Merge branch 'release/2.10' into 'stable'
Release/2.10 See merge request pleroma/pleroma!4413
This commit is contained in:
commit
3b99bbd955
712 changed files with 12876 additions and 2393 deletions
|
|
@ -1,4 +1,4 @@
|
|||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.14.5-otp-25
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15.8-otp-26
|
||||
|
||||
variables: &global_variables
|
||||
# Only used for the release
|
||||
|
|
@ -20,7 +20,6 @@ workflow:
|
|||
- if: $CI_COMMIT_TAG
|
||||
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
|
||||
when: never
|
||||
- if: $CI_COMMIT_BRANCH
|
||||
|
||||
# Default artifacts configuration
|
||||
.default_artifacts: &default_artifacts
|
||||
|
|
@ -81,7 +80,7 @@ check-changelog:
|
|||
- amd64
|
||||
artifacts: *default_artifacts
|
||||
|
||||
build-1.14.5-otp-25:
|
||||
build-1.15.8-otp-26:
|
||||
extends:
|
||||
- .build_changes_policy
|
||||
- .using-ci-base
|
||||
|
|
@ -89,12 +88,12 @@ build-1.14.5-otp-25:
|
|||
script:
|
||||
- mix compile --force
|
||||
|
||||
build-1.17.1-otp-26:
|
||||
build-1.18.3-otp-27:
|
||||
extends:
|
||||
- .build_changes_policy
|
||||
- .using-ci-base
|
||||
stage: build
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.17.1-otp-26
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.18.3-otp-27
|
||||
script:
|
||||
- mix compile --force
|
||||
|
||||
|
|
@ -133,7 +132,7 @@ benchmark:
|
|||
- mix ecto.migrate
|
||||
- mix pleroma.load_testing
|
||||
|
||||
unit-testing-1.14.5-otp-25:
|
||||
unit-testing-1.15.8-otp-26:
|
||||
extends:
|
||||
- .build_changes_policy
|
||||
- .using-ci-base
|
||||
|
|
@ -145,10 +144,25 @@ unit-testing-1.14.5-otp-25:
|
|||
- name: postgres:13-alpine
|
||||
alias: postgres
|
||||
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
|
||||
before_script: &testing_before_script
|
||||
- echo $MIX_ENV
|
||||
- rm -rf _build/*/lib/pleroma
|
||||
# Create a non-root user for running tests
|
||||
- useradd -m -s /bin/bash testuser
|
||||
# Install dependencies as root first
|
||||
- mix deps.get
|
||||
# Set proper ownership for everything
|
||||
- chown -R testuser:testuser .
|
||||
- chown -R testuser:testuser /root/.mix || true
|
||||
- chown -R testuser:testuser /root/.hex || true
|
||||
# Create user-specific directories
|
||||
- su testuser -c "HOME=/home/testuser mix local.hex --force"
|
||||
- su testuser -c "HOME=/home/testuser mix local.rebar --force"
|
||||
script: &testing_script
|
||||
- mix ecto.create
|
||||
- mix ecto.migrate
|
||||
- mix pleroma.test_runner --cover --preload-modules
|
||||
# Run tests as non-root user
|
||||
- su testuser -c "HOME=/home/testuser mix ecto.create"
|
||||
- su testuser -c "HOME=/home/testuser mix ecto.migrate"
|
||||
- su testuser -c "HOME=/home/testuser mix pleroma.test_runner --cover --preload-modules"
|
||||
coverage: '/^Line total: ([^ ]*%)$/'
|
||||
artifacts:
|
||||
expire_in: 30 days
|
||||
|
|
@ -157,14 +171,15 @@ unit-testing-1.14.5-otp-25:
|
|||
coverage_format: cobertura
|
||||
path: coverage.xml
|
||||
|
||||
unit-testing-1.17.1-otp-26:
|
||||
unit-testing-1.18.3-otp-27:
|
||||
extends:
|
||||
- .build_changes_policy
|
||||
- .using-ci-base
|
||||
stage: test
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.17.1-otp-26
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.18.3-otp-27
|
||||
cache: *testing_cache_policy
|
||||
services: *testing_services
|
||||
before_script: *testing_before_script
|
||||
script: *testing_script
|
||||
|
||||
formatting-1.15:
|
||||
|
|
@ -288,7 +303,7 @@ stop_review_app:
|
|||
amd64:
|
||||
stage: release
|
||||
image:
|
||||
name: hexpm/elixir-amd64:1.17.3-erlang-26.2.5.6-ubuntu-focal-20241011
|
||||
name: hexpm/elixir-amd64:1.17.3-erlang-27.3.4.2-ubuntu-noble-20250716
|
||||
only: &release-only
|
||||
- stable@pleroma/pleroma
|
||||
- develop@pleroma/pleroma
|
||||
|
|
@ -315,7 +330,7 @@ amd64:
|
|||
VIX_COMPILATION_MODE: PLATFORM_PROVIDED_LIBVIPS
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
before_script: &before-release
|
||||
- apt-get update && apt-get install -y cmake libmagic-dev libvips-dev erlang-dev git
|
||||
- apt-get update && apt-get install -y cmake libmagic-dev libvips-dev erlang-dev git build-essential
|
||||
- echo "import Config" > config/prod.secret.exs
|
||||
- mix local.hex --force
|
||||
- mix local.rebar --force
|
||||
|
|
@ -331,7 +346,7 @@ amd64-musl:
|
|||
artifacts: *release-artifacts
|
||||
only: *release-only
|
||||
image:
|
||||
name: hexpm/elixir-amd64:1.17.3-erlang-26.2.5.6-alpine-3.17.9
|
||||
name: hexpm/elixir-amd64:1.17.3-erlang-27.3.4.2-alpine-3.22.1
|
||||
tags:
|
||||
- amd64
|
||||
cache: *release-cache
|
||||
|
|
@ -375,7 +390,7 @@ arm64:
|
|||
tags:
|
||||
- arm
|
||||
image:
|
||||
name: hexpm/elixir-arm64:1.17.3-erlang-26.2.5.6-ubuntu-focal-20241011
|
||||
name: hexpm/elixir-arm64:1.17.3-erlang-27.3.4.2-ubuntu-noble-20250716
|
||||
cache: *release-cache
|
||||
variables: *release-variables
|
||||
before_script: *before-release
|
||||
|
|
@ -388,7 +403,7 @@ arm64-musl:
|
|||
tags:
|
||||
- arm
|
||||
image:
|
||||
name: hexpm/elixir-arm64:1.17.3-erlang-26.2.5.6-alpine-3.17.9
|
||||
name: hexpm/elixir-arm64:1.17.3-erlang-27.3.4.2-alpine-3.22.1
|
||||
cache: *release-cache
|
||||
variables: *release-variables
|
||||
before_script: *before-release-musl
|
||||
|
|
|
|||
311
CHANGELOG.md
311
CHANGELOG.md
|
|
@ -4,9 +4,113 @@ All notable changes to this project will be documented in this file.
|
|||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||
|
||||
## 2.10
|
||||
|
||||
### Security
|
||||
|
||||
- Admin API: Fixed self-revocation vulnerability where admins could accidentally revoke their own admin status via the single-user permission endpoint
|
||||
- Fix bypass of the restrict unauthenticated setting by requesting local Activities
|
||||
|
||||
### Changed
|
||||
|
||||
- Add new activity actor/type index. Greatly speeds up retrieval of rare types (like "Listen")
|
||||
- Use separate schemas for muted/blocked accounts lists
|
||||
- Docs: Restore DB schema before data to avoid long restore times
|
||||
- Return 404 with a better error message instead of 400 when receiving an activity for a deactivated user
|
||||
- Deleting an instance queues individual jobs for each user that needs to be deleted from the server.
|
||||
- Update Dockerfile to use Elixir 1.17.3, Erlang 26.2.5.6, and Alpine 3.17.9 to match CI release builds
|
||||
- Docs RUM index: Add OTP install command, update index size expectation and recommend VACUUM FULL
|
||||
- Support new Mastodon API for endorsed accounts
|
||||
- Allow FediIndex crawler bot by default
|
||||
- Update Cowboy, Gun, and Plug family of dependencies
|
||||
- Hashtag searches return real results based on words in your query
|
||||
- Support `quoted_status_id` parameter in post creation request
|
||||
- Use Mastodon-compatible route for quotes list and param for quotes count
|
||||
- Updated the example Nginx configuration
|
||||
- Oban Notifier was changed to Oban.Notifiers.PG for performance and scalability benefits
|
||||
- Updated relayd/httpd config files to be on par with nginx
|
||||
- Order favourites and reblogs list from newest to oldest
|
||||
- Update Pleroma-FE to 2.9.2
|
||||
- Updated Postgrex library to 0.20.0
|
||||
- Improved the logic of how we determine if a server is unreachable.
|
||||
- Relax alsoKnownAs requirements to just URI, not necessarily HTTP(S)
|
||||
- Redirect /users/:nickname.rss to /users/:nickname/feed.rss instead of .atom
|
||||
- Add `write:scrobbles` and `read:scrobbles` scope for scrobbling
|
||||
- Change scrobble external link param name to use snake case
|
||||
- Allow "invisible" and "ellipsis" classes for span tags to match Mastodon behavior
|
||||
- Change SMTP example to use the Mua adapter that works with OTP>25
|
||||
- Updated Tesla to 1.15.3
|
||||
- Truncate the length of Rich Media title and description fields
|
||||
- Don't require an Accept header for WebFinger queries and default to JSON.
|
||||
|
||||
### Added
|
||||
|
||||
- Support Dislike activity, as sent by Mitra and Friendica, by changing it into a thumbs-down EmojiReact
|
||||
- Support Mitra-style emoji likes.
|
||||
- Added a way to upload new packs from a URL or ZIP file via Admin API
|
||||
- Add `duration` to the block endpoint, which makes block expire
|
||||
- Expose markup configuration in InstanceView
|
||||
- Allow filtering users with `accepts_chat_messages` capability
|
||||
- Add `timelines_access` to InstanceView
|
||||
- Implement language detection with fastText
|
||||
- Added MRF.QuietReply which prevents replies to public posts from being published to the timelines
|
||||
- Oban.Plugins.Lazarus to help recover stuck jobs from an unclean shutdown of Pleroma
|
||||
- Add /api/v1/pleroma/outgoing_follow_requests
|
||||
- Allow users to select preferred frontend
|
||||
- Provide full replies collection in ActivityPub objects
|
||||
- Allow anonymizing reports sent to remote servers
|
||||
- Add only_reblogs parameter to account statuses API for filtering to show only reblogs/reposts
|
||||
- Allow setting custom user-agent for fetching rich media content
|
||||
- Scrubber: Allow `quote-inline` class in <p> tags used by Mastodon quotes
|
||||
- Add `base_urls` to the /api/v1/instance pleroma metadata which provides information about the base URLs for media_proxy and uploads when configured
|
||||
- Stream marker updates
|
||||
- Allow Terms of Service panel behaviour to be configurable
|
||||
- Support translation providers (DeepL, LibreTranslate)
|
||||
- Support Mozhi translation provider
|
||||
- Support translateLocally translation provider
|
||||
|
||||
### Fixed
|
||||
|
||||
- AP C2S: Reject interactions with statuses not visible to Actor
|
||||
- Fix AssignAppUser migration OOM
|
||||
- Fix fetching public keys with authorized fetch enabled
|
||||
- Fix building "captcha" library with OpenBSD make
|
||||
- Use JSON for DeepL API requests
|
||||
- Elixir 1.18: Fixed warnings and new deprecations
|
||||
- Fix endorsement state display in relationship view
|
||||
- Fix publisher when publishing to a list of users
|
||||
- Fix reports being rejected when the activity had an empty CC or TO field (instead of not having them at all)
|
||||
- Set PATH in the FreeBSD rc script to avoid failures starting the service
|
||||
- Improved performance of status search queries using the default GIN index
|
||||
- Use end-of-string in regex for local `get_by_nickname`
|
||||
- Respect restrict_unauthenticated in /api/v1/accounts/lookup
|
||||
- MastodonAPI: Reject interactions with statuses not visible to user
|
||||
- Fix ModerationLog FunctionClauseError for unknown actions
|
||||
- MRF InlineQuotePolicy: Don't inline quoted post URL in Mastodon quote posts
|
||||
- Fix NodeInfo content-type
|
||||
- Add Actor images normalization from array of urls to string
|
||||
- Add `update` to @notification_types
|
||||
- replaced depracated flags and functions, renamed service to fit other service files
|
||||
- Allow to pin/unpip chats
|
||||
- Fix federation issue where Public visibility information in cc field was lost when sent to remote servers, causing posts to appear with inconsistent visibility across instances
|
||||
- OpenBSD relayd: Fix IPv6 example
|
||||
- Fix release builds
|
||||
- `remote_url` links to unproxied URL
|
||||
- Send push notifications for statuses from subscribed accounts
|
||||
- Backport [Elixir PR 14242](https://github.com/elixir-lang/elixir/pull/14242) fixing racy mkdir and lack of error handling of parent directory creation
|
||||
- Transmogrifier: convert "as:Public" to full w3 URL
|
||||
- Update voters count in remote polls when refreshing
|
||||
- Fix sometimes incorrect URI percent encoding
|
||||
- Fix HTTP client making invalid requests due to no percent encoding processing or validation.
|
||||
- ObjectView: Do not leak unsanitized internal representation of non-Create/non-Undo Activities on fetches
|
||||
- Fix WebFinger for split-domain setups
|
||||
- Enforce an exact domain match for WebFinger resolution
|
||||
- MastodonAPI: Fix misattribution of statuses when fetched via non-Announce Activity ID
|
||||
|
||||
## 2.9.1
|
||||
|
||||
### Security
|
||||
|
||||
- Fix authorization checks for C2S Update activities to prevent unauthorized modifications of other users' content.
|
||||
- Fix content-type spoofing vulnerability that could allow users to upload ActivityPub objects as attachments
|
||||
- Reject cross-domain redirects when fetching ActivityPub objects to prevent bypassing domain-based security controls.
|
||||
|
|
@ -16,27 +120,33 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Validate Content-Type headers when fetching remote ActivityPub objects to prevent spoofing attacks.
|
||||
|
||||
### Changed
|
||||
|
||||
- Include `pl-fe` in available frontends
|
||||
|
||||
### Fixed
|
||||
|
||||
- Remove trailing ` from end of line 75 which caused issues copy-pasting
|
||||
|
||||
## 2.9.0
|
||||
|
||||
### Security
|
||||
|
||||
- Require HTTP signatures (if enabled) for routes used by both C2S and S2S AP API
|
||||
- Fix several spoofing vectors
|
||||
|
||||
### Changed
|
||||
- Performance: Use 301 (permanent) redirect instead of 302 (temporary) when redirecting small images in media proxy. This allows browsers to cache the redirect response.
|
||||
|
||||
- Performance: Use 301 (permanent) redirect instead of 302 (temporary) when redirecting small images in media proxy. This allows browsers to cache the redirect response.
|
||||
|
||||
### Added
|
||||
|
||||
- Include "published" in actor view
|
||||
- Link to exported outbox/followers/following collections in backup actor.json
|
||||
- Hashtag following
|
||||
- Allow to specify post language
|
||||
|
||||
### Fixed
|
||||
|
||||
- Verify a local Update sent through AP C2S so users can only update their own objects
|
||||
- Fix Mastodon incoming edits with inlined "likes"
|
||||
- Allow incoming "Listen" activities
|
||||
|
|
@ -46,11 +156,13 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Fix blurhash generation crashes
|
||||
|
||||
### Removed
|
||||
|
||||
- Retire MRFs DNSRBL, FODirectReply, and QuietReply
|
||||
|
||||
## 2.8.0
|
||||
|
||||
### Changed
|
||||
|
||||
- Metadata: Do not include .atom feed links for remote accounts
|
||||
- Bumped `fast_html` to v2.3.0, which notably allows to use system-installed lexbor with passing `WITH_SYSTEM_LEXBOR=1` environment variable at build-time
|
||||
- Dedupe upload filter now uses a three-level sharding directory structure
|
||||
|
|
@ -71,6 +183,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Worker configuration is no longer available. This only affects custom max_retries values for a couple Oban queues.
|
||||
|
||||
### Added
|
||||
|
||||
- Add metadata provider for ActivityPub alternate links
|
||||
- Added support for argon2 passwords and their conversion for migration from Akkoma fork to upstream.
|
||||
- Respect :restrict_unauthenticated for hashtag rss/atom feeds
|
||||
|
|
@ -88,6 +201,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Include session scopes in TokenView
|
||||
|
||||
### Fixed
|
||||
|
||||
- Verify a local Update sent through AP C2S so users can only update their own objects
|
||||
- Fixed malformed follow requests that cause them to appear stuck pending due to the recipient being unable to process them.
|
||||
- Fix incoming Block activities being rejected
|
||||
|
|
@ -105,14 +219,17 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Make vapid_config return empty array, fixing preloading for instances without push notifications configured
|
||||
|
||||
### Removed
|
||||
|
||||
- Remove stub for /api/v1/accounts/:id/identity_proofs (deprecated by Mastodon 3.5.0)
|
||||
|
||||
## 2.7.1
|
||||
|
||||
### Changed
|
||||
|
||||
- Accept `application/activity+json` for requests to `/.well-known/nodeinfo`
|
||||
|
||||
### Fixed
|
||||
|
||||
- Truncate remote user fields, avoids them getting rejected
|
||||
- Improve the `FollowValidator` to successfully incoming activities with an errant `cc` field.
|
||||
- Resolved edge case where the API can report you are following a user but the relationship is not fully established.
|
||||
|
|
@ -122,16 +239,18 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
## 2.7.0
|
||||
|
||||
### Security
|
||||
|
||||
- HTTP Security: By default, don't allow unsafe-eval. The setting needs to be changed to allow Flash emulation.
|
||||
- Fix webfinger spoofing.
|
||||
- Use proper workers for fetching pins instead of an ad-hoc task, fixing a potential fetch loop
|
||||
|
||||
### Changed
|
||||
|
||||
- Update to Phoenix 1.7
|
||||
- Elixir Logger configuration is now longer permitted through AdminFE and ConfigDB
|
||||
- Refactor the user backups code and improve test coverage
|
||||
- Invalid activities delivered to the inbox will be rejected with a 400 Bad Request
|
||||
- Support Bandit as an alternative to Cowboy for the HTTP server.
|
||||
- Support Bandit as an alternative to Cowboy for the HTTP server.
|
||||
- Update Bandit to 1.5.2
|
||||
- Replace eblurhash with rinpatch_blurhash. This also removes a dependency on ImageMagick.
|
||||
- Elixir 1.13 is the minimum required version.
|
||||
|
|
@ -170,6 +289,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Refactor the Mastodon /api/v1/streaming websocket handler to use Phoenix.Socket.Transport
|
||||
|
||||
### Added
|
||||
|
||||
- Uploader: Add support for uploading attachments using IPFS
|
||||
- Add NSFW-detecting MRF
|
||||
- Add DNSRBL MRF
|
||||
|
|
@ -215,6 +335,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Support honk-style attachment summaries as alt-text.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fix Emoji object IDs not always being valid
|
||||
- Remove checking ImageMagick's commands for Pleroma.Upload.Filter.AnalyzeMetadata
|
||||
- Ensure that StripLocation actually removes everything resembling GPS data from PNGs
|
||||
|
|
@ -257,7 +378,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Fix Optimistic Inbox for failed signatures
|
||||
- MediaProxy Preview failures prevented when encountering certain video files
|
||||
- pleroma_ctl: Use realpath(1) instead of readlink(1)
|
||||
- ReceiverWorker: Make sure non-{:ok, _} is returned as {:error, …}
|
||||
- ReceiverWorker: Make sure non-{:ok, \_} is returned as {:error, …}
|
||||
- Harden Rich Media parsing against very slow or malicious URLs
|
||||
- Rich Media Preview cache eviction when the activity is updated.
|
||||
- Parsing of RichMedia TTLs for Amazon URLs when query parameters are nil
|
||||
|
|
@ -269,32 +390,41 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Fix validate_webfinger when running a different domain for Webfinger
|
||||
|
||||
### Removed
|
||||
|
||||
- Mastodon API: Remove deprecated GET /api/v1/statuses/:id/card endpoint https://github.com/mastodon/mastodon/pull/11213
|
||||
- Removed support for multiple federator modules as we only support ActivityPub
|
||||
|
||||
## 2.6.2
|
||||
|
||||
### Security
|
||||
|
||||
- MRF StealEmojiPolicy: Sanitize shortcodes (thanks to Hazel K for the report
|
||||
|
||||
## 2.6.1
|
||||
|
||||
### Changed
|
||||
|
||||
- - Document maximum supported version of Erlang & Elixir
|
||||
|
||||
### Added
|
||||
|
||||
- [docs] add frontends management documentation
|
||||
|
||||
### Fixed
|
||||
|
||||
- TwitterAPI: Return proper error when healthcheck is disabled
|
||||
- Fix eblurhash and elixir-captcha not using system cflags
|
||||
|
||||
## 2.6.0
|
||||
|
||||
### Security
|
||||
|
||||
- Preload: Make generated JSON html-safe. It already was html safe because it only consists of config data that is base64 encoded, but this will keep it safe it that ever changes.
|
||||
- CommonAPI: Prevent users from accessing media of other users by creating a status with reused attachment ID
|
||||
- Disable XML entity resolution completely to fix a dos vulnerability
|
||||
|
||||
### Added
|
||||
|
||||
- Support for Image activities, namely from Hubzilla
|
||||
- Add OAuth scope descriptions
|
||||
- Allow lang attribute in status text
|
||||
|
|
@ -305,6 +435,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Add unified streaming endpoint
|
||||
|
||||
### Fixed
|
||||
|
||||
- rel="me" was missing its cache
|
||||
- MediaProxy responses now return a sandbox CSP header
|
||||
- Filter context activities using Visibility.visible_for_user?
|
||||
|
|
@ -326,6 +457,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Show more informative errors when profile exceeds char limits
|
||||
|
||||
### Removed
|
||||
|
||||
- BREAKING: Support for passwords generated with `crypt(3)` (Gnu Social migration artifact)
|
||||
- remove BBS/SSH feature, replaced by an external bridge.
|
||||
- Remove a few unused indexes.
|
||||
|
|
@ -335,56 +467,67 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
## 2.5.4
|
||||
|
||||
## Security
|
||||
|
||||
- Fix XML External Entity (XXE) loading vulnerability allowing to fetch arbitrary files from the server's filesystem
|
||||
|
||||
## 2.5.3
|
||||
|
||||
### Security
|
||||
|
||||
- Emoji pack loader sanitizes pack names
|
||||
- Reduced permissions of config files and directories, distros requiring greater permissions like group-read need to pre-create the directories
|
||||
|
||||
## 2.5.5
|
||||
|
||||
## Security
|
||||
|
||||
- Prevent users from accessing media of other users by creating a status with reused attachment ID
|
||||
|
||||
## 2.5.4
|
||||
|
||||
## Security
|
||||
|
||||
- Fix XML External Entity (XXE) loading vulnerability allowing to fetch arbitrary files from the server's filesystem
|
||||
|
||||
## 2.5.3
|
||||
|
||||
### Security
|
||||
|
||||
- Emoji pack loader sanitizes pack names
|
||||
- Reduced permissions of config files and directories, distros requiring greater permissions like group-read need to pre-create the directories
|
||||
|
||||
## 2.5.2
|
||||
|
||||
### Security
|
||||
|
||||
- `/proxy` endpoint now sets a Content-Security-Policy (sandbox)
|
||||
- WebSocket endpoint now respects unauthenticated restrictions for streams of public posts
|
||||
- OEmbed HTML tags are now filtered
|
||||
|
||||
### Changed
|
||||
|
||||
- docs: Be more explicit about the level of compatibility of OTP releases
|
||||
- Set default background worker timeout to 15 minutes
|
||||
|
||||
### Fixed
|
||||
|
||||
- Atom/RSS formatting (HTML truncation, published, missing summary)
|
||||
- Remove `static_fe` pipeline for `/users/:nickname/feed`
|
||||
- Stop oban from retrying if validating errors occur when processing incoming data
|
||||
- Make sure object refetching as used by already received polls follows MRF rules
|
||||
|
||||
### Removed
|
||||
|
||||
- BREAKING: Support for passwords generated with `crypt(3)` (Gnu Social migration artifact)
|
||||
|
||||
## 2.5.1
|
||||
|
||||
### Added
|
||||
|
||||
- Allow customizing instance languages
|
||||
|
||||
### Fixed
|
||||
|
||||
- Security: uploading HTTP endpoint can no longer create directories in the upload dir (internal APIs, like backup, still can do it.)
|
||||
- ~ character in urls in Markdown posts are handled properly
|
||||
- Exiftool upload filter will now ignore SVG files
|
||||
|
|
@ -405,6 +548,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Quack, the logging backend that pushes to Slack channels
|
||||
|
||||
### Changed
|
||||
|
||||
- **Breaking:** Elixir >=1.11 is now required (was >= 1.9)
|
||||
- Allow users to remove their emails if instance does not need email to register
|
||||
- Uploadfilter `Pleroma.Upload.Filter.Exiftool` has been renamed to `Pleroma.Upload.Filter.Exiftool.StripLocation`
|
||||
|
|
@ -415,6 +559,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- CSP now includes wasm-unsafe-eval
|
||||
|
||||
### Added
|
||||
|
||||
- `activeMonth` and `activeHalfyear` fields in NodeInfo usage.users object
|
||||
- Experimental support for Finch. Put `config :tesla, :adapter, {Tesla.Adapter.Finch, name: MyFinch}` in your secrets file to use it. Reverse Proxy will still use Hackney.
|
||||
- `ForceMentionsInPostContent` MRF policy
|
||||
|
|
@ -436,6 +581,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Possibility to discover users like `user@example.org`, while Pleroma is working on `pleroma.example.org`. Additional configuration required.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Subscription(Bell) Notifications: Don't create from Pipeline Ingested replies
|
||||
- Handle Reject for already-accepted Follows properly
|
||||
- Display OpenGraph data on alternative notice routes.
|
||||
|
|
@ -458,6 +604,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
## 2.4.5 - 2022-11-27
|
||||
|
||||
## Fixed
|
||||
|
||||
- Image `class` attributes not being scrubbed, allowing to exploit frontend special classes [!3792](https://git.pleroma.social/pleroma/pleroma/-/merge_requests/3792)
|
||||
- Delete report notifs when demoting from superuser [!3642](https://git.pleroma.social/pleroma/pleroma/-/merge_requests/3642)
|
||||
- Validate `mediaType` only by it's format rather than using a list [!3597](https://git.pleroma.social/pleroma/pleroma/-/merge_requests/3597)
|
||||
|
|
@ -472,17 +619,20 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
## 2.4.4 - 2022-08-19
|
||||
|
||||
### Security
|
||||
|
||||
- Streaming API sessions will now properly disconnect if the corresponding token is revoked
|
||||
|
||||
## 2.4.3 - 2022-05-06
|
||||
|
||||
### Security
|
||||
|
||||
- Private `/objects/` and `/activities/` leaking if cached by authenticated user
|
||||
- SweetXML library DTD bomb
|
||||
|
||||
## 2.4.2 - 2022-01-10
|
||||
|
||||
### Fixed
|
||||
|
||||
- Federation issues caused by HTTP pool checkout timeouts
|
||||
- Compatibility with Elixir 1.13
|
||||
|
||||
|
|
@ -493,12 +643,15 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
## 2.4.1 - 2021-08-29
|
||||
|
||||
### Changed
|
||||
|
||||
- Make `mix pleroma.database set_text_search_config` run concurrently and indefinitely
|
||||
|
||||
### Added
|
||||
|
||||
- AdminAPI: Missing configuration description for StealEmojiPolicy
|
||||
|
||||
### Fixed
|
||||
|
||||
- MastodonAPI: Stream out Create activities
|
||||
- MRF ObjectAgePolicy: Fix pattern matching on "published"
|
||||
- TwitterAPI: Make `change_password` and `change_email` require params on body instead of query
|
||||
|
|
@ -537,6 +690,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Pinned posts federation
|
||||
|
||||
### Fixed
|
||||
|
||||
- Don't crash so hard when email settings are invalid.
|
||||
- Checking activated Upload Filters for required commands.
|
||||
- Remote users can no longer reappear after being deleted.
|
||||
|
|
@ -554,6 +708,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Fixed some Markdown issues, including trailing slash in links.
|
||||
|
||||
### Removed
|
||||
|
||||
- **Breaking**: Remove deprecated `/api/qvitter/statuses/notifications/read` (replaced by `/api/v1/pleroma/notifications/read`)
|
||||
|
||||
## [2.3.0] - 2021-03-01
|
||||
|
|
@ -672,6 +827,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
## [2.2.1] - 2020-12-22
|
||||
|
||||
### Changed
|
||||
|
||||
- Updated Pleroma FE
|
||||
|
||||
### Fixed
|
||||
|
|
@ -724,7 +880,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- **Breaking:** `Pleroma.Workers.Cron.ClearOauthTokenWorker` setting from Oban `:crontab` (moved to scheduled jobs).
|
||||
- **Breaking:** `Pleroma.Workers.Cron.PurgeExpiredActivitiesWorker` setting from Oban `:crontab` (moved to scheduled jobs).
|
||||
- Removed `:managed_config` option. In practice, it was accidentally removed with 2.0.0 release when frontends were
|
||||
switched to a new configuration mechanism, however it was not officially removed until now.
|
||||
switched to a new configuration mechanism, however it was not officially removed until now.
|
||||
|
||||
### Added
|
||||
|
||||
|
|
@ -755,8 +911,10 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
|
||||
1. Install libmagic and development headers (`libmagic-dev` on Ubuntu/Debian, `file-dev` on Alpine Linux)
|
||||
2. Run database migrations (inside Pleroma directory):
|
||||
- OTP: `./bin/pleroma_ctl migrate`
|
||||
- From Source: `mix ecto.migrate`
|
||||
|
||||
- OTP: `./bin/pleroma_ctl migrate`
|
||||
- From Source: `mix ecto.migrate`
|
||||
|
||||
3. Restart Pleroma
|
||||
|
||||
## [2.1.2] - 2020-09-17
|
||||
|
|
@ -784,6 +942,7 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
## [2.1.1] - 2020-09-08
|
||||
|
||||
### Security
|
||||
|
||||
- Fix possible DoS in Mastodon API user search due to an error in match clauses, leading to an infinite recursion and subsequent OOM with certain inputs.
|
||||
- Fix metadata leak for accounts and statuses on private instances.
|
||||
- Fix possible DoS in Admin API search using an atom leak vulnerability. Authentication with admin rights was required to exploit.
|
||||
|
|
@ -794,6 +953,7 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
- Improved error message when cmake is not available at build stage.
|
||||
|
||||
### Added
|
||||
|
||||
- Rich media failure tracking (along with `:failure_backoff` option).
|
||||
|
||||
<details>
|
||||
|
|
@ -803,6 +963,7 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
</details>
|
||||
|
||||
### Fixed
|
||||
|
||||
- Default HTTP adapter not respecting pool setting, leading to possible OOM.
|
||||
- Fixed uploading webp images when the Exiftool Upload Filter is enabled by skipping them
|
||||
- Mastodon API: Search parameter `following` now correctly returns the followings rather than the followers
|
||||
|
|
@ -908,6 +1069,7 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
</details>
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fix list pagination and other list issues.
|
||||
- Support pagination in conversations API
|
||||
- **Breaking**: SimplePolicy `:reject` and `:accept` allow deletions again
|
||||
|
|
@ -928,9 +1090,11 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
## [2.0.7] - 2020-06-13
|
||||
|
||||
### Security
|
||||
|
||||
- Fix potential DoSes exploiting atom leaks in rich media parser and the `UserAllowListPolicy` MRF policy
|
||||
|
||||
### Fixed
|
||||
|
||||
- CSP: not allowing images/media from every host when mediaproxy is disabled
|
||||
- CSP: not adding mediaproxy base url to image/media hosts
|
||||
- StaticFE missing the CSS file
|
||||
|
|
@ -942,28 +1106,36 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
## [2.0.6] - 2020-06-09
|
||||
|
||||
### Security
|
||||
|
||||
- CSP: harden `image-src` and `media-src` when MediaProxy is used
|
||||
|
||||
### Fixed
|
||||
|
||||
- AP C2S: Fix pagination in inbox/outbox
|
||||
- Various compilation errors on OTP 23
|
||||
- Mastodon API streaming: Repeats from muted threads not being filtered
|
||||
|
||||
### Changed
|
||||
|
||||
- Various database performance improvements
|
||||
|
||||
### Upgrade notes
|
||||
|
||||
1. Run database migrations (inside Pleroma directory):
|
||||
- OTP: `./bin/pleroma_ctl migrate`
|
||||
- From Source: `mix ecto.migrate`
|
||||
|
||||
- OTP: `./bin/pleroma_ctl migrate`
|
||||
- From Source: `mix ecto.migrate`
|
||||
|
||||
2. Restart Pleroma
|
||||
|
||||
## [2.0.5] - 2020-05-13
|
||||
|
||||
### Security
|
||||
|
||||
- Fix possible private status leaks in Mastodon Streaming API
|
||||
|
||||
### Fixed
|
||||
|
||||
- Crashes when trying to block a user if block federation is disabled
|
||||
- Not being able to start the instance without `erlang-eldap` installed
|
||||
- Users with bios over the limit getting rejected
|
||||
|
|
@ -976,9 +1148,11 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
## [2.0.4] - 2020-05-10
|
||||
|
||||
### Security
|
||||
|
||||
- AP C2S: Fix a potential DoS by creating nonsensical objects that break timelines
|
||||
|
||||
### Fixed
|
||||
|
||||
- Peertube user lookups not working
|
||||
- `InsertSkeletonsForDeletedUsers` migration failing on some instances
|
||||
- Healthcheck reporting the number of memory currently used, rather than allocated in total
|
||||
|
|
@ -990,6 +1164,7 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
#### Apache only
|
||||
|
||||
1. Remove the following line from your config:
|
||||
|
||||
```
|
||||
SSLCertificateFile /etc/letsencrypt/live/${servername}/cert.pem
|
||||
```
|
||||
|
|
@ -1001,11 +1176,13 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
## [2.0.3] - 2020-05-02
|
||||
|
||||
### Security
|
||||
|
||||
- Disallow re-registration of previously deleted users, which allowed viewing direct messages addressed to them
|
||||
- Mastodon API: Fix `POST /api/v1/follow_requests/:id/authorize` allowing to force a follow from a local user even if they didn't request to follow
|
||||
- CSP: Sandbox uploads
|
||||
|
||||
### Fixed
|
||||
|
||||
- Notifications from blocked domains
|
||||
- Potential federation issues with Mastodon versions before 3.0.0
|
||||
- HTTP Basic Authentication permissions issue
|
||||
|
|
@ -1016,6 +1193,7 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
- `blob:` urls not being allowed by CSP
|
||||
|
||||
### Added
|
||||
|
||||
- NodeInfo: ObjectAgePolicy settings to the `federation` list.
|
||||
- Follow request notifications
|
||||
<details>
|
||||
|
|
@ -1027,19 +1205,24 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
|
||||
1. Restart Pleroma
|
||||
2. Run database migrations (inside Pleroma directory):
|
||||
- OTP: `./bin/pleroma_ctl migrate`
|
||||
- From Source: `mix ecto.migrate`
|
||||
3. Reset status visibility counters (inside Pleroma directory):
|
||||
- OTP: `./bin/pleroma_ctl refresh_counter_cache`
|
||||
- From Source: `mix pleroma.refresh_counter_cache`
|
||||
|
||||
- OTP: `./bin/pleroma_ctl migrate`
|
||||
- From Source: `mix ecto.migrate`
|
||||
|
||||
3. Reset status visibility counters (inside Pleroma directory):
|
||||
|
||||
- OTP: `./bin/pleroma_ctl refresh_counter_cache`
|
||||
- From Source: `mix pleroma.refresh_counter_cache`
|
||||
|
||||
## [2.0.2] - 2020-04-08
|
||||
|
||||
### Added
|
||||
|
||||
- Support for Funkwhale's `Audio` activity
|
||||
- Admin API: `PATCH /api/pleroma/admin/users/:nickname/update_credentials`
|
||||
|
||||
### Fixed
|
||||
|
||||
- Blocked/muted users still generating push notifications
|
||||
- Input textbox for bio ignoring newlines
|
||||
- OTP: Inability to use PostgreSQL databases with SSL
|
||||
|
|
@ -1047,13 +1230,17 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
- Incorrect URL for Funkwhale channels
|
||||
|
||||
### Upgrade notes
|
||||
|
||||
1. Restart Pleroma
|
||||
|
||||
## [2.0.1] - 2020-03-15
|
||||
|
||||
### Security
|
||||
|
||||
- Static-FE: Fix remote posts not being sanitized
|
||||
|
||||
### Fixed
|
||||
|
||||
- Rate limiter crashes when there is no explicitly specified ip in the config
|
||||
- 500 errors when no `Accept` header is present if Static-FE is enabled
|
||||
- Instance panel not being updated immediately due to wrong `Cache-Control` headers
|
||||
|
|
@ -1064,24 +1251,33 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
- Mastodon Streaming API: hashtag timelines not working
|
||||
|
||||
### Changed
|
||||
|
||||
- BBCode and Markdown formatters will no longer return any `\n` and only use `<br/>` for newlines
|
||||
- Mastodon API: Allow registration without email if email verification is not enabled
|
||||
|
||||
### Upgrade notes
|
||||
|
||||
#### Nginx only
|
||||
|
||||
1. Remove `proxy_ignore_headers Cache-Control;` and `proxy_hide_header Cache-Control;` from your config.
|
||||
|
||||
#### Everyone
|
||||
|
||||
1. Run database migrations (inside Pleroma directory):
|
||||
- OTP: `./bin/pleroma_ctl migrate`
|
||||
- From Source: `mix ecto.migrate`
|
||||
|
||||
- OTP: `./bin/pleroma_ctl migrate`
|
||||
- From Source: `mix ecto.migrate`
|
||||
|
||||
2. Restart Pleroma
|
||||
|
||||
## [2.0.0] - 2019-03-08
|
||||
|
||||
### Security
|
||||
|
||||
- Mastodon API: Fix being able to request enormous amount of statuses in timelines leading to DoS. Now limited to 40 per request.
|
||||
|
||||
### Removed
|
||||
|
||||
- **Breaking**: Removed 1.0+ deprecated configurations `Pleroma.Upload, :strip_exif` and `:instance, :dedupe_media`
|
||||
- **Breaking**: OStatus protocol support
|
||||
- **Breaking**: MDII uploader
|
||||
|
|
@ -1093,6 +1289,7 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
</details>
|
||||
|
||||
### Changed
|
||||
|
||||
- **Breaking:** Pleroma won't start if it detects unapplied migrations
|
||||
- **Breaking:** Elixir >=1.8 is now required (was >= 1.7)
|
||||
- **Breaking:** `Pleroma.Plugs.RemoteIp` and `:rate_limiter` enabled by default. Please ensure your reverse proxy forwards the real IP!
|
||||
|
|
@ -1142,6 +1339,7 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
</details>
|
||||
|
||||
### Added
|
||||
|
||||
- `:chat_limit` option to limit chat characters.
|
||||
- `cleanup_attachments` option to remove attachments along with statuses. Does not affect duplicate files and attachments without status. Enabling this will increase load to database when deleting statuses on larger instances.
|
||||
- Refreshing poll results for remote polls
|
||||
|
|
@ -1209,6 +1407,7 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
</details>
|
||||
|
||||
### Fixed
|
||||
|
||||
- Report emails now include functional links to profiles of remote user accounts
|
||||
- Not being able to log in to some third-party apps when logged in to MastoFE
|
||||
- MRF: `Delete` activities being exempt from MRF policies
|
||||
|
|
@ -1228,7 +1427,9 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
</details>
|
||||
|
||||
## [1.1.9] - 2020-02-10
|
||||
|
||||
### Fixed
|
||||
|
||||
- OTP: Inability to set the upload limit (again)
|
||||
- Not being able to pin polls
|
||||
- Streaming API: incorrect handling of reblog mutes
|
||||
|
|
@ -1236,98 +1437,132 @@ switched to a new configuration mechanism, however it was not officially removed
|
|||
- OpenGraph provider: html entities in descriptions
|
||||
|
||||
## [1.1.8] - 2020-01-10
|
||||
|
||||
### Fixed
|
||||
|
||||
- Captcha generation issues
|
||||
- Returned Kocaptcha endpoint to configuration
|
||||
- Captcha validity is now 5 minutes
|
||||
|
||||
## [1.1.7] - 2019-12-13
|
||||
|
||||
### Fixed
|
||||
|
||||
- OTP: Inability to set the upload limit
|
||||
- OTP: Inability to override node name/distribution type to run 2 Pleroma instances on the same machine
|
||||
|
||||
### Added
|
||||
|
||||
- Integrated captcha provider
|
||||
|
||||
### Changed
|
||||
|
||||
- Captcha enabled by default
|
||||
- Default Captcha provider changed from `Pleroma.Captcha.Kocaptcha` to `Pleroma.Captcha.Native`
|
||||
- Better `Cache-Control` header for static content
|
||||
|
||||
### Bundled Pleroma-FE Changes
|
||||
|
||||
#### Added
|
||||
|
||||
- Icons in the navigation panel
|
||||
|
||||
#### Fixed
|
||||
|
||||
- Improved support unauthenticated view of private instances
|
||||
|
||||
#### Removed
|
||||
|
||||
- Whitespace hack on empty post content
|
||||
|
||||
## [1.1.6] - 2019-11-19
|
||||
|
||||
### Fixed
|
||||
|
||||
- Not being able to log into to third party apps when the browser is logged into mastofe
|
||||
- Email confirmation not being required even when enabled
|
||||
- Mastodon API: conversations API crashing when one status is malformed
|
||||
|
||||
### Bundled Pleroma-FE Changes
|
||||
|
||||
#### Added
|
||||
|
||||
- About page
|
||||
- Meme arrows
|
||||
|
||||
#### Fixed
|
||||
|
||||
- Image modal not closing unless clicked outside of image
|
||||
- Attachment upload spinner not being centered
|
||||
- Showing follow counters being 0 when they are actually hidden
|
||||
|
||||
## [1.1.5] - 2019-11-09
|
||||
|
||||
### Fixed
|
||||
|
||||
- Polls having different numbers in timelines/notifications/poll api endpoints due to cache desyncronization
|
||||
- Pleroma API: OAuth token endpoint not being found when ".json" suffix is appended
|
||||
|
||||
### Changed
|
||||
|
||||
- Frontend bundle updated to [044c9ad0](https://git.pleroma.social/pleroma/pleroma-fe/commit/044c9ad0562af059dd961d50961a3880fca9c642)
|
||||
|
||||
## [1.1.4] - 2019-11-01
|
||||
|
||||
### Fixed
|
||||
|
||||
- Added a migration that fills up empty user.info fields to prevent breakage after previous unsafe migrations.
|
||||
- Failure to migrate from pre-1.0.0 versions
|
||||
- Mastodon API: Notification stream not including follow notifications
|
||||
|
||||
## [1.1.3] - 2019-10-25
|
||||
|
||||
### Fixed
|
||||
|
||||
- Blocked users showing up in notifications collapsed as if they were muted
|
||||
- `pleroma_ctl` not working on Debian's default shell
|
||||
|
||||
## [1.1.2] - 2019-10-18
|
||||
|
||||
### Fixed
|
||||
|
||||
- `pleroma_ctl` trying to connect to a running instance when generating the config, which of course doesn't exist.
|
||||
|
||||
## [1.1.1] - 2019-10-18
|
||||
|
||||
### Fixed
|
||||
|
||||
- One of the migrations between 1.0.0 and 1.1.0 wiping user info of the relay user because of unexpected behavior of postgresql's `jsonb_set`, resulting in inability to post in the default configuration. If you were affected, please run the following query in postgres console, the relay user will be recreated automatically:
|
||||
|
||||
```
|
||||
delete from users where ap_id = 'https://your.instance.hostname/relay';
|
||||
```
|
||||
|
||||
- Bad user search matches
|
||||
|
||||
## [1.1.0] - 2019-10-14
|
||||
|
||||
**Breaking:** The stable branch has been changed from `master` to `stable`. If you want to keep using 1.0, the `release/1.0` branch will receive security updates for 6 months after 1.1 release.
|
||||
|
||||
**OTP Note:** `pleroma_ctl` in 1.0 defaults to `master` and doesn't support specifying arbitrary branches, making `./pleroma_ctl update` fail. To fix this, fetch a version of `pleroma_ctl` from 1.1 using the command below and proceed with the update normally:
|
||||
|
||||
```
|
||||
curl -Lo ./bin/pleroma_ctl 'https://git.pleroma.social/pleroma/pleroma/raw/develop/rel/files/bin/pleroma_ctl'
|
||||
```
|
||||
|
||||
### Security
|
||||
|
||||
- Mastodon API: respect post privacy in `/api/v1/statuses/:id/{favourited,reblogged}_by`
|
||||
|
||||
### Removed
|
||||
|
||||
- **Breaking:** GNU Social API with Qvitter extensions support
|
||||
- Emoji: Remove longfox emojis.
|
||||
- Remove `Reply-To` header from report emails for admins.
|
||||
- ActivityPub: The `/objects/:uuid/likes` endpoint.
|
||||
|
||||
### Changed
|
||||
|
||||
- **Breaking:** Configuration: A setting to explicitly disable the mailer was added, defaulting to true, if you are using a mailer add `config :pleroma, Pleroma.Emails.Mailer, enabled: true` to your config
|
||||
- **Breaking:** Configuration: `/media/` is now removed when `base_url` is configured, append `/media/` to your `base_url` config to keep the old behaviour if desired
|
||||
- **Breaking:** `/api/pleroma/notifications/read` is moved to `/api/v1/pleroma/notifications/read` and now supports `max_id` and responds with Mastodon API entities.
|
||||
|
|
@ -1341,10 +1576,11 @@ curl -Lo ./bin/pleroma_ctl 'https://git.pleroma.social/pleroma/pleroma/raw/devel
|
|||
- Mastodon API: `pleroma.thread_muted` key in the Status entity
|
||||
- AdminAPI: Add "godmode" while fetching user statuses (i.e. admin can see private statuses)
|
||||
- Improve digest email template
|
||||
– Pagination: (optional) return `total` alongside with `items` when paginating
|
||||
– Pagination: (optional) return `total` alongside with `items` when paginating
|
||||
- The `Pleroma.FlakeId` module has been replaced with the `flake_id` library.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Following from Osada
|
||||
- Favorites timeline doing database-intensive queries
|
||||
- Metadata rendering errors resulting in the entire page being inaccessible
|
||||
|
|
@ -1377,6 +1613,7 @@ curl -Lo ./bin/pleroma_ctl 'https://git.pleroma.social/pleroma/pleroma/raw/devel
|
|||
- Reverse Proxy limiting `max_body_length` was incorrectly defined and only checked `Content-Length` headers which may not be sufficient in some circumstances
|
||||
|
||||
### Added
|
||||
|
||||
- Expiring/ephemeral activities. All activities can have expires_at value set, which controls when they should be deleted automatically.
|
||||
- Mastodon API: in post_status, the expires_in parameter lets you set the number of seconds until an activity expires. It must be at least one hour.
|
||||
- Mastodon API: all status JSON responses contain a `pleroma.expires_at` item which states when an activity will expire. The value is only shown to the user who created the activity. To everyone else it's empty.
|
||||
|
|
@ -1420,24 +1657,33 @@ curl -Lo ./bin/pleroma_ctl 'https://git.pleroma.social/pleroma/pleroma/raw/devel
|
|||
- Reverse Proxy: Do not retry failed requests to limit pressure on the peer
|
||||
|
||||
### Changed
|
||||
|
||||
- Configuration: Filter.AnonymizeFilename added ability to retain file extension with custom text
|
||||
- Admin API: changed json structure for saving config settings.
|
||||
- RichMedia: parsers and their order are configured in `rich_media` config.
|
||||
- RichMedia: add the rich media ttl based on image expiration time.
|
||||
|
||||
## [1.0.7] - 2019-09-26
|
||||
|
||||
### Fixed
|
||||
|
||||
- Broken federation on Erlang 22 (previous versions of hackney http client were using an option that got deprecated)
|
||||
|
||||
### Changed
|
||||
|
||||
- ActivityPub: The first page in inboxes/outboxes is no longer embedded.
|
||||
|
||||
## [1.0.6] - 2019-08-14
|
||||
|
||||
### Fixed
|
||||
|
||||
- MRF: fix use of unserializable keyword lists in describe() implementations
|
||||
- ActivityPub S2S: POST requests are now signed with `(request-target)` pseudo-header.
|
||||
|
||||
## [1.0.5] - 2019-08-13
|
||||
|
||||
### Fixed
|
||||
|
||||
- Mastodon API: follower/following counters not being nullified, when `hide_follows`/`hide_followers` is set
|
||||
- Mastodon API: `muted` in the Status entity, using author's account to determine if the thread was muted
|
||||
- Mastodon API: return the actual profile URL in the Account entity's `url` property when appropriate
|
||||
|
|
@ -1448,6 +1694,7 @@ curl -Lo ./bin/pleroma_ctl 'https://git.pleroma.social/pleroma/pleroma/raw/devel
|
|||
- Fix internal server error when using the healthcheck API.
|
||||
|
||||
### Added
|
||||
|
||||
- **Breaking:** MRF describe API, which adds support for exposing configuration information about MRF policies to NodeInfo.
|
||||
Custom modules will need to be updated by adding, at the very least, `def describe, do: {:ok, %{}}` to the MRF policy modules.
|
||||
- Relays: Added a task to list relay subscriptions.
|
||||
|
|
@ -1459,21 +1706,28 @@ curl -Lo ./bin/pleroma_ctl 'https://git.pleroma.social/pleroma/pleroma/raw/devel
|
|||
- Configuration: `federation_incoming_replies_max_depth` option
|
||||
|
||||
### Removed
|
||||
|
||||
- Federation: Remove `likes` from objects.
|
||||
- **Breaking:** ActivityPub: The `accept_blocks` configuration setting.
|
||||
|
||||
## [1.0.4] - 2019-08-01
|
||||
|
||||
### Fixed
|
||||
|
||||
- Invalid SemVer version generation, when the current branch does not have commits ahead of tag/checked out on a tag
|
||||
|
||||
## [1.0.3] - 2019-07-31
|
||||
|
||||
### Security
|
||||
|
||||
- OStatus: eliminate the possibility of a protocol downgrade attack.
|
||||
- OStatus: prevent following locked accounts, bypassing the approval process.
|
||||
- TwitterAPI: use CommonAPI to handle remote follows instead of OStatus.
|
||||
|
||||
## [1.0.2] - 2019-07-28
|
||||
|
||||
### Fixed
|
||||
|
||||
- Not being able to pin unlisted posts
|
||||
- Mastodon API: represent poll IDs as strings
|
||||
- MediaProxy: fix matching filenames
|
||||
|
|
@ -1484,19 +1738,25 @@ curl -Lo ./bin/pleroma_ctl 'https://git.pleroma.social/pleroma/pleroma/raw/devel
|
|||
- ActivityPub S2S: remote user deletions now work the same as local user deletions.
|
||||
|
||||
### Changed
|
||||
|
||||
- Configuration: OpenGraph and TwitterCard providers enabled by default
|
||||
- Configuration: Filter.AnonymizeFilename added ability to retain file extension with custom text
|
||||
|
||||
## [1.0.1] - 2019-07-14
|
||||
|
||||
### Security
|
||||
|
||||
- OStatus: fix an object spoofing vulnerability.
|
||||
|
||||
## [1.0.0] - 2019-06-29
|
||||
|
||||
### Security
|
||||
|
||||
- Mastodon API: Fix display names not being sanitized
|
||||
- Rich media: Do not crawl private IP ranges
|
||||
|
||||
### Added
|
||||
|
||||
- Digest email for inactive users
|
||||
- Add a generic settings store for frontends / clients to use.
|
||||
- Explicit addressing option for posting.
|
||||
|
|
@ -1561,6 +1821,7 @@ curl -Lo ./bin/pleroma_ctl 'https://git.pleroma.social/pleroma/pleroma/raw/devel
|
|||
- Configuration: default syslog tag "Pleroma" is now lowercased to "pleroma"
|
||||
|
||||
### Changed
|
||||
|
||||
- **Breaking:** bind to 127.0.0.1 instead of 0.0.0.0 by default
|
||||
- **Breaking:** Configuration: move from Pleroma.Mailer to Pleroma.Emails.Mailer
|
||||
- Thread containment / test for complete visibility will be skipped by default.
|
||||
|
|
@ -1602,6 +1863,7 @@ curl -Lo ./bin/pleroma_ctl 'https://git.pleroma.social/pleroma/pleroma/raw/devel
|
|||
- Rich Media: crawl only https URLs.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Follow requests don't get 'stuck' anymore.
|
||||
- Added an FTS index on objects. Running `vacuum analyze` and setting a larger `work_mem` is recommended.
|
||||
- Followers counter not being updated when a follower is blocked
|
||||
|
|
@ -1637,31 +1899,48 @@ curl -Lo ./bin/pleroma_ctl 'https://git.pleroma.social/pleroma/pleroma/raw/devel
|
|||
- MRF: Simple policy now properly delists imported or relayed statuses
|
||||
|
||||
## Removed
|
||||
|
||||
- Configuration: `config :pleroma, :fe` in favor of the more flexible `config :pleroma, :frontend_configurations`
|
||||
|
||||
## [0.9.99999] - 2019-05-31
|
||||
|
||||
### Security
|
||||
|
||||
- Mastodon API: Fix lists leaking private posts
|
||||
|
||||
## [0.9.9999] - 2019-04-05
|
||||
|
||||
### Security
|
||||
|
||||
- Mastodon API: Fix content warnings skipping HTML sanitization
|
||||
|
||||
## [0.9.999] - 2019-03-13
|
||||
|
||||
Frontend changes only.
|
||||
|
||||
### Added
|
||||
|
||||
- Added floating action button for posting status on mobile
|
||||
|
||||
### Changed
|
||||
|
||||
- Changed user-settings icon to a pencil
|
||||
|
||||
### Fixed
|
||||
|
||||
- Keyboard shortcuts activating when typing a message
|
||||
- Gaps when scrolling down on a timeline after showing new
|
||||
|
||||
## [0.9.99] - 2019-03-08
|
||||
|
||||
### Changed
|
||||
|
||||
- Update the frontend to the 0.9.99 tag
|
||||
|
||||
### Fixed
|
||||
|
||||
- Sign the date header in federation to fix Mastodon federation.
|
||||
|
||||
## [0.9.9] - 2019-02-22
|
||||
|
||||
This is our first stable release.
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
# https://hub.docker.com/r/hexpm/elixir/tags
|
||||
ARG ELIXIR_IMG=hexpm/elixir
|
||||
ARG ELIXIR_VER=1.14.5
|
||||
ARG ERLANG_VER=25.3.2.14
|
||||
ARG ELIXIR_VER=1.17.3
|
||||
ARG ERLANG_VER=26.2.5.6
|
||||
ARG ALPINE_VER=3.17.9
|
||||
|
||||
FROM ${ELIXIR_IMG}:${ELIXIR_VER}-erlang-${ERLANG_VER}-alpine-${ALPINE_VER} as build
|
||||
FROM ${ELIXIR_IMG}:${ELIXIR_VER}-erlang-${ERLANG_VER}-alpine-${ALPINE_VER} AS build
|
||||
|
||||
COPY . .
|
||||
|
||||
|
|
@ -15,6 +15,7 @@ RUN apk add git gcc g++ musl-dev make cmake file-dev vips-dev &&\
|
|||
echo "import Config" > config/prod.secret.exs &&\
|
||||
mix local.hex --force &&\
|
||||
mix local.rebar --force &&\
|
||||
mix deps.clean --all &&\
|
||||
mix deps.get --only prod &&\
|
||||
mkdir release &&\
|
||||
mix release --path release
|
||||
|
|
|
|||
8
ci/elixir-1.18.3-otp-27/Dockerfile
Normal file
8
ci/elixir-1.18.3-otp-27/Dockerfile
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
FROM elixir:1.18.3-otp-27
|
||||
|
||||
# Single RUN statement, otherwise intermediate images are created
|
||||
# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run
|
||||
RUN apt-get update &&\
|
||||
apt-get install -y libmagic-dev cmake libimage-exiftool-perl ffmpeg &&\
|
||||
mix local.hex --force &&\
|
||||
mix local.rebar --force
|
||||
1
ci/elixir-1.18.3-otp-27/build_and_push.sh
Executable file
1
ci/elixir-1.18.3-otp-27/build_and_push.sh
Executable file
|
|
@ -0,0 +1 @@
|
|||
docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.18.3-otp-27 --push .
|
||||
|
|
@ -48,7 +48,7 @@ config :pleroma, ecto_repos: [Pleroma.Repo]
|
|||
|
||||
config :pleroma, Pleroma.Repo,
|
||||
telemetry_event: [Pleroma.Repo.Instrumenter],
|
||||
migration_lock: nil
|
||||
migration_lock: :pg_advisory_lock
|
||||
|
||||
config :pleroma, Pleroma.Captcha,
|
||||
enabled: true,
|
||||
|
|
@ -194,7 +194,6 @@ config :pleroma, :instance,
|
|||
account_approval_required: false,
|
||||
federating: true,
|
||||
federation_incoming_replies_max_depth: 100,
|
||||
federation_reachability_timeout_days: 7,
|
||||
allow_relay: true,
|
||||
public: true,
|
||||
quarantined_instances: [],
|
||||
|
|
@ -307,6 +306,7 @@ config :pleroma, :frontend_configurations,
|
|||
collapseMessageWithSubject: false,
|
||||
disableChat: false,
|
||||
greentext: false,
|
||||
embeddedToS: true,
|
||||
hideFilteredStatuses: false,
|
||||
hideMutedPosts: false,
|
||||
hidePostStats: false,
|
||||
|
|
@ -364,7 +364,9 @@ config :pleroma, :activitypub,
|
|||
note_replies_output_limit: 5,
|
||||
sign_object_fetches: true,
|
||||
authorized_fetch_mode: false,
|
||||
client_api_enabled: false
|
||||
client_api_enabled: false,
|
||||
anonymize_reporter: false,
|
||||
anonymize_reporter_local_nickname: ""
|
||||
|
||||
config :pleroma, :streamer,
|
||||
workers: 3,
|
||||
|
|
@ -589,6 +591,7 @@ config :pleroma, Pleroma.User,
|
|||
# value or it cannot enforce uniqueness.
|
||||
config :pleroma, Oban,
|
||||
repo: Pleroma.Repo,
|
||||
notifier: Oban.Notifiers.PG,
|
||||
log: false,
|
||||
queues: [
|
||||
activity_expiration: 10,
|
||||
|
|
@ -599,7 +602,7 @@ config :pleroma, Oban,
|
|||
search_indexing: [limit: 10, paused: true],
|
||||
slow: 5
|
||||
],
|
||||
plugins: [{Oban.Plugins.Pruner, max_age: 900}],
|
||||
plugins: [Oban.Plugins.Lazarus, {Oban.Plugins.Pruner, max_age: 900}],
|
||||
crontab: [
|
||||
{"0 0 * * 0", Pleroma.Workers.Cron.DigestEmailsWorker},
|
||||
{"0 0 * * *", Pleroma.Workers.Cron.NewUsersDigestWorker},
|
||||
|
|
|
|||
|
|
@ -1261,6 +1261,7 @@ config :pleroma, :config_description, [
|
|||
background: "/static/aurora_borealis.jpg",
|
||||
collapseMessageWithSubject: false,
|
||||
greentext: false,
|
||||
embeddedToS: true,
|
||||
hideFilteredStatuses: false,
|
||||
hideMutedPosts: false,
|
||||
hidePostStats: false,
|
||||
|
|
@ -1312,6 +1313,12 @@ config :pleroma, :config_description, [
|
|||
type: :boolean,
|
||||
description: "Enables green text on lines prefixed with the > character"
|
||||
},
|
||||
%{
|
||||
key: :embeddedToS,
|
||||
label: "Embedded ToS panel",
|
||||
type: :boolean,
|
||||
description: "Hide Terms of Service panel decorations on About and Registration pages"
|
||||
},
|
||||
%{
|
||||
key: :hideFilteredStatuses,
|
||||
label: "Hide Filtered Statuses",
|
||||
|
|
@ -1790,6 +1797,23 @@ config :pleroma, :config_description, [
|
|||
key: :client_api_enabled,
|
||||
type: :boolean,
|
||||
description: "Allow client to server ActivityPub interactions"
|
||||
},
|
||||
%{
|
||||
key: :anonymize_reporter,
|
||||
type: :boolean,
|
||||
label: "Anonymize local reports",
|
||||
description:
|
||||
"If true, replace local reporters with the designated local user for the copy to be sent to remote servers"
|
||||
},
|
||||
%{
|
||||
key: :anonymize_reporter_local_nickname,
|
||||
type: :string,
|
||||
label: "Anonymized reporter",
|
||||
description:
|
||||
"The nickname of the designated local user that replaces the actual reporter in the copy to be sent to remote servers",
|
||||
suggestions: [
|
||||
"lain"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
@ -2107,6 +2131,11 @@ config :pleroma, :config_description, [
|
|||
description:
|
||||
"Amount of milliseconds after which the HTTP request is forcibly terminated.",
|
||||
suggestions: [5_000]
|
||||
},
|
||||
%{
|
||||
key: :user_agent,
|
||||
type: :string,
|
||||
description: "Custom User-Agent header to be used when fetching rich media content."
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
@ -3304,6 +3333,12 @@ config :pleroma, :config_description, [
|
|||
description:
|
||||
"A map containing available frontends and parameters for their installation.",
|
||||
children: frontend_options
|
||||
},
|
||||
%{
|
||||
key: :pickable,
|
||||
type: {:list, :string},
|
||||
description:
|
||||
"A list containing all frontends users can pick as their preference, format is :name/:ref, e.g pleroma-fe/stable."
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
@ -3500,5 +3535,87 @@ config :pleroma, :config_description, [
|
|||
suggestion: [100_000]
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
group: :pleroma,
|
||||
key: Pleroma.Language.LanguageDetector,
|
||||
type: :group,
|
||||
description: "Language detection providers",
|
||||
children: [
|
||||
%{
|
||||
key: :provider,
|
||||
type: :module,
|
||||
suggestions: {:list_behaviour_implementations, Pleroma.Language.LanguageDetector.Provider}
|
||||
},
|
||||
%{
|
||||
group: {:subgroup, Pleroma.Language.LanguageDetector.Fasttext},
|
||||
key: :model,
|
||||
label: "fastText language detection model",
|
||||
type: :string,
|
||||
suggestions: ["/usr/share/fasttext/lid.176.bin"]
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
group: :pleroma,
|
||||
key: Pleroma.Language.Translation,
|
||||
type: :group,
|
||||
description: "Translation providers",
|
||||
children: [
|
||||
%{
|
||||
key: :provider,
|
||||
type: :module,
|
||||
suggestions: {:list_behaviour_implementations, Pleroma.Language.Translation.Provider}
|
||||
},
|
||||
%{
|
||||
group: {:subgroup, Pleroma.Language.Translation.Deepl},
|
||||
key: :base_url,
|
||||
label: "DeepL base URL",
|
||||
type: :string,
|
||||
suggestions: ["https://api-free.deepl.com", "https://api.deepl.com"]
|
||||
},
|
||||
%{
|
||||
group: {:subgroup, Pleroma.Language.Translation.Deepl},
|
||||
key: :api_key,
|
||||
label: "DeepL API Key",
|
||||
type: :string,
|
||||
suggestions: ["YOUR_API_KEY"]
|
||||
},
|
||||
%{
|
||||
group: {:subgroup, Pleroma.Language.Translation.Libretranslate},
|
||||
key: :base_url,
|
||||
label: "LibreTranslate instance URL",
|
||||
type: :string,
|
||||
suggestions: ["https://libretranslate.com"]
|
||||
},
|
||||
%{
|
||||
group: {:subgroup, Pleroma.Language.Translation.Libretranslate},
|
||||
key: :api_key,
|
||||
label: "LibreTranslate API Key",
|
||||
type: :string,
|
||||
suggestions: ["YOUR_API_KEY"]
|
||||
},
|
||||
%{
|
||||
group: {:subgroup, Pleroma.Language.Translation.TranslateLocally},
|
||||
key: :intermediary_language,
|
||||
label:
|
||||
"translateLocally intermediary language (used when direct source->target model is not available)",
|
||||
type: :string,
|
||||
suggestions: ["en"]
|
||||
},
|
||||
%{
|
||||
group: {:subgroup, Pleroma.Language.Translation.Mozhi},
|
||||
key: :base_url,
|
||||
label: "Mozhi instance URL",
|
||||
type: :string
|
||||
},
|
||||
%{
|
||||
group: {:subgroup, Pleroma.Language.Translation.Mozhi},
|
||||
key: :engine,
|
||||
label: "Engine used for Mozhi",
|
||||
type: :string,
|
||||
suggestions: ["libretranslate"]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
|
|
|
|||
|
|
@ -156,6 +156,7 @@ config :pleroma, Pleroma.User.Backup, config_impl: Pleroma.UnstubbedConfigMock
|
|||
config :pleroma, Pleroma.Uploaders.S3, ex_aws_impl: Pleroma.Uploaders.S3.ExAwsMock
|
||||
config :pleroma, Pleroma.Uploaders.S3, config_impl: Pleroma.UnstubbedConfigMock
|
||||
config :pleroma, Pleroma.Upload, config_impl: Pleroma.UnstubbedConfigMock
|
||||
config :pleroma, Pleroma.Language.LanguageDetector, config_impl: Pleroma.StaticStubbedConfigMock
|
||||
config :pleroma, Pleroma.ScheduledActivity, config_impl: Pleroma.UnstubbedConfigMock
|
||||
config :pleroma, Pleroma.Web.RichMedia.Helpers, config_impl: Pleroma.StaticStubbedConfigMock
|
||||
config :pleroma, Pleroma.Uploaders.IPFS, config_impl: Pleroma.UnstubbedConfigMock
|
||||
|
|
@ -169,6 +170,10 @@ config :pleroma, Pleroma.Upload.Filter.Mogrify, config_impl: Pleroma.StaticStubb
|
|||
config :pleroma, Pleroma.Upload.Filter.Mogrify, mogrify_impl: Pleroma.MogrifyMock
|
||||
|
||||
config :pleroma, Pleroma.Signature, http_signatures_impl: Pleroma.StubbedHTTPSignaturesMock
|
||||
config :pleroma, Pleroma.Web.ActivityPub.Publisher, signature_impl: Pleroma.SignatureMock
|
||||
|
||||
config :pleroma, Pleroma.Web.ActivityPub.Publisher,
|
||||
transmogrifier_impl: Pleroma.Web.ActivityPub.TransmogrifierMock
|
||||
|
||||
peer_module =
|
||||
if String.to_integer(System.otp_release()) >= 25 do
|
||||
|
|
|
|||
|
|
@ -2,28 +2,60 @@
|
|||
|
||||
## Backup
|
||||
|
||||
1. Stop the Pleroma service.
|
||||
2. Go to the working directory of Pleroma (default is `/opt/pleroma`)
|
||||
3. Run `sudo -Hu postgres pg_dump -d <pleroma_db> --format=custom -f </path/to/backup_location/pleroma.pgdump>` (make sure the postgres user has write access to the destination file)
|
||||
1. Stop the Pleroma service:
|
||||
```
|
||||
# sudo systemctl stop pleroma
|
||||
```
|
||||
2. Go to the working directory of Pleroma (default is `/opt/pleroma`).
|
||||
3. Run (make sure the postgres user has write access to the destination file):
|
||||
```
|
||||
# sudo -Hu postgres pg_dump -d <pleroma_db> -v --format=custom --compress=9 -f </path/to/backup_location/pleroma.pgdump>
|
||||
```
|
||||
4. Copy `pleroma.pgdump`, `config/prod.secret.exs`, `config/setup_db.psql` (if still available) and the `uploads` folder to your backup destination. If you have other modifications, copy those changes too.
|
||||
5. Restart the Pleroma service.
|
||||
5. Restart the Pleroma service:
|
||||
```
|
||||
# sudo systemctl start pleroma
|
||||
```
|
||||
|
||||
## Restore/Move
|
||||
|
||||
1. Optionally reinstall Pleroma (either on the same server or on another server if you want to move servers).
|
||||
2. Stop the Pleroma service.
|
||||
3. Go to the working directory of Pleroma (default is `/opt/pleroma`)
|
||||
2. Stop the Pleroma service:
|
||||
```
|
||||
# sudo systemctl stop pleroma
|
||||
```
|
||||
3. Go to the working directory of Pleroma (default is `/opt/pleroma`).
|
||||
4. Copy the above mentioned files back to their original position.
|
||||
5. Drop the existing database and user if restoring in-place. `sudo -Hu postgres psql -c 'DROP DATABASE <pleroma_db>;';` `sudo -Hu postgres psql -c 'DROP USER <pleroma_db>;'`
|
||||
6. Restore the database schema and pleroma postgres role the with the original `setup_db.psql` if you have it: `sudo -Hu postgres psql -f config/setup_db.psql`.
|
||||
5. Drop the existing database and user if restoring in-place:
|
||||
```
|
||||
# sudo -Hu postgres dropdb <pleroma_db>
|
||||
# sudo -Hu postgres dropuser <pleroma_user>
|
||||
```
|
||||
6. Restore the database schema and pleroma database user the with the original `setup_db.psql` if you have it:
|
||||
```
|
||||
# sudo -Hu postgres psql -f config/setup_db.psql
|
||||
```
|
||||
|
||||
Alternatively, run the `mix pleroma.instance gen` task again. You can ignore most of the questions, but make the database user, name, and password the same as found in your backup of `config/prod.secret.exs`. Then run the restoration of the pleroma role and schema with of the generated `config/setup_db.psql` as instructed above. You may delete the `config/generated_config.exs` file as it is not needed.
|
||||
Alternatively, run the `mix pleroma.instance gen` task again. You can ignore most of the questions, but make the database user, name, and password the same as found in your backup of `config/prod.secret.exs`. Then run the restoration of the pleroma user and schema with the generated `config/setup_db.psql` as instructed above. You may delete the `config/generated_config.exs` file as it is not needed.
|
||||
|
||||
7. Now restore the Pleroma instance's data into the empty database schema: `sudo -Hu postgres pg_restore -d <pleroma_db> -v -1 </path/to/backup_location/pleroma.pgdump>`
|
||||
8. If you installed a newer Pleroma version, you should run `mix ecto.migrate`[^1]. This task performs database migrations, if there were any.
|
||||
9. Restart the Pleroma service.
|
||||
10. Run `sudo -Hu postgres vacuumdb --all --analyze-in-stages`. This will quickly generate the statistics so that postgres can properly plan queries.
|
||||
11. If setting up on a new server configure Nginx by using the `installation/pleroma.nginx` config sample or reference the Pleroma installation guide for your OS which contains the Nginx configuration instructions.
|
||||
7. Now restore the Pleroma instance's schema into the empty database schema:
|
||||
```
|
||||
# sudo -Hu postgres pg_restore -d <pleroma_db> -v -s -1 </path/to/backup_location/pleroma.pgdump>
|
||||
```
|
||||
8. Now restore the Pleroma instance's data into the database:
|
||||
```
|
||||
# sudo -Hu postgres pg_restore -d <pleroma_db> -v -a -1 --disable-triggers </path/to/backup_location/pleroma.pgdump>
|
||||
```
|
||||
9. If you installed a newer Pleroma version, you should run `mix ecto.migrate`[^1]. This task performs database migrations, if there were any.
|
||||
10. Generate the statistics so that PostgreSQL can properly plan queries:
|
||||
```
|
||||
# sudo -Hu postgres vacuumdb -v --all --analyze-in-stages
|
||||
```
|
||||
11. Restart the Pleroma service:
|
||||
```
|
||||
# sudo systemctl start pleroma
|
||||
```
|
||||
12. If setting up on a new server, configure Nginx by using your original configuration or by using the `installation/pleroma.nginx` config sample or reference the Pleroma installation guide for your OS which contains the Nginx configuration instructions.
|
||||
|
||||
[^1]: Prefix with `MIX_ENV=prod` to run it using the production config file.
|
||||
|
||||
|
|
@ -32,10 +64,26 @@
|
|||
1. Optionally you can remove the users of your instance. This will trigger delete requests for their accounts and posts. Note that this is 'best effort' and doesn't mean that all traces of your instance will be gone from the fediverse.
|
||||
* You can do this from the admin-FE where you can select all local users and delete the accounts using the *Moderate multiple users* dropdown.
|
||||
* You can also list local users and delete them individually using the CLI tasks for [Managing users](./CLI_tasks/user.md).
|
||||
2. Stop the Pleroma service `systemctl stop pleroma`
|
||||
3. Disable pleroma from systemd `systemctl disable pleroma`
|
||||
2. Stop the Pleroma service:
|
||||
```
|
||||
# systemctl stop pleroma
|
||||
```
|
||||
3. Disable pleroma from systemd:
|
||||
```
|
||||
# systemctl disable pleroma
|
||||
```
|
||||
4. Remove the files and folders you created during installation (see installation guide). This includes the pleroma, nginx and systemd files and folders.
|
||||
5. Reload nginx now that the configuration is removed `systemctl reload nginx`
|
||||
6. Remove the database and database user `sudo -Hu postgres psql -c 'DROP DATABASE <pleroma_db>;';` `sudo -Hu postgres psql -c 'DROP USER <pleroma_db>;'`
|
||||
7. Remove the system user `userdel pleroma`
|
||||
8. Remove the dependencies that you don't need anymore (see installation guide). Make sure you don't remove packages that are still needed for other software that you have running!
|
||||
5. Reload nginx now that the configuration is removed:
|
||||
```
|
||||
# systemctl reload nginx
|
||||
```
|
||||
6. Remove the database and database user:
|
||||
```
|
||||
# sudo -Hu postgres dropdb <pleroma_db>
|
||||
# sudo -Hu postgres dropuser <pleroma_user>
|
||||
```
|
||||
7. Remove the system user:
|
||||
```
|
||||
# userdel -r pleroma
|
||||
```
|
||||
8. Remove the dependencies that you don't need anymore (see installation guide). **Make sure you don't remove packages that are still needed for other software that you have running!**
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@ Feel free to contact us to be added to this list!
|
|||
### AndStatus
|
||||
- Homepage: <http://andstatus.org/>
|
||||
- Source Code: <https://github.com/andstatus/andstatus/>
|
||||
- Contact: [@AndStatus@mastodon.social](https://mastodon.social/@AndStatus)
|
||||
- Platforms: Android
|
||||
- Features: MastoAPI, ActivityPub (Client-to-Server)
|
||||
|
||||
|
|
@ -40,8 +41,8 @@ Feel free to contact us to be added to this list!
|
|||
|
||||
### Fedilab
|
||||
- Homepage: <https://fedilab.app/>
|
||||
- Source Code: <https://framagit.org/tom79/fedilab/>
|
||||
- Contact: [@fedilab@framapiaf.org](https://framapiaf.org/users/fedilab)
|
||||
- Source Code: <https://codeberg.org/tom79/Fedilab>
|
||||
- Contact: [@apps@toot.fedilab.app](https://toot.fedilab.app/@apps)
|
||||
- Platforms: Android
|
||||
- Features: MastoAPI, Streaming Ready, Moderation, Text Formatting
|
||||
|
||||
|
|
@ -51,8 +52,8 @@ Feel free to contact us to be added to this list!
|
|||
- Features: MastoAPI, No Streaming
|
||||
|
||||
### Husky
|
||||
- Source code: <https://git.mentality.rip/FWGS/Husky>
|
||||
- Contact: [@Husky@enigmatic.observer](https://enigmatic.observer/users/Husky)
|
||||
- Source code: <https://github.com/captainepoch/husky>
|
||||
- Contact: [@husky@stereophonic.space](https://stereophonic.space/users/husky)
|
||||
- Platforms: Android
|
||||
- Features: MastoAPI, No Streaming, Emoji Reactions, Text Formatting, FE Stickers
|
||||
|
||||
|
|
@ -65,7 +66,7 @@ Feel free to contact us to be added to this list!
|
|||
### Tusky
|
||||
- Homepage: <https://tuskyapp.github.io/>
|
||||
- Source Code: <https://github.com/tuskyapp/Tusky>
|
||||
- Contact: [@ConnyDuck@mastodon.social](https://mastodon.social/users/ConnyDuck)
|
||||
- Contact: [@Tusky@mastodon.social](https://mastodon.social/@Tusky)
|
||||
- Platforms: Android
|
||||
- Features: MastoAPI, No Streaming
|
||||
|
||||
|
|
@ -76,10 +77,10 @@ Feel free to contact us to be added to this list!
|
|||
- Platform: Android
|
||||
- Features: MastoAPI, No Streaming
|
||||
|
||||
### Indigenous
|
||||
- Homepage: <https://indigenous.realize.be/>
|
||||
- Source Code: <https://github.com/swentel/indigenous-android/>
|
||||
- Contact: [@swentel@realize.be](https://realize.be)
|
||||
### IndiePass
|
||||
- Homepage: <https://indiepass.app/>
|
||||
- Source Code: <https://github.com/IndiePass/indiepass-android>
|
||||
- Contact: [@marksuth@mastodon.social](https://mastodon.social/@marksuth)
|
||||
- Platforms: Android
|
||||
- Features: MastoAPI, No Streaming
|
||||
|
||||
|
|
|
|||
|
|
@ -733,13 +733,11 @@ An example for SMTP adapter:
|
|||
```elixir
|
||||
config :pleroma, Pleroma.Emails.Mailer,
|
||||
enabled: true,
|
||||
adapter: Swoosh.Adapters.SMTP,
|
||||
adapter: Swoosh.Adapters.Mua,
|
||||
relay: "smtp.gmail.com",
|
||||
username: "YOUR_USERNAME@gmail.com",
|
||||
password: "YOUR_SMTP_PASSWORD",
|
||||
auth: [username: "YOUR_USERNAME@gmail.com", password: "YOUR_SMTP_PASSWORD"],
|
||||
port: 465,
|
||||
ssl: true,
|
||||
auth: :always
|
||||
protocol: :ssl
|
||||
```
|
||||
|
||||
An example for Mua adapter:
|
||||
|
|
@ -906,21 +904,31 @@ config :logger, :console,
|
|||
|
||||
### RUM indexing for full text search
|
||||
|
||||
!!! warning
|
||||
It is recommended to use PostgreSQL v11 or newer. We have seen some minor issues with lower PostgreSQL versions.
|
||||
|
||||
* `rum_enabled`: If RUM indexes should be used. Defaults to `false`.
|
||||
|
||||
RUM indexes are an alternative indexing scheme that is not included in PostgreSQL by default. While they may eventually be mainlined, for now they have to be installed as a PostgreSQL extension from https://github.com/postgrespro/rum.
|
||||
RUM indexes are an alternative indexing scheme that is not included in PostgreSQL by default. While they may eventually be mainlined, for now they have to be installed as a PostgreSQL extension from [https://github.com/postgrespro/rum](https://github.com/postgrespro/rum).
|
||||
|
||||
Their advantage over the standard GIN indexes is that they allow efficient ordering of search results by timestamp, which makes search queries a lot faster on larger servers, by one or two orders of magnitude. They take up around 3 times as much space as GIN indexes.
|
||||
Their advantage over the standard GIN indexes is that they allow efficient ordering of search results by timestamp, which makes search queries a lot faster on larger servers, by one or two orders of magnitude. They take up around 3-4 times as much space as GIN indexes.
|
||||
|
||||
To enable them, both the `rum_enabled` flag has to be set and the following special migration has to be run:
|
||||
|
||||
`mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/`
|
||||
* Source install:
|
||||
- Stop Pleroma
|
||||
- `mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/`
|
||||
* OTP install:
|
||||
- Stop Pleroma
|
||||
- `pleroma_ctl migrate --migrations-path priv/repo/optional_migrations/rum_indexing/`
|
||||
|
||||
This will probably take a long time.
|
||||
|
||||
!!! note
|
||||
It is recommended to `VACUUM FULL` the objects table after the migration has completed, to do that run:
|
||||
```
|
||||
# sudo -Hu postgres vacuumdb --full --analyze -t objects <pleroma DB name>
|
||||
```
|
||||
|
||||
Now you can start Pleroma back up.
|
||||
|
||||
## Alternative client protocols
|
||||
|
||||
### BBS / SSH access
|
||||
|
|
|
|||
|
|
@ -16,7 +16,9 @@ location /proxy {
|
|||
```
|
||||
Also add the following on top of the configuration, outside of the `server` block:
|
||||
```
|
||||
proxy_cache_path /tmp/pleroma-media-cache levels=1:2 keys_zone=pleroma_media_cache:10m max_size=10g inactive=720m use_temp_path=off;
|
||||
# Note: The cache directory must exist and be writable by nginx.
|
||||
# If nginx runs in a chroot, create it inside the chroot.
|
||||
proxy_cache_path /var/tmp/pleroma-media-cache levels=1:2 keys_zone=pleroma_media_cache:10m max_size=10g inactive=720m use_temp_path=off;
|
||||
```
|
||||
If you came here from one of the installation guides, take a look at the example configuration `/installation/pleroma.nginx`, where this part is already included.
|
||||
|
||||
|
|
|
|||
|
|
@ -70,6 +70,8 @@ The `/api/v1/pleroma/admin/*` path is backwards compatible with `/api/pleroma/ad
|
|||
- `nicknames`
|
||||
- Response: Array of user nicknames
|
||||
|
||||
## `POST /api/v1/pleroma/admin/users`
|
||||
|
||||
### Create a user
|
||||
|
||||
- Method: `POST`
|
||||
|
|
@ -81,7 +83,7 @@ The `/api/v1/pleroma/admin/*` path is backwards compatible with `/api/pleroma/ad
|
|||
`password`
|
||||
}
|
||||
]
|
||||
- Response: User’s nickname
|
||||
- Response: Array of user objects
|
||||
|
||||
## `POST /api/v1/pleroma/admin/users/follow`
|
||||
|
||||
|
|
|
|||
|
|
@ -66,9 +66,9 @@ Returned data:
|
|||
"username": "somenick",
|
||||
...
|
||||
},
|
||||
"id" : "1",
|
||||
"unread" : 2,
|
||||
"last_message" : {...}, // The last message in that chat
|
||||
"id": "1",
|
||||
"unread": 2,
|
||||
"last_message": {...}, // The last message in that chat
|
||||
"updated_at": "2020-04-21T15:11:46.000Z"
|
||||
}
|
||||
```
|
||||
|
|
@ -93,8 +93,8 @@ Returned data:
|
|||
"username": "somenick",
|
||||
...
|
||||
},
|
||||
"id" : "1",
|
||||
"unread" : 0,
|
||||
"id": "1",
|
||||
"unread": 0,
|
||||
"updated_at": "2020-04-21T15:11:46.000Z"
|
||||
}
|
||||
```
|
||||
|
|
@ -111,7 +111,7 @@ The modified chat message
|
|||
|
||||
### Getting a list of Chats
|
||||
|
||||
`GET /api/v1/pleroma/chats`
|
||||
`GET /api/v2/pleroma/chats`
|
||||
|
||||
This will return a list of chats that you have been involved in, sorted by their
|
||||
last update (so new chats will be at the top).
|
||||
|
|
@ -119,6 +119,7 @@ last update (so new chats will be at the top).
|
|||
Parameters:
|
||||
|
||||
- with_muted: Include chats from muted users (boolean).
|
||||
- pinned: Include only pinned chats (boolean).
|
||||
|
||||
Returned data:
|
||||
|
||||
|
|
@ -130,16 +131,16 @@ Returned data:
|
|||
"username": "somenick",
|
||||
...
|
||||
},
|
||||
"id" : "1",
|
||||
"unread" : 2,
|
||||
"last_message" : {...}, // The last message in that chat
|
||||
"id": "1",
|
||||
"unread": 2,
|
||||
"last_message": {...}, // The last message in that chat
|
||||
"updated_at": "2020-04-21T15:11:46.000Z"
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
The recipient of messages that are sent to this chat is given by their AP ID.
|
||||
No pagination is implemented for now.
|
||||
The usual pagination options are implemented.
|
||||
|
||||
### Getting the messages for a Chat
|
||||
|
||||
|
|
@ -226,6 +227,32 @@ Deleting a chat message for given Chat id works like this:
|
|||
|
||||
Returned data is the deleted message.
|
||||
|
||||
### Pinning a chat
|
||||
|
||||
Pinning a chat works like this:
|
||||
|
||||
`POST /api/v1/pleroma/chats/:id/pin`
|
||||
|
||||
Returned data:
|
||||
|
||||
```json
|
||||
{
|
||||
"account": {
|
||||
"id": "someflakeid",
|
||||
"username": "somenick",
|
||||
...
|
||||
},
|
||||
"id": "1",
|
||||
"unread": 0,
|
||||
"updated_at": "2020-04-21T15:11:46.000Z",
|
||||
"pinned": true,
|
||||
}
|
||||
```
|
||||
|
||||
To unpin a pinned chat, use:
|
||||
|
||||
`POST /api/v1/pleroma/chats/:id/unpin`
|
||||
|
||||
### Notifications
|
||||
|
||||
There's a new `pleroma:chat_mention` notification, which has this form. It is not given out in the notifications endpoint by default, you need to explicitly request it with `include_types[]=pleroma:chat_mention`:
|
||||
|
|
|
|||
|
|
@ -39,11 +39,13 @@ Has these additional fields under the `pleroma` object:
|
|||
- `emoji_reactions`: A list with emoji / reaction maps. The format is `{name: "☕", count: 1, me: true}`. Contains no information about the reacting users, for that use the `/statuses/:id/reactions` endpoint.
|
||||
- `parent_visible`: If the parent of this post is visible to the user or not.
|
||||
- `pinned_at`: a datetime (iso8601) when status was pinned, `null` otherwise.
|
||||
- `quotes_count`: the count of status quotes.
|
||||
- `non_anonymous`: true if the source post specifies the poll results are not anonymous. Currently only implemented by Smithereen.
|
||||
- `bookmark_folder`: the ID of the folder bookmark is stored within (if any).
|
||||
- `list_id`: the ID of the list the post is addressed to (if any, only returned to author).
|
||||
|
||||
Has these additional fields under the `poll.pleroma` object:
|
||||
|
||||
- `non_anonymous`: true if the source post specifies the poll results are not anonymous. Currently only implemented by Smithereen.
|
||||
|
||||
The `GET /api/v1/statuses/:id/source` endpoint additionally has the following attributes:
|
||||
|
||||
- `content_type`: The content type of the status source.
|
||||
|
|
@ -88,6 +90,7 @@ The `id` parameter can also be the `nickname` of the user. This only works in th
|
|||
- `only_media`: include only statuses with media attached
|
||||
- `with_muted`: include statuses/reactions from muted accounts
|
||||
- `exclude_reblogs`: exclude reblogs
|
||||
- `only_reblogs`: include only reblogs
|
||||
- `exclude_replies`: exclude replies
|
||||
- `exclude_visibilities`: exclude visibilities
|
||||
|
||||
|
|
@ -97,6 +100,9 @@ Endpoints which accept `with_relationships` parameter:
|
|||
- `/api/v1/accounts/:id/followers`
|
||||
- `/api/v1/accounts/:id/following`
|
||||
- `/api/v1/mutes`
|
||||
- `/api/v1/blocks`
|
||||
- `/api/v1/search`
|
||||
- `/api/v2/search`
|
||||
|
||||
Has these additional fields under the `pleroma` object:
|
||||
|
||||
|
|
|
|||
|
|
@ -671,6 +671,7 @@ Audio scrobbling in Pleroma is **deprecated**.
|
|||
"artist": "Some Artist",
|
||||
"album": "Some Album",
|
||||
"length": 180000,
|
||||
"external_link": "https://www.last.fm/music/Some+Artist/_/Some+Title",
|
||||
"created_at": "2019-09-28T12:40:45.000Z"
|
||||
}
|
||||
]
|
||||
|
|
@ -683,6 +684,7 @@ Audio scrobbling in Pleroma is **deprecated**.
|
|||
### Creates a new Listen activity for an account
|
||||
* Method `POST`
|
||||
* Authentication: required
|
||||
* OAuth scope: `write:scrobbles`
|
||||
* Params:
|
||||
* `title`: the title of the media playing
|
||||
* `album`: the album of the media playing [optional]
|
||||
|
|
|
|||
|
|
@ -1 +1,7 @@
|
|||
This section contains notes and guidelines for developers.
|
||||
|
||||
- [Setting up a Pleroma development environment](setting_up_pleroma_dev.md)
|
||||
- [Setting up a Gitlab Runner](setting_up_a_gitlab_runner.md)
|
||||
- [Authentication & Authorization](authentication_authorization.md)
|
||||
- [ActivityPub Extensions](ap_extensions.md)
|
||||
- [Mox Testing Guide](mox_testing.md)
|
||||
|
|
|
|||
485
docs/development/mox_testing.md
Normal file
485
docs/development/mox_testing.md
Normal file
|
|
@ -0,0 +1,485 @@
|
|||
# Using Mox for Testing in Pleroma
|
||||
|
||||
## Introduction
|
||||
|
||||
This guide explains how to use [Mox](https://hexdocs.pm/mox/Mox.html) for testing in Pleroma and how to migrate existing tests from Mock/meck to Mox. Mox is a library for defining concurrent mocks in Elixir that offers several key advantages:
|
||||
|
||||
- **Async-safe testing**: Mox supports concurrent testing with `async: true`
|
||||
- **Explicit contract through behaviors**: Enforces implementation of behavior callbacks
|
||||
- **No module redefinition**: Avoids runtime issues caused by redefining modules
|
||||
- **Expectations scoped to the current process**: Prevents test state from leaking between tests
|
||||
|
||||
## Why Migrate from Mock/meck to Mox?
|
||||
|
||||
### Problems with Mock/meck
|
||||
|
||||
1. **Not async-safe**: Tests using Mock/meck cannot safely run with `async: true`, which slows down the test suite
|
||||
2. **Global state**: Mocked functions are global, leading to potential cross-test contamination
|
||||
3. **No explicit contract**: No guarantee that mocked functions match the actual implementation
|
||||
4. **Module redefinition**: Can lead to hard-to-debug runtime issues
|
||||
|
||||
### Benefits of Mox
|
||||
|
||||
1. **Async-safe testing**: Tests can run concurrently with `async: true`, significantly speeding up the test suite
|
||||
2. **Process isolation**: Expectations are set per process, preventing leakage between tests
|
||||
3. **Explicit contracts via behaviors**: Ensures mocks implement all required functions
|
||||
4. **Compile-time checks**: Prevents mocking non-existent functions
|
||||
5. **No module redefinition**: Mocks are defined at compile time, not runtime
|
||||
|
||||
## Existing Mox Setup in Pleroma
|
||||
|
||||
Pleroma already has a basic Mox setup in the `Pleroma.DataCase` module, which handles some common mocking scenarios automatically. Here's what's included:
|
||||
|
||||
### Default Mox Configuration
|
||||
|
||||
The `setup` function in `DataCase` does the following:
|
||||
|
||||
1. Sets up Mox for either async or non-async tests
|
||||
2. Verifies all mock expectations on test exit
|
||||
3. Stubs common dependencies with their real implementations
|
||||
|
||||
```elixir
|
||||
# From test/support/data_case.ex
|
||||
setup tags do
|
||||
setup_multi_process_mode(tags)
|
||||
setup_streamer(tags)
|
||||
stub_pipeline()
|
||||
|
||||
Mox.verify_on_exit!()
|
||||
|
||||
:ok
|
||||
end
|
||||
```
|
||||
|
||||
### Async vs. Non-Async Test Setup
|
||||
|
||||
Pleroma configures Mox differently depending on whether your test is async or not:
|
||||
|
||||
```elixir
|
||||
def setup_multi_process_mode(tags) do
|
||||
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Pleroma.Repo)
|
||||
|
||||
if tags[:async] do
|
||||
# For async tests, use process-specific mocks and stub CachexMock with NullCache
|
||||
Mox.stub_with(Pleroma.CachexMock, Pleroma.NullCache)
|
||||
Mox.set_mox_private()
|
||||
else
|
||||
# For non-async tests, use global mocks and stub CachexMock with CachexProxy
|
||||
Ecto.Adapters.SQL.Sandbox.mode(Pleroma.Repo, {:shared, self()})
|
||||
|
||||
Mox.set_mox_global()
|
||||
Mox.stub_with(Pleroma.CachexMock, Pleroma.CachexProxy)
|
||||
clear_cachex()
|
||||
end
|
||||
|
||||
:ok
|
||||
end
|
||||
```
|
||||
|
||||
### Default Pipeline Stubs
|
||||
|
||||
Pleroma automatically stubs several core components with their real implementations:
|
||||
|
||||
```elixir
|
||||
def stub_pipeline do
|
||||
Mox.stub_with(Pleroma.Web.ActivityPub.SideEffectsMock, Pleroma.Web.ActivityPub.SideEffects)
|
||||
Mox.stub_with(Pleroma.Web.ActivityPub.ObjectValidatorMock, Pleroma.Web.ActivityPub.ObjectValidator)
|
||||
Mox.stub_with(Pleroma.Web.ActivityPub.MRFMock, Pleroma.Web.ActivityPub.MRF)
|
||||
Mox.stub_with(Pleroma.Web.ActivityPub.ActivityPubMock, Pleroma.Web.ActivityPub.ActivityPub)
|
||||
Mox.stub_with(Pleroma.Web.FederatorMock, Pleroma.Web.Federator)
|
||||
Mox.stub_with(Pleroma.ConfigMock, Pleroma.Config)
|
||||
Mox.stub_with(Pleroma.StaticStubbedConfigMock, Pleroma.Test.StaticConfig)
|
||||
Mox.stub_with(Pleroma.StubbedHTTPSignaturesMock, Pleroma.Test.HTTPSignaturesProxy)
|
||||
end
|
||||
```
|
||||
|
||||
This means that by default, these mocks will behave like their real implementations unless you explicitly override them with expectations in your tests.
|
||||
|
||||
### Understanding Config Mock Types
|
||||
|
||||
Pleroma has three different Config mock implementations, each with a specific purpose and different characteristics regarding async test safety:
|
||||
|
||||
#### 1. ConfigMock
|
||||
|
||||
- Defined in `test/support/mocks.ex` as `Mox.defmock(Pleroma.ConfigMock, for: Pleroma.Config.Getting)`
|
||||
- It's stubbed with the real `Pleroma.Config` by default in `DataCase`: `Mox.stub_with(Pleroma.ConfigMock, Pleroma.Config)`
|
||||
- This means it falls back to the normal configuration behavior unless explicitly overridden
|
||||
- Used for general mocking of configuration in tests where you want most config to behave normally
|
||||
- ⚠️ **NOT ASYNC-SAFE**: Since it's stubbed with the real `Pleroma.Config`, it modifies global application state
|
||||
- Can not be used in tests with `async: true`
|
||||
|
||||
#### 2. StaticStubbedConfigMock
|
||||
|
||||
- Defined in `test/support/mocks.ex` as `Mox.defmock(Pleroma.StaticStubbedConfigMock, for: Pleroma.Config.Getting)`
|
||||
- It's stubbed with `Pleroma.Test.StaticConfig` (defined in `test/test_helper.exs`)
|
||||
- `Pleroma.Test.StaticConfig` creates a completely static configuration snapshot at the start of the test run:
|
||||
```elixir
|
||||
defmodule Pleroma.Test.StaticConfig do
|
||||
@moduledoc """
|
||||
This module provides a Config that is completely static, built at startup time from the environment.
|
||||
It's safe to use in testing as it will not modify any state.
|
||||
"""
|
||||
|
||||
@behaviour Pleroma.Config.Getting
|
||||
@config Application.get_all_env(:pleroma)
|
||||
|
||||
def get(path, default \\ nil) do
|
||||
get_in(@config, path) || default
|
||||
end
|
||||
end
|
||||
```
|
||||
- Configuration is frozen at startup time and doesn't change during the test run
|
||||
- ✅ **ASYNC-SAFE**: Never modifies global state since it uses a frozen snapshot of the configuration
|
||||
|
||||
#### 3. UnstubbedConfigMock
|
||||
|
||||
- Defined in `test/support/mocks.ex` as `Mox.defmock(Pleroma.UnstubbedConfigMock, for: Pleroma.Config.Getting)`
|
||||
- Unlike the other two mocks, it's not automatically stubbed with any implementation in `DataCase`
|
||||
- Starts completely "unstubbed" and requires tests to explicitly set expectations or stub it
|
||||
- The most commonly used configuration mock in the test suite
|
||||
- Often aliased as `ConfigMock` in individual test files: `alias Pleroma.UnstubbedConfigMock, as: ConfigMock`
|
||||
- Set as the default config implementation in `config/test.exs`: `config :pleroma, :config_impl, Pleroma.UnstubbedConfigMock`
|
||||
- Offers maximum flexibility for tests that need precise control over configuration values
|
||||
- ✅ **ASYNC-SAFE**: Safe if used with `expect()` to set up test-specific expectations (since expectations are process-scoped)
|
||||
|
||||
#### Configuring Components to Use Specific Mocks
|
||||
|
||||
In `config/test.exs`, different components can be configured to use different configuration mocks:
|
||||
|
||||
```elixir
|
||||
# Components using UnstubbedConfigMock
|
||||
config :pleroma, Pleroma.Upload, config_impl: Pleroma.UnstubbedConfigMock
|
||||
config :pleroma, Pleroma.User.Backup, config_impl: Pleroma.UnstubbedConfigMock
|
||||
config :pleroma, Pleroma.Uploaders.S3, config_impl: Pleroma.UnstubbedConfigMock
|
||||
|
||||
# Components using StaticStubbedConfigMock (async-safe)
|
||||
config :pleroma, Pleroma.Language.LanguageDetector, config_impl: Pleroma.StaticStubbedConfigMock
|
||||
config :pleroma, Pleroma.Web.RichMedia.Helpers, config_impl: Pleroma.StaticStubbedConfigMock
|
||||
config :pleroma, Pleroma.Web.Plugs.HTTPSecurityPlug, config_impl: Pleroma.StaticStubbedConfigMock
|
||||
```
|
||||
|
||||
This allows different parts of the application to use the most appropriate configuration mocking strategy based on their specific needs.
|
||||
|
||||
#### When to Use Each Config Mock Type
|
||||
|
||||
- **ConfigMock**: ⚠️ For non-async tests only, when you want most configuration to behave normally with occasional overrides
|
||||
- **StaticStubbedConfigMock**: ✅ For async tests where modifying global state would be problematic and a static configuration is sufficient
|
||||
- **UnstubbedConfigMock**: ⚠️ Use carefully in async tests; set specific expectations rather than stubbing with implementations that modify global state
|
||||
|
||||
#### Summary of Async Safety
|
||||
|
||||
| Mock Type | Async-Safe? | Best Use Case |
|
||||
|-----------|-------------|--------------|
|
||||
| ConfigMock | ❌ No | Non-async tests that need minimal configuration overrides |
|
||||
| StaticStubbedConfigMock | ✅ Yes | Async tests that need configuration values without modification |
|
||||
| UnstubbedConfigMock | ⚠️ Depends | Any test with careful usage; set expectations rather than stubbing |
|
||||
|
||||
## Configuration in Async Tests
|
||||
|
||||
### Understanding `clear_config` Limitations
|
||||
|
||||
The `clear_config` helper is commonly used in Pleroma tests to modify configuration for specific tests. However, it's important to understand that **`clear_config` is not async-safe** and should not be used in tests with `async: true`.
|
||||
|
||||
Here's why:
|
||||
|
||||
```elixir
|
||||
# Implementation of clear_config in test/support/helpers.ex
|
||||
defmacro clear_config(config_path, temp_setting) do
|
||||
quote do
|
||||
clear_config(unquote(config_path)) do
|
||||
Config.put(unquote(config_path), unquote(temp_setting))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defmacro clear_config(config_path, do: yield) do
|
||||
quote do
|
||||
initial_setting = Config.fetch(unquote(config_path))
|
||||
|
||||
unquote(yield)
|
||||
|
||||
on_exit(fn ->
|
||||
case initial_setting do
|
||||
:error ->
|
||||
Config.delete(unquote(config_path))
|
||||
|
||||
{:ok, value} ->
|
||||
Config.put(unquote(config_path), value)
|
||||
end
|
||||
end)
|
||||
|
||||
:ok
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
The issue is that `clear_config`:
|
||||
1. Modifies the global application environment
|
||||
2. Uses `on_exit` to restore the original value after the test
|
||||
3. Can lead to race conditions when multiple async tests modify the same configuration
|
||||
|
||||
### Async-Safe Configuration Approaches
|
||||
|
||||
When writing async tests with Mox, use these approaches instead of `clear_config`:
|
||||
|
||||
1. **Dependency Injection with Module Attributes**:
|
||||
```elixir
|
||||
# In your module
|
||||
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||
|
||||
def some_function do
|
||||
value = @config_impl.get([:some, :config])
|
||||
# ...
|
||||
end
|
||||
```
|
||||
|
||||
2. **Mock the Config Module**:
|
||||
```elixir
|
||||
# In your test
|
||||
Pleroma.ConfigMock
|
||||
|> expect(:get, fn [:some, :config] -> "test_value" end)
|
||||
```
|
||||
|
||||
3. **Use Test-Specific Implementations**:
|
||||
```elixir
|
||||
# Define a test-specific implementation
|
||||
defmodule TestConfig do
|
||||
def get([:some, :config]), do: "test_value"
|
||||
def get(_), do: nil
|
||||
end
|
||||
|
||||
# In your test
|
||||
Mox.stub_with(Pleroma.ConfigMock, TestConfig)
|
||||
```
|
||||
|
||||
4. **Pass Configuration as Arguments**:
|
||||
```elixir
|
||||
# Refactor functions to accept configuration as arguments
|
||||
def some_function(config \\ nil) do
|
||||
config = config || Pleroma.Config.get([:some, :config])
|
||||
# ...
|
||||
end
|
||||
|
||||
# In your test
|
||||
some_function("test_value")
|
||||
```
|
||||
|
||||
By using these approaches, you can safely run tests with `async: true` without worrying about configuration conflicts.
|
||||
|
||||
## Setting Up Mox in Pleroma
|
||||
|
||||
### Step 1: Define a Behavior
|
||||
|
||||
Start by defining a behavior for the module you want to mock. This specifies the contract that both the real implementation and mocks must follow.
|
||||
|
||||
```elixir
|
||||
# In your implementation module (e.g., lib/pleroma/uploaders/s3.ex)
|
||||
defmodule Pleroma.Uploaders.S3.ExAwsAPI do
|
||||
@callback request(op :: ExAws.Operation.t()) :: {:ok, ExAws.Operation.t()} | {:error, term()}
|
||||
end
|
||||
```
|
||||
|
||||
### Step 2: Make Your Implementation Configurable
|
||||
|
||||
Modify your module to use a configurable implementation. This allows for dependency injection and easier testing.
|
||||
|
||||
```elixir
|
||||
# In your implementation module
|
||||
@ex_aws_impl Application.compile_env(:pleroma, [__MODULE__, :ex_aws_impl], ExAws)
|
||||
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||
|
||||
def put_file(%Pleroma.Upload{} = upload) do
|
||||
# Use @ex_aws_impl instead of ExAws directly
|
||||
case @ex_aws_impl.request(op) do
|
||||
{:ok, _} ->
|
||||
{:ok, {:file, s3_name}}
|
||||
|
||||
error ->
|
||||
Logger.error("#{__MODULE__}: #{inspect(error)}")
|
||||
error
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
### Step 3: Define the Mock in test/support/mocks.ex
|
||||
|
||||
Add your mock definition in the central mocks file:
|
||||
|
||||
```elixir
|
||||
# In test/support/mocks.ex
|
||||
Mox.defmock(Pleroma.Uploaders.S3.ExAwsMock, for: Pleroma.Uploaders.S3.ExAwsAPI)
|
||||
```
|
||||
|
||||
### Step 4: Configure the Mock in Test Environment
|
||||
|
||||
In your test configuration (e.g., `config/test.exs`), specify which mock implementation to use:
|
||||
|
||||
```elixir
|
||||
config :pleroma, Pleroma.Uploaders.S3, ex_aws_impl: Pleroma.Uploaders.S3.ExAwsMock
|
||||
config :pleroma, Pleroma.Uploaders.S3, config_impl: Pleroma.UnstubbedConfigMock
|
||||
```
|
||||
|
||||
## Writing Tests with Mox
|
||||
|
||||
### Setting Up Your Test
|
||||
|
||||
```elixir
|
||||
defmodule Pleroma.Uploaders.S3Test do
|
||||
use Pleroma.DataCase, async: true # Note: async: true is now possible!
|
||||
|
||||
alias Pleroma.Uploaders.S3
|
||||
alias Pleroma.Uploaders.S3.ExAwsMock
|
||||
alias Pleroma.UnstubbedConfigMock, as: ConfigMock
|
||||
|
||||
import Mox # Import Mox functions
|
||||
|
||||
# Note: verify_on_exit! is already called in DataCase setup
|
||||
# so you don't need to add it explicitly in your test module
|
||||
end
|
||||
```
|
||||
|
||||
### Setting Expectations with Mox
|
||||
|
||||
Mox uses an explicit expectation system. Here's how to use it:
|
||||
|
||||
```elixir
|
||||
# Basic expectation for a function call
|
||||
ExAwsMock
|
||||
|> expect(:request, fn _req -> {:ok, %{status_code: 200}} end)
|
||||
|
||||
# Expectation for multiple calls with same response
|
||||
ExAwsMock
|
||||
|> expect(:request, 3, fn _req -> {:ok, %{status_code: 200}} end)
|
||||
|
||||
# Expectation with specific arguments
|
||||
ExAwsMock
|
||||
|> expect(:request, fn %{bucket: "test_bucket"} -> {:ok, %{status_code: 200}} end)
|
||||
|
||||
# Complex configuration mocking
|
||||
ConfigMock
|
||||
|> expect(:get, fn key ->
|
||||
[
|
||||
{Pleroma.Upload, [uploader: Pleroma.Uploaders.S3, base_url: "https://s3.amazonaws.com"]},
|
||||
{Pleroma.Uploaders.S3, [bucket: "test_bucket"]}
|
||||
]
|
||||
|> get_in(key)
|
||||
end)
|
||||
```
|
||||
|
||||
### Understanding Mox Modes in Pleroma
|
||||
|
||||
Pleroma's DataCase automatically configures Mox differently based on whether your test is async or not:
|
||||
|
||||
1. **Async tests** (`async: true`):
|
||||
- Uses `Mox.set_mox_private()` - expectations are scoped to the current process
|
||||
- Stubs `Pleroma.CachexMock` with `Pleroma.NullCache`
|
||||
- Each test process has its own isolated mock expectations
|
||||
|
||||
2. **Non-async tests** (`async: false`):
|
||||
- Uses `Mox.set_mox_global()` - expectations are shared across processes
|
||||
- Stubs `Pleroma.CachexMock` with `Pleroma.CachexProxy`
|
||||
- Mock expectations can be set in one process and called from another
|
||||
|
||||
Choose the appropriate mode based on your test requirements. For most tests, async mode is preferred for better performance.
|
||||
|
||||
## Migrating from Mock/meck to Mox
|
||||
|
||||
Here's a step-by-step guide for migrating existing tests from Mock/meck to Mox:
|
||||
|
||||
### 1. Identify the Module to Mock
|
||||
|
||||
Look for `with_mock` or `test_with_mock` calls in your tests:
|
||||
|
||||
```elixir
|
||||
# Old approach with Mock
|
||||
with_mock ExAws, request: fn _ -> {:ok, :ok} end do
|
||||
assert S3.put_file(file_upload) == {:ok, {:file, "test_folder/image-tet.jpg"}}
|
||||
end
|
||||
```
|
||||
|
||||
### 2. Define a Behavior for the Module
|
||||
|
||||
Create a behavior that defines the functions you want to mock:
|
||||
|
||||
```elixir
|
||||
defmodule Pleroma.Uploaders.S3.ExAwsAPI do
|
||||
@callback request(op :: ExAws.Operation.t()) :: {:ok, ExAws.Operation.t()} | {:error, term()}
|
||||
end
|
||||
```
|
||||
|
||||
### 3. Update Your Implementation to Use a Configurable Dependency
|
||||
|
||||
```elixir
|
||||
# Old
|
||||
def put_file(%Pleroma.Upload{} = upload) do
|
||||
case ExAws.request(op) do
|
||||
# ...
|
||||
end
|
||||
end
|
||||
|
||||
# New
|
||||
@ex_aws_impl Application.compile_env(:pleroma, [__MODULE__, :ex_aws_impl], ExAws)
|
||||
|
||||
def put_file(%Pleroma.Upload{} = upload) do
|
||||
case @ex_aws_impl.request(op) do
|
||||
# ...
|
||||
end
|
||||
end
|
||||
```
|
||||
|
||||
### 4. Define the Mock in mocks.ex
|
||||
|
||||
```elixir
|
||||
Mox.defmock(Pleroma.Uploaders.S3.ExAwsMock, for: Pleroma.Uploaders.S3.ExAwsAPI)
|
||||
```
|
||||
|
||||
### 5. Configure the Test Environment
|
||||
|
||||
```elixir
|
||||
config :pleroma, Pleroma.Uploaders.S3, ex_aws_impl: Pleroma.Uploaders.S3.ExAwsMock
|
||||
```
|
||||
|
||||
### 6. Update Your Tests to Use Mox
|
||||
|
||||
```elixir
|
||||
# Old (with Mock)
|
||||
test_with_mock "save file", ExAws, request: fn _ -> {:ok, :ok} end do
|
||||
assert S3.put_file(file_upload) == {:ok, {:file, "test_folder/image-tet.jpg"}}
|
||||
assert_called(ExAws.request(:_))
|
||||
end
|
||||
|
||||
# New (with Mox)
|
||||
test "save file" do
|
||||
ExAwsMock
|
||||
|> expect(:request, fn _req -> {:ok, %{status_code: 200}} end)
|
||||
|
||||
assert S3.put_file(file_upload) == {:ok, {:file, "test_folder/image-tet.jpg"}}
|
||||
end
|
||||
```
|
||||
|
||||
### 7. Enable Async Testing
|
||||
|
||||
Now you can safely enable `async: true` in your test module:
|
||||
|
||||
```elixir
|
||||
use Pleroma.DataCase, async: true
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Always define behaviors**: They serve as contracts and documentation
|
||||
2. **Keep mocks in a central location**: Use test/support/mocks.ex for all mock definitions
|
||||
3. **Use verify_on_exit!**: This is already set up in DataCase, ensuring all expected calls were made
|
||||
4. **Use specific expectations**: Be as specific as possible with your expectations
|
||||
5. **Enable async: true**: Take advantage of Mox's concurrent testing capability
|
||||
6. **Don't over-mock**: Only mock external dependencies that are difficult to test directly
|
||||
7. **Leverage existing stubs**: Use the default stubs provided by DataCase when possible
|
||||
8. **Avoid clear_config in async tests**: Use dependency injection and mocking instead
|
||||
|
||||
## Example: Complete Migration
|
||||
|
||||
For a complete example of migrating a test from Mock/meck to Mox, you can refer to commit `90a47ca050c5839e8b4dc3bac315dc436d49152d` in the Pleroma repository, which shows how the S3 uploader tests were migrated.
|
||||
|
||||
## Conclusion
|
||||
|
||||
Migrating tests from Mock/meck to Mox provides significant benefits for the Pleroma test suite, including faster test execution through async testing, better isolation between tests, and more robust mocking through explicit contracts. By following this guide, you can successfully migrate existing tests and write new tests using Mox.
|
||||
|
|
@ -14,7 +14,7 @@ Note: This article is potentially outdated because at this time we may not have
|
|||
|
||||
- PostgreSQL 11.0以上 (Ubuntu16.04では9.5しか提供されていないので,[](https://www.postgresql.org/download/linux/ubuntu/)こちらから新しいバージョンを入手してください)
|
||||
- `postgresql-contrib` 11.0以上 (同上)
|
||||
- Elixir 1.14 以上 ([Debianのリポジトリからインストールしないこと!!! ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like)。または [asdf](https://github.com/asdf-vm/asdf) をpleromaユーザーでインストールしてください)
|
||||
- Elixir 1.15 以上 ([Debianのリポジトリからインストールしないこと!!! ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like)。または [asdf](https://github.com/asdf-vm/asdf) をpleromaユーザーでインストールしてください)
|
||||
- `erlang-dev`
|
||||
- `erlang-nox`
|
||||
- `git`
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
## Required dependencies
|
||||
|
||||
* PostgreSQL >=11.0
|
||||
* Elixir >=1.14.0 <1.17
|
||||
* Erlang OTP >=23.0.0 (supported: <27)
|
||||
* Elixir >=1.15.0 <1.19
|
||||
* Erlang OTP >=23.0.0 (supported: <28)
|
||||
* git
|
||||
* file / libmagic
|
||||
* gcc or clang
|
||||
|
|
|
|||
|
|
@ -1,25 +1,29 @@
|
|||
# Installing on OpenBSD
|
||||
|
||||
This guide describes the installation and configuration of pleroma (and the required software to run it) on a single OpenBSD 6.6 server.
|
||||
{! backend/installation/otp_vs_from_source_source.include !}
|
||||
|
||||
This guide describes the installation and configuration of Pleroma (and the required software to run it) on a single OpenBSD 7.7 server.
|
||||
|
||||
For any additional information regarding commands and configuration files mentioned here, check the man pages [online](https://man.openbsd.org/) or directly on your server with the man command.
|
||||
|
||||
{! backend/installation/generic_dependencies.include !}
|
||||
|
||||
## Installation
|
||||
|
||||
### Preparing the system
|
||||
#### Required software
|
||||
|
||||
To install them, run the following command (with doas or as root):
|
||||
To install required packages, run the following command:
|
||||
|
||||
```
|
||||
pkg_add elixir gmake git postgresql-server postgresql-contrib cmake ffmpeg ImageMagick libvips
|
||||
# pkg_add elixir gmake git postgresql-server postgresql-contrib cmake libmagic libvips
|
||||
```
|
||||
|
||||
Pleroma requires a reverse proxy, OpenBSD has relayd in base (and is used in this guide) and packages/ports are available for nginx (www/nginx) and apache (www/apache-httpd). Independently of the reverse proxy, [acme-client(1)](https://man.openbsd.org/acme-client) can be used to get a certificate from Let's Encrypt.
|
||||
Pleroma requires a reverse proxy, OpenBSD has relayd in base (and is used in this guide) and packages/ports are available for nginx (www/nginx) and apache (www/apache-httpd).
|
||||
Independently of the reverse proxy, [acme-client(1)](https://man.openbsd.org/acme-client) can be used to get a certificate from Let's Encrypt.
|
||||
|
||||
#### Optional software
|
||||
|
||||
Per [`docs/installation/optional/media_graphics_packages.md`](../installation/optional/media_graphics_packages.md):
|
||||
* ImageMagick
|
||||
* ffmpeg
|
||||
* exiftool
|
||||
|
|
@ -27,234 +31,351 @@ Per [`docs/installation/optional/media_graphics_packages.md`](../installation/op
|
|||
To install the above:
|
||||
|
||||
```
|
||||
pkg_add ImageMagick ffmpeg p5-Image-ExifTool
|
||||
# pkg_add ImageMagick ffmpeg p5-Image-ExifTool
|
||||
```
|
||||
|
||||
#### Creating the pleroma user
|
||||
Pleroma will be run by a dedicated user, \_pleroma. Before creating it, insert the following lines in login.conf:
|
||||
For more information read [`docs/installation/optional/media_graphics_packages.md`](../installation/optional/media_graphics_packages.md):
|
||||
|
||||
### PostgreSQL
|
||||
|
||||
Switch to the \_postgresql user and initialize PostgreSQL:
|
||||
|
||||
```
|
||||
# su _postgresql
|
||||
$ initdb -D /var/postgresql/data -U postgres --encoding=utf-8 --lc-collate=C
|
||||
```
|
||||
|
||||
Running PostgreSQL in a different directory than `/var/postgresql/data` requires changing the `daemon_flags` variable in the `/etc/rc.d/postgresql` script.
|
||||
|
||||
For security reasons it is recommended to change the authentication method for `local` and `host` connections with the localhost address to `scram-sha-256`.<br>
|
||||
Do not forget to set a password for the `postgres` user before doing so, otherwise you won't be able to log back in unless you change the authentication method back to `trust`.<br>
|
||||
Changing the password hashing algorithm is not needed.<br>
|
||||
For more information [read](https://www.postgresql.org/docs/16/auth-pg-hba-conf.html) the PostgreSQL documentation.
|
||||
|
||||
Enable and start the postgresql service:
|
||||
|
||||
```
|
||||
# rcctl enable postgresql
|
||||
# rcctl start postgresql
|
||||
```
|
||||
|
||||
To check that PostgreSQL started properly and didn't fail right after starting, run `# rcctl check postgresql` which should return `postgresql(ok)`.
|
||||
|
||||
### Configuring Pleroma
|
||||
|
||||
Pleroma will be run by a dedicated \_pleroma user. Before creating it, insert the following lines in `/etc/login.conf`:
|
||||
|
||||
```
|
||||
pleroma:\
|
||||
:datasize-max=1536M:\
|
||||
:datasize-cur=1536M:\
|
||||
:openfiles-max=4096
|
||||
:datasize=1536M:\
|
||||
:openfiles-max=4096:\
|
||||
:openfiles-cur=1024:\
|
||||
:setenv=LC_ALL=en_US.UTF-8,VIX_COMPILATION_MODE=PLATFORM_PROVIDED_LIBVIPS,MIX_ENV=prod:\
|
||||
:tc=daemon:
|
||||
```
|
||||
This creates a "pleroma" login class and sets higher values than default for datasize and openfiles (see [login.conf(5)](https://man.openbsd.org/login.conf)), this is required to avoid having pleroma crash some time after starting.
|
||||
|
||||
Create the \_pleroma user, assign it the pleroma login class and create its home directory (/home/\_pleroma/): `useradd -m -L pleroma _pleroma`
|
||||
This creates a "pleroma" login class and sets higher values than default for datasize and openfiles (see [login.conf(5)](https://man.openbsd.org/login.conf)), this is required to avoid having Pleroma crash some time after starting.
|
||||
|
||||
#### Clone pleroma's directory
|
||||
Enter a shell as the \_pleroma user. As root, run `su _pleroma -;cd`. Then clone the repository with `git clone -b stable https://git.pleroma.social/pleroma/pleroma.git`. Pleroma is now installed in /home/\_pleroma/pleroma/, it will be configured and started at the end of this guide.
|
||||
|
||||
#### PostgreSQL
|
||||
Start a shell as the \_postgresql user (as root run `su _postgresql -` then run the `initdb` command to initialize postgresql:
|
||||
You will need to specify pgdata directory to the default (/var/postgresql/data) with the `-D <path>` and set the user to postgres with the `-U <username>` flag. This can be done as follows:
|
||||
Create the \_pleroma user, assign it the pleroma login class and create its home directory (/home/\_pleroma/):
|
||||
|
||||
```
|
||||
initdb -D /var/postgresql/data -U postgres
|
||||
# useradd -m -L pleroma _pleroma
|
||||
```
|
||||
If you are not using the default directory, you will have to update the `datadir` variable in the /etc/rc.d/postgresql script.
|
||||
|
||||
When this is done, enable postgresql so that it starts on boot and start it. As root, run:
|
||||
Switch to the _pleroma user:
|
||||
|
||||
```
|
||||
rcctl enable postgresql
|
||||
rcctl start postgresql
|
||||
# su -l _pleroma
|
||||
```
|
||||
|
||||
Clone the Pleroma repository:
|
||||
|
||||
```
|
||||
$ git clone -b stable https://git.pleroma.social/pleroma/pleroma.git
|
||||
$ cd pleroma
|
||||
```
|
||||
|
||||
Pleroma is now installed in /home/\_pleroma/pleroma/. To configure it run:
|
||||
|
||||
```
|
||||
$ mix deps.get
|
||||
$ MIX_ENV=prod mix pleroma.instance gen # You will be asked a few questions here.
|
||||
$ cp config/generated_config.exs config/prod.secret.exs
|
||||
```
|
||||
|
||||
Note: Answer yes when asked to install Hex and rebar3. This step might take some time as Pleroma gets compiled first.
|
||||
|
||||
Create the Pleroma database:
|
||||
|
||||
```
|
||||
$ psql -U postgres -f config/setup_db.psql
|
||||
```
|
||||
|
||||
Apply database migrations:
|
||||
|
||||
```
|
||||
$ MIX_ENV=prod mix ecto.migrate
|
||||
```
|
||||
|
||||
Note: You will need to run this step again when updating your instance to a newer version with `git pull` or `git checkout tags/NEW_VERSION`.
|
||||
|
||||
As \_pleroma in /home/\_pleroma/pleroma, you can now run `MIX_ENV=prod mix phx.server` to start your instance.
|
||||
In another SSH session or a tmux window, check that it is working properly by running `ftp -MVo - http://127.0.0.1:4000/api/v1/instance`, you should get json output.
|
||||
Double-check that the *uri* value near the bottom is your instance's domain name and the instance *title* are correct.
|
||||
|
||||
### Configuring acme-client
|
||||
|
||||
acme-client is used to get SSL/TLS certificates from Let's Encrypt.
|
||||
Insert the following configuration in `/etc/acme-client.conf` and replace `example.tld` with your domain:
|
||||
|
||||
```
|
||||
#
|
||||
# $OpenBSD: acme-client.conf,v 1.5 2023/05/10 07:34:57 tb Exp $
|
||||
#
|
||||
|
||||
authority letsencrypt {
|
||||
api url "https://acme-v02.api.letsencrypt.org/directory"
|
||||
account key "/etc/acme/letsencrypt-privkey.pem"
|
||||
}
|
||||
|
||||
domain example.tld {
|
||||
# Adds alternative names to the certificate. Useful when serving media on another domain. Comma or space separated list.
|
||||
# alternative names { }
|
||||
|
||||
domain key "/etc/ssl/private/example.tld.key"
|
||||
domain certificate "/etc/ssl/example.tld_cert-only.crt"
|
||||
domain full chain certificate "/etc/ssl/example.tld.crt"
|
||||
sign with letsencrypt
|
||||
}
|
||||
```
|
||||
|
||||
Check the configuration:
|
||||
|
||||
```
|
||||
# acme-client -n
|
||||
```
|
||||
|
||||
### Configuring the Web server
|
||||
|
||||
Pleroma supports two Web servers:
|
||||
|
||||
* nginx (recommended for most users)
|
||||
* OpenBSD's httpd and relayd (ONLY for advanced users, media proxy cache is NOT supported and will NOT work properly)
|
||||
|
||||
#### nginx
|
||||
|
||||
Since nginx is not installed by default, install it by running:
|
||||
|
||||
```
|
||||
# pkg_add nginx
|
||||
```
|
||||
|
||||
Add the following to `/etc/nginx/nginx.conf`, within the `server {}` block listening on port 80 and change `server_name`, as follows:
|
||||
|
||||
```
|
||||
http {
|
||||
...
|
||||
|
||||
server {
|
||||
...
|
||||
server_name localhost; # Replace with your domain
|
||||
|
||||
location /.well-known/acme-challenge {
|
||||
rewrite ^/\.well-known/acme-challenge/(.*) /$1 break;
|
||||
root /var/www/acme;
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Start the nginx service and acquire certificates:
|
||||
|
||||
```
|
||||
# rcctl start nginx
|
||||
# acme-client example.tld
|
||||
```
|
||||
|
||||
Add certificate auto-renewal by adding acme-client to `/etc/weekly.local`, replace `example.tld` with your domain:
|
||||
|
||||
```
|
||||
# echo "acme-client example.tld && rcctl reload nginx" >> /etc/weekly.local
|
||||
```
|
||||
|
||||
OpenBSD's default nginx configuration does not contain an include directive, which is typically used for multiple sites.
|
||||
Therefore, you will need to first create the required directory as follows:
|
||||
|
||||
```
|
||||
# mkdir /etc/nginx/sites-available
|
||||
# mkdir /etc/nginx/sites-enabled
|
||||
```
|
||||
|
||||
Next add the `include` directive to `/etc/nginx/nginx.conf`, within the `http {}` block, as follows:
|
||||
|
||||
```
|
||||
http {
|
||||
...
|
||||
|
||||
server {
|
||||
...
|
||||
}
|
||||
|
||||
include /etc/nginx/sites-enabled/*;
|
||||
}
|
||||
```
|
||||
|
||||
As root, copy `/home/_pleroma/pleroma/installation/pleroma.nginx` to `/etc/nginx/sites-available/pleroma.nginx`.
|
||||
|
||||
Edit default `/etc/nginx/sites-available/pleroma.nginx` settings and replace `example.tld` with your domain:
|
||||
|
||||
* Uncomment the location block for `~ /\.well-known/acme-challenge` in the server block listening on port 80
|
||||
- add `rewrite ^/\.well-known/acme-challenge/(.*) /$1 break;` above the `root` location
|
||||
- change the `root` location to `/var/www/acme;`
|
||||
* Change `ssl_trusted_certificate` to `/etc/ssl/example.tld_cert-only.crt`
|
||||
* Change `ssl_certificate` to `/etc/ssl/example.tld.crt`
|
||||
* Change `ssl_certificate_key` to `/etc/ssl/private/example.tld.key`
|
||||
|
||||
Remove the following `location {}` block from `/etc/nginx/nginx.conf`, that was previously added for acquiring certificates and change `server_name` back to `localhost`:
|
||||
|
||||
```
|
||||
http {
|
||||
...
|
||||
|
||||
server {
|
||||
...
|
||||
server_name example.tld; # Change back to localhost
|
||||
|
||||
# Delete this block
|
||||
location /.well-known/acme-challenge {
|
||||
rewrite ^/\.well-known/acme-challenge/(.*) /$1 break;
|
||||
root /var/www/acme;
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Symlink the Pleroma configuration to the enabled sites:
|
||||
|
||||
```
|
||||
# ln -s /etc/nginx/sites-available/pleroma.nginx /etc/nginx/sites-enabled
|
||||
```
|
||||
|
||||
Check nginx configuration syntax by running:
|
||||
|
||||
```
|
||||
# nginx -t
|
||||
```
|
||||
|
||||
Note: If the above command complains about a `conflicting server name`, check again that the `location {}` block for acquiring certificates has been removed from `/etc/nginx/nginx.conf` and that the `server_name` has been reverted back to `localhost`.
|
||||
After doing so run `# nginx -t` again.
|
||||
|
||||
If the configuration is correct, you can now enable and reload the nginx service:
|
||||
|
||||
```
|
||||
# rcctl enable nginx
|
||||
# rcctl reload nginx
|
||||
```
|
||||
To check that it started properly and didn't fail right after starting, you can run `ps aux | grep postgres`, there should be multiple lines of output.
|
||||
|
||||
#### httpd
|
||||
httpd will have three functions:
|
||||
|
||||
***Skip this section when using nginx***
|
||||
|
||||
httpd will have two functions:
|
||||
|
||||
* redirect requests trying to reach the instance over http to the https URL
|
||||
* serve a robots.txt file
|
||||
* get Let's Encrypt certificates, with acme-client
|
||||
|
||||
Insert the following config in httpd.conf:
|
||||
As root, copy `/home/_pleroma/pleroma/installation/openbsd/httpd.conf` to `/etc/httpd.conf`, or modify the existing one.
|
||||
|
||||
Edit `/etc/httpd.conf` settings and change:
|
||||
|
||||
* `<ipaddr>` with your instance's IPv4 address
|
||||
* All occurrences of `example.tld` with your instance's domain name
|
||||
* When using IPv6 also change:
|
||||
- Uncomment the `ext_inet6="<ip6addr>"` line near the beginning of the file and change `<ip6addr` to your instance's IPv6 address
|
||||
- Uncomment the line starting with `listen on $ext_inet6` in the `server` block
|
||||
|
||||
Check the configuration by running:
|
||||
```
|
||||
# $OpenBSD: httpd.conf,v 1.17 2017/04/16 08:50:49 ajacoutot Exp $
|
||||
|
||||
ext_inet="<IPv4 address>"
|
||||
ext_inet6="<IPv6 address>"
|
||||
|
||||
server "default" {
|
||||
listen on $ext_inet port 80 # Comment to disable listening on IPv4
|
||||
listen on $ext_inet6 port 80 # Comment to disable listening on IPv6
|
||||
listen on 127.0.0.1 port 80 # Do NOT comment this line
|
||||
|
||||
log syslog
|
||||
directory no index
|
||||
|
||||
location "/.well-known/acme-challenge/*" {
|
||||
root "/acme"
|
||||
request strip 2
|
||||
}
|
||||
|
||||
location "/robots.txt" { root "/htdocs/local/" }
|
||||
location "/*" { block return 302 "https://$HTTP_HOST$REQUEST_URI" }
|
||||
}
|
||||
|
||||
types {
|
||||
}
|
||||
```
|
||||
Do not forget to change *<IPv4/6 address\>* to your server's address(es). If httpd should only listen on one protocol family, comment one of the two first *listen* options.
|
||||
|
||||
Create the /var/www/htdocs/local/ folder and write the content of your robots.txt in /var/www/htdocs/local/robots.txt.
|
||||
Check the configuration with `httpd -n`, if it is OK enable and start httpd (as root):
|
||||
```
|
||||
rcctl enable httpd
|
||||
rcctl start httpd
|
||||
# httpd -n
|
||||
```
|
||||
|
||||
#### acme-client
|
||||
acme-client is used to get SSL/TLS certificates from Let's Encrypt.
|
||||
Insert the following configuration in /etc/acme-client.conf:
|
||||
```
|
||||
#
|
||||
# $OpenBSD: acme-client.conf,v 1.4 2017/03/22 11:14:14 benno Exp $
|
||||
#
|
||||
If the configuration is correct, enable and start the `httpd` service:
|
||||
|
||||
authority letsencrypt-<domain name> {
|
||||
#agreement url "https://letsencrypt.org/documents/LE-SA-v1.2-November-15-2017.pdf"
|
||||
api url "https://acme-v02.api.letsencrypt.org/directory"
|
||||
account key "/etc/acme/letsencrypt-privkey-<domain name>.pem"
|
||||
}
|
||||
```
|
||||
# rcctl enable httpd
|
||||
# rcctl start httpd
|
||||
```
|
||||
|
||||
domain <domain name> {
|
||||
domain key "/etc/ssl/private/<domain name>.key"
|
||||
domain certificate "/etc/ssl/<domain name>.crt"
|
||||
domain full chain certificate "/etc/ssl/<domain name>.fullchain.pem"
|
||||
sign with letsencrypt-<domain name>
|
||||
challengedir "/var/www/acme/"
|
||||
}
|
||||
```
|
||||
Replace *<domain name\>* by the domain name you'll use for your instance. As root, run `acme-client -n` to check the config, then `acme-client -ADv <domain name>` to create account and domain keys, and request a certificate for the first time.
|
||||
Make acme-client run everyday by adding it in /etc/daily.local. As root, run the following command: `echo "acme-client <domain name>" >> /etc/daily.local`.
|
||||
Acquire certificate:
|
||||
|
||||
Relayd will look for certificates and keys based on the address it listens on (see next part), the easiest way to make them available to relayd is to create a link, as root run:
|
||||
```
|
||||
ln -s /etc/ssl/<domain name>.fullchain.pem /etc/ssl/<IP address>.crt
|
||||
ln -s /etc/ssl/private/<domain name>.key /etc/ssl/private/<IP address>.key
|
||||
# acme-client example.tld
|
||||
```
|
||||
This will have to be done for each IPv4 and IPv6 address relayd listens on.
|
||||
|
||||
#### relayd
|
||||
|
||||
***Skip this section when using nginx***
|
||||
|
||||
relayd will be used as the reverse proxy sitting in front of pleroma.
|
||||
Insert the following configuration in /etc/relayd.conf:
|
||||
|
||||
As root, copy `/home/_pleroma/pleroma/installation/openbsd/relayd.conf` to `/etc/relayd.conf`, or modify the existing one.
|
||||
|
||||
Edit `/etc/relayd.conf` settings and change:
|
||||
|
||||
* `<ipaddr>` with your instance's IPv4 address
|
||||
* All occurrences of `example.tld` with your instance's domain name
|
||||
* When using IPv6 also change:
|
||||
- Uncomment the `ext_inet6="<ip6addr>"` line near the beginning of the file and change `<ip6addr>` to your instance's IPv6 address
|
||||
- Uncomment the line starting with `listen on $ext_inet6` in the `relay wwwtls` block
|
||||
|
||||
Check the configuration by running:
|
||||
```
|
||||
# $OpenBSD: relayd.conf,v 1.4 2018/03/23 09:55:06 claudio Exp $
|
||||
|
||||
ext_inet="<IPv4 address>"
|
||||
ext_inet6="<IPv6 address>"
|
||||
|
||||
table <pleroma_server> { 127.0.0.1 }
|
||||
table <httpd_server> { 127.0.0.1 }
|
||||
|
||||
http protocol plerup { # Protocol for upstream pleroma server
|
||||
#tcp { nodelay, sack, socket buffer 65536, backlog 128 } # Uncomment and adjust as you see fit
|
||||
tls ciphers "ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305"
|
||||
tls ecdhe secp384r1
|
||||
|
||||
# Forward some paths to the local server (as pleroma won't respond to them as you might want)
|
||||
pass request quick path "/robots.txt" forward to <httpd_server>
|
||||
|
||||
# Append a bunch of headers
|
||||
match request header append "X-Forwarded-For" value "$REMOTE_ADDR" # This two header and the next one are not strictly required by pleroma but adding them won't hurt
|
||||
match request header append "X-Forwarded-By" value "$SERVER_ADDR:$SERVER_PORT"
|
||||
|
||||
match response header append "X-XSS-Protection" value "1; mode=block"
|
||||
match response header append "X-Permitted-Cross-Domain-Policies" value "none"
|
||||
match response header append "X-Frame-Options" value "DENY"
|
||||
match response header append "X-Content-Type-Options" value "nosniff"
|
||||
match response header append "Referrer-Policy" value "same-origin"
|
||||
match response header append "X-Download-Options" value "noopen"
|
||||
match response header append "Content-Security-Policy" value "default-src 'none'; base-uri 'self'; form-action 'self'; img-src 'self' data: https:; media-src 'self' https:; style-src 'self' 'unsafe-inline'; font-src 'self'; script-src 'self'; connect-src 'self' wss://CHANGEME.tld; upgrade-insecure-requests;" # Modify "CHANGEME.tld" and set your instance's domain here
|
||||
match request header append "Connection" value "upgrade"
|
||||
#match response header append "Strict-Transport-Security" value "max-age=31536000; includeSubDomains" # Uncomment this only after you get HTTPS working.
|
||||
|
||||
# If you do not want remote frontends to be able to access your Pleroma backend server, comment these lines
|
||||
match response header append "Access-Control-Allow-Origin" value "*"
|
||||
match response header append "Access-Control-Allow-Methods" value "POST, PUT, DELETE, GET, PATCH, OPTIONS"
|
||||
match response header append "Access-Control-Allow-Headers" value "Authorization, Content-Type, Idempotency-Key"
|
||||
match response header append "Access-Control-Expose-Headers" value "Link, X-RateLimit-Reset, X-RateLimit-Limit, X-RateLimit-Remaining, X-Request-Id"
|
||||
# Stop commenting lines here
|
||||
}
|
||||
|
||||
relay wwwtls {
|
||||
listen on $ext_inet port https tls # Comment to disable listening on IPv4
|
||||
listen on $ext_inet6 port https tls # Comment to disable listening on IPv6
|
||||
|
||||
protocol plerup
|
||||
|
||||
forward to <pleroma_server> port 4000 check http "/" code 200
|
||||
forward to <httpd_server> port 80 check http "/robots.txt" code 200
|
||||
}
|
||||
```
|
||||
Again, change *<IPv4/6 address\>* to your server's address(es) and comment one of the two *listen* options if needed. Also change *wss://CHANGEME.tld* to *wss://<your instance's domain name\>*.
|
||||
Check the configuration with `relayd -n`, if it is OK enable and start relayd (as root):
|
||||
```
|
||||
rcctl enable relayd
|
||||
rcctl start relayd
|
||||
# relayd -n
|
||||
```
|
||||
|
||||
##### (Strongly recommended) serve media on another domain
|
||||
If the configuration is correct, enable and start the `relayd` service:
|
||||
|
||||
```
|
||||
# rcctl enable relayd
|
||||
# rcctl start relayd
|
||||
```
|
||||
|
||||
Add certificate auto-renewal by adding acme-client to `/etc/weekly.local`, replace `example.tld` with your domain:
|
||||
|
||||
```
|
||||
# echo "acme-client example.tld && rcctl reload relayd" >> /etc/weekly.local
|
||||
```
|
||||
|
||||
#### (Strongly recommended) serve media on another domain
|
||||
|
||||
Refer to the [Hardening your instance](../configuration/hardening.md) document on how to serve media on another domain. We STRONGLY RECOMMEND you to do this to minimize attack vectors.
|
||||
|
||||
#### pf
|
||||
Enabling and configuring pf is highly recommended.
|
||||
In /etc/pf.conf, insert the following configuration:
|
||||
### Starting pleroma at boot
|
||||
|
||||
Copy the startup script and make sure it's executable:
|
||||
|
||||
```
|
||||
# Macros
|
||||
if="<network interface>"
|
||||
authorized_ssh_clients="any"
|
||||
|
||||
# Skip traffic on loopback interface
|
||||
set skip on lo
|
||||
|
||||
# Default behavior
|
||||
set block-policy drop
|
||||
block in log all
|
||||
pass out quick
|
||||
|
||||
# Security features
|
||||
match in all scrub (no-df random-id)
|
||||
block in log from urpf-failed
|
||||
|
||||
# Rules
|
||||
pass in quick on $if inet proto icmp to ($if) icmp-type { echoreq unreach paramprob trace } # ICMP
|
||||
pass in quick on $if inet6 proto icmp6 to ($if) icmp6-type { echoreq unreach paramprob timex toobig } # ICMPv6
|
||||
pass in quick on $if proto tcp to ($if) port { http https } # relayd/httpd
|
||||
pass in quick on $if proto tcp from $authorized_ssh_clients to ($if) port ssh
|
||||
```
|
||||
Replace *<network interface\>* by your server's network interface name (which you can get with ifconfig). Consider replacing the content of the authorized\_ssh\_clients macro by, for example, your home IP address, to avoid SSH connection attempts from bots.
|
||||
|
||||
Check pf's configuration by running `pfctl -nf /etc/pf.conf`, load it with `pfctl -f /etc/pf.conf` and enable pf at boot with `rcctl enable pf`.
|
||||
|
||||
#### Configure and start pleroma
|
||||
Enter a shell as \_pleroma (as root `su _pleroma -`) and enter pleroma's installation directory (`cd ~/pleroma/`).
|
||||
|
||||
Then follow the main installation guide:
|
||||
|
||||
* run `mix deps.get`
|
||||
* run `MIX_ENV=prod mix pleroma.instance gen` and enter your instance's information when asked
|
||||
* copy config/generated\_config.exs to config/prod.secret.exs. The default values should be sufficient but you should edit it and check that everything seems OK.
|
||||
* exit your current shell back to a root one and run `psql -U postgres -f /home/_pleroma/pleroma/config/setup_db.psql` to setup the database.
|
||||
* return to a \_pleroma shell into pleroma's installation directory (`su _pleroma -;cd ~/pleroma`) and run `MIX_ENV=prod mix ecto.migrate`
|
||||
|
||||
As \_pleroma in /home/\_pleroma/pleroma, you can now run `LC_ALL=en_US.UTF-8 MIX_ENV=prod mix phx.server` to start your instance.
|
||||
In another SSH session/tmux window, check that it is working properly by running `ftp -MVo - http://127.0.0.1:4000/api/v1/instance`, you should get json output. Double-check that *uri*'s value is your instance's domain name.
|
||||
|
||||
##### Starting pleroma at boot
|
||||
An rc script to automatically start pleroma at boot hasn't been written yet, it can be run in a tmux session (tmux is in base).
|
||||
|
||||
|
||||
#### Create administrative user
|
||||
|
||||
If your instance is up and running, you can create your first user with administrative rights with the following command as the \_pleroma user.
|
||||
```
|
||||
LC_ALL=en_US.UTF-8 MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress> --admin
|
||||
# cp /home/_pleroma/pleroma/installation/openbsd/rc.d/pleroma /etc/rc.d/pleroma
|
||||
# chmod 555 /etc/rc.d/pleroma
|
||||
```
|
||||
|
||||
#### Further reading
|
||||
Enable and start the pleroma service:
|
||||
|
||||
```
|
||||
# rcctl enable pleroma
|
||||
# rcctl start pleroma
|
||||
```
|
||||
|
||||
### Create administrative user
|
||||
|
||||
If your instance is up and running, you can create your first user with administrative rights with the following commands as the \_pleroma user:
|
||||
|
||||
```
|
||||
$ cd pleroma
|
||||
$ MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress> --admin
|
||||
```
|
||||
|
||||
### Further reading
|
||||
|
||||
{! backend/installation/further_reading.include !}
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ Note: This article is potentially outdated because at this time we may not have
|
|||
|
||||
Tarvitset:
|
||||
* Oman domainin
|
||||
* OpenBSD 6.3 -serverin
|
||||
* OpenBSD 7.5 -serverin
|
||||
* Auttavan ymmärryksen unix-järjestelmistä
|
||||
|
||||
Komennot, joiden edessä on '#', tulee ajaa käyttäjänä `root`. Tämä on
|
||||
|
|
@ -18,7 +18,7 @@ Matrix-kanava #pleroma:libera.chat ovat hyviä paikkoja löytää apua
|
|||
|
||||
Asenna tarvittava ohjelmisto:
|
||||
|
||||
`# pkg_add git elixir gmake postgresql-server-10.3 postgresql-contrib-10.3 cmake ffmpeg ImageMagick libvips`
|
||||
`# pkg_add git elixir gmake postgresql-server postgresql-contrib cmake libmagic libvips`
|
||||
|
||||
#### Optional software
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ Note: the packages are not required with the current default settings of Pleroma
|
|||
|
||||
It is required for the following Pleroma features:
|
||||
|
||||
* `Pleroma.Upload.Filters.Mogrify`, `Pleroma.Upload.Filters.Mogrifun` upload filters (related config: `Plaroma.Upload/filters` in `config/config.exs`)
|
||||
* `Pleroma.Upload.Filters.Mogrify`, `Pleroma.Upload.Filters.Mogrifun` upload filters (related config: `Pleroma.Upload/filters` in `config/config.exs`)
|
||||
* Media preview proxy for still images (related config: `media_preview_proxy/enabled` in `config/config.exs`)
|
||||
|
||||
## `ffmpeg`
|
||||
|
|
@ -33,5 +33,5 @@ It is required for the following Pleroma features:
|
|||
|
||||
It is required for the following Pleroma features:
|
||||
|
||||
* `Pleroma.Upload.Filters.Exiftool.StripLocation` upload filter (related config: `Plaroma.Upload/filters` in `config/config.exs`)
|
||||
* `Pleroma.Upload.Filters.Exiftool.ReadDescription` upload filter (related config: `Plaroma.Upload/filters` in `config/config.exs`)
|
||||
* `Pleroma.Upload.Filters.Exiftool.StripLocation` upload filter (related config: `Pleroma.Upload/filters` in `config/config.exs`)
|
||||
* `Pleroma.Upload.Filters.Exiftool.ReadDescription` upload filter (related config: `Pleroma.Upload/filters` in `config/config.exs`)
|
||||
|
|
|
|||
|
|
@ -24,4 +24,6 @@ command=/usr/local/bin/elixir
|
|||
command_args="--erl \"-detached\" -S /usr/local/bin/mix phx.server"
|
||||
procname="*beam.smp"
|
||||
|
||||
PATH="${PATH}:/usr/local/sbin:/usr/local/bin"
|
||||
|
||||
run_rc_command "$1"
|
||||
|
|
|
|||
|
|
@ -2,20 +2,21 @@
|
|||
# Default httpd.conf file for Pleroma on OpenBSD
|
||||
# Simple installation instructions
|
||||
# 1. Place file in /etc
|
||||
# 2. Replace <IPv4 address> with your public IP address
|
||||
# 3. If using IPv6, uncomment IPv6 lines and replace <IPv6 address> with your public IPv6 address
|
||||
# 4. Check file using 'doas httpd -n'
|
||||
# 5. Enable and start httpd:
|
||||
# 2. Replace <ipaddr> with your public IP address
|
||||
# 3. If using IPv6, uncomment IPv6 lines and replace <ip6addr> with your public IPv6 address
|
||||
# 4. Replace all occurences of example.tld with your instance's domain name.
|
||||
# 5. Check file using 'doas httpd -n'
|
||||
# 6. Enable and start httpd:
|
||||
# # doas rcctl enable httpd
|
||||
# # doas rcctl start httpd
|
||||
#
|
||||
|
||||
ext_inet="<IPv4 address>"
|
||||
#ext_inet6="<IPv6 address>"
|
||||
ext_inet="<ipaddr>"
|
||||
#ext_inet6="<ip6addr>"
|
||||
|
||||
server "default" {
|
||||
server "example.tld" {
|
||||
listen on $ext_inet port 80 # Comment to disable listening on IPv4
|
||||
# listen on $ext_inet6 port 80 # Comment to disable listening on IPv6
|
||||
#listen on $ext_inet6 port 80 # Comment to disable listening on IPv6
|
||||
listen on 127.0.0.1 port 80 # Do NOT comment this line
|
||||
|
||||
log syslog
|
||||
|
|
@ -26,10 +27,18 @@ server "default" {
|
|||
request strip 2
|
||||
}
|
||||
|
||||
location "/robots.txt" { root "/htdocs/local/" }
|
||||
location "/*" { block return 302 "https://$HTTP_HOST$REQUEST_URI" }
|
||||
location "/*" { block return 301 "https://$HTTP_HOST$REQUEST_URI" }
|
||||
}
|
||||
|
||||
# Example of serving a basic static website besides Pleroma using the example configuration in relayd
|
||||
#server "site.example.tld" {
|
||||
# listen on 127.0.0.1 port 8080
|
||||
#
|
||||
# location "/*" {
|
||||
# root "/website"
|
||||
# }
|
||||
#}
|
||||
|
||||
types {
|
||||
include "/usr/share/misc/mime.types"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,15 +4,16 @@
|
|||
#
|
||||
# Simple installation instructions:
|
||||
# 1. Install Pleroma per wiki instructions
|
||||
# 2. Place this pleromad file in /etc/rc.d
|
||||
# 2. Place this pleroma file in /etc/rc.d
|
||||
# 3. Enable and start Pleroma
|
||||
# # doas rcctl enable pleromad
|
||||
# # doas rcctl start pleromad
|
||||
# # doas rcctl enable pleroma
|
||||
# # doas rcctl start pleroma
|
||||
#
|
||||
|
||||
daemon="/usr/local/bin/elixir"
|
||||
daemon_flags="--detached -S /usr/local/bin/mix phx.server"
|
||||
daemon_flags="--erl \"-detached\" -S /usr/local/bin/mix phx.server"
|
||||
daemon_user="_pleroma"
|
||||
daemon_execdir="/home/_pleroma/pleroma"
|
||||
|
||||
. /etc/rc.d/rc.subr
|
||||
|
||||
|
|
@ -23,10 +24,6 @@ rc_check() {
|
|||
pgrep -q -U _pleroma -f "phx.server"
|
||||
}
|
||||
|
||||
rc_start() {
|
||||
${rcexec} "cd pleroma; ${daemon} ${daemon_flags}"
|
||||
}
|
||||
|
||||
rc_stop() {
|
||||
pkill -q -U _pleroma -f "phx.server"
|
||||
}
|
||||
|
|
@ -3,9 +3,10 @@
|
|||
# Simple installation instructions:
|
||||
# 1. Place in /etc
|
||||
# 2. Replace <ipaddr> with your public IPv4 address
|
||||
# 3. If using IPv6i, uncomment IPv6 lines and replace <ip6addr> with your public IPv6 address
|
||||
# 4. Check file using 'doas relayd -n'
|
||||
# 5. Reload/start relayd
|
||||
# 3. If using IPv6, uncomment IPv6 lines and replace <ip6addr> with your public IPv6 address
|
||||
# 4. Replace all occurrences of example.tld with your instance's domain
|
||||
# 5. Check file using 'doas relayd -n'
|
||||
# 6. Reload/start relayd
|
||||
# # doas rcctl enable relayd
|
||||
# # doas rcctl start relayd
|
||||
#
|
||||
|
|
@ -14,31 +15,66 @@ ext_inet="<ipaddr>"
|
|||
#ext_inet6="<ip6addr>"
|
||||
|
||||
table <pleroma_server> { 127.0.0.1 }
|
||||
table <httpd_server> { 127.0.0.1 }
|
||||
|
||||
http protocol plerup { # Protocol for upstream pleroma server
|
||||
# Uncomment when you want to serve other services than Pleroma.
|
||||
# In this example tables are used only as way to differentiate between Pleroma and other services.
|
||||
# Feel free to rename "httpd_server" everywhere to fit your setup.
|
||||
#table <httpd_server> { 127.0.0.1 }
|
||||
|
||||
http protocol pleroma { # Protocol for upstream Pleroma server
|
||||
#tcp { nodelay, sack, socket buffer 65536, backlog 128 } # Uncomment and adjust as you see fit
|
||||
tls ciphers "ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA0-POLY1305"
|
||||
tls ecdhe secp384r1
|
||||
tls ciphers "ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4"
|
||||
tls ecdhe "X25519,P-256,P-384,secp521r1" # relayd default+secp521r1
|
||||
|
||||
# Forward some paths to the local server (as pleroma won't respond to them as you might want)
|
||||
pass request quick path "/robots.txt" forward to <httpd_server>
|
||||
return error
|
||||
|
||||
# Append a bunch of headers
|
||||
match request header append "X-Forwarded-For" value "$REMOTE_ADDR" # This two header and the next one are not strictl required by pleroma but adding them won't hurt
|
||||
match request header append "X-Forwarded-By" value "$SERVER_ADDR:$SERVER_PORT"
|
||||
# When serving multiple services with different certificates, specify multiple "tls keypair" keywords
|
||||
# and add forwards to those services before the block keyword near the bottom of the protocol and relay configurations.
|
||||
# The string in quotes must match the fullchain certificate file created by acme-client without the extension.
|
||||
# For example:
|
||||
# tls keypair "pleroma.example.tld"
|
||||
# tls keypair "example.tld"
|
||||
tls keypair "example.tld"
|
||||
|
||||
match request header append "X-Forwarded-For" value "$REMOTE_ADDR"
|
||||
match request header append "Connection" value "upgrade"
|
||||
|
||||
# When hosting Pleroma on a subdomain, replace example.tld accordingly (not the base domain).
|
||||
# From the above example, "example.tld" should be replaced with "pleroma.example.tld" instead.
|
||||
pass request quick header "Host" value "example.tld" forward to <pleroma_server>
|
||||
|
||||
# Uncomment when serving media uploads on a different (sub)domain.
|
||||
# Keep media proxy disabled, as it will NOT work under relayd/httpd. If you want to also setup media proxy, use nginx instead.
|
||||
#pass request quick header "Host" value "media.example.tld" forward to <pleroma_server>
|
||||
|
||||
# When serving multiple services, add the forwards here.
|
||||
# Example:
|
||||
#pass request quick header "Host" value "example.tld" forward to <httpd_server>
|
||||
|
||||
block
|
||||
}
|
||||
|
||||
relay wwwtls {
|
||||
listen on $ext_inet port https tls # Comment to disable listening on IPv4
|
||||
# listen on $ext_inet6 port https tls # Comment to disable listening on IPv6
|
||||
|
||||
protocol plerup
|
||||
protocol pleroma
|
||||
|
||||
forward to <pleroma_server> port 4000 check http "/" code 200
|
||||
forward to <httpd_server> port 80 check http "/robots.txt" code 200
|
||||
forward to <pleroma_server> port 4000 check tcp timeout 500 # Adjust timeout accordingly when relayd returns 502 while Pleroma is running without problems.
|
||||
|
||||
# When serving multiple services, add the forwards here.
|
||||
# Example:
|
||||
#forward to <httpd_server> port 8080
|
||||
}
|
||||
|
||||
# Uncomment relay block to enable IPv6
|
||||
#relay wwwtls6 {
|
||||
# listen on $ext_inet6 port https tls
|
||||
|
||||
# protocol pleroma
|
||||
|
||||
# forward to <pleroma_server> port 4000 check tcp timeout 500 # Adjust timeout accordingly when relayd returns 502 while Pleroma is running without problems.
|
||||
|
||||
# # When serving multiple services, add the forwards here.
|
||||
# # Example:
|
||||
# #forward to <httpd_server> port 8080
|
||||
#}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,9 @@
|
|||
# 3. Copy this file to /etc/nginx/sites-available/ and then add a symlink to it
|
||||
# in /etc/nginx/sites-enabled/ and run 'nginx -s reload' or restart nginx.
|
||||
|
||||
proxy_cache_path /tmp/pleroma-media-cache levels=1:2 keys_zone=pleroma_media_cache:10m max_size=10g
|
||||
# Note: The cache directory must exist and be writable by nginx.
|
||||
# If nginx runs in a chroot, create it inside the chroot.
|
||||
proxy_cache_path /var/tmp/pleroma-media-cache levels=1:2 keys_zone=pleroma_media_cache:10m max_size=10g
|
||||
inactive=720m use_temp_path=off;
|
||||
|
||||
# this is explicitly IPv4 since Pleroma.Web.Endpoint binds on IPv4 only
|
||||
|
|
@ -41,8 +43,21 @@ ssl_session_cache shared:ssl_session_cache:10m;
|
|||
server {
|
||||
server_name example.tld;
|
||||
|
||||
listen 443 ssl http2;
|
||||
listen [::]:443 ssl http2;
|
||||
listen 443 ssl;
|
||||
listen [::]:443 ssl;
|
||||
http2 on;
|
||||
|
||||
# Optional HTTP/3 support
|
||||
# Note: requires you open UDP port 443
|
||||
#
|
||||
# listen 443 quic reuseport;
|
||||
# listen [::]:443 quic reuseport;
|
||||
# http3 on;
|
||||
# quic_retry on;
|
||||
# ssl_early_data on;
|
||||
# quic_gso on;
|
||||
# add_header Alt-Svc 'h3=":443"; ma=86400';
|
||||
|
||||
ssl_session_timeout 1d;
|
||||
ssl_session_cache shared:MozSSL:10m; # about 40000 sessions
|
||||
ssl_session_tickets off;
|
||||
|
|
@ -67,8 +82,14 @@ server {
|
|||
gzip_http_version 1.1;
|
||||
gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript application/activity+json application/atom+xml;
|
||||
|
||||
# the nginx default is 1m, not enough for large media uploads
|
||||
# Nginx media upload limitation
|
||||
# Ensure that this value matches or exceeds your Pleroma upload limit:
|
||||
#
|
||||
# config :pleroma, :instance,
|
||||
# upload_limit: 16_000_000
|
||||
#
|
||||
client_max_body_size 16m;
|
||||
|
||||
ignore_invalid_headers off;
|
||||
|
||||
proxy_http_version 1.1;
|
||||
|
|
@ -94,7 +115,9 @@ server {
|
|||
# proxy_pass http://phoenix/notice/$1;
|
||||
# }
|
||||
|
||||
location ~ ^/(media|proxy) {
|
||||
# Remove this location if you choose to use a dedicated subdomain
|
||||
# for mediaproxy
|
||||
location /proxy {
|
||||
proxy_cache pleroma_media_cache;
|
||||
slice 1m;
|
||||
proxy_cache_key $host$uri$is_args$args$slice_range;
|
||||
|
|
@ -106,4 +129,95 @@ server {
|
|||
chunked_transfer_encoding on;
|
||||
proxy_pass http://phoenix;
|
||||
}
|
||||
|
||||
# Nginx can serve the local file uploads directly reducing work for
|
||||
# the backend. Make sure to change this to a "deny all" if you use
|
||||
# a dedicated subdomain. It will break access to uploads that have already
|
||||
# federated if you are converting an existing installation, so weigh the risks
|
||||
# carefully.
|
||||
#
|
||||
# location /media/ {
|
||||
# alias /var/lib/pleroma/uploads/; # <-- make sure this is correct for your deployment
|
||||
# allow all;
|
||||
# add_header X-Content-Type-Options "nosniff";
|
||||
# add_header Content-Security-Policy "sandbox";
|
||||
# }
|
||||
|
||||
}
|
||||
|
||||
# It is strongly recommended that you host your media and the mediaproxy on a dedicated subdomain for security reasons.
|
||||
# The following Pleroma settings will be required to enable this capability:
|
||||
#
|
||||
# config :pleroma, :media_proxy,
|
||||
# base_url: "https://media.example.tld/"
|
||||
#
|
||||
# # Assuming default media upload deployment (e.g., not S3 which will require a different domain anyway) --
|
||||
# config :pleroma, Pleroma.Upload,
|
||||
# base_url: "https://media.example.tld/media/",
|
||||
#
|
||||
# config :pleroma, Pleroma.Uploaders.Local, uploads: "/var/lib/pleroma/uploads"
|
||||
#
|
||||
# And then uncomment and configure the following server.
|
||||
# Make sure your certificate was issued to support both domains or use a dedicated certificate:
|
||||
#
|
||||
# server {
|
||||
# server_name media.example.tld;
|
||||
#
|
||||
# listen 443 ssl;
|
||||
# listen [::]:443 ssl;
|
||||
# http2 on;
|
||||
#
|
||||
# # Optional HTTP/3 support
|
||||
# # Note: requires you open UDP port 443
|
||||
# #
|
||||
# # listen 443 quic reuseport;
|
||||
# # listen [::]:443 quic reuseport;
|
||||
# # http3 on;
|
||||
# # quic_retry on;
|
||||
# # ssl_early_data on;
|
||||
# # quic_gso on;
|
||||
# # add_header Alt-Svc 'h3=":443"; ma=86400';
|
||||
#
|
||||
# ssl_session_timeout 1d;
|
||||
# ssl_session_cache shared:MozSSL:10m; # about 40000 sessions
|
||||
# ssl_session_tickets off;
|
||||
#
|
||||
# ssl_trusted_certificate /etc/letsencrypt/live/example.tld/chain.pem;
|
||||
# ssl_certificate /etc/letsencrypt/live/example.tld/fullchain.pem;
|
||||
# ssl_certificate_key /etc/letsencrypt/live/example.tld/privkey.pem;
|
||||
#
|
||||
# ssl_protocols TLSv1.2 TLSv1.3;
|
||||
# ssl_ciphers "ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4";
|
||||
# ssl_prefer_server_ciphers off;
|
||||
# # In case of an old server with an OpenSSL version of 1.0.2 or below,
|
||||
# # leave only prime256v1 or comment out the following line.
|
||||
# ssl_ecdh_curve X25519:prime256v1:secp384r1:secp521r1;
|
||||
# ssl_stapling on;
|
||||
# ssl_stapling_verify on;
|
||||
#
|
||||
# proxy_http_version 1.1;
|
||||
# proxy_set_header Upgrade $http_upgrade;
|
||||
# proxy_set_header Connection "upgrade";
|
||||
# proxy_set_header Host $http_host;
|
||||
# proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
#
|
||||
# location /media/ { # <-- make sure this path matches your Pleroma.Upload :base_url
|
||||
# alias /var/lib/pleroma/uploads/; # <-- make sure this is correct for your deployment
|
||||
# allow all;
|
||||
# add_header X-Content-Type-Options "nosniff";
|
||||
# add_header Content-Security-Policy "sandbox";
|
||||
# }
|
||||
#
|
||||
# location /proxy {
|
||||
# proxy_cache pleroma_media_cache;
|
||||
# slice 1m;
|
||||
# proxy_cache_key $host$uri$is_args$args$slice_range;
|
||||
# proxy_set_header Range $slice_range;
|
||||
# proxy_cache_valid 200 206 301 304 1h;
|
||||
# proxy_cache_lock on;
|
||||
# proxy_ignore_client_abort on;
|
||||
# proxy_buffering on;
|
||||
# chunked_transfer_encoding on;
|
||||
# proxy_pass http://phoenix;
|
||||
# }
|
||||
# }
|
||||
|
|
|
|||
|
|
@ -26,7 +26,11 @@ defmodule Mix.Pleroma do
|
|||
Application.put_env(:phoenix, :serve_endpoints, false, persistent: true)
|
||||
|
||||
unless System.get_env("DEBUG") do
|
||||
Logger.remove_backend(:console)
|
||||
try do
|
||||
Logger.remove_backend(:console)
|
||||
catch
|
||||
:exit, _ -> :ok
|
||||
end
|
||||
end
|
||||
|
||||
adapter = Application.get_env(:tesla, :adapter)
|
||||
|
|
|
|||
|
|
@ -271,7 +271,7 @@ defmodule Mix.Tasks.Pleroma.Instance do
|
|||
[config_dir, psql_dir, static_dir, uploads_dir]
|
||||
|> Enum.reject(&File.exists?/1)
|
||||
|> Enum.each(fn dir ->
|
||||
File.mkdir_p!(dir)
|
||||
Pleroma.Backports.mkdir_p!(dir)
|
||||
File.chmod!(dir, 0o700)
|
||||
end)
|
||||
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ defmodule Mix.Tasks.Pleroma.RobotsTxt do
|
|||
static_dir = Pleroma.Config.get([:instance, :static_dir], "instance/static/")
|
||||
|
||||
if !File.exists?(static_dir) do
|
||||
File.mkdir_p!(static_dir)
|
||||
Pleroma.Backports.mkdir_p!(static_dir)
|
||||
end
|
||||
|
||||
robots_txt_path = Path.join(static_dir, "robots.txt")
|
||||
|
|
|
|||
|
|
@ -4,7 +4,9 @@ defmodule Mix.Tasks.Pleroma.TestRunner do
|
|||
use Mix.Task
|
||||
|
||||
def run(args \\ []) do
|
||||
case System.cmd("mix", ["test"] ++ args, into: IO.stream(:stdio, :line)) do
|
||||
case System.cmd("mix", ["test", "--warnings-as-errors"] ++ args,
|
||||
into: IO.stream(:stdio, :line)
|
||||
) do
|
||||
{_, 0} ->
|
||||
:ok
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ defmodule Pleroma.Activity.Queries do
|
|||
Contains queries for Activity.
|
||||
"""
|
||||
|
||||
import Ecto.Query, only: [from: 2, where: 3]
|
||||
import Ecto.Query, only: [from: 2]
|
||||
|
||||
@type query :: Ecto.Queryable.t() | Pleroma.Activity.t()
|
||||
|
||||
|
|
@ -70,22 +70,6 @@ defmodule Pleroma.Activity.Queries do
|
|||
)
|
||||
end
|
||||
|
||||
@spec by_object_in_reply_to_id(query, String.t(), keyword()) :: query
|
||||
def by_object_in_reply_to_id(query, in_reply_to_id, opts \\ []) do
|
||||
query =
|
||||
if opts[:skip_preloading] do
|
||||
Activity.with_joined_object(query)
|
||||
else
|
||||
Activity.with_preloaded_object(query)
|
||||
end
|
||||
|
||||
where(
|
||||
query,
|
||||
[activity, object: o],
|
||||
fragment("(?)->>'inReplyTo' = ?", o.data, ^to_string(in_reply_to_id))
|
||||
)
|
||||
end
|
||||
|
||||
@spec by_type(query, String.t()) :: query
|
||||
def by_type(query \\ Activity, activity_type) do
|
||||
from(
|
||||
|
|
|
|||
|
|
@ -43,9 +43,6 @@ defmodule Pleroma.Application do
|
|||
# every time the application is restarted, so we disable module
|
||||
# conflicts at runtime
|
||||
Code.compiler_options(ignore_module_conflict: true)
|
||||
# Disable warnings_as_errors at runtime, it breaks Phoenix live reload
|
||||
# due to protocol consolidation warnings
|
||||
Code.compiler_options(warnings_as_errors: false)
|
||||
Pleroma.Telemetry.Logger.attach()
|
||||
Config.Holder.save_default()
|
||||
Pleroma.HTML.compile_scrubbers()
|
||||
|
|
@ -56,7 +53,10 @@ defmodule Pleroma.Application do
|
|||
Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled()
|
||||
end
|
||||
|
||||
Pleroma.ApplicationRequirements.verify!()
|
||||
if Config.get(:env) != :test do
|
||||
Pleroma.ApplicationRequirements.verify!()
|
||||
end
|
||||
|
||||
load_custom_modules()
|
||||
Pleroma.Docs.JSON.compile()
|
||||
limiters_setup()
|
||||
|
|
@ -68,26 +68,11 @@ defmodule Pleroma.Application do
|
|||
Finch.start_link(name: MyFinch)
|
||||
end
|
||||
|
||||
if adapter == Tesla.Adapter.Gun do
|
||||
if version = Pleroma.OTPVersion.version() do
|
||||
[major, minor] =
|
||||
version
|
||||
|> String.split(".")
|
||||
|> Enum.map(&String.to_integer/1)
|
||||
|> Enum.take(2)
|
||||
|
||||
if (major == 22 and minor < 2) or major < 22 do
|
||||
raise "
|
||||
!!!OTP VERSION WARNING!!!
|
||||
You are using gun adapter with OTP version #{version}, which doesn't support correct handling of unordered certificates chains. Please update your Erlang/OTP to at least 22.2.
|
||||
"
|
||||
end
|
||||
else
|
||||
raise "
|
||||
!!!OTP VERSION WARNING!!!
|
||||
To support correct handling of unordered certificates chains - OTP version must be > 22.2.
|
||||
"
|
||||
end
|
||||
# Disable warnings_as_errors at runtime, it breaks Phoenix live reload
|
||||
# due to protocol consolidation warnings
|
||||
# :warnings_as_errors is deprecated via Code.compiler_options/2 since 1.18
|
||||
if Version.compare(System.version(), "1.18.0") == :lt do
|
||||
Code.compiler_options(warnings_as_errors: false)
|
||||
end
|
||||
|
||||
# Define workers and child supervisors to be supervised
|
||||
|
|
@ -169,7 +154,8 @@ defmodule Pleroma.Application do
|
|||
limit: 500_000
|
||||
),
|
||||
build_cachex("rel_me", limit: 2500),
|
||||
build_cachex("host_meta", default_ttl: :timer.minutes(120), limit: 5000)
|
||||
build_cachex("host_meta", default_ttl: :timer.minutes(120), limit: 5_000),
|
||||
build_cachex("translations", default_ttl: :timer.hours(24), limit: 5_000)
|
||||
]
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -189,7 +189,40 @@ defmodule Pleroma.ApplicationRequirements do
|
|||
false
|
||||
end
|
||||
|
||||
if Enum.all?([preview_proxy_commands_status | filter_commands_statuses], & &1) do
|
||||
language_detector_commands_status =
|
||||
if Pleroma.Language.LanguageDetector.missing_dependencies() == [] do
|
||||
true
|
||||
else
|
||||
Logger.error(
|
||||
"The following dependencies required by the currently enabled " <>
|
||||
"language detection provider are not installed: " <>
|
||||
inspect(Pleroma.Language.LanguageDetector.missing_dependencies())
|
||||
)
|
||||
|
||||
false
|
||||
end
|
||||
|
||||
translation_commands_status =
|
||||
if Pleroma.Language.Translation.missing_dependencies() == [] do
|
||||
true
|
||||
else
|
||||
Logger.error(
|
||||
"The following dependencies required by the currently enabled " <>
|
||||
"translation provider are not installed: " <>
|
||||
inspect(Pleroma.Language.Translation.missing_dependencies())
|
||||
)
|
||||
|
||||
false
|
||||
end
|
||||
|
||||
if Enum.all?(
|
||||
[
|
||||
preview_proxy_commands_status,
|
||||
language_detector_commands_status,
|
||||
translation_commands_status | filter_commands_statuses
|
||||
],
|
||||
& &1
|
||||
) do
|
||||
:ok
|
||||
else
|
||||
{:error,
|
||||
|
|
|
|||
72
lib/pleroma/backports.ex
Normal file
72
lib/pleroma/backports.ex
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
# Copyright 2012 Plataformatec
|
||||
# Copyright 2021 The Elixir Team
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
defmodule Pleroma.Backports do
|
||||
import File, only: [dir?: 1]
|
||||
|
||||
# <https://github.com/elixir-lang/elixir/pull/14242>
|
||||
# To be removed when we require Elixir 1.19
|
||||
@doc """
|
||||
Tries to create the directory `path`.
|
||||
|
||||
Missing parent directories are created. Returns `:ok` if successful, or
|
||||
`{:error, reason}` if an error occurs.
|
||||
|
||||
Typical error reasons are:
|
||||
|
||||
* `:eacces` - missing search or write permissions for the parent
|
||||
directories of `path`
|
||||
* `:enospc` - there is no space left on the device
|
||||
* `:enotdir` - a component of `path` is not a directory
|
||||
|
||||
"""
|
||||
@spec mkdir_p(Path.t()) :: :ok | {:error, File.posix() | :badarg}
|
||||
def mkdir_p(path) do
|
||||
do_mkdir_p(IO.chardata_to_string(path))
|
||||
end
|
||||
|
||||
defp do_mkdir_p("/") do
|
||||
:ok
|
||||
end
|
||||
|
||||
defp do_mkdir_p(path) do
|
||||
parent = Path.dirname(path)
|
||||
|
||||
if parent == path do
|
||||
:ok
|
||||
else
|
||||
case do_mkdir_p(parent) do
|
||||
:ok ->
|
||||
case :file.make_dir(path) do
|
||||
{:error, :eexist} ->
|
||||
if dir?(path), do: :ok, else: {:error, :enotdir}
|
||||
|
||||
other ->
|
||||
other
|
||||
end
|
||||
|
||||
e ->
|
||||
e
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Same as `mkdir_p/1`, but raises a `File.Error` exception in case of failure.
|
||||
Otherwise `:ok`.
|
||||
"""
|
||||
@spec mkdir_p!(Path.t()) :: :ok
|
||||
def mkdir_p!(path) do
|
||||
case mkdir_p(path) do
|
||||
:ok ->
|
||||
:ok
|
||||
|
||||
{:error, reason} ->
|
||||
raise File.Error,
|
||||
reason: reason,
|
||||
action: "make directory (with -p)",
|
||||
path: IO.chardata_to_string(path)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -25,6 +25,8 @@ defmodule Pleroma.Chat do
|
|||
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
|
||||
field(:recipient, :string)
|
||||
|
||||
field(:pinned, :boolean)
|
||||
|
||||
timestamps()
|
||||
end
|
||||
|
||||
|
|
@ -94,4 +96,16 @@ defmodule Pleroma.Chat do
|
|||
order_by: [desc: c.updated_at]
|
||||
)
|
||||
end
|
||||
|
||||
def pin(%__MODULE__{} = chat) do
|
||||
chat
|
||||
|> cast(%{pinned: true}, [:pinned])
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
def unpin(%__MODULE__{} = chat) do
|
||||
chat
|
||||
|> cast(%{pinned: false}, [:pinned])
|
||||
|> Repo.update()
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -302,7 +302,7 @@ defmodule Pleroma.ConfigDB do
|
|||
end
|
||||
|
||||
def to_elixir_types(%{"tuple" => entity}) do
|
||||
Enum.reduce(entity, {}, &Tuple.append(&2, to_elixir_types(&1)))
|
||||
Enum.reduce(entity, {}, &Tuple.insert_at(&2, tuple_size(&2), to_elixir_types(&1)))
|
||||
end
|
||||
|
||||
def to_elixir_types(entity) when is_map(entity) do
|
||||
|
|
|
|||
|
|
@ -21,7 +21,8 @@ defmodule Pleroma.Constants do
|
|||
"pleroma_internal",
|
||||
"generator",
|
||||
"rules",
|
||||
"language"
|
||||
"language",
|
||||
"voters"
|
||||
]
|
||||
)
|
||||
|
||||
|
|
@ -100,6 +101,7 @@ defmodule Pleroma.Constants do
|
|||
"Add",
|
||||
"Remove",
|
||||
"Like",
|
||||
"Dislike",
|
||||
"Announce",
|
||||
"Undo",
|
||||
"Flag",
|
||||
|
|
@ -115,6 +117,7 @@ defmodule Pleroma.Constants do
|
|||
"Flag",
|
||||
"Follow",
|
||||
"Like",
|
||||
"Dislike",
|
||||
"EmojiReact",
|
||||
"Announce"
|
||||
]
|
||||
|
|
@ -130,6 +133,13 @@ defmodule Pleroma.Constants do
|
|||
do: ~r/^[^[:cntrl:] ()<>@,;:\\"\/\[\]?=]+\/[^[:cntrl:] ()<>@,;:\\"\/\[\]?=]+(; .*)?$/
|
||||
)
|
||||
|
||||
# List of allowed chars in the path segment of a URI
|
||||
# unreserved, sub-delims, ":", "@" and "/" allowed as the separator in path
|
||||
# https://datatracker.ietf.org/doc/html/rfc3986
|
||||
const(uri_path_allowed_reserved_chars,
|
||||
do: ~c"!$&'()*+,;=/:@"
|
||||
)
|
||||
|
||||
const(upload_object_types, do: ["Document", "Image"])
|
||||
|
||||
const(activity_json_canonical_mime_type,
|
||||
|
|
|
|||
|
|
@ -225,6 +225,97 @@ defmodule Pleroma.Emoji.Pack do
|
|||
end
|
||||
end
|
||||
|
||||
def download_zip(name, opts \\ %{}) do
|
||||
with :ok <- validate_not_empty([name]),
|
||||
:ok <- validate_new_pack(name),
|
||||
{:ok, archive_data} <- fetch_archive_data(opts),
|
||||
pack_path <- path_join_name_safe(emoji_path(), name),
|
||||
:ok <- create_pack_dir(pack_path),
|
||||
:ok <- safe_unzip(archive_data, pack_path) do
|
||||
ensure_pack_json(pack_path, archive_data, opts)
|
||||
else
|
||||
{:error, :empty_values} -> {:error, "Pack name cannot be empty"}
|
||||
{:error, reason} when is_binary(reason) -> {:error, reason}
|
||||
_ -> {:error, "Could not process pack"}
|
||||
end
|
||||
end
|
||||
|
||||
defp create_pack_dir(pack_path) do
|
||||
case File.mkdir_p(pack_path) do
|
||||
:ok -> :ok
|
||||
{:error, _} -> {:error, "Could not create the pack directory"}
|
||||
end
|
||||
end
|
||||
|
||||
defp safe_unzip(archive_data, pack_path) do
|
||||
case SafeZip.unzip_data(archive_data, pack_path) do
|
||||
{:ok, _} -> :ok
|
||||
{:error, reason} when is_binary(reason) -> {:error, reason}
|
||||
_ -> {:error, "Could not unzip pack"}
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_new_pack(name) do
|
||||
pack_path = path_join_name_safe(emoji_path(), name)
|
||||
|
||||
if File.exists?(pack_path) do
|
||||
{:error, "Pack already exists, refusing to import #{name}"}
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
defp fetch_archive_data(%{url: url}) do
|
||||
case Pleroma.HTTP.get(url) do
|
||||
{:ok, %{status: 200, body: data}} -> {:ok, data}
|
||||
_ -> {:error, "Could not download pack"}
|
||||
end
|
||||
end
|
||||
|
||||
defp fetch_archive_data(%{file: %Plug.Upload{path: path}}) do
|
||||
case File.read(path) do
|
||||
{:ok, data} -> {:ok, data}
|
||||
_ -> {:error, "Could not read the uploaded pack file"}
|
||||
end
|
||||
end
|
||||
|
||||
defp fetch_archive_data(_) do
|
||||
{:error, "Neither file nor URL was present in the request"}
|
||||
end
|
||||
|
||||
defp ensure_pack_json(pack_path, archive_data, opts) do
|
||||
pack_json_path = Path.join(pack_path, "pack.json")
|
||||
|
||||
if not File.exists?(pack_json_path) do
|
||||
create_pack_json(pack_path, pack_json_path, archive_data, opts)
|
||||
end
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
defp create_pack_json(pack_path, pack_json_path, archive_data, opts) do
|
||||
emoji_map =
|
||||
Pleroma.Emoji.Loader.make_shortcode_to_file_map(
|
||||
pack_path,
|
||||
Map.get(opts, :exts, [".png", ".gif", ".jpg"])
|
||||
)
|
||||
|
||||
archive_sha = :crypto.hash(:sha256, archive_data) |> Base.encode16()
|
||||
|
||||
pack_json = %{
|
||||
pack: %{
|
||||
license: Map.get(opts, :license, ""),
|
||||
homepage: Map.get(opts, :homepage, ""),
|
||||
description: Map.get(opts, :description, ""),
|
||||
src: Map.get(opts, :url),
|
||||
src_sha256: archive_sha
|
||||
},
|
||||
files: emoji_map
|
||||
}
|
||||
|
||||
File.write!(pack_json_path, Jason.encode!(pack_json, pretty: true))
|
||||
end
|
||||
|
||||
@spec download(String.t(), String.t(), String.t()) :: {:ok, t()} | {:error, atom()}
|
||||
def download(name, url, as) do
|
||||
uri = url |> String.trim() |> URI.parse()
|
||||
|
|
@ -488,7 +579,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
with true <- String.contains?(file_path, "/"),
|
||||
path <- Path.dirname(file_path),
|
||||
false <- File.exists?(path) do
|
||||
File.mkdir_p!(path)
|
||||
Pleroma.Backports.mkdir_p!(path)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -536,7 +627,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
emoji_path = emoji_path()
|
||||
# Create the directory first if it does not exist. This is probably the first request made
|
||||
# with the API so it should be sufficient
|
||||
with {:create_dir, :ok} <- {:create_dir, File.mkdir_p(emoji_path)},
|
||||
with {:create_dir, :ok} <- {:create_dir, Pleroma.Backports.mkdir_p(emoji_path)},
|
||||
{:ls, {:ok, results}} <- {:ls, File.ls(emoji_path)} do
|
||||
{:ok, Enum.sort(results)}
|
||||
else
|
||||
|
|
@ -561,7 +652,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
end
|
||||
|
||||
defp unzip(archive, pack_info, remote_pack, local_pack) do
|
||||
with :ok <- File.mkdir_p!(local_pack.path) do
|
||||
with :ok <- Pleroma.Backports.mkdir_p!(local_pack.path) do
|
||||
files = Enum.map(remote_pack["files"], fn {_, path} -> path end)
|
||||
# Fallback cannot contain a pack.json file
|
||||
files = if pack_info[:fallback], do: files, else: ["pack.json" | files]
|
||||
|
|
|
|||
|
|
@ -157,6 +157,16 @@ defmodule Pleroma.FollowingRelationship do
|
|||
|> Repo.all()
|
||||
end
|
||||
|
||||
def get_outgoing_follow_requests(%User{id: id}) do
|
||||
__MODULE__
|
||||
|> join(:inner, [r], f in assoc(r, :following))
|
||||
|> where([r], r.state == ^:follow_pending)
|
||||
|> where([r], r.follower_id == ^id)
|
||||
|> where([r, f], f.is_active == true)
|
||||
|> select([r, f], f)
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
def following?(%User{id: follower_id}, %User{id: followed_id}) do
|
||||
__MODULE__
|
||||
|> where(follower_id: ^follower_id, following_id: ^followed_id, state: ^:follow_accept)
|
||||
|
|
|
|||
|
|
@ -66,7 +66,7 @@ defmodule Pleroma.Frontend do
|
|||
|
||||
def unzip(zip, dest) do
|
||||
File.rm_rf!(dest)
|
||||
File.mkdir_p!(dest)
|
||||
Pleroma.Backports.mkdir_p!(dest)
|
||||
|
||||
case Pleroma.SafeZip.unzip_data(zip, dest) do
|
||||
{:ok, _} -> :ok
|
||||
|
|
@ -90,7 +90,7 @@ defmodule Pleroma.Frontend do
|
|||
defp install_frontend(frontend_info, source, dest) do
|
||||
from = frontend_info["build_dir"] || "dist"
|
||||
File.rm_rf!(dest)
|
||||
File.mkdir_p!(dest)
|
||||
Pleroma.Backports.mkdir_p!(dest)
|
||||
File.cp_r!(Path.join([source, from]), dest)
|
||||
:ok
|
||||
end
|
||||
|
|
|
|||
|
|
@ -22,14 +22,18 @@ defmodule Pleroma.Gopher.Server do
|
|||
def init([ip, port]) do
|
||||
Logger.info("Starting gopher server on #{port}")
|
||||
|
||||
:ranch.start_listener(
|
||||
:gopher,
|
||||
100,
|
||||
:ranch_tcp,
|
||||
[ip: ip, port: port],
|
||||
__MODULE__.ProtocolHandler,
|
||||
[]
|
||||
)
|
||||
{:ok, _pid} =
|
||||
:ranch.start_listener(
|
||||
:gopher,
|
||||
:ranch_tcp,
|
||||
%{
|
||||
num_acceptors: 100,
|
||||
max_connections: 100,
|
||||
socket_opts: [ip: ip, port: port]
|
||||
},
|
||||
__MODULE__.ProtocolHandler,
|
||||
[]
|
||||
)
|
||||
|
||||
{:ok, %{ip: ip, port: port}}
|
||||
end
|
||||
|
|
@ -43,13 +47,13 @@ defmodule Pleroma.Gopher.Server.ProtocolHandler do
|
|||
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||
alias Pleroma.Web.ActivityPub.Visibility
|
||||
|
||||
def start_link(ref, socket, transport, opts) do
|
||||
pid = spawn_link(__MODULE__, :init, [ref, socket, transport, opts])
|
||||
def start_link(ref, transport, opts) do
|
||||
pid = spawn_link(__MODULE__, :init, [ref, transport, opts])
|
||||
{:ok, pid}
|
||||
end
|
||||
|
||||
def init(ref, socket, transport, [] = _Opts) do
|
||||
:ok = :ranch.accept_ack(ref)
|
||||
def init(ref, transport, opts \\ []) do
|
||||
{:ok, socket} = :ranch.handshake(ref, opts)
|
||||
loop(socket, transport)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -130,4 +130,66 @@ defmodule Pleroma.Hashtag do
|
|||
end
|
||||
|
||||
def get_recipients_for_activity(_activity), do: []
|
||||
|
||||
def search(query, options \\ []) do
|
||||
limit = Keyword.get(options, :limit, 20)
|
||||
offset = Keyword.get(options, :offset, 0)
|
||||
|
||||
search_terms =
|
||||
query
|
||||
|> String.downcase()
|
||||
|> String.trim()
|
||||
|> String.split(~r/\s+/)
|
||||
|> Enum.filter(&(&1 != ""))
|
||||
|> Enum.map(&String.trim_leading(&1, "#"))
|
||||
|> Enum.filter(&(&1 != ""))
|
||||
|
||||
if Enum.empty?(search_terms) do
|
||||
[]
|
||||
else
|
||||
# Use PostgreSQL's ANY operator with array for efficient multi-term search
|
||||
# This is much more efficient than multiple OR clauses
|
||||
search_patterns = Enum.map(search_terms, &"%#{&1}%")
|
||||
|
||||
# Create ranking query that prioritizes exact matches and closer matches
|
||||
# Use a subquery to properly handle computed columns in ORDER BY
|
||||
base_query =
|
||||
from(ht in Hashtag,
|
||||
where: fragment("LOWER(?) LIKE ANY(?)", ht.name, ^search_patterns),
|
||||
select: %{
|
||||
name: ht.name,
|
||||
# Ranking: exact matches get highest priority (0)
|
||||
# then prefix matches (1), then contains (2)
|
||||
match_rank:
|
||||
fragment(
|
||||
"""
|
||||
CASE
|
||||
WHEN LOWER(?) = ANY(?) THEN 0
|
||||
WHEN LOWER(?) LIKE ANY(?) THEN 1
|
||||
ELSE 2
|
||||
END
|
||||
""",
|
||||
ht.name,
|
||||
^search_terms,
|
||||
ht.name,
|
||||
^Enum.map(search_terms, &"#{&1}%")
|
||||
),
|
||||
# Secondary sort by name length (shorter names first)
|
||||
name_length: fragment("LENGTH(?)", ht.name)
|
||||
}
|
||||
)
|
||||
|
||||
from(result in subquery(base_query),
|
||||
order_by: [
|
||||
asc: result.match_rank,
|
||||
asc: result.name_length,
|
||||
asc: result.name
|
||||
],
|
||||
limit: ^limit,
|
||||
offset: ^offset
|
||||
)
|
||||
|> Repo.all()
|
||||
|> Enum.map(& &1.name)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -105,20 +105,30 @@ defmodule Pleroma.HTTP do
|
|||
end
|
||||
|
||||
defp adapter_middlewares(Tesla.Adapter.Gun, extra_middleware) do
|
||||
[Tesla.Middleware.FollowRedirects, Pleroma.Tesla.Middleware.ConnectionPool] ++
|
||||
default_middleware() ++
|
||||
[Pleroma.Tesla.Middleware.ConnectionPool] ++
|
||||
extra_middleware
|
||||
end
|
||||
|
||||
defp adapter_middlewares({Tesla.Adapter.Finch, _}, extra_middleware) do
|
||||
[Tesla.Middleware.FollowRedirects] ++ extra_middleware
|
||||
end
|
||||
|
||||
defp adapter_middlewares(_, extra_middleware) do
|
||||
if Pleroma.Config.get(:env) == :test do
|
||||
# Emulate redirects in test env, which are handled by adapters in other environments
|
||||
[Tesla.Middleware.FollowRedirects]
|
||||
else
|
||||
extra_middleware
|
||||
# A lot of tests are written expecting unencoded URLs
|
||||
# and the burden of fixing that is high. Also it makes
|
||||
# them hard to read. Tests will opt-in when we want to validate
|
||||
# the encoding is being done correctly.
|
||||
cond do
|
||||
Pleroma.Config.get(:env) == :test and Pleroma.Config.get(:test_url_encoding) ->
|
||||
default_middleware()
|
||||
|
||||
Pleroma.Config.get(:env) == :test ->
|
||||
# Emulate redirects in test env, which are handled by adapters in other environments
|
||||
[Tesla.Middleware.FollowRedirects]
|
||||
|
||||
# Hackney and Finch
|
||||
true ->
|
||||
default_middleware() ++ extra_middleware
|
||||
end
|
||||
end
|
||||
|
||||
defp default_middleware,
|
||||
do: [Tesla.Middleware.FollowRedirects, Pleroma.Tesla.Middleware.EncodeUrl]
|
||||
end
|
||||
|
|
|
|||
|
|
@ -16,7 +16,12 @@ defmodule Pleroma.HTTP.AdapterHelper.Hackney do
|
|||
|
||||
config_opts = Pleroma.Config.get([:http, :adapter], [])
|
||||
|
||||
url_encoding =
|
||||
Keyword.new()
|
||||
|> Keyword.put(:path_encode_fun, fn path -> path end)
|
||||
|
||||
@defaults
|
||||
|> Keyword.merge(url_encoding)
|
||||
|> Keyword.merge(config_opts)
|
||||
|> Keyword.merge(connection_opts)
|
||||
|> add_scheme_opts(uri)
|
||||
|
|
|
|||
|
|
@ -15,25 +15,7 @@ defmodule Pleroma.Instances do
|
|||
|
||||
defdelegate set_unreachable(url_or_host, unreachable_since \\ nil), to: Instance
|
||||
|
||||
defdelegate get_consistently_unreachable, to: Instance
|
||||
|
||||
def set_consistently_unreachable(url_or_host),
|
||||
do: set_unreachable(url_or_host, reachability_datetime_threshold())
|
||||
|
||||
def reachability_datetime_threshold do
|
||||
federation_reachability_timeout_days =
|
||||
Pleroma.Config.get([:instance, :federation_reachability_timeout_days], 0)
|
||||
|
||||
if federation_reachability_timeout_days > 0 do
|
||||
NaiveDateTime.add(
|
||||
NaiveDateTime.utc_now(),
|
||||
-federation_reachability_timeout_days * 24 * 3600,
|
||||
:second
|
||||
)
|
||||
else
|
||||
~N[0000-01-01 00:00:00]
|
||||
end
|
||||
end
|
||||
defdelegate get_unreachable, to: Instance
|
||||
|
||||
def host(url_or_host) when is_binary(url_or_host) do
|
||||
if url_or_host =~ ~r/^http/i do
|
||||
|
|
@ -42,4 +24,21 @@ defmodule Pleroma.Instances do
|
|||
url_or_host
|
||||
end
|
||||
end
|
||||
|
||||
@doc "Schedules reachability checks for all unreachable instances"
|
||||
def check_all_unreachable do
|
||||
get_unreachable()
|
||||
|> Enum.each(fn {domain, _} ->
|
||||
Pleroma.Workers.ReachabilityWorker.new(%{"domain" => domain})
|
||||
|> Oban.insert()
|
||||
end)
|
||||
end
|
||||
|
||||
@doc "Deletes all users and activities for unreachable instances"
|
||||
def delete_all_unreachable do
|
||||
get_unreachable()
|
||||
|> Enum.each(fn {domain, _} ->
|
||||
Instance.delete(domain)
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ defmodule Pleroma.Instances.Instance do
|
|||
alias Pleroma.Instances.Instance
|
||||
alias Pleroma.Maps
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
alias Pleroma.Workers.DeleteWorker
|
||||
|
||||
use Ecto.Schema
|
||||
|
|
@ -51,7 +50,7 @@ defmodule Pleroma.Instances.Instance do
|
|||
|> cast(params, [:software_name, :software_version, :software_repository])
|
||||
end
|
||||
|
||||
def filter_reachable([]), do: %{}
|
||||
def filter_reachable([]), do: []
|
||||
|
||||
def filter_reachable(urls_or_hosts) when is_list(urls_or_hosts) do
|
||||
hosts =
|
||||
|
|
@ -68,19 +67,15 @@ defmodule Pleroma.Instances.Instance do
|
|||
)
|
||||
|> Map.new(& &1)
|
||||
|
||||
reachability_datetime_threshold = Instances.reachability_datetime_threshold()
|
||||
|
||||
for entry <- Enum.filter(urls_or_hosts, &is_binary/1) do
|
||||
host = host(entry)
|
||||
unreachable_since = unreachable_since_by_host[host]
|
||||
|
||||
if !unreachable_since ||
|
||||
NaiveDateTime.compare(unreachable_since, reachability_datetime_threshold) == :gt do
|
||||
{entry, unreachable_since}
|
||||
if is_nil(unreachable_since) do
|
||||
entry
|
||||
end
|
||||
end
|
||||
|> Enum.filter(& &1)
|
||||
|> Map.new(& &1)
|
||||
end
|
||||
|
||||
def reachable?(url_or_host) when is_binary(url_or_host) do
|
||||
|
|
@ -88,7 +83,7 @@ defmodule Pleroma.Instances.Instance do
|
|||
from(i in Instance,
|
||||
where:
|
||||
i.host == ^host(url_or_host) and
|
||||
i.unreachable_since <= ^Instances.reachability_datetime_threshold(),
|
||||
not is_nil(i.unreachable_since),
|
||||
select: true
|
||||
)
|
||||
)
|
||||
|
|
@ -97,9 +92,16 @@ defmodule Pleroma.Instances.Instance do
|
|||
def reachable?(url_or_host) when is_binary(url_or_host), do: true
|
||||
|
||||
def set_reachable(url_or_host) when is_binary(url_or_host) do
|
||||
%Instance{host: host(url_or_host)}
|
||||
|> changeset(%{unreachable_since: nil})
|
||||
|> Repo.insert(on_conflict: {:replace, [:unreachable_since]}, conflict_target: :host)
|
||||
host = host(url_or_host)
|
||||
|
||||
result =
|
||||
%Instance{host: host}
|
||||
|> changeset(%{unreachable_since: nil})
|
||||
|> Repo.insert(on_conflict: {:replace, [:unreachable_since]}, conflict_target: :host)
|
||||
|
||||
Pleroma.Workers.ReachabilityWorker.delete_jobs_for_host(host)
|
||||
|
||||
result
|
||||
end
|
||||
|
||||
def set_reachable(_), do: {:error, nil}
|
||||
|
|
@ -132,11 +134,9 @@ defmodule Pleroma.Instances.Instance do
|
|||
|
||||
def set_unreachable(_, _), do: {:error, nil}
|
||||
|
||||
def get_consistently_unreachable do
|
||||
reachability_datetime_threshold = Instances.reachability_datetime_threshold()
|
||||
|
||||
def get_unreachable do
|
||||
from(i in Instance,
|
||||
where: ^reachability_datetime_threshold > i.unreachable_since,
|
||||
where: not is_nil(i.unreachable_since),
|
||||
order_by: i.unreachable_since,
|
||||
select: {i.host, i.unreachable_since}
|
||||
)
|
||||
|
|
@ -296,20 +296,14 @@ defmodule Pleroma.Instances.Instance do
|
|||
Deletes all users from an instance in a background task, thus also deleting
|
||||
all of those users' activities and notifications.
|
||||
"""
|
||||
def delete_users_and_activities(host) when is_binary(host) do
|
||||
def delete(host) when is_binary(host) do
|
||||
DeleteWorker.new(%{"op" => "delete_instance", "host" => host})
|
||||
|> Oban.insert()
|
||||
end
|
||||
|
||||
def perform(:delete_instance, host) when is_binary(host) do
|
||||
User.Query.build(%{nickname: "@#{host}"})
|
||||
|> Repo.chunk_stream(100, :batches)
|
||||
|> Stream.each(fn users ->
|
||||
users
|
||||
|> Enum.each(fn user ->
|
||||
User.perform(:delete, user)
|
||||
end)
|
||||
end)
|
||||
|> Stream.run()
|
||||
@doc "Schedules reachability check for instance"
|
||||
def check_unreachable(domain) when is_binary(domain) do
|
||||
Pleroma.Workers.ReachabilityWorker.new(%{"domain" => domain})
|
||||
|> Oban.insert()
|
||||
end
|
||||
end
|
||||
|
|
|
|||
59
lib/pleroma/language/language_detector.ex
Normal file
59
lib/pleroma/language/language_detector.ex
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Language.LanguageDetector do
|
||||
import Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode,
|
||||
only: [good_locale_code?: 1]
|
||||
|
||||
@words_threshold 4
|
||||
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||
|
||||
def configured? do
|
||||
provider = get_provider()
|
||||
|
||||
!!provider and provider.configured?()
|
||||
end
|
||||
|
||||
def missing_dependencies do
|
||||
provider = get_provider()
|
||||
|
||||
if provider do
|
||||
provider.missing_dependencies()
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
# Strip tags from text, etc.
|
||||
defp prepare_text(text) do
|
||||
text
|
||||
|> Floki.parse_fragment!()
|
||||
|> Floki.filter_out(
|
||||
".h-card, .mention, .hashtag, .u-url, .quote-inline, .recipients-inline, code, pre"
|
||||
)
|
||||
|> Floki.text()
|
||||
end
|
||||
|
||||
def detect(text) do
|
||||
provider = get_provider()
|
||||
|
||||
text = prepare_text(text)
|
||||
word_count = text |> String.split(~r/\s+/) |> Enum.count()
|
||||
|
||||
if word_count < @words_threshold or !provider or !provider.configured?() do
|
||||
nil
|
||||
else
|
||||
with language <- provider.detect(text),
|
||||
true <- good_locale_code?(language) do
|
||||
language
|
||||
else
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp get_provider do
|
||||
@config_impl.get([__MODULE__, :provider])
|
||||
end
|
||||
end
|
||||
47
lib/pleroma/language/language_detector/fasttext.ex
Normal file
47
lib/pleroma/language/language_detector/fasttext.ex
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Language.LanguageDetector.Fasttext do
|
||||
import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
|
||||
|
||||
alias Pleroma.Language.LanguageDetector.Provider
|
||||
|
||||
@behaviour Provider
|
||||
|
||||
@impl Provider
|
||||
def missing_dependencies do
|
||||
if Pleroma.Utils.command_available?("fasttext") do
|
||||
[]
|
||||
else
|
||||
["fasttext"]
|
||||
end
|
||||
end
|
||||
|
||||
@impl Provider
|
||||
def configured?, do: not_empty_string(get_model())
|
||||
|
||||
@impl Provider
|
||||
def detect(text) do
|
||||
text_path = Path.join(System.tmp_dir!(), "fasttext-#{Ecto.UUID.generate()}")
|
||||
|
||||
File.write(text_path, text |> String.replace(~r/\s+/, " "))
|
||||
|
||||
detected_language =
|
||||
case System.cmd("fasttext", ["predict", get_model(), text_path]) do
|
||||
{"__label__" <> language, _} ->
|
||||
language |> String.trim()
|
||||
|
||||
_ ->
|
||||
nil
|
||||
end
|
||||
|
||||
File.rm(text_path)
|
||||
|
||||
detected_language
|
||||
end
|
||||
|
||||
defp get_model do
|
||||
Pleroma.Config.get([__MODULE__, :model])
|
||||
end
|
||||
end
|
||||
11
lib/pleroma/language/language_detector/provider.ex
Normal file
11
lib/pleroma/language/language_detector/provider.ex
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Language.LanguageDetector.Provider do
|
||||
@callback missing_dependencies() :: [String.t()]
|
||||
|
||||
@callback configured?() :: boolean()
|
||||
|
||||
@callback detect(text :: String.t()) :: String.t() | nil
|
||||
end
|
||||
127
lib/pleroma/language/translation.ex
Normal file
127
lib/pleroma/language/translation.ex
Normal file
|
|
@ -0,0 +1,127 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Language.Translation do
|
||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
||||
|
||||
def configured? do
|
||||
provider = get_provider()
|
||||
|
||||
!!provider and provider.configured?()
|
||||
end
|
||||
|
||||
def missing_dependencies do
|
||||
provider = get_provider()
|
||||
|
||||
if provider do
|
||||
provider.missing_dependencies()
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
def translate(text, source_language, target_language) do
|
||||
cache_key = get_cache_key(text, source_language, target_language)
|
||||
|
||||
case @cachex.get(:translations_cache, cache_key) do
|
||||
{:ok, nil} ->
|
||||
provider = get_provider()
|
||||
|
||||
result =
|
||||
if !configured?() do
|
||||
{:error, :not_found}
|
||||
else
|
||||
provider.translate(text, source_language, target_language)
|
||||
|> scrub_html()
|
||||
end
|
||||
|
||||
store_result(result, cache_key)
|
||||
|
||||
result
|
||||
|
||||
{:ok, result} ->
|
||||
{:ok, result}
|
||||
|
||||
{:error, error} ->
|
||||
{:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
def supported_languages(type) when type in [:source, :target] do
|
||||
provider = get_provider()
|
||||
|
||||
cache_key = "#{type}_languages/#{provider.name()}"
|
||||
|
||||
case @cachex.get(:translations_cache, cache_key) do
|
||||
{:ok, nil} ->
|
||||
result =
|
||||
if !configured?() do
|
||||
{:error, :not_found}
|
||||
else
|
||||
provider.supported_languages(type)
|
||||
end
|
||||
|
||||
store_result(result, cache_key)
|
||||
|
||||
result
|
||||
|
||||
{:ok, result} ->
|
||||
{:ok, result}
|
||||
|
||||
{:error, error} ->
|
||||
{:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
def languages_matrix do
|
||||
provider = get_provider()
|
||||
|
||||
cache_key = "languages_matrix/#{provider.name()}"
|
||||
|
||||
case @cachex.get(:translations_cache, cache_key) do
|
||||
{:ok, nil} ->
|
||||
result =
|
||||
if !configured?() do
|
||||
{:error, :not_found}
|
||||
else
|
||||
provider.languages_matrix()
|
||||
end
|
||||
|
||||
store_result(result, cache_key)
|
||||
|
||||
result
|
||||
|
||||
{:ok, result} ->
|
||||
{:ok, result}
|
||||
|
||||
{:error, error} ->
|
||||
{:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
defp get_provider, do: Pleroma.Config.get([__MODULE__, :provider])
|
||||
|
||||
defp get_cache_key(text, source_language, target_language) do
|
||||
"#{source_language}/#{target_language}/#{content_hash(text)}"
|
||||
end
|
||||
|
||||
defp store_result({:ok, result}, cache_key) do
|
||||
@cachex.put(:translations_cache, cache_key, result)
|
||||
end
|
||||
|
||||
defp store_result(_, _), do: nil
|
||||
|
||||
defp content_hash(text), do: :crypto.hash(:sha256, text) |> Base.encode64()
|
||||
|
||||
defp scrub_html({:ok, %{content: content} = result}) when is_binary(content) do
|
||||
scrubbers = Pleroma.Config.get([:markup, :scrub_policy])
|
||||
|
||||
content
|
||||
|> Pleroma.HTML.filter_tags(scrubbers)
|
||||
|
||||
{:ok, %{result | content: content}}
|
||||
end
|
||||
|
||||
defp scrub_html(result), do: result
|
||||
end
|
||||
121
lib/pleroma/language/translation/deepl.ex
Normal file
121
lib/pleroma/language/translation/deepl.ex
Normal file
|
|
@ -0,0 +1,121 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Language.Translation.Deepl do
|
||||
import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
|
||||
|
||||
alias Pleroma.Language.Translation.Provider
|
||||
|
||||
use Provider
|
||||
|
||||
@behaviour Provider
|
||||
|
||||
@name "DeepL"
|
||||
|
||||
@impl Provider
|
||||
def configured?, do: not_empty_string(base_url()) and not_empty_string(api_key())
|
||||
|
||||
@impl Provider
|
||||
def translate(content, source_language, target_language) do
|
||||
endpoint =
|
||||
base_url()
|
||||
|> URI.merge("/v2/translate")
|
||||
|> URI.to_string()
|
||||
|
||||
case Pleroma.HTTP.post(
|
||||
endpoint,
|
||||
Jason.encode!(%{
|
||||
text: [content],
|
||||
source_lang: source_language |> String.upcase(),
|
||||
target_lang: target_language,
|
||||
tag_handling: "html"
|
||||
}),
|
||||
[
|
||||
{"Content-Type", "application/json"},
|
||||
{"Authorization", "DeepL-Auth-Key #{api_key()}"}
|
||||
]
|
||||
) do
|
||||
{:ok, %{status: 429}} ->
|
||||
{:error, :too_many_requests}
|
||||
|
||||
{:ok, %{status: 456}} ->
|
||||
{:error, :quota_exceeded}
|
||||
|
||||
{:ok, %{status: 200} = res} ->
|
||||
%{
|
||||
"translations" => [
|
||||
%{"text" => content, "detected_source_language" => detected_source_language}
|
||||
]
|
||||
} = Jason.decode!(res.body)
|
||||
|
||||
{:ok,
|
||||
%{
|
||||
content: content,
|
||||
detected_source_language: detected_source_language,
|
||||
provider: @name
|
||||
}}
|
||||
|
||||
_ ->
|
||||
{:error, :internal_server_error}
|
||||
end
|
||||
end
|
||||
|
||||
@impl Provider
|
||||
def supported_languages(type) when type in [:source, :target] do
|
||||
endpoint =
|
||||
base_url()
|
||||
|> URI.merge("/v2/languages")
|
||||
|> URI.to_string()
|
||||
|
||||
case Pleroma.HTTP.post(
|
||||
endpoint <> "?" <> URI.encode_query(%{type: type}),
|
||||
"",
|
||||
[
|
||||
{"Content-Type", "application/x-www-form-urlencoded"},
|
||||
{"Authorization", "DeepL-Auth-Key #{api_key()}"}
|
||||
]
|
||||
) do
|
||||
{:ok, %{status: 200} = res} ->
|
||||
languages =
|
||||
Jason.decode!(res.body)
|
||||
|> Enum.map(fn %{"language" => language} -> language |> String.downcase() end)
|
||||
|> Enum.map(fn language ->
|
||||
if String.contains?(language, "-") do
|
||||
[language, language |> String.split("-") |> Enum.at(0)]
|
||||
else
|
||||
language
|
||||
end
|
||||
end)
|
||||
|> List.flatten()
|
||||
|> Enum.uniq()
|
||||
|
||||
{:ok, languages}
|
||||
|
||||
_ ->
|
||||
{:error, :internal_server_error}
|
||||
end
|
||||
end
|
||||
|
||||
@impl Provider
|
||||
def languages_matrix do
|
||||
with {:ok, source_languages} <- supported_languages(:source),
|
||||
{:ok, target_languages} <- supported_languages(:target) do
|
||||
{:ok,
|
||||
Map.new(source_languages, fn language -> {language, target_languages -- [language]} end)}
|
||||
else
|
||||
{:error, error} -> {:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
@impl Provider
|
||||
def name, do: @name
|
||||
|
||||
defp base_url do
|
||||
Pleroma.Config.get([__MODULE__, :base_url])
|
||||
end
|
||||
|
||||
defp api_key do
|
||||
Pleroma.Config.get([__MODULE__, :api_key])
|
||||
end
|
||||
end
|
||||
93
lib/pleroma/language/translation/libretranslate.ex
Normal file
93
lib/pleroma/language/translation/libretranslate.ex
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Language.Translation.Libretranslate do
|
||||
import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
|
||||
|
||||
alias Pleroma.Language.Translation.Provider
|
||||
|
||||
use Provider
|
||||
|
||||
@behaviour Provider
|
||||
|
||||
@name "LibreTranslate"
|
||||
|
||||
@impl Provider
|
||||
def configured?, do: not_empty_string(base_url()) and not_empty_string(api_key())
|
||||
|
||||
@impl Provider
|
||||
def translate(content, source_language, target_language) do
|
||||
case Pleroma.HTTP.post(
|
||||
base_url() <> "/translate",
|
||||
Jason.encode!(%{
|
||||
q: content,
|
||||
source: source_language |> String.upcase(),
|
||||
target: target_language,
|
||||
format: "html",
|
||||
api_key: api_key()
|
||||
}),
|
||||
[
|
||||
{"Content-Type", "application/json"}
|
||||
]
|
||||
) do
|
||||
{:ok, %{status: 429}} ->
|
||||
{:error, :too_many_requests}
|
||||
|
||||
{:ok, %{status: 403}} ->
|
||||
{:error, :quota_exceeded}
|
||||
|
||||
{:ok, %{status: 200} = res} ->
|
||||
%{
|
||||
"translatedText" => content
|
||||
} = Jason.decode!(res.body)
|
||||
|
||||
{:ok,
|
||||
%{
|
||||
content: content,
|
||||
detected_source_language: source_language,
|
||||
provider: @name
|
||||
}}
|
||||
|
||||
_ ->
|
||||
{:error, :internal_server_error}
|
||||
end
|
||||
end
|
||||
|
||||
@impl Provider
|
||||
def supported_languages(_) do
|
||||
case Pleroma.HTTP.get(base_url() <> "/languages") do
|
||||
{:ok, %{status: 200} = res} ->
|
||||
languages =
|
||||
Jason.decode!(res.body)
|
||||
|> Enum.map(fn %{"code" => code} -> code end)
|
||||
|
||||
{:ok, languages}
|
||||
|
||||
_ ->
|
||||
{:error, :internal_server_error}
|
||||
end
|
||||
end
|
||||
|
||||
@impl Provider
|
||||
def languages_matrix do
|
||||
with {:ok, source_languages} <- supported_languages(:source),
|
||||
{:ok, target_languages} <- supported_languages(:target) do
|
||||
{:ok,
|
||||
Map.new(source_languages, fn language -> {language, target_languages -- [language]} end)}
|
||||
else
|
||||
{:error, error} -> {:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
@impl Provider
|
||||
def name, do: @name
|
||||
|
||||
defp base_url do
|
||||
Pleroma.Config.get([__MODULE__, :base_url])
|
||||
end
|
||||
|
||||
defp api_key do
|
||||
Pleroma.Config.get([__MODULE__, :api_key], "")
|
||||
end
|
||||
end
|
||||
109
lib/pleroma/language/translation/mozhi.ex
Normal file
109
lib/pleroma/language/translation/mozhi.ex
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Language.Translation.Mozhi do
|
||||
import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
|
||||
|
||||
alias Pleroma.Language.Translation.Provider
|
||||
|
||||
use Provider
|
||||
|
||||
@behaviour Provider
|
||||
|
||||
@name "Mozhi"
|
||||
|
||||
@impl Provider
|
||||
def configured?, do: not_empty_string(base_url()) and not_empty_string(engine())
|
||||
|
||||
@impl Provider
|
||||
def translate(content, source_language, target_language) do
|
||||
endpoint =
|
||||
base_url()
|
||||
|> URI.merge("/api/translate")
|
||||
|> URI.to_string()
|
||||
|
||||
case Pleroma.HTTP.get(
|
||||
endpoint <>
|
||||
"?" <>
|
||||
URI.encode_query(%{
|
||||
engine: engine(),
|
||||
text: content,
|
||||
from: source_language,
|
||||
to: target_language
|
||||
}),
|
||||
[{"Accept", "application/json"}]
|
||||
) do
|
||||
{:ok, %{status: 200} = res} ->
|
||||
%{
|
||||
"translated-text" => content,
|
||||
"source_language" => source_language
|
||||
} = Jason.decode!(res.body)
|
||||
|
||||
{:ok,
|
||||
%{
|
||||
content: content,
|
||||
detected_source_language: source_language,
|
||||
provider: @name
|
||||
}}
|
||||
|
||||
_ ->
|
||||
{:error, :internal_server_error}
|
||||
end
|
||||
end
|
||||
|
||||
@impl Provider
|
||||
def supported_languages(type) when type in [:source, :target] do
|
||||
path =
|
||||
case type do
|
||||
:source -> "/api/source_languages"
|
||||
:target -> "/api/target_languages"
|
||||
end
|
||||
|
||||
endpoint =
|
||||
base_url()
|
||||
|> URI.merge(path)
|
||||
|> URI.to_string()
|
||||
|
||||
case Pleroma.HTTP.get(
|
||||
endpoint <>
|
||||
"?" <>
|
||||
URI.encode_query(%{
|
||||
engine: engine()
|
||||
}),
|
||||
[{"Accept", "application/json"}]
|
||||
) do
|
||||
{:ok, %{status: 200} = res} ->
|
||||
languages =
|
||||
Jason.decode!(res.body)
|
||||
|> Enum.map(fn %{"Id" => language} -> language end)
|
||||
|
||||
{:ok, languages}
|
||||
|
||||
_ ->
|
||||
{:error, :internal_server_error}
|
||||
end
|
||||
end
|
||||
|
||||
@impl Provider
|
||||
def languages_matrix do
|
||||
with {:ok, source_languages} <- supported_languages(:source),
|
||||
{:ok, target_languages} <- supported_languages(:target) do
|
||||
{:ok,
|
||||
Map.new(source_languages, fn language -> {language, target_languages -- [language]} end)}
|
||||
else
|
||||
{:error, error} -> {:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
@impl Provider
|
||||
def name, do: @name
|
||||
|
||||
defp base_url do
|
||||
Pleroma.Config.get([__MODULE__, :base_url])
|
||||
end
|
||||
|
||||
defp engine do
|
||||
Pleroma.Config.get([__MODULE__, :engine])
|
||||
end
|
||||
end
|
||||
40
lib/pleroma/language/translation/provider.ex
Normal file
40
lib/pleroma/language/translation/provider.ex
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Language.Translation.Provider do
|
||||
alias Pleroma.Language.Translation.Provider
|
||||
|
||||
@callback missing_dependencies() :: [String.t()]
|
||||
|
||||
@callback configured?() :: boolean()
|
||||
|
||||
@callback translate(
|
||||
content :: String.t(),
|
||||
source_language :: String.t(),
|
||||
target_language :: String.t()
|
||||
) ::
|
||||
{:ok,
|
||||
%{
|
||||
content: String.t(),
|
||||
detected_source_language: String.t(),
|
||||
provider: String.t()
|
||||
}}
|
||||
| {:error, atom()}
|
||||
|
||||
@callback supported_languages(type :: :string | :target) ::
|
||||
{:ok, [String.t()]} | {:error, atom()}
|
||||
|
||||
@callback languages_matrix() :: {:ok, map()} | {:error, atom()}
|
||||
|
||||
@callback name() :: String.t()
|
||||
|
||||
defmacro __using__(_opts) do
|
||||
quote do
|
||||
@impl Provider
|
||||
def missing_dependencies, do: []
|
||||
|
||||
defoverridable missing_dependencies: 0
|
||||
end
|
||||
end
|
||||
end
|
||||
129
lib/pleroma/language/translation/translate_locally.ex
Normal file
129
lib/pleroma/language/translation/translate_locally.ex
Normal file
|
|
@ -0,0 +1,129 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Language.Translation.TranslateLocally do
|
||||
alias Pleroma.Language.Translation.Provider
|
||||
|
||||
use Provider
|
||||
|
||||
@behaviour Provider
|
||||
|
||||
@name "translateLocally"
|
||||
|
||||
@impl Provider
|
||||
def missing_dependencies do
|
||||
if Pleroma.Utils.command_available?("translateLocally") do
|
||||
[]
|
||||
else
|
||||
["translateLocally"]
|
||||
end
|
||||
end
|
||||
|
||||
@impl Provider
|
||||
def configured?, do: is_map(models())
|
||||
|
||||
@impl Provider
|
||||
def translate(content, source_language, target_language) do
|
||||
model =
|
||||
models()
|
||||
|> Map.get(source_language, %{})
|
||||
|> Map.get(target_language)
|
||||
|
||||
models =
|
||||
if model do
|
||||
[model]
|
||||
else
|
||||
[
|
||||
models()
|
||||
|> Map.get(source_language, %{})
|
||||
|> Map.get(intermediary_language()),
|
||||
models()
|
||||
|> Map.get(intermediary_language(), %{})
|
||||
|> Map.get(target_language)
|
||||
]
|
||||
end
|
||||
|
||||
translated_content =
|
||||
Enum.reduce(models, content, fn model, content ->
|
||||
text_path = Path.join(System.tmp_dir!(), "translateLocally-#{Ecto.UUID.generate()}")
|
||||
|
||||
File.write(text_path, content)
|
||||
|
||||
translated_content =
|
||||
case System.cmd("translateLocally", ["-m", model, "-i", text_path, "--html"]) do
|
||||
{content, _} -> content
|
||||
_ -> nil
|
||||
end
|
||||
|
||||
File.rm(text_path)
|
||||
|
||||
translated_content
|
||||
end)
|
||||
|
||||
{:ok,
|
||||
%{
|
||||
content: translated_content,
|
||||
detected_source_language: source_language,
|
||||
provider: @name
|
||||
}}
|
||||
end
|
||||
|
||||
@impl Provider
|
||||
def supported_languages(:source) do
|
||||
languages =
|
||||
languages_matrix()
|
||||
|> elem(1)
|
||||
|> Map.keys()
|
||||
|
||||
{:ok, languages}
|
||||
end
|
||||
|
||||
@impl Provider
|
||||
def supported_languages(:target) do
|
||||
languages =
|
||||
languages_matrix()
|
||||
|> elem(1)
|
||||
|> Map.values()
|
||||
|> List.flatten()
|
||||
|> Enum.uniq()
|
||||
|
||||
{:ok, languages}
|
||||
end
|
||||
|
||||
@impl Provider
|
||||
def languages_matrix do
|
||||
languages =
|
||||
models()
|
||||
|> Map.to_list()
|
||||
|> Enum.map(fn {key, value} -> {key, Map.keys(value)} end)
|
||||
|> Enum.into(%{})
|
||||
|
||||
matrix =
|
||||
if intermediary_language() do
|
||||
languages
|
||||
|> Map.to_list()
|
||||
|> Enum.map(fn {key, value} ->
|
||||
with_intermediary =
|
||||
(((value ++ languages[intermediary_language()])
|
||||
|> Enum.uniq()) --
|
||||
[key])
|
||||
|> Enum.sort()
|
||||
|
||||
{key, with_intermediary}
|
||||
end)
|
||||
|> Enum.into(%{})
|
||||
else
|
||||
languages
|
||||
end
|
||||
|
||||
{:ok, matrix}
|
||||
end
|
||||
|
||||
@impl Provider
|
||||
def name, do: @name
|
||||
|
||||
defp models, do: Pleroma.Config.get([__MODULE__, :models])
|
||||
|
||||
defp intermediary_language, do: Pleroma.Config.get([__MODULE__, :intermediary_language])
|
||||
end
|
||||
|
|
@ -575,6 +575,12 @@ defmodule Pleroma.ModerationLog do
|
|||
"@#{actor_nickname} requested account backup for @#{user_nickname}"
|
||||
end
|
||||
|
||||
def get_log_entry_message(%ModerationLog{data: data}) do
|
||||
actor_name = get_in(data, ["actor", "nickname"]) || "unknown"
|
||||
action = data["action"] || "unknown"
|
||||
"@#{actor_name} performed action #{action}"
|
||||
end
|
||||
|
||||
defp nicknames_to_string(nicknames) do
|
||||
nicknames
|
||||
|> Enum.map(&"@#{&1}")
|
||||
|
|
|
|||
|
|
@ -74,6 +74,7 @@ defmodule Pleroma.Notification do
|
|||
reblog
|
||||
poll
|
||||
status
|
||||
update
|
||||
}
|
||||
|
||||
def changeset(%Notification{} = notification, attrs) do
|
||||
|
|
@ -281,10 +282,15 @@ defmodule Pleroma.Notification do
|
|||
select: n.id
|
||||
)
|
||||
|
||||
Multi.new()
|
||||
|> Multi.update_all(:ids, query, set: [seen: true, updated_at: NaiveDateTime.utc_now()])
|
||||
|> Marker.multi_set_last_read_id(user, "notifications")
|
||||
|> Repo.transaction()
|
||||
{:ok, %{marker: marker}} =
|
||||
Multi.new()
|
||||
|> Multi.update_all(:ids, query, set: [seen: true, updated_at: NaiveDateTime.utc_now()])
|
||||
|> Marker.multi_set_last_read_id(user, "notifications")
|
||||
|> Repo.transaction()
|
||||
|
||||
Streamer.stream(["user", "user:notification"], marker)
|
||||
|
||||
{:ok, %{marker: marker}}
|
||||
end
|
||||
|
||||
@spec read_one(User.t(), String.t()) ::
|
||||
|
|
@ -525,9 +531,7 @@ defmodule Pleroma.Notification do
|
|||
%Activity{data: %{"type" => "Create"}} = activity,
|
||||
local_only
|
||||
) do
|
||||
notification_enabled_ap_ids =
|
||||
[]
|
||||
|> Utils.maybe_notify_subscribers(activity)
|
||||
notification_enabled_ap_ids = Utils.get_notified_subscribers(activity)
|
||||
|
||||
potential_receivers =
|
||||
User.get_users_from_set(notification_enabled_ap_ids, local_only: local_only)
|
||||
|
|
|
|||
|
|
@ -126,7 +126,7 @@ defmodule Pleroma.Object do
|
|||
Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}")
|
||||
end
|
||||
|
||||
def normalize(_, options \\ [fetch: false, id_only: false])
|
||||
def normalize(_, options \\ [fetch: false])
|
||||
|
||||
# If we pass an Activity to Object.normalize(), we can try to use the preloaded object.
|
||||
# Use this whenever possible, especially when walking graphs in an O(N) loop!
|
||||
|
|
@ -155,9 +155,6 @@ defmodule Pleroma.Object do
|
|||
|
||||
def normalize(ap_id, options) when is_binary(ap_id) do
|
||||
cond do
|
||||
Keyword.get(options, :id_only) ->
|
||||
ap_id
|
||||
|
||||
Keyword.get(options, :fetch) ->
|
||||
case Fetcher.fetch_object_from_id(ap_id, options) do
|
||||
{:ok, object} -> object
|
||||
|
|
@ -401,28 +398,6 @@ defmodule Pleroma.Object do
|
|||
String.starts_with?(id, Pleroma.Web.Endpoint.url() <> "/")
|
||||
end
|
||||
|
||||
def replies(object, opts \\ []) do
|
||||
object = Object.normalize(object, fetch: false)
|
||||
|
||||
query =
|
||||
Object
|
||||
|> where(
|
||||
[o],
|
||||
fragment("(?)->>'inReplyTo' = ?", o.data, ^object.data["id"])
|
||||
)
|
||||
|> order_by([o], asc: o.id)
|
||||
|
||||
if opts[:self_only] do
|
||||
actor = object.data["actor"]
|
||||
where(query, [o], fragment("(?)->>'actor' = ?", o.data, ^actor))
|
||||
else
|
||||
query
|
||||
end
|
||||
end
|
||||
|
||||
def self_replies(object, opts \\ []),
|
||||
do: replies(object, Keyword.put(opts, :self_only, true))
|
||||
|
||||
def tags(%Object{data: %{"tag" => tags}}) when is_list(tags), do: tags
|
||||
|
||||
def tags(_), do: []
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@
|
|||
|
||||
defmodule Pleroma.Object.Fetcher do
|
||||
alias Pleroma.HTTP
|
||||
alias Pleroma.Instances
|
||||
alias Pleroma.Maps
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Object.Containment
|
||||
|
|
@ -19,8 +18,6 @@ defmodule Pleroma.Object.Fetcher do
|
|||
require Logger
|
||||
require Pleroma.Constants
|
||||
|
||||
@mix_env Mix.env()
|
||||
|
||||
@spec reinject_object(struct(), map()) :: {:ok, Object.t()} | {:error, any()}
|
||||
defp reinject_object(%Object{data: %{}} = object, new_data) do
|
||||
Logger.debug("Reinjecting object #{new_data["id"]}")
|
||||
|
|
@ -152,10 +149,6 @@ defmodule Pleroma.Object.Fetcher do
|
|||
{:ok, body} <- get_object(id),
|
||||
{:ok, data} <- safe_json_decode(body),
|
||||
:ok <- Containment.contain_origin_from_id(id, data) do
|
||||
if not Instances.reachable?(id) do
|
||||
Instances.set_reachable(id)
|
||||
end
|
||||
|
||||
{:ok, data}
|
||||
else
|
||||
{:scheme, _} ->
|
||||
|
|
@ -178,13 +171,8 @@ defmodule Pleroma.Object.Fetcher do
|
|||
def fetch_and_contain_remote_object_from_id(_id),
|
||||
do: {:error, "id must be a string"}
|
||||
|
||||
defp check_crossdomain_redirect(final_host, original_url)
|
||||
|
||||
# Handle the common case in tests where responses don't include URLs
|
||||
if @mix_env == :test do
|
||||
defp check_crossdomain_redirect(nil, _) do
|
||||
{:cross_domain_redirect, false}
|
||||
end
|
||||
defp check_crossdomain_redirect(final_host, _original_url) when is_nil(final_host) do
|
||||
{:cross_domain_redirect, false}
|
||||
end
|
||||
|
||||
defp check_crossdomain_redirect(final_host, original_url) do
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@
|
|||
defmodule Pleroma.Object.Updater do
|
||||
require Pleroma.Constants
|
||||
|
||||
alias Pleroma.Maps
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Repo
|
||||
|
||||
|
|
@ -115,6 +116,7 @@ defmodule Pleroma.Object.Updater do
|
|||
# Choices are the same, but counts are different
|
||||
to_be_updated
|
||||
|> Map.put(key, updated_object[key])
|
||||
|> Maps.put_if_present("votersCount", updated_object["votersCount"])
|
||||
else
|
||||
# Choices (or vote type) have changed, do not allow this
|
||||
_ -> to_be_updated
|
||||
|
|
|
|||
|
|
@ -1,28 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.OTPVersion do
|
||||
@spec version() :: String.t() | nil
|
||||
def version do
|
||||
# OTP Version https://erlang.org/doc/system_principles/versions.html#otp-version
|
||||
[
|
||||
Path.join(:code.root_dir(), "OTP_VERSION"),
|
||||
Path.join([:code.root_dir(), "releases", :erlang.system_info(:otp_release), "OTP_VERSION"])
|
||||
]
|
||||
|> get_version_from_files()
|
||||
end
|
||||
|
||||
@spec get_version_from_files([Path.t()]) :: String.t() | nil
|
||||
def get_version_from_files([]), do: nil
|
||||
|
||||
def get_version_from_files([path | paths]) do
|
||||
if File.exists?(path) do
|
||||
path
|
||||
|> File.read!()
|
||||
|> String.replace(~r/\r|\n|\s/, "")
|
||||
else
|
||||
get_version_from_files(paths)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -95,13 +95,30 @@ defmodule Pleroma.Pagination do
|
|||
offset: :integer,
|
||||
limit: :integer,
|
||||
skip_extra_order: :boolean,
|
||||
skip_order: :boolean
|
||||
skip_order: :boolean,
|
||||
order_asc: :boolean
|
||||
}
|
||||
|
||||
changeset = cast({%{}, param_types}, params, Map.keys(param_types))
|
||||
changeset.changes
|
||||
end
|
||||
|
||||
defp order_statement(query, table_binding, :asc) do
|
||||
order_by(
|
||||
query,
|
||||
[{u, table_position(query, table_binding)}],
|
||||
fragment("? asc nulls last", u.id)
|
||||
)
|
||||
end
|
||||
|
||||
defp order_statement(query, table_binding, :desc) do
|
||||
order_by(
|
||||
query,
|
||||
[{u, table_position(query, table_binding)}],
|
||||
fragment("? desc nulls last", u.id)
|
||||
)
|
||||
end
|
||||
|
||||
defp restrict(query, :min_id, %{min_id: min_id}, table_binding) do
|
||||
where(query, [{q, table_position(query, table_binding)}], q.id > ^min_id)
|
||||
end
|
||||
|
|
@ -119,19 +136,16 @@ defmodule Pleroma.Pagination do
|
|||
defp restrict(%{order_bys: [_ | _]} = query, :order, %{skip_extra_order: true}, _), do: query
|
||||
|
||||
defp restrict(query, :order, %{min_id: _}, table_binding) do
|
||||
order_by(
|
||||
query,
|
||||
[{u, table_position(query, table_binding)}],
|
||||
fragment("? asc nulls last", u.id)
|
||||
)
|
||||
order_statement(query, table_binding, :asc)
|
||||
end
|
||||
|
||||
defp restrict(query, :order, _options, table_binding) do
|
||||
order_by(
|
||||
query,
|
||||
[{u, table_position(query, table_binding)}],
|
||||
fragment("? desc nulls last", u.id)
|
||||
)
|
||||
defp restrict(query, :order, %{max_id: _}, table_binding) do
|
||||
order_statement(query, table_binding, :desc)
|
||||
end
|
||||
|
||||
defp restrict(query, :order, options, table_binding) do
|
||||
dir = if options[:order_asc], do: :asc, else: :desc
|
||||
order_statement(query, table_binding, dir)
|
||||
end
|
||||
|
||||
defp restrict(query, :offset, %{offset: offset}, _table_binding) do
|
||||
|
|
@ -151,11 +165,9 @@ defmodule Pleroma.Pagination do
|
|||
|
||||
defp restrict(query, _, _, _), do: query
|
||||
|
||||
defp enforce_order(result, %{min_id: _}) do
|
||||
result
|
||||
|> Enum.reverse()
|
||||
end
|
||||
|
||||
defp enforce_order(result, %{min_id: _, order_asc: true}), do: result
|
||||
defp enforce_order(result, %{min_id: _}), do: Enum.reverse(result)
|
||||
defp enforce_order(result, %{max_id: _, order_asc: true}), do: Enum.reverse(result)
|
||||
defp enforce_order(result, _), do: result
|
||||
|
||||
defp table_position(%Ecto.Query{} = query, binding_name) do
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.ReverseProxy do
|
||||
alias Pleroma.Utils.URIEncoding
|
||||
|
||||
@range_headers ~w(range if-range)
|
||||
@keep_req_headers ~w(accept accept-encoding cache-control if-modified-since) ++
|
||||
~w(if-unmodified-since if-none-match) ++ @range_headers
|
||||
|
|
@ -155,9 +157,12 @@ defmodule Pleroma.ReverseProxy do
|
|||
end
|
||||
|
||||
defp request(method, url, headers, opts) do
|
||||
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
|
||||
method = method |> String.downcase() |> String.to_existing_atom()
|
||||
|
||||
url = maybe_encode_url(url)
|
||||
|
||||
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
|
||||
|
||||
case client().request(method, url, headers, "", opts) do
|
||||
{:ok, code, headers, client} when code in @valid_resp_codes ->
|
||||
{:ok, code, downcase_headers(headers), client}
|
||||
|
|
@ -449,4 +454,18 @@ defmodule Pleroma.ReverseProxy do
|
|||
_ -> delete_resp_header(conn, "content-length")
|
||||
end
|
||||
end
|
||||
|
||||
# Only when Tesla adapter is Hackney or Finch does the URL
|
||||
# need encoding before Reverse Proxying as both end up
|
||||
# using the raw Hackney client and cannot leverage our
|
||||
# EncodeUrl Tesla middleware
|
||||
# Also do it for test environment
|
||||
defp maybe_encode_url(url) do
|
||||
case Application.get_env(:tesla, :adapter) do
|
||||
Tesla.Adapter.Hackney -> URIEncoding.encode_url(url)
|
||||
{Tesla.Adapter.Finch, _} -> URIEncoding.encode_url(url)
|
||||
Tesla.Mock -> URIEncoding.encode_url(url)
|
||||
_ -> url
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,6 +7,11 @@ defmodule Pleroma.ReverseProxy.Client.Hackney do
|
|||
|
||||
@impl true
|
||||
def request(method, url, headers, body, opts \\ []) do
|
||||
opts =
|
||||
Keyword.put_new(opts, :path_encode_fun, fn path ->
|
||||
path
|
||||
end)
|
||||
|
||||
:hackney.request(method, url, headers, body, opts)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -56,10 +56,6 @@ defmodule Pleroma.SafeZip do
|
|||
{_, true} <- {:safe_path, safe_path?(path)} do
|
||||
{:cont, {:ok, maybe_add_file(type, path, fl)}}
|
||||
else
|
||||
{:get_type, e} ->
|
||||
{:halt,
|
||||
{:error, "Couldn't determine file type of ZIP entry at #{path} (#{inspect(e)})"}}
|
||||
|
||||
{:type, _} ->
|
||||
{:halt, {:error, "Potentially unsafe file type in ZIP at: #{path}"}}
|
||||
|
||||
|
|
|
|||
|
|
@ -102,7 +102,8 @@ defmodule Pleroma.Search.DatabaseSearch do
|
|||
^tsc,
|
||||
o.data,
|
||||
^search_query
|
||||
)
|
||||
),
|
||||
order_by: [desc: :inserted_at]
|
||||
)
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -157,26 +157,55 @@ defmodule Pleroma.Search.QdrantSearch do
|
|||
end
|
||||
|
||||
defmodule Pleroma.Search.QdrantSearch.OpenAIClient do
|
||||
use Tesla
|
||||
alias Pleroma.Config.Getting, as: Config
|
||||
|
||||
plug(Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :openai_url]))
|
||||
plug(Tesla.Middleware.JSON)
|
||||
def post(path, body) do
|
||||
Tesla.post(client(), path, body)
|
||||
end
|
||||
|
||||
plug(Tesla.Middleware.Headers, [
|
||||
{"Authorization",
|
||||
"Bearer #{Pleroma.Config.get([Pleroma.Search.QdrantSearch, :openai_api_key])}"}
|
||||
])
|
||||
defp client do
|
||||
Tesla.client(middleware())
|
||||
end
|
||||
|
||||
defp middleware do
|
||||
[
|
||||
{Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :openai_url])},
|
||||
Tesla.Middleware.JSON,
|
||||
{Tesla.Middleware.Headers,
|
||||
[
|
||||
{"Authorization", "Bearer #{Config.get([Pleroma.Search.QdrantSearch, :openai_api_key])}"}
|
||||
]}
|
||||
]
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Pleroma.Search.QdrantSearch.QdrantClient do
|
||||
use Tesla
|
||||
alias Pleroma.Config.Getting, as: Config
|
||||
|
||||
plug(Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :qdrant_url]))
|
||||
plug(Tesla.Middleware.JSON)
|
||||
def delete(path) do
|
||||
Tesla.delete(client(), path)
|
||||
end
|
||||
|
||||
plug(Tesla.Middleware.Headers, [
|
||||
{"api-key", Pleroma.Config.get([Pleroma.Search.QdrantSearch, :qdrant_api_key])}
|
||||
])
|
||||
def post(path, body) do
|
||||
Tesla.post(client(), path, body)
|
||||
end
|
||||
|
||||
def put(path, body) do
|
||||
Tesla.put(client(), path, body)
|
||||
end
|
||||
|
||||
defp client do
|
||||
Tesla.client(middleware())
|
||||
end
|
||||
|
||||
defp middleware do
|
||||
[
|
||||
{Tesla.Middleware.BaseUrl, Config.get([Pleroma.Search.QdrantSearch, :qdrant_url])},
|
||||
Tesla.Middleware.JSON,
|
||||
{Tesla.Middleware.Headers,
|
||||
[
|
||||
{"api-key", Pleroma.Config.get([Pleroma.Search.QdrantSearch, :qdrant_api_key])}
|
||||
]}
|
||||
]
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Signature do
|
||||
@behaviour Pleroma.Signature.API
|
||||
@behaviour HTTPSignatures.Adapter
|
||||
|
||||
alias Pleroma.EctoType.ActivityPub.ObjectValidators
|
||||
|
|
@ -53,7 +54,7 @@ defmodule Pleroma.Signature do
|
|||
|
||||
def fetch_public_key(conn) do
|
||||
with {:ok, actor_id} <- get_actor_id(conn),
|
||||
{:ok, public_key} <- User.get_public_key_for_ap_id(actor_id) do
|
||||
{:ok, public_key} <- User.get_or_fetch_public_key_for_ap_id(actor_id) do
|
||||
{:ok, public_key}
|
||||
else
|
||||
e ->
|
||||
|
|
|
|||
14
lib/pleroma/signature/api.ex
Normal file
14
lib/pleroma/signature/api.ex
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Signature.API do
|
||||
@moduledoc """
|
||||
Behaviour for signing requests and producing HTTP Date headers.
|
||||
|
||||
This is used to allow tests to replace the signing implementation with Mox.
|
||||
"""
|
||||
|
||||
@callback sign(user :: Pleroma.User.t(), headers :: map()) :: String.t()
|
||||
@callback signed_date() :: String.t()
|
||||
end
|
||||
29
lib/pleroma/tesla/middleware/encode_url.ex
Normal file
29
lib/pleroma/tesla/middleware/encode_url.ex
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2025 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Tesla.Middleware.EncodeUrl do
|
||||
@moduledoc """
|
||||
Middleware to encode URLs properly
|
||||
|
||||
We must decode and then re-encode to ensure correct encoding.
|
||||
If we only encode it will re-encode each % as %25 causing a space
|
||||
already encoded as %20 to be %2520.
|
||||
|
||||
Similar problem for query parameters which need spaces to be the + character
|
||||
"""
|
||||
|
||||
@behaviour Tesla.Middleware
|
||||
|
||||
@impl Tesla.Middleware
|
||||
def call(%Tesla.Env{url: url} = env, next, _) do
|
||||
url = Pleroma.Utils.URIEncoding.encode_url(url)
|
||||
|
||||
env = %{env | url: url}
|
||||
|
||||
case Tesla.run(env, next) do
|
||||
{:ok, env} -> {:ok, env}
|
||||
err -> err
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -35,6 +35,7 @@ defmodule Pleroma.Upload do
|
|||
"""
|
||||
alias Ecto.UUID
|
||||
alias Pleroma.Maps
|
||||
alias Pleroma.Utils.URIEncoding
|
||||
alias Pleroma.Web.ActivityPub.Utils
|
||||
require Logger
|
||||
|
||||
|
|
@ -230,11 +231,18 @@ defmodule Pleroma.Upload do
|
|||
tmp_path
|
||||
end
|
||||
|
||||
# Encoding the whole path here is fine since the path is in a
|
||||
# UUID/<file name> form.
|
||||
# The file at this point isn't %-encoded, so the path shouldn't
|
||||
# be decoded first like Pleroma.Utils.URIEncoding.encode_url/1 does.
|
||||
defp url_from_spec(%__MODULE__{name: name}, base_url, {:file, path}) do
|
||||
encode_opts = [bypass_decode: true, bypass_parse: true]
|
||||
|
||||
path =
|
||||
URI.encode(path, &char_unescaped?/1) <>
|
||||
URIEncoding.encode_url(path, encode_opts) <>
|
||||
if Pleroma.Config.get([__MODULE__, :link_name], false) do
|
||||
"?name=#{URI.encode(name, &char_unescaped?/1)}"
|
||||
enum = %{name: name}
|
||||
"?#{URI.encode_query(enum)}"
|
||||
else
|
||||
""
|
||||
end
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ defmodule Pleroma.Uploaders.Local do
|
|||
|
||||
[file | folders] ->
|
||||
path = Path.join([upload_path()] ++ Enum.reverse(folders))
|
||||
File.mkdir_p!(path)
|
||||
Pleroma.Backports.mkdir_p!(path)
|
||||
{path, file}
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -150,7 +150,7 @@ defmodule Pleroma.User do
|
|||
field(:allow_following_move, :boolean, default: true)
|
||||
field(:skip_thread_containment, :boolean, default: false)
|
||||
field(:actor_type, :string, default: "Person")
|
||||
field(:also_known_as, {:array, ObjectValidators.ObjectID}, default: [])
|
||||
field(:also_known_as, {:array, ObjectValidators.BareUri}, default: [])
|
||||
field(:inbox, :string)
|
||||
field(:shared_inbox, :string)
|
||||
field(:accepts_chat_messages, :boolean, default: nil)
|
||||
|
|
@ -233,8 +233,8 @@ defmodule Pleroma.User do
|
|||
for {_relationship_type, [{_outgoing_relation, outgoing_relation_target}, _]} <-
|
||||
@user_relationships_config do
|
||||
# `def blocked_users_relation/2`, `def muted_users_relation/2`,
|
||||
# `def reblog_muted_users_relation/2`, `def notification_muted_users/2`,
|
||||
# `def subscriber_users/2`, `def endorsed_users_relation/2`
|
||||
# `def reblog_muted_users_relation/2`, `def notification_muted_users_relation/2`,
|
||||
# `def subscriber_users_relation/2`, `def endorsed_users_relation/2`
|
||||
def unquote(:"#{outgoing_relation_target}_relation")(user, restrict_deactivated? \\ false) do
|
||||
target_users_query = assoc(user, unquote(outgoing_relation_target))
|
||||
|
||||
|
|
@ -288,6 +288,7 @@ defmodule Pleroma.User do
|
|||
defdelegate following?(follower, followed), to: FollowingRelationship
|
||||
defdelegate following_ap_ids(user), to: FollowingRelationship
|
||||
defdelegate get_follow_requests(user), to: FollowingRelationship
|
||||
defdelegate get_outgoing_follow_requests(user), to: FollowingRelationship
|
||||
defdelegate search(query, opts \\ []), to: User.Search
|
||||
|
||||
@doc """
|
||||
|
|
@ -308,7 +309,7 @@ defmodule Pleroma.User do
|
|||
|
||||
def binary_id(%User{} = user), do: binary_id(user.id)
|
||||
|
||||
@doc "Returns status account"
|
||||
@doc "Returns account status"
|
||||
@spec account_status(User.t()) :: account_status()
|
||||
def account_status(%User{is_active: false}), do: :deactivated
|
||||
def account_status(%User{password_reset_pending: true}), do: :password_reset_pending
|
||||
|
|
@ -801,13 +802,6 @@ defmodule Pleroma.User do
|
|||
when is_nil(password) do
|
||||
params = Map.put_new(params, :accepts_chat_messages, true)
|
||||
|
||||
params =
|
||||
if Map.has_key?(params, :email) do
|
||||
Map.put_new(params, :email, params[:email])
|
||||
else
|
||||
params
|
||||
end
|
||||
|
||||
struct
|
||||
|> cast(params, [
|
||||
:name,
|
||||
|
|
@ -895,7 +889,7 @@ defmodule Pleroma.User do
|
|||
end)
|
||||
end
|
||||
|
||||
def validate_email_not_in_blacklisted_domain(changeset, field) do
|
||||
defp validate_email_not_in_blacklisted_domain(changeset, field) do
|
||||
validate_change(changeset, field, fn _, value ->
|
||||
valid? =
|
||||
Config.get([User, :email_blacklist])
|
||||
|
|
@ -912,9 +906,9 @@ defmodule Pleroma.User do
|
|||
end)
|
||||
end
|
||||
|
||||
def maybe_validate_required_email(changeset, true), do: changeset
|
||||
defp maybe_validate_required_email(changeset, true), do: changeset
|
||||
|
||||
def maybe_validate_required_email(changeset, _) do
|
||||
defp maybe_validate_required_email(changeset, _) do
|
||||
if Config.get([:instance, :account_activation_required]) do
|
||||
validate_required(changeset, [:email])
|
||||
else
|
||||
|
|
@ -1109,15 +1103,15 @@ defmodule Pleroma.User do
|
|||
|
||||
defp maybe_send_registration_email(_), do: {:ok, :noop}
|
||||
|
||||
def needs_update?(%User{local: true}), do: false
|
||||
defp needs_update?(%User{local: true}), do: false
|
||||
|
||||
def needs_update?(%User{local: false, last_refreshed_at: nil}), do: true
|
||||
defp needs_update?(%User{local: false, last_refreshed_at: nil}), do: true
|
||||
|
||||
def needs_update?(%User{local: false} = user) do
|
||||
defp needs_update?(%User{local: false} = user) do
|
||||
NaiveDateTime.diff(NaiveDateTime.utc_now(), user.last_refreshed_at) >= 86_400
|
||||
end
|
||||
|
||||
def needs_update?(_), do: true
|
||||
defp needs_update?(_), do: true
|
||||
|
||||
@spec maybe_direct_follow(User.t(), User.t()) ::
|
||||
{:ok, User.t(), User.t()} | {:error, String.t()}
|
||||
|
|
@ -1364,7 +1358,7 @@ defmodule Pleroma.User do
|
|||
@spec get_by_nickname(String.t()) :: User.t() | nil
|
||||
def get_by_nickname(nickname) do
|
||||
Repo.get_by(User, nickname: nickname) ||
|
||||
if Regex.match?(~r(@#{Pleroma.Web.Endpoint.host()})i, nickname) do
|
||||
if Regex.match?(~r(@#{Pleroma.Web.Endpoint.host()}$)i, nickname) do
|
||||
Repo.get_by(User, nickname: local_nickname(nickname))
|
||||
end
|
||||
end
|
||||
|
|
@ -1708,7 +1702,9 @@ defmodule Pleroma.User do
|
|||
end
|
||||
end
|
||||
|
||||
def block(%User{} = blocker, %User{} = blocked) do
|
||||
def block(blocker, blocked, params \\ %{})
|
||||
|
||||
def block(%User{} = blocker, %User{} = blocked, params) do
|
||||
# sever any follow relationships to prevent leaks per activitypub (Pleroma issue #213)
|
||||
blocker =
|
||||
if following?(blocker, blocked) do
|
||||
|
|
@ -1738,12 +1734,33 @@ defmodule Pleroma.User do
|
|||
|
||||
{:ok, blocker} = update_follower_count(blocker)
|
||||
{:ok, blocker, _} = Participation.mark_all_as_read(blocker, blocked)
|
||||
add_to_block(blocker, blocked)
|
||||
|
||||
duration = Map.get(params, :duration, 0)
|
||||
|
||||
expires_at =
|
||||
if duration > 0 do
|
||||
DateTime.utc_now()
|
||||
|> DateTime.add(duration)
|
||||
else
|
||||
nil
|
||||
end
|
||||
|
||||
user_block = add_to_block(blocker, blocked, expires_at)
|
||||
|
||||
if duration > 0 do
|
||||
Pleroma.Workers.MuteExpireWorker.new(
|
||||
%{"op" => "unblock_user", "blocker_id" => blocker.id, "blocked_id" => blocked.id},
|
||||
scheduled_at: expires_at
|
||||
)
|
||||
|> Oban.insert()
|
||||
end
|
||||
|
||||
user_block
|
||||
end
|
||||
|
||||
# helper to handle the block given only an actor's AP id
|
||||
def block(%User{} = blocker, %{ap_id: ap_id}) do
|
||||
block(blocker, get_cached_by_ap_id(ap_id))
|
||||
def block(%User{} = blocker, %{ap_id: ap_id}, params) do
|
||||
block(blocker, get_cached_by_ap_id(ap_id), params)
|
||||
end
|
||||
|
||||
def unblock(%User{} = blocker, %User{} = blocked) do
|
||||
|
|
@ -1984,7 +2001,7 @@ defmodule Pleroma.User do
|
|||
end
|
||||
|
||||
@spec purge_user_changeset(User.t()) :: Ecto.Changeset.t()
|
||||
def purge_user_changeset(user) do
|
||||
defp purge_user_changeset(user) do
|
||||
# "Right to be forgotten"
|
||||
# https://gdpr.eu/right-to-be-forgotten/
|
||||
change(user, %{
|
||||
|
|
@ -2156,7 +2173,7 @@ defmodule Pleroma.User do
|
|||
Repo.all(query)
|
||||
end
|
||||
|
||||
def delete_notifications_from_user_activities(%User{ap_id: ap_id}) do
|
||||
defp delete_notifications_from_user_activities(%User{ap_id: ap_id}) do
|
||||
Notification
|
||||
|> join(:inner, [n], activity in assoc(n, :activity))
|
||||
|> where([n, a], fragment("? = ?", a.actor, ^ap_id))
|
||||
|
|
@ -2291,6 +2308,15 @@ defmodule Pleroma.User do
|
|||
|
||||
def public_key(_), do: {:error, "key not found"}
|
||||
|
||||
def get_or_fetch_public_key_for_ap_id(ap_id) do
|
||||
with {:ok, %User{} = user} <- get_or_fetch_by_ap_id(ap_id),
|
||||
{:ok, public_key} <- public_key(user) do
|
||||
{:ok, public_key}
|
||||
else
|
||||
_ -> :error
|
||||
end
|
||||
end
|
||||
|
||||
def get_public_key_for_ap_id(ap_id) do
|
||||
with %User{} = user <- get_cached_by_ap_id(ap_id),
|
||||
{:ok, public_key} <- public_key(user) do
|
||||
|
|
@ -2615,7 +2641,7 @@ defmodule Pleroma.User do
|
|||
end
|
||||
end
|
||||
|
||||
# Internal function; public one is `deactivate/2`
|
||||
# Internal function; public one is `set_activation/2`
|
||||
defp set_activation_status(user, status) do
|
||||
user
|
||||
|> cast(%{is_active: status}, [:is_active])
|
||||
|
|
@ -2634,7 +2660,7 @@ defmodule Pleroma.User do
|
|||
|> update_and_set_cache()
|
||||
end
|
||||
|
||||
def validate_fields(changeset, remote? \\ false) do
|
||||
defp validate_fields(changeset, remote?) do
|
||||
limit_name = if remote?, do: :max_remote_account_fields, else: :max_account_fields
|
||||
limit = Config.get([:instance, limit_name], 0)
|
||||
|
||||
|
|
@ -2779,10 +2805,10 @@ defmodule Pleroma.User do
|
|||
set_domain_blocks(user, List.delete(user.domain_blocks, domain_blocked))
|
||||
end
|
||||
|
||||
@spec add_to_block(User.t(), User.t()) ::
|
||||
@spec add_to_block(User.t(), User.t(), integer() | nil) ::
|
||||
{:ok, UserRelationship.t()} | {:error, Ecto.Changeset.t()}
|
||||
defp add_to_block(%User{} = user, %User{} = blocked) do
|
||||
with {:ok, relationship} <- UserRelationship.create_block(user, blocked) do
|
||||
defp add_to_block(%User{} = user, %User{} = blocked, expires_at) do
|
||||
with {:ok, relationship} <- UserRelationship.create_block(user, blocked, expires_at) do
|
||||
@cachex.del(:user_cache, "blocked_users_ap_ids:#{user.ap_id}")
|
||||
{:ok, relationship}
|
||||
end
|
||||
|
|
|
|||
|
|
@ -193,7 +193,7 @@ defmodule Pleroma.User.Backup do
|
|||
backup = Repo.preload(backup, :user)
|
||||
tempfile = Path.join([backup.tempdir, backup.file_name])
|
||||
|
||||
with {_, :ok} <- {:mkdir, File.mkdir_p(backup.tempdir)},
|
||||
with {_, :ok} <- {:mkdir, Pleroma.Backports.mkdir_p(backup.tempdir)},
|
||||
{_, :ok} <- {:actor, actor(backup.tempdir, backup.user)},
|
||||
{_, :ok} <- {:statuses, statuses(backup.tempdir, backup.user)},
|
||||
{_, :ok} <- {:likes, likes(backup.tempdir, backup.user)},
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ defmodule Pleroma.User.Search do
|
|||
following = Keyword.get(opts, :following, false)
|
||||
result_limit = Keyword.get(opts, :limit, @limit)
|
||||
offset = Keyword.get(opts, :offset, 0)
|
||||
capabilities = Keyword.get(opts, :capabilities, [])
|
||||
|
||||
for_user = Keyword.get(opts, :for_user)
|
||||
|
||||
|
|
@ -32,7 +33,7 @@ defmodule Pleroma.User.Search do
|
|||
|
||||
results =
|
||||
query_string
|
||||
|> search_query(for_user, following, top_user_ids)
|
||||
|> search_query(for_user, following, top_user_ids, capabilities)
|
||||
|> Pagination.fetch_paginated(%{"offset" => offset, "limit" => result_limit}, :offset)
|
||||
|
||||
results
|
||||
|
|
@ -80,7 +81,7 @@ defmodule Pleroma.User.Search do
|
|||
end
|
||||
end
|
||||
|
||||
defp search_query(query_string, for_user, following, top_user_ids) do
|
||||
defp search_query(query_string, for_user, following, top_user_ids, capabilities) do
|
||||
for_user
|
||||
|> base_query(following)
|
||||
|> filter_blocked_user(for_user)
|
||||
|
|
@ -94,6 +95,7 @@ defmodule Pleroma.User.Search do
|
|||
|> subquery()
|
||||
|> order_by(desc: :search_rank)
|
||||
|> maybe_restrict_local(for_user)
|
||||
|> maybe_restrict_accepting_chat_messages(capabilities)
|
||||
|> filter_deactivated_users()
|
||||
end
|
||||
|
||||
|
|
@ -214,6 +216,14 @@ defmodule Pleroma.User.Search do
|
|||
end
|
||||
end
|
||||
|
||||
defp maybe_restrict_accepting_chat_messages(query, capabilities) do
|
||||
if "accepts_chat_messages" in capabilities do
|
||||
from(q in query, where: q.accepts_chat_messages == true)
|
||||
else
|
||||
query
|
||||
end
|
||||
end
|
||||
|
||||
defp limit, do: Pleroma.Config.get([:instance, :limit_to_local_content], :unauthenticated)
|
||||
|
||||
defp restrict_local(q), do: where(q, [u], u.local == true)
|
||||
|
|
|
|||
|
|
@ -193,7 +193,8 @@ defmodule Pleroma.UserRelationship do
|
|||
{[:mute], []}
|
||||
|
||||
nil ->
|
||||
{[:block, :mute, :notification_mute, :reblog_mute], [:block, :inverse_subscription]}
|
||||
{[:block, :mute, :notification_mute, :reblog_mute, :endorsement],
|
||||
[:block, :inverse_subscription]}
|
||||
|
||||
unknown ->
|
||||
raise "Unsupported :subset option value: #{inspect(unknown)}"
|
||||
|
|
|
|||
142
lib/pleroma/utils/uri_encoding.ex
Normal file
142
lib/pleroma/utils/uri_encoding.ex
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2025 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Utils.URIEncoding do
|
||||
@moduledoc """
|
||||
Utility functions for dealing with URI encoding of paths and queries
|
||||
with support for query-encoding quirks.
|
||||
"""
|
||||
require Pleroma.Constants
|
||||
|
||||
# We don't always want to decode the path first, like is the case in
|
||||
# Pleroma.Upload.url_from_spec/3.
|
||||
@doc """
|
||||
Wraps URI encoding/decoding functions from Elixir's standard library to fix usually unintended side-effects.
|
||||
|
||||
Supports two URL processing options in the optional 2nd argument with the default being `false`:
|
||||
|
||||
* `bypass_parse` - Bypasses `URI.parse` stage, useful when it's not desirable to parse to URL first
|
||||
before encoding it. Supports only encoding as the Path segment of a URI.
|
||||
* `bypass_decode` - Bypasses `URI.decode` stage for the Path segment of a URI. Used when a URL
|
||||
has to be double %-encoded for internal reasons.
|
||||
|
||||
Options must be specified as a Keyword with tuples with booleans, otherwise
|
||||
`{:error, :invalid_opts}` is returned. Example:
|
||||
`encode_url(url, [bypass_parse: true, bypass_decode: true])`
|
||||
"""
|
||||
@spec encode_url(String.t(), Keyword.t()) :: String.t() | {:error, :invalid_opts}
|
||||
def encode_url(url, opts \\ []) when is_binary(url) and is_list(opts) do
|
||||
bypass_parse = Keyword.get(opts, :bypass_parse, false)
|
||||
bypass_decode = Keyword.get(opts, :bypass_decode, false)
|
||||
|
||||
with true <- is_boolean(bypass_parse),
|
||||
true <- is_boolean(bypass_decode) do
|
||||
cond do
|
||||
bypass_parse ->
|
||||
encode_path(url, bypass_decode)
|
||||
|
||||
true ->
|
||||
URI.parse(url)
|
||||
|> then(fn parsed ->
|
||||
path = encode_path(parsed.path, bypass_decode)
|
||||
|
||||
query = encode_query(parsed.query, parsed.host)
|
||||
|
||||
%{parsed | path: path, query: query}
|
||||
end)
|
||||
|> URI.to_string()
|
||||
end
|
||||
else
|
||||
_ -> {:error, :invalid_opts}
|
||||
end
|
||||
end
|
||||
|
||||
defp encode_path(nil, _bypass_decode), do: nil
|
||||
|
||||
# URI.encode/2 deliberately does not encode all chars that are forbidden
|
||||
# in the path component of a URI. It only encodes chars that are forbidden
|
||||
# in the whole URI. A predicate in the 2nd argument is used to fix that here.
|
||||
# URI.encode/2 uses the predicate function to determine whether each byte
|
||||
# (in an integer representation) should be encoded or not.
|
||||
defp encode_path(path, bypass_decode) when is_binary(path) do
|
||||
path =
|
||||
cond do
|
||||
bypass_decode ->
|
||||
path
|
||||
|
||||
true ->
|
||||
URI.decode(path)
|
||||
end
|
||||
|
||||
path
|
||||
|> URI.encode(fn byte ->
|
||||
URI.char_unreserved?(byte) ||
|
||||
Enum.any?(
|
||||
Pleroma.Constants.uri_path_allowed_reserved_chars(),
|
||||
fn char ->
|
||||
char == byte
|
||||
end
|
||||
)
|
||||
end)
|
||||
end
|
||||
|
||||
# Order of kv pairs in query is not preserved when using URI.decode_query.
|
||||
# URI.query_decoder/2 returns a stream which so far appears to not change order.
|
||||
# Immediately switch to a list to prevent breakage for sites that expect
|
||||
# the order of query keys to be always the same.
|
||||
defp encode_query(query, host) when is_binary(query) do
|
||||
query
|
||||
|> URI.query_decoder()
|
||||
|> Enum.to_list()
|
||||
|> do_encode_query(host)
|
||||
end
|
||||
|
||||
defp encode_query(nil, _), do: nil
|
||||
|
||||
# Always uses www_form encoding.
|
||||
# Taken from Elixir's URI module.
|
||||
defp do_encode_query(enumerable, host) do
|
||||
Enum.map_join(enumerable, "&", &maybe_apply_query_quirk(&1, host))
|
||||
end
|
||||
|
||||
# https://git.pleroma.social/pleroma/pleroma/-/issues/1055
|
||||
defp maybe_apply_query_quirk({key, value}, "i.guim.co.uk" = _host) do
|
||||
case key do
|
||||
"precrop" ->
|
||||
query_encode_kv_pair({key, value}, ~c":,")
|
||||
|
||||
key ->
|
||||
query_encode_kv_pair({key, value})
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_apply_query_quirk({key, value}, _), do: query_encode_kv_pair({key, value})
|
||||
|
||||
# Taken from Elixir's URI module and modified to support quirks.
|
||||
defp query_encode_kv_pair({key, value}, rules \\ []) when is_list(rules) do
|
||||
cond do
|
||||
length(rules) > 0 ->
|
||||
# URI.encode_query/2 does not appear to follow spec and encodes all parts
|
||||
# of our URI path Constant. This appears to work outside of edge-cases
|
||||
# like The Guardian Rich Media Cards, keeping behavior same as with
|
||||
# URI.encode_query/2 unless otherwise specified via rules.
|
||||
(URI.encode_www_form(Kernel.to_string(key)) <>
|
||||
"=" <>
|
||||
URI.encode(value, fn byte ->
|
||||
URI.char_unreserved?(byte) ||
|
||||
Enum.any?(
|
||||
rules,
|
||||
fn char ->
|
||||
char == byte
|
||||
end
|
||||
)
|
||||
end))
|
||||
|> String.replace("%20", "+")
|
||||
|
||||
true ->
|
||||
URI.encode_www_form(Kernel.to_string(key)) <>
|
||||
"=" <> URI.encode_www_form(Kernel.to_string(value))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -414,10 +414,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
|
||||
with flag_data <- make_flag_data(params, additional),
|
||||
{:ok, activity} <- insert(flag_data, local),
|
||||
{:ok, stripped_activity} <- strip_report_status_data(activity),
|
||||
_ <- notify_and_stream(activity),
|
||||
:ok <-
|
||||
maybe_federate(stripped_activity) do
|
||||
:ok <- maybe_federate(activity) do
|
||||
User.all_users_with_privilege(:reports_manage_reports)
|
||||
|> Enum.filter(fn user -> user.ap_id != actor end)
|
||||
|> Enum.filter(fn user -> not is_nil(user.email) end)
|
||||
|
|
@ -501,6 +499,28 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
|> Repo.all()
|
||||
end
|
||||
|
||||
def fetch_objects_for_replies_collection(parent_ap_id, opts \\ %{}) do
|
||||
opts =
|
||||
opts
|
||||
|> Map.put(:order_asc, true)
|
||||
|> Map.put(:id_type, :integer)
|
||||
|
||||
from(o in Object,
|
||||
where:
|
||||
fragment("?->>'inReplyTo' = ?", o.data, ^parent_ap_id) and
|
||||
fragment(
|
||||
"(?->'to' \\? ?::text OR ?->'cc' \\? ?::text)",
|
||||
o.data,
|
||||
^Pleroma.Constants.as_public(),
|
||||
o.data,
|
||||
^Pleroma.Constants.as_public()
|
||||
) and
|
||||
fragment("?->>'type' <> 'Answer'", o.data),
|
||||
select: %{id: o.id, ap_id: fragment("?->>'id'", o.data)}
|
||||
)
|
||||
|> Pagination.fetch_paginated(opts, :keyset)
|
||||
end
|
||||
|
||||
@spec fetch_latest_direct_activity_id_for_context(String.t(), keyword() | map()) ::
|
||||
Ecto.UUID.t() | nil
|
||||
def fetch_latest_direct_activity_id_for_context(context, opts \\ %{}) do
|
||||
|
|
@ -1065,6 +1085,10 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
from(activity in query, where: fragment("?->>'type' != 'Announce'", activity.data))
|
||||
end
|
||||
|
||||
defp restrict_reblogs(query, %{only_reblogs: true}) do
|
||||
from(activity in query, where: fragment("?->>'type' = 'Announce'", activity.data))
|
||||
end
|
||||
|
||||
defp restrict_reblogs(query, _), do: query
|
||||
|
||||
defp restrict_muted(query, %{with_muted: true}), do: query
|
||||
|
|
@ -1567,7 +1591,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
|
||||
defp get_actor_url(_url), do: nil
|
||||
|
||||
defp normalize_image(%{"url" => url} = data) do
|
||||
defp normalize_image(%{"url" => url} = data) when is_binary(url) do
|
||||
%{
|
||||
"type" => "Image",
|
||||
"url" => [%{"href" => url}]
|
||||
|
|
@ -1575,6 +1599,13 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
|> maybe_put_description(data)
|
||||
end
|
||||
|
||||
defp normalize_image(%{"url" => urls}) when is_list(urls) do
|
||||
url = urls |> List.first()
|
||||
|
||||
%{"url" => url}
|
||||
|> normalize_image()
|
||||
end
|
||||
|
||||
defp normalize_image(urls) when is_list(urls), do: urls |> List.first() |> normalize_image()
|
||||
defp normalize_image(_), do: nil
|
||||
|
||||
|
|
|
|||
|
|
@ -31,6 +31,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
|||
|
||||
@federating_only_actions [:internal_fetch, :relay, :relay_following, :relay_followers]
|
||||
|
||||
@object_replies_known_param_keys ["page", "min_id", "max_id", "since_id", "limit"]
|
||||
|
||||
plug(FederatingPlug when action in @federating_only_actions)
|
||||
|
||||
plug(
|
||||
|
|
@ -53,7 +55,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
|||
)
|
||||
|
||||
plug(:log_inbox_metadata when action in [:inbox])
|
||||
plug(:set_requester_reachable when action in [:inbox])
|
||||
plug(:relay_active? when action in [:relay])
|
||||
|
||||
defp relay_active?(conn, _) do
|
||||
|
|
@ -96,6 +97,36 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
|||
end
|
||||
end
|
||||
|
||||
def object_replies(%{assigns: assigns, query_params: params} = conn, _all_params) do
|
||||
object_ap_id = conn.path_info |> Enum.reverse() |> tl() |> Enum.reverse()
|
||||
object_ap_id = Endpoint.url() <> "/" <> Enum.join(object_ap_id, "/")
|
||||
|
||||
# Most other API params are converted to atoms by OpenAPISpex 3.x
|
||||
# and therefore helper functions assume atoms. For consistency,
|
||||
# also convert our params to atoms here.
|
||||
params =
|
||||
params
|
||||
|> Map.take(@object_replies_known_param_keys)
|
||||
|> Enum.into(%{}, fn {k, v} -> {String.to_existing_atom(k), v} end)
|
||||
|> Map.put(:object_ap_id, object_ap_id)
|
||||
|> Map.put(:order_asc, true)
|
||||
|> Map.put(:conn, conn)
|
||||
|
||||
with %Object{} = object <- Object.get_cached_by_ap_id(object_ap_id),
|
||||
user <- Map.get(assigns, :user, nil),
|
||||
{_, true} <- {:visible?, Visibility.visible_for_user?(object, user)} do
|
||||
conn
|
||||
|> maybe_skip_cache(user)
|
||||
|> set_cache_ttl_for(object)
|
||||
|> put_resp_content_type("application/activity+json")
|
||||
|> put_view(ObjectView)
|
||||
|> render("object_replies.json", render_params: params)
|
||||
else
|
||||
{:visible?, false} -> {:error, :not_found}
|
||||
nil -> {:error, :not_found}
|
||||
end
|
||||
end
|
||||
|
||||
def track_object_fetch(conn, nil), do: conn
|
||||
|
||||
def track_object_fetch(conn, object_id) do
|
||||
|
|
@ -258,8 +289,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
|||
|> put_view(UserView)
|
||||
|> render("activity_collection_page.json", %{
|
||||
activities: activities,
|
||||
pagination: ControllerHelper.get_pagination_fields(conn, activities),
|
||||
iri: "#{user.ap_id}/outbox"
|
||||
pagination: ControllerHelper.get_pagination_fields(conn, activities)
|
||||
})
|
||||
end
|
||||
end
|
||||
|
|
@ -274,13 +304,37 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
|||
end
|
||||
|
||||
def inbox(%{assigns: %{valid_signature: true}} = conn, %{"nickname" => nickname} = params) do
|
||||
with %User{is_active: true} = recipient <- User.get_cached_by_nickname(nickname),
|
||||
{:ok, %User{is_active: true} = actor} <- User.get_or_fetch_by_ap_id(params["actor"]),
|
||||
with {:recipient_exists, %User{} = recipient} <-
|
||||
{:recipient_exists, User.get_cached_by_nickname(nickname)},
|
||||
{:sender_exists, {:ok, %User{} = actor}} <-
|
||||
{:sender_exists, User.get_or_fetch_by_ap_id(params["actor"])},
|
||||
{:recipient_active, true} <- {:recipient_active, recipient.is_active},
|
||||
{:sender_active, true} <- {:sender_active, actor.is_active},
|
||||
true <- Utils.recipient_in_message(recipient, actor, params),
|
||||
params <- Utils.maybe_splice_recipient(recipient.ap_id, params) do
|
||||
Federator.incoming_ap_doc(params)
|
||||
json(conn, "ok")
|
||||
else
|
||||
{:recipient_exists, _} ->
|
||||
conn
|
||||
|> put_status(:not_found)
|
||||
|> json("User does not exist")
|
||||
|
||||
{:sender_exists, _} ->
|
||||
conn
|
||||
|> put_status(:not_found)
|
||||
|> json("Sender does not exist")
|
||||
|
||||
{:recipient_active, _} ->
|
||||
conn
|
||||
|> put_status(:not_found)
|
||||
|> json("User deactivated")
|
||||
|
||||
{:sender_active, _} ->
|
||||
conn
|
||||
|> put_status(:not_found)
|
||||
|> json("Sender deactivated")
|
||||
|
||||
_ ->
|
||||
conn
|
||||
|> put_status(:bad_request)
|
||||
|
|
@ -381,8 +435,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
|||
|> put_view(UserView)
|
||||
|> render("activity_collection_page.json", %{
|
||||
activities: activities,
|
||||
pagination: ControllerHelper.get_pagination_fields(conn, activities),
|
||||
iri: "#{user.ap_id}/inbox"
|
||||
pagination: ControllerHelper.get_pagination_fields(conn, activities)
|
||||
})
|
||||
end
|
||||
|
||||
|
|
@ -459,6 +512,42 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
|||
{:ok, activity}
|
||||
end
|
||||
|
||||
# We currently lack a Flag ObjectValidator since both CommonAPI and Transmogrifier
|
||||
# both send it straight to ActivityPub.flag and C2S currently has to go through
|
||||
# the normal pipeline which requires an ObjectValidator.
|
||||
# TODO: Add a Flag Activity ObjectValidator
|
||||
defp check_allowed_action(_, %{"type" => "Flag"}) do
|
||||
{:error, "Flag activities aren't currently supported in C2S"}
|
||||
end
|
||||
|
||||
# It would respond with 201 and silently fail with:
|
||||
# Could not decode featured collection at fetch #{user.ap_id} \
|
||||
# {:error, "Trying to fetch local resource"}
|
||||
defp check_allowed_action(%{ap_id: ap_id}, %{"type" => "Update", "object" => %{"id" => ap_id}}),
|
||||
do: {:error, "Updating profile is not currently supported in C2S"}
|
||||
|
||||
defp check_allowed_action(_, activity), do: {:ok, activity}
|
||||
|
||||
defp validate_visibility(%User{} = user, %{"type" => type, "object" => object} = activity) do
|
||||
with {_, %Object{} = normalized_object} <-
|
||||
{:normalize, Object.normalize(object, fetch: false)},
|
||||
{_, true} <- {:visibility, Visibility.visible_for_user?(normalized_object, user)} do
|
||||
{:ok, activity}
|
||||
else
|
||||
{:normalize, _} ->
|
||||
if type in ["Create", "Listen"] do
|
||||
# Creating new object via C2S; user is local and authenticated
|
||||
# via the :authenticate Plug pipeline.
|
||||
{:ok, activity}
|
||||
else
|
||||
{:error, "No such object found"}
|
||||
end
|
||||
|
||||
{:visibility, _} ->
|
||||
{:forbidden, "You can't interact with this object"}
|
||||
end
|
||||
end
|
||||
|
||||
def update_outbox(
|
||||
%{assigns: %{user: %User{nickname: nickname, ap_id: actor} = user}} = conn,
|
||||
%{"nickname" => nickname} = params
|
||||
|
|
@ -470,6 +559,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
|||
|> Map.put("actor", actor)
|
||||
|
||||
with {:ok, params} <- fix_user_message(user, params),
|
||||
{:ok, params} <- check_allowed_action(user, params),
|
||||
{:ok, params} <- validate_visibility(user, params),
|
||||
{:ok, activity, _} <- Pipeline.common_pipeline(params, local: true),
|
||||
%Activity{data: activity_data} <- Activity.normalize(activity) do
|
||||
conn
|
||||
|
|
@ -520,15 +611,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
|||
|> json(dgettext("errors", "error"))
|
||||
end
|
||||
|
||||
defp set_requester_reachable(%Plug.Conn{} = conn, _) do
|
||||
with actor <- conn.params["actor"],
|
||||
true <- is_binary(actor) do
|
||||
Pleroma.Instances.set_reachable(actor)
|
||||
end
|
||||
|
||||
conn
|
||||
end
|
||||
|
||||
defp log_inbox_metadata(%{params: %{"actor" => actor, "type" => type}} = conn, _) do
|
||||
Logger.metadata(actor: actor, type: type)
|
||||
conn
|
||||
|
|
|
|||
|
|
@ -327,8 +327,8 @@ defmodule Pleroma.Web.ActivityPub.Builder do
|
|||
}, []}
|
||||
end
|
||||
|
||||
@spec block(User.t(), User.t()) :: {:ok, map(), keyword()}
|
||||
def block(blocker, blocked) do
|
||||
@spec block(User.t(), User.t(), map()) :: {:ok, map(), keyword()}
|
||||
def block(blocker, blocked, params \\ %{}) do
|
||||
{:ok,
|
||||
%{
|
||||
"id" => Utils.generate_activity_id(),
|
||||
|
|
@ -336,7 +336,7 @@ defmodule Pleroma.Web.ActivityPub.Builder do
|
|||
"actor" => blocker.ap_id,
|
||||
"object" => blocked.ap_id,
|
||||
"to" => [blocked.ap_id]
|
||||
}, []}
|
||||
}, Keyword.new(params)}
|
||||
end
|
||||
|
||||
@spec announce(User.t(), Object.t(), keyword()) :: {:ok, map(), keyword()}
|
||||
|
|
|
|||
|
|
@ -18,6 +18,8 @@ defmodule Pleroma.Web.ActivityPub.MRF.InlineQuotePolicy do
|
|||
content =~ quote_url -> true
|
||||
# Does the content already have a .quote-inline span?
|
||||
content =~ "<span class=\"quote-inline\">" -> true
|
||||
# Does the content already have a .quote-inline p? (Mastodon)
|
||||
content =~ "<p class=\"quote-inline\">" -> true
|
||||
# No inline quote found
|
||||
true -> false
|
||||
end
|
||||
|
|
|
|||
61
lib/pleroma/web/activity_pub/mrf/quiet_reply.ex
Normal file
61
lib/pleroma/web/activity_pub/mrf/quiet_reply.ex
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF.QuietReply do
|
||||
@moduledoc """
|
||||
QuietReply alters the scope of activities from local users when replying by enforcing them to be "Unlisted" or "Quiet Public". This delivers the activity to all the expected recipients and instances, but it will not be published in the Federated / The Whole Known Network timelines. It will still be published to the Home timelines of the user's followers and visible to anyone who opens the thread.
|
||||
"""
|
||||
require Pleroma.Constants
|
||||
|
||||
alias Pleroma.User
|
||||
|
||||
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
|
||||
|
||||
@impl true
|
||||
def history_awareness, do: :auto
|
||||
|
||||
@impl true
|
||||
def filter(
|
||||
%{
|
||||
"type" => "Create",
|
||||
"to" => to,
|
||||
"cc" => cc,
|
||||
"object" => %{
|
||||
"actor" => actor,
|
||||
"type" => "Note",
|
||||
"inReplyTo" => in_reply_to
|
||||
}
|
||||
} = activity
|
||||
) do
|
||||
with true <- is_binary(in_reply_to),
|
||||
true <- Pleroma.Constants.as_public() in to,
|
||||
%User{follower_address: followers_collection, local: true} <-
|
||||
User.get_by_ap_id(actor) do
|
||||
updated_to =
|
||||
[followers_collection | to]
|
||||
|> Kernel.--([Pleroma.Constants.as_public()])
|
||||
|
||||
updated_cc =
|
||||
[Pleroma.Constants.as_public() | cc]
|
||||
|> Kernel.--([followers_collection])
|
||||
|
||||
updated_activity =
|
||||
activity
|
||||
|> Map.put("to", updated_to)
|
||||
|> Map.put("cc", updated_cc)
|
||||
|> put_in(["object", "to"], updated_to)
|
||||
|> put_in(["object", "cc"], updated_cc)
|
||||
|
||||
{:ok, updated_activity}
|
||||
else
|
||||
_ -> {:ok, activity}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def filter(activity), do: {:ok, activity}
|
||||
|
||||
@impl true
|
||||
def describe, do: {:ok, %{}}
|
||||
end
|
||||
|
|
@ -15,7 +15,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.RemoteReportPolicy do
|
|||
else
|
||||
{:local, true} -> {:ok, object}
|
||||
{:reject, message} -> {:reject, message}
|
||||
error -> {:reject, error}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -87,7 +87,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicy do
|
|||
Path.join(Config.get([:instance, :static_dir]), "emoji/stolen")
|
||||
)
|
||||
|
||||
File.mkdir_p(emoji_dir_path)
|
||||
Pleroma.Backports.mkdir_p(emoji_dir_path)
|
||||
|
||||
new_emojis =
|
||||
foreign_emojis
|
||||
|
|
|
|||
|
|
@ -200,14 +200,13 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do
|
|||
end
|
||||
|
||||
def validate(%{"type" => type} = object, meta)
|
||||
when type in ~w[Accept Reject Follow Update Like EmojiReact Announce
|
||||
when type in ~w[Accept Reject Follow Like EmojiReact Announce
|
||||
ChatMessage Answer] do
|
||||
validator =
|
||||
case type do
|
||||
"Accept" -> AcceptRejectValidator
|
||||
"Reject" -> AcceptRejectValidator
|
||||
"Follow" -> FollowValidator
|
||||
"Update" -> UpdateValidator
|
||||
"Like" -> LikeValidator
|
||||
"EmojiReact" -> EmojiReactValidator
|
||||
"Announce" -> AnnounceValidator
|
||||
|
|
@ -215,16 +214,19 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do
|
|||
"Answer" -> AnswerValidator
|
||||
end
|
||||
|
||||
cast_func =
|
||||
if type == "Update" do
|
||||
fn o -> validator.cast_and_validate(o, meta) end
|
||||
else
|
||||
fn o -> validator.cast_and_validate(o) end
|
||||
end
|
||||
|
||||
with {:ok, object} <-
|
||||
object
|
||||
|> cast_func.()
|
||||
|> validator.cast_and_validate()
|
||||
|> Ecto.Changeset.apply_action(:insert) do
|
||||
object = stringify_keys(object)
|
||||
{:ok, object, meta}
|
||||
end
|
||||
end
|
||||
|
||||
def validate(%{"type" => type} = object, meta) when type == "Update" do
|
||||
with {:ok, object} <-
|
||||
object
|
||||
|> UpdateValidator.cast_and_validate(meta)
|
||||
|> Ecto.Changeset.apply_action(:insert) do
|
||||
object = stringify_keys(object)
|
||||
{:ok, object, meta}
|
||||
|
|
|
|||
|
|
@ -56,20 +56,24 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidator do
|
|||
defp fix_tag(%{"tag" => tag} = data) when is_map(tag), do: Map.put(data, "tag", [tag])
|
||||
defp fix_tag(data), do: Map.drop(data, ["tag"])
|
||||
|
||||
# legacy internal *oma format
|
||||
defp fix_replies(%{"replies" => replies} = data) when is_list(replies), do: data
|
||||
|
||||
defp fix_replies(%{"replies" => %{"first" => %{"items" => replies}}} = data)
|
||||
when is_list(replies),
|
||||
do: Map.put(data, "replies", replies)
|
||||
|
||||
defp fix_replies(%{"replies" => %{"first" => %{"orderedItems" => replies}}} = data)
|
||||
when is_list(replies),
|
||||
do: Map.put(data, "replies", replies)
|
||||
|
||||
defp fix_replies(%{"replies" => %{"items" => replies}} = data) when is_list(replies),
|
||||
do: Map.put(data, "replies", replies)
|
||||
|
||||
# TODO: Pleroma does not have any support for Collections at the moment.
|
||||
# If the `replies` field is not something the ObjectID validator can handle,
|
||||
# the activity/object would be rejected, which is bad behavior.
|
||||
defp fix_replies(%{"replies" => replies} = data) when not is_list(replies),
|
||||
do: Map.drop(data, ["replies"])
|
||||
defp fix_replies(%{"replies" => %{"orderedItems" => replies}} = data) when is_list(replies),
|
||||
do: Map.put(data, "replies", replies)
|
||||
|
||||
defp fix_replies(data), do: data
|
||||
defp fix_replies(data), do: Map.delete(data, "replies")
|
||||
|
||||
def fix_attachments(%{"attachment" => attachment} = data) when is_map(attachment),
|
||||
do: Map.put(data, "attachment", [attachment])
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
|
||||
defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do
|
||||
alias Pleroma.EctoType.ActivityPub.ObjectValidators
|
||||
alias Pleroma.Language.LanguageDetector
|
||||
alias Pleroma.Maps
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Object.Containment
|
||||
|
|
@ -19,7 +20,15 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do
|
|||
require Pleroma.Constants
|
||||
|
||||
def cast_and_filter_recipients(message, field, follower_collection, field_fallback \\ []) do
|
||||
{:ok, data} = ObjectValidators.Recipients.cast(message[field] || field_fallback)
|
||||
# Fix as:Public/Public before ObjectID casting drops it, but keep `field_fallback`
|
||||
# semantics (only used when the field is missing).
|
||||
recipients =
|
||||
%{field => message[field] || field_fallback}
|
||||
|> Transmogrifier.fix_addressing_list(field)
|
||||
|> Transmogrifier.fix_addressing_public(field)
|
||||
|> Map.fetch!(field)
|
||||
|
||||
{:ok, data} = ObjectValidators.Recipients.cast(recipients)
|
||||
|
||||
data =
|
||||
Enum.reject(data, fn x ->
|
||||
|
|
@ -151,10 +160,19 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do
|
|||
def maybe_add_language(object) do
|
||||
language =
|
||||
[
|
||||
get_language_from_context(object),
|
||||
get_language_from_content_map(object)
|
||||
&get_language_from_context/1,
|
||||
&get_language_from_content_map/1,
|
||||
&get_language_from_content/1
|
||||
]
|
||||
|> Enum.find(&good_locale_code?(&1))
|
||||
|> Enum.find_value(fn get_language ->
|
||||
language = get_language.(object)
|
||||
|
||||
if good_locale_code?(language) do
|
||||
language
|
||||
else
|
||||
nil
|
||||
end
|
||||
end)
|
||||
|
||||
if language do
|
||||
Map.put(object, "language", language)
|
||||
|
|
@ -187,6 +205,12 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do
|
|||
|
||||
defp get_language_from_content_map(_), do: nil
|
||||
|
||||
defp get_language_from_content(%{"content" => content} = object) do
|
||||
LanguageDetector.detect("#{object["summary"] || ""} #{content}")
|
||||
end
|
||||
|
||||
defp get_language_from_content(_), do: nil
|
||||
|
||||
def maybe_add_content_map(%{"language" => language, "content" => content} = object)
|
||||
when not_empty_string(language) do
|
||||
Map.put(object, "contentMap", Map.put(%{}, language, content))
|
||||
|
|
|
|||
|
|
@ -50,13 +50,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.TagValidator do
|
|||
end
|
||||
|
||||
def changeset(struct, %{"type" => "Hashtag", "name" => name} = data) do
|
||||
name =
|
||||
cond do
|
||||
"#" <> name -> name
|
||||
name -> name
|
||||
end
|
||||
|> String.downcase()
|
||||
|
||||
name = String.downcase(name)
|
||||
data = Map.put(data, "name", name)
|
||||
|
||||
struct
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
|||
alias Pleroma.User
|
||||
alias Pleroma.Web.ActivityPub.Publisher.Prepared
|
||||
alias Pleroma.Web.ActivityPub.Relay
|
||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||
alias Pleroma.Workers.PublisherWorker
|
||||
|
||||
require Pleroma.Constants
|
||||
|
|
@ -26,6 +25,18 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
|||
ActivityPub outgoing federation module.
|
||||
"""
|
||||
|
||||
@signature_impl Application.compile_env(
|
||||
:pleroma,
|
||||
[__MODULE__, :signature_impl],
|
||||
Pleroma.Signature
|
||||
)
|
||||
|
||||
@transmogrifier_impl Application.compile_env(
|
||||
:pleroma,
|
||||
[__MODULE__, :transmogrifier_impl],
|
||||
Pleroma.Web.ActivityPub.Transmogrifier
|
||||
)
|
||||
|
||||
@doc """
|
||||
Enqueue publishing a single activity.
|
||||
"""
|
||||
|
|
@ -68,7 +79,7 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
|||
Determine if an activity can be represented by running it through Transmogrifier.
|
||||
"""
|
||||
def representable?(%Activity{} = activity) do
|
||||
with {:ok, _data} <- Transmogrifier.prepare_outgoing(activity.data) do
|
||||
with {:ok, _data} <- @transmogrifier_impl.prepare_outgoing(activity.data) do
|
||||
true
|
||||
else
|
||||
_e ->
|
||||
|
|
@ -91,9 +102,32 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
|||
Logger.debug("Federating #{ap_id} to #{inbox}")
|
||||
uri = %{path: path} = URI.parse(inbox)
|
||||
|
||||
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
|
||||
{:ok, data} = @transmogrifier_impl.prepare_outgoing(activity.data)
|
||||
|
||||
cc = Map.get(params, :cc, [])
|
||||
{actor, data} =
|
||||
with {_, false} <- {:actor_changed?, data["actor"] != activity.data["actor"]} do
|
||||
{actor, data}
|
||||
else
|
||||
{:actor_changed?, true} ->
|
||||
# If prepare_outgoing changes the actor, re-get it from the db
|
||||
new_actor = User.get_cached_by_ap_id(data["actor"])
|
||||
{new_actor, data}
|
||||
end
|
||||
|
||||
param_cc = Map.get(params, :cc, [])
|
||||
|
||||
original_cc = Map.get(data, "cc", [])
|
||||
|
||||
public_address = Pleroma.Constants.as_public()
|
||||
|
||||
# Ensure unlisted posts don't lose the public address in the cc
|
||||
# if the param_cc was set
|
||||
cc =
|
||||
if public_address in original_cc and public_address not in param_cc do
|
||||
[public_address | param_cc]
|
||||
else
|
||||
param_cc
|
||||
end
|
||||
|
||||
json =
|
||||
data
|
||||
|
|
@ -102,10 +136,10 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
|||
|
||||
digest = "SHA-256=" <> (:crypto.hash(:sha256, json) |> Base.encode64())
|
||||
|
||||
date = Pleroma.Signature.signed_date()
|
||||
date = @signature_impl.signed_date()
|
||||
|
||||
signature =
|
||||
Pleroma.Signature.sign(actor, %{
|
||||
@signature_impl.sign(actor, %{
|
||||
"(request-target)": "post #{path}",
|
||||
host: signature_host(uri),
|
||||
"content-length": byte_size(json),
|
||||
|
|
@ -148,17 +182,9 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
|||
{"digest", p.digest}
|
||||
]
|
||||
) do
|
||||
if not is_nil(p.unreachable_since) do
|
||||
Instances.set_reachable(p.inbox)
|
||||
end
|
||||
|
||||
result
|
||||
else
|
||||
{_post_result, %{status: code} = response} = e ->
|
||||
if is_nil(p.unreachable_since) do
|
||||
Instances.set_unreachable(p.inbox)
|
||||
end
|
||||
|
||||
Logger.metadata(activity: p.activity_id, inbox: p.inbox, status: code)
|
||||
Logger.error("Publisher failed to inbox #{p.inbox} with status #{code}")
|
||||
|
||||
|
|
@ -179,10 +205,6 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
|||
connection_pool_snooze()
|
||||
|
||||
e ->
|
||||
if is_nil(p.unreachable_since) do
|
||||
Instances.set_unreachable(p.inbox)
|
||||
end
|
||||
|
||||
Logger.metadata(activity: p.activity_id, inbox: p.inbox)
|
||||
Logger.error("Publisher failed to inbox #{p.inbox} #{inspect(e)}")
|
||||
{:error, e}
|
||||
|
|
@ -294,7 +316,7 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
|||
|
||||
[priority_recipients, recipients] = recipients(actor, activity)
|
||||
|
||||
inboxes =
|
||||
[priority_inboxes, other_inboxes] =
|
||||
[priority_recipients, recipients]
|
||||
|> Enum.map(fn recipients ->
|
||||
recipients
|
||||
|
|
@ -307,20 +329,23 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
|||
end)
|
||||
|
||||
Repo.checkout(fn ->
|
||||
Enum.each(inboxes, fn inboxes ->
|
||||
Enum.each(inboxes, fn {inbox, unreachable_since} ->
|
||||
%User{ap_id: ap_id} = Enum.find(recipients, fn actor -> actor.inbox == inbox end)
|
||||
Enum.each([priority_inboxes, other_inboxes], fn inboxes ->
|
||||
Enum.each(inboxes, fn inbox ->
|
||||
{%User{ap_id: ap_id}, priority} =
|
||||
get_user_with_priority(inbox, priority_recipients, recipients)
|
||||
|
||||
# Get all the recipients on the same host and add them to cc. Otherwise, a remote
|
||||
# instance would only accept a first message for the first recipient and ignore the rest.
|
||||
cc = get_cc_ap_ids(ap_id, recipients)
|
||||
|
||||
__MODULE__.enqueue_one(%{
|
||||
inbox: inbox,
|
||||
cc: cc,
|
||||
activity_id: activity.id,
|
||||
unreachable_since: unreachable_since
|
||||
})
|
||||
__MODULE__.enqueue_one(
|
||||
%{
|
||||
inbox: inbox,
|
||||
cc: cc,
|
||||
activity_id: activity.id
|
||||
},
|
||||
priority: priority
|
||||
)
|
||||
end)
|
||||
end)
|
||||
end)
|
||||
|
|
@ -352,12 +377,11 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
|||
|> Enum.each(fn {inboxes, priority} ->
|
||||
inboxes
|
||||
|> Instances.filter_reachable()
|
||||
|> Enum.each(fn {inbox, unreachable_since} ->
|
||||
|> Enum.each(fn inbox ->
|
||||
__MODULE__.enqueue_one(
|
||||
%{
|
||||
inbox: inbox,
|
||||
activity_id: activity.id,
|
||||
unreachable_since: unreachable_since
|
||||
activity_id: activity.id
|
||||
},
|
||||
priority: priority
|
||||
)
|
||||
|
|
@ -383,4 +407,15 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
|||
end
|
||||
|
||||
def gather_nodeinfo_protocol_names, do: ["activitypub"]
|
||||
|
||||
defp get_user_with_priority(inbox, priority_recipients, recipients) do
|
||||
[{priority_recipients, 0}, {recipients, 1}]
|
||||
|> Enum.find_value(fn {recipients, priority} ->
|
||||
with %User{} = user <- Enum.find(recipients, fn actor -> actor.inbox == inbox end) do
|
||||
{user, priority}
|
||||
else
|
||||
_ -> nil
|
||||
end
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -145,7 +145,7 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
|
|||
) do
|
||||
with %User{} = blocker <- User.get_cached_by_ap_id(blocking_user),
|
||||
%User{} = blocked <- User.get_cached_by_ap_id(blocked_user) do
|
||||
User.block(blocker, blocked)
|
||||
User.block(blocker, blocked, Enum.into(meta, %{}))
|
||||
end
|
||||
|
||||
{:ok, object, meta}
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
@moduledoc """
|
||||
A module to handle coding from internal to wire ActivityPub and back.
|
||||
"""
|
||||
@behaviour Pleroma.Web.ActivityPub.Transmogrifier.API
|
||||
alias Pleroma.Activity
|
||||
alias Pleroma.EctoType.ActivityPub.ObjectValidators
|
||||
alias Pleroma.Maps
|
||||
|
|
@ -22,7 +23,6 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
alias Pleroma.Web.ActivityPub.Visibility
|
||||
alias Pleroma.Web.Federator
|
||||
|
||||
import Ecto.Query
|
||||
import Pleroma.Web.Utils.Guards, only: [not_empty_string: 1]
|
||||
|
||||
require Pleroma.Constants
|
||||
|
|
@ -103,6 +103,24 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Bovine compatibility
|
||||
https://codeberg.org/bovine/bovine/issues/53
|
||||
"""
|
||||
def fix_addressing_public(map, field) do
|
||||
addrs = Map.get(map, field, []) |> List.wrap()
|
||||
|
||||
Map.put(
|
||||
map,
|
||||
field,
|
||||
Enum.map(addrs, fn
|
||||
"Public" -> Pleroma.Constants.as_public()
|
||||
"as:Public" -> Pleroma.Constants.as_public()
|
||||
x -> x
|
||||
end)
|
||||
)
|
||||
end
|
||||
|
||||
# if directMessage flag is set to true, leave the addressing alone
|
||||
def fix_explicit_addressing(%{"directMessage" => true} = object, _follower_collection),
|
||||
do: object
|
||||
|
|
@ -160,6 +178,10 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
|> fix_addressing_list("cc")
|
||||
|> fix_addressing_list("bto")
|
||||
|> fix_addressing_list("bcc")
|
||||
|> fix_addressing_public("to")
|
||||
|> fix_addressing_public("cc")
|
||||
|> fix_addressing_public("bto")
|
||||
|> fix_addressing_public("bcc")
|
||||
|> fix_explicit_addressing(follower_collection)
|
||||
|> fix_implicit_addressing(follower_collection)
|
||||
end
|
||||
|
|
@ -492,6 +514,19 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
}
|
||||
|
||||
# Rewrite misskey likes into EmojiReacts
|
||||
defp handle_incoming_normalized(
|
||||
%{
|
||||
"type" => "Like",
|
||||
"content" => content
|
||||
} = data,
|
||||
options
|
||||
)
|
||||
when is_binary(content) do
|
||||
data
|
||||
|> Map.put("type", "EmojiReact")
|
||||
|> handle_incoming_normalized(options)
|
||||
end
|
||||
|
||||
defp handle_incoming_normalized(
|
||||
%{
|
||||
"type" => "Like",
|
||||
|
|
@ -500,7 +535,6 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
options
|
||||
) do
|
||||
data
|
||||
|> Map.put("type", "EmojiReact")
|
||||
|> Map.put("content", @misskey_reactions[reaction] || reaction)
|
||||
|> handle_incoming_normalized(options)
|
||||
end
|
||||
|
|
@ -652,6 +686,24 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
end
|
||||
end
|
||||
|
||||
# Rewrite dislikes into the thumbs down emoji
|
||||
defp handle_incoming_normalized(%{"type" => "Dislike"} = data, options) do
|
||||
data
|
||||
|> Map.put("type", "EmojiReact")
|
||||
|> Map.put("content", "👎")
|
||||
|> handle_incoming_normalized(options)
|
||||
end
|
||||
|
||||
defp handle_incoming_normalized(
|
||||
%{"type" => "Undo", "object" => %{"type" => "Dislike"}} = data,
|
||||
options
|
||||
) do
|
||||
data
|
||||
|> put_in(["object", "type"], "EmojiReact")
|
||||
|> put_in(["object", "content"], "👎")
|
||||
|> handle_incoming_normalized(options)
|
||||
end
|
||||
|
||||
defp handle_incoming_normalized(_, _), do: :error
|
||||
|
||||
@spec get_obj_helper(String.t(), Keyword.t()) :: {:ok, Object.t()} | nil
|
||||
|
|
@ -709,48 +761,26 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
def set_quote_url(obj), do: obj
|
||||
|
||||
@doc """
|
||||
Serialized Mastodon-compatible `replies` collection containing _self-replies_.
|
||||
Based on Mastodon's ActivityPub::NoteSerializer#replies.
|
||||
Inline first page of the `replies` collection,
|
||||
containing any replies in chronological order.
|
||||
"""
|
||||
def set_replies(obj_data) do
|
||||
replies_uris =
|
||||
with limit when limit > 0 <-
|
||||
Pleroma.Config.get([:activitypub, :note_replies_output_limit], 0),
|
||||
%Object{} = object <- Object.get_cached_by_ap_id(obj_data["id"]) do
|
||||
object
|
||||
|> Object.self_replies()
|
||||
|> select([o], fragment("?->>'id'", o.data))
|
||||
|> limit(^limit)
|
||||
|> Repo.all()
|
||||
else
|
||||
_ -> []
|
||||
end
|
||||
|
||||
set_replies(obj_data, replies_uris)
|
||||
def set_replies(%{"type" => type} = obj_data)
|
||||
when type in Pleroma.Constants.status_object_types() do
|
||||
with obj_ap_id when is_binary(obj_ap_id) <- obj_data["id"],
|
||||
limit when limit > 0 <-
|
||||
Pleroma.Config.get([:activitypub, :note_replies_output_limit], 0),
|
||||
collection <-
|
||||
Pleroma.Web.ActivityPub.ObjectView.render("object_replies.json", %{
|
||||
render_params: %{object_ap_id: obj_data["id"], limit: limit, skip_ap_ctx: true}
|
||||
}) do
|
||||
Map.put(obj_data, "replies", collection)
|
||||
else
|
||||
0 -> Map.put(obj_data, "replies", obj_data["id"] <> "/replies")
|
||||
_ -> obj_data
|
||||
end
|
||||
end
|
||||
|
||||
defp set_replies(obj, []) do
|
||||
obj
|
||||
end
|
||||
|
||||
defp set_replies(obj, replies_uris) do
|
||||
replies_collection = %{
|
||||
"type" => "Collection",
|
||||
"items" => replies_uris
|
||||
}
|
||||
|
||||
Map.merge(obj, %{"replies" => replies_collection})
|
||||
end
|
||||
|
||||
def replies(%{"replies" => %{"first" => %{"items" => items}}}) when not is_nil(items) do
|
||||
items
|
||||
end
|
||||
|
||||
def replies(%{"replies" => %{"items" => items}}) when not is_nil(items) do
|
||||
items
|
||||
end
|
||||
|
||||
def replies(_), do: []
|
||||
def set_replies(obj_data), do: obj_data
|
||||
|
||||
# Prepares the object of an outgoing create activity.
|
||||
def prepare_object(object) do
|
||||
|
|
@ -820,6 +850,27 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
{:ok, data}
|
||||
end
|
||||
|
||||
def prepare_outgoing(%{"type" => "Update", "object" => %{"type" => objtype} = object} = data)
|
||||
when objtype in Pleroma.Constants.actor_types() do
|
||||
object =
|
||||
object
|
||||
|> maybe_fix_user_object()
|
||||
|> strip_internal_fields()
|
||||
|
||||
data =
|
||||
data
|
||||
|> Map.put("object", object)
|
||||
|> strip_internal_fields()
|
||||
|> Map.merge(Utils.make_json_ld_header(object))
|
||||
|> Map.delete("bcc")
|
||||
|
||||
{:ok, data}
|
||||
end
|
||||
|
||||
def prepare_outgoing(%{"type" => "Update", "object" => %{}} = data) do
|
||||
raise "Requested to serve an Update for non-updateable object type: #{inspect(data)}"
|
||||
end
|
||||
|
||||
def prepare_outgoing(%{"type" => "Announce", "actor" => ap_id, "object" => object_id} = data) do
|
||||
object =
|
||||
object_id
|
||||
|
|
@ -879,6 +930,14 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
end
|
||||
end
|
||||
|
||||
def prepare_outgoing(%{"type" => "Flag"} = data) do
|
||||
with {:ok, stripped_activity} <- Utils.strip_report_status_data(data),
|
||||
stripped_activity <- Utils.maybe_anonymize_reporter(stripped_activity),
|
||||
stripped_activity <- Map.merge(stripped_activity, Utils.make_json_ld_header()) do
|
||||
{:ok, stripped_activity}
|
||||
end
|
||||
end
|
||||
|
||||
def prepare_outgoing(%{"type" => _type} = data) do
|
||||
data =
|
||||
data
|
||||
|
|
|
|||
11
lib/pleroma/web/activity_pub/transmogrifier/api.ex
Normal file
11
lib/pleroma/web/activity_pub/transmogrifier/api.ex
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.Transmogrifier.API do
|
||||
@moduledoc """
|
||||
Behaviour for the subset of Transmogrifier used by Publisher.
|
||||
"""
|
||||
|
||||
@callback prepare_outgoing(map()) :: {:ok, map()} | {:error, term()}
|
||||
end
|
||||
|
|
@ -82,7 +82,11 @@ defmodule Pleroma.Web.ActivityPub.Utils do
|
|||
def unaddressed_message?(params),
|
||||
do:
|
||||
[params["to"], params["cc"], params["bto"], params["bcc"]]
|
||||
|> Enum.all?(&is_nil(&1))
|
||||
|> Enum.all?(fn
|
||||
nil -> true
|
||||
[] -> true
|
||||
_ -> false
|
||||
end)
|
||||
|
||||
@spec recipient_in_message(User.t(), User.t(), map()) :: boolean()
|
||||
def recipient_in_message(%User{ap_id: ap_id} = recipient, %User{} = actor, params),
|
||||
|
|
@ -859,8 +863,14 @@ defmodule Pleroma.Web.ActivityPub.Utils do
|
|||
|
||||
def update_report_state(_, _), do: {:error, "Unsupported state"}
|
||||
|
||||
def strip_report_status_data(activity) do
|
||||
[actor | reported_activities] = activity.data["object"]
|
||||
def strip_report_status_data(%Activity{} = activity) do
|
||||
with {:ok, new_data} <- strip_report_status_data(activity.data) do
|
||||
{:ok, %{activity | data: new_data}}
|
||||
end
|
||||
end
|
||||
|
||||
def strip_report_status_data(data) do
|
||||
[actor | reported_activities] = data["object"]
|
||||
|
||||
stripped_activities =
|
||||
Enum.reduce(reported_activities, [], fn act, acc ->
|
||||
|
|
@ -870,9 +880,36 @@ defmodule Pleroma.Web.ActivityPub.Utils do
|
|||
end
|
||||
end)
|
||||
|
||||
new_data = put_in(activity.data, ["object"], [actor | stripped_activities])
|
||||
new_data = put_in(data, ["object"], [actor | stripped_activities])
|
||||
|
||||
{:ok, %{activity | data: new_data}}
|
||||
{:ok, new_data}
|
||||
end
|
||||
|
||||
def get_anonymized_reporter do
|
||||
with true <- Pleroma.Config.get([:activitypub, :anonymize_reporter]),
|
||||
nickname when is_binary(nickname) <-
|
||||
Pleroma.Config.get([:activitypub, :anonymize_reporter_local_nickname]),
|
||||
%User{ap_id: ap_id, local: true} <- User.get_cached_by_nickname(nickname) do
|
||||
ap_id
|
||||
else
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
def maybe_anonymize_reporter(%Activity{data: data} = activity) do
|
||||
new_data = maybe_anonymize_reporter(data)
|
||||
%Activity{activity | actor: new_data["actor"], data: new_data}
|
||||
end
|
||||
|
||||
def maybe_anonymize_reporter(activity) do
|
||||
ap_id = get_anonymized_reporter()
|
||||
|
||||
if is_binary(ap_id) do
|
||||
activity
|
||||
|> Map.put("actor", ap_id)
|
||||
else
|
||||
activity
|
||||
end
|
||||
end
|
||||
|
||||
def update_activity_visibility(activity, visibility) when visibility in @valid_visibilities do
|
||||
|
|
|
|||
59
lib/pleroma/web/activity_pub/views/collection_view_helper.ex
Normal file
59
lib/pleroma/web/activity_pub/views/collection_view_helper.ex
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||
# Copyright © 2025 Akkoma Authors <https://akkoma.dev/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.CollectionViewHelper do
|
||||
alias Pleroma.Web.ActivityPub.Utils
|
||||
|
||||
def collection_page_offset(collection, iri, page, show_items \\ true, total \\ nil) do
|
||||
offset = (page - 1) * 10
|
||||
items = Enum.slice(collection, offset, 10)
|
||||
items = Enum.map(items, fn user -> user.ap_id end)
|
||||
total = total || length(collection)
|
||||
|
||||
map = %{
|
||||
"id" => "#{iri}?page=#{page}",
|
||||
"type" => "OrderedCollectionPage",
|
||||
"partOf" => iri,
|
||||
"totalItems" => total,
|
||||
"orderedItems" => if(show_items, do: items, else: [])
|
||||
}
|
||||
|
||||
if offset + 10 < total do
|
||||
Map.put(map, "next", "#{iri}?page=#{page + 1}")
|
||||
else
|
||||
map
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_omit_next(pagination, _items, nil), do: pagination
|
||||
|
||||
defp maybe_omit_next(pagination, items, limit) when is_binary(limit) do
|
||||
case Integer.parse(limit) do
|
||||
{limit, ""} -> maybe_omit_next(pagination, items, limit)
|
||||
_ -> maybe_omit_next(pagination, items, nil)
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_omit_next(pagination, items, limit) when is_number(limit) do
|
||||
if Enum.count(items) < limit, do: Map.delete(pagination, "next"), else: pagination
|
||||
end
|
||||
|
||||
def collection_page_keyset(
|
||||
display_items,
|
||||
pagination,
|
||||
limit \\ nil,
|
||||
skip_ap_context \\ false
|
||||
) do
|
||||
%{
|
||||
"type" => "OrderedCollectionPage",
|
||||
"orderedItems" => display_items
|
||||
}
|
||||
|> Map.merge(pagination)
|
||||
|> maybe_omit_next(display_items, limit)
|
||||
|> then(fn m ->
|
||||
if skip_ap_context, do: m, else: Map.merge(m, Utils.make_json_ld_header())
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
|
@ -6,7 +6,10 @@ defmodule Pleroma.Web.ActivityPub.ObjectView do
|
|||
use Pleroma.Web, :view
|
||||
alias Pleroma.Activity
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||
alias Pleroma.Web.ActivityPub.CollectionViewHelper
|
||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||
alias Pleroma.Web.ControllerHelper
|
||||
|
||||
def render("object.json", %{object: %Object{} = object}) do
|
||||
base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header(object.data)
|
||||
|
|
@ -15,26 +18,94 @@ defmodule Pleroma.Web.ActivityPub.ObjectView do
|
|||
Map.merge(base, additional)
|
||||
end
|
||||
|
||||
def render("object.json", %{object: %Activity{data: %{"type" => activity_type}} = activity})
|
||||
when activity_type in ["Create", "Listen"] do
|
||||
base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header(activity.data)
|
||||
object = Object.normalize(activity, fetch: false)
|
||||
|
||||
additional =
|
||||
Transmogrifier.prepare_object(activity.data)
|
||||
|> Map.put("object", Transmogrifier.prepare_object(object.data))
|
||||
|
||||
Map.merge(base, additional)
|
||||
def render("object.json", %{object: %Activity{} = activity}) do
|
||||
{:ok, ap_data} = Transmogrifier.prepare_outgoing(activity.data)
|
||||
ap_data
|
||||
end
|
||||
|
||||
def render("object.json", %{object: %Activity{} = activity}) do
|
||||
base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header(activity.data)
|
||||
object_id = Object.normalize(activity, id_only: true)
|
||||
def render("object_replies.json", %{
|
||||
conn: conn,
|
||||
render_params: %{object_ap_id: object_ap_id, page: "true"} = params
|
||||
}) do
|
||||
params = Map.put_new(params, :limit, 40)
|
||||
|
||||
additional =
|
||||
Transmogrifier.prepare_object(activity.data)
|
||||
|> Map.put("object", object_id)
|
||||
items = ActivityPub.fetch_objects_for_replies_collection(object_ap_id, params)
|
||||
display_items = map_reply_collection_items(items)
|
||||
|
||||
Map.merge(base, additional)
|
||||
pagination = ControllerHelper.get_pagination_fields(conn, items, %{}, :asc)
|
||||
|
||||
CollectionViewHelper.collection_page_keyset(display_items, pagination, params[:limit])
|
||||
end
|
||||
|
||||
def render(
|
||||
"object_replies.json",
|
||||
%{
|
||||
render_params: %{object_ap_id: object_ap_id} = params
|
||||
} = opts
|
||||
) do
|
||||
params =
|
||||
params
|
||||
|> Map.drop([:max_id, :min_id, :since_id, :object_ap_id])
|
||||
|> Map.put_new(:limit, 40)
|
||||
|> Map.put(:total, true)
|
||||
|
||||
%{total: total, items: items} =
|
||||
ActivityPub.fetch_objects_for_replies_collection(object_ap_id, params)
|
||||
|
||||
display_items = map_reply_collection_items(items)
|
||||
|
||||
first_pagination = reply_collection_first_pagination(items, opts)
|
||||
|
||||
col_ap =
|
||||
%{
|
||||
"id" => object_ap_id <> "/replies",
|
||||
"type" => "OrderedCollection",
|
||||
"totalItems" => total
|
||||
}
|
||||
|
||||
col_ap =
|
||||
if total > 0 do
|
||||
first_page =
|
||||
CollectionViewHelper.collection_page_keyset(
|
||||
display_items,
|
||||
first_pagination,
|
||||
params[:limit],
|
||||
true
|
||||
)
|
||||
|
||||
Map.put(col_ap, "first", first_page)
|
||||
else
|
||||
col_ap
|
||||
end
|
||||
|
||||
if params[:skip_ap_ctx] do
|
||||
col_ap
|
||||
else
|
||||
Map.merge(col_ap, Pleroma.Web.ActivityPub.Utils.make_json_ld_header())
|
||||
end
|
||||
end
|
||||
|
||||
defp map_reply_collection_items(items), do: Enum.map(items, fn %{ap_id: ap_id} -> ap_id end)
|
||||
|
||||
defp reply_collection_first_pagination(items, %{conn: %Plug.Conn{} = conn}) do
|
||||
pagination = ControllerHelper.get_pagination_fields(conn, items, %{"page" => true}, :asc)
|
||||
Map.put(pagination, "id", Phoenix.Controller.current_url(conn, %{"page" => true}))
|
||||
end
|
||||
|
||||
defp reply_collection_first_pagination(items, %{render_params: %{object_ap_id: object_ap_id}}) do
|
||||
%{
|
||||
"id" => object_ap_id <> "/replies?page=true",
|
||||
"partOf" => object_ap_id <> "/replies"
|
||||
}
|
||||
|> then(fn m ->
|
||||
case items do
|
||||
[] ->
|
||||
m
|
||||
|
||||
i ->
|
||||
next_id = object_ap_id <> "/replies?page=true&min_id=#{List.last(i)[:id]}"
|
||||
Map.put(m, "next", next_id)
|
||||
end
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ defmodule Pleroma.Web.ActivityPub.UserView do
|
|||
alias Pleroma.Object
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
alias Pleroma.Web.ActivityPub.CollectionViewHelper
|
||||
alias Pleroma.Web.ActivityPub.ObjectView
|
||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||
alias Pleroma.Web.ActivityPub.Utils
|
||||
|
|
@ -164,7 +165,13 @@ defmodule Pleroma.Web.ActivityPub.UserView do
|
|||
0
|
||||
end
|
||||
|
||||
collection(following, "#{user.ap_id}/following", page, showing_items, total)
|
||||
CollectionViewHelper.collection_page_offset(
|
||||
following,
|
||||
"#{user.ap_id}/following",
|
||||
page,
|
||||
showing_items,
|
||||
total
|
||||
)
|
||||
|> Map.merge(Utils.make_json_ld_header())
|
||||
end
|
||||
|
||||
|
|
@ -189,7 +196,12 @@ defmodule Pleroma.Web.ActivityPub.UserView do
|
|||
"totalItems" => total,
|
||||
"first" =>
|
||||
if showing_items do
|
||||
collection(following, "#{user.ap_id}/following", 1, !user.hide_follows)
|
||||
CollectionViewHelper.collection_page_offset(
|
||||
following,
|
||||
"#{user.ap_id}/following",
|
||||
1,
|
||||
!user.hide_follows
|
||||
)
|
||||
else
|
||||
"#{user.ap_id}/following?page=1"
|
||||
end
|
||||
|
|
@ -212,7 +224,13 @@ defmodule Pleroma.Web.ActivityPub.UserView do
|
|||
0
|
||||
end
|
||||
|
||||
collection(followers, "#{user.ap_id}/followers", page, showing_items, total)
|
||||
CollectionViewHelper.collection_page_offset(
|
||||
followers,
|
||||
"#{user.ap_id}/followers",
|
||||
page,
|
||||
showing_items,
|
||||
total
|
||||
)
|
||||
|> Map.merge(Utils.make_json_ld_header())
|
||||
end
|
||||
|
||||
|
|
@ -236,7 +254,12 @@ defmodule Pleroma.Web.ActivityPub.UserView do
|
|||
"type" => "OrderedCollection",
|
||||
"first" =>
|
||||
if showing_items do
|
||||
collection(followers, "#{user.ap_id}/followers", 1, showing_items, total)
|
||||
CollectionViewHelper.collection_page_offset(
|
||||
followers,
|
||||
"#{user.ap_id}/followers",
|
||||
1,
|
||||
showing_items
|
||||
)
|
||||
else
|
||||
"#{user.ap_id}/followers?page=1"
|
||||
end
|
||||
|
|
@ -256,7 +279,6 @@ defmodule Pleroma.Web.ActivityPub.UserView do
|
|||
|
||||
def render("activity_collection_page.json", %{
|
||||
activities: activities,
|
||||
iri: iri,
|
||||
pagination: pagination
|
||||
}) do
|
||||
collection =
|
||||
|
|
@ -265,13 +287,7 @@ defmodule Pleroma.Web.ActivityPub.UserView do
|
|||
data
|
||||
end)
|
||||
|
||||
%{
|
||||
"type" => "OrderedCollectionPage",
|
||||
"partOf" => iri,
|
||||
"orderedItems" => collection
|
||||
}
|
||||
|> Map.merge(Utils.make_json_ld_header())
|
||||
|> Map.merge(pagination)
|
||||
CollectionViewHelper.collection_page_keyset(collection, pagination)
|
||||
end
|
||||
|
||||
def render("featured.json", %{
|
||||
|
|
@ -299,27 +315,6 @@ defmodule Pleroma.Web.ActivityPub.UserView do
|
|||
Map.put(map, "totalItems", total)
|
||||
end
|
||||
|
||||
def collection(collection, iri, page, show_items \\ true, total \\ nil) do
|
||||
offset = (page - 1) * 10
|
||||
items = Enum.slice(collection, offset, 10)
|
||||
items = Enum.map(items, fn user -> user.ap_id end)
|
||||
total = total || length(collection)
|
||||
|
||||
map = %{
|
||||
"id" => "#{iri}?page=#{page}",
|
||||
"type" => "OrderedCollectionPage",
|
||||
"partOf" => iri,
|
||||
"totalItems" => total,
|
||||
"orderedItems" => if(show_items, do: items, else: [])
|
||||
}
|
||||
|
||||
if offset < total do
|
||||
Map.put(map, "next", "#{iri}?page=#{page + 1}")
|
||||
else
|
||||
map
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_make_image(func, description, key, user) do
|
||||
if image = func.(user, no_default: true) do
|
||||
%{
|
||||
|
|
|
|||
|
|
@ -73,6 +73,22 @@ defmodule Pleroma.Web.ActivityPub.Visibility do
|
|||
|> Pleroma.List.member?(user)
|
||||
end
|
||||
|
||||
def visible_for_user?(%Activity{object: %Object{} = object} = activity, nil) do
|
||||
activity_visibility? = restrict_unauthenticated_access?(activity)
|
||||
activity_public? = public?(activity) and not local_public?(activity)
|
||||
object_visibility? = restrict_unauthenticated_access?(object)
|
||||
object_public? = public?(object) and not local_public?(object)
|
||||
|
||||
# Activity could be local, but object might not (Announce/Like)
|
||||
cond do
|
||||
activity_visibility? or object_visibility? ->
|
||||
false
|
||||
|
||||
true ->
|
||||
activity_public? and object_public?
|
||||
end
|
||||
end
|
||||
|
||||
def visible_for_user?(%{__struct__: module} = message, nil)
|
||||
when module in [Activity, Object] do
|
||||
if restrict_unauthenticated_access?(message),
|
||||
|
|
|
|||
|
|
@ -240,6 +240,10 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
|
|||
render_error(conn, :not_found, "No such permission_group")
|
||||
end
|
||||
|
||||
def right_delete(%{assigns: %{user: %{nickname: nickname}}} = conn, %{"nickname" => nickname}) do
|
||||
render_error(conn, :forbidden, "You can't revoke your own admin status.")
|
||||
end
|
||||
|
||||
def right_delete(
|
||||
%{assigns: %{user: admin}} = conn,
|
||||
%{
|
||||
|
|
@ -265,10 +269,6 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
|
|||
json(conn, fields)
|
||||
end
|
||||
|
||||
def right_delete(%{assigns: %{user: %{nickname: nickname}}} = conn, %{"nickname" => nickname}) do
|
||||
render_error(conn, :forbidden, "You can't revoke your own admin status.")
|
||||
end
|
||||
|
||||
@doc "Get a password reset token (base64 string) for given nickname"
|
||||
def get_password_reset(conn, %{"nickname" => nickname}) do
|
||||
(%User{local: true} = user) = User.get_cached_by_nickname(nickname)
|
||||
|
|
@ -335,13 +335,13 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
|
|||
|
||||
if params["password"] do
|
||||
User.force_password_reset_async(user)
|
||||
end
|
||||
|
||||
ModerationLog.insert_log(%{
|
||||
actor: admin,
|
||||
subject: [user],
|
||||
action: "force_password_reset"
|
||||
})
|
||||
ModerationLog.insert_log(%{
|
||||
actor: admin,
|
||||
subject: [user],
|
||||
action: "force_password_reset"
|
||||
})
|
||||
end
|
||||
|
||||
json(conn, %{status: "success"})
|
||||
else
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue