Merge remote-tracking branch 'origin/develop' into gitlab-mr-iid-4161

This commit is contained in:
Mark Felder 2026-03-25 13:31:07 -07:00
commit 7cc9ba6f06
1296 changed files with 27633 additions and 7387 deletions

BIN
test/fixtures/DSCN0010.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 744 KiB

View file

@ -0,0 +1,34 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
"https://www.w3.org/ns/did/v1",
"https://w3id.org/security/multikey/v1",
{
"Hashtag": "as:Hashtag"
}
],
"attributedTo": "https://mymath.rocks/endpoints/SYn3cl_N4HAPfPHgo2x37XunLEmhV9LnxCggcYwyec0",
"cc": [
"https://mymath.rocks/endpoints/30zoCe7haKBEFolH4rbAmKj-t9_bG0c2X2kMQkJk5qY",
"https://mastodon.social/users/nikclayton"
],
"content": "<blockquote class=\"h-quote\">\n<p>I note that mymath.rocks does not provide this information.</p>\n</blockquote>\n<p>I&#39;m a big believer in &quot;Do as I say, not as I <strong>did</strong>&quot;.</p>\n<p>I could give a long list of technical reasons, which boil down to: nodeinfo is pretty nonsensical with the way I write stuff.</p>\n<p>I think the above statement also addresses a main failure of the Fediverse. People, e.g. me, would love to fix stuff. Unfortunately, we lack the focus to address a lot of issues, e.g. nodeinfo sucks. So stuff gets done in a broken way.</p>\n<p>I think the main challenge the Fediverse has faced, and failed at, is avoiding the above situation. To continue the example, there is no way for somebody to say: Let&#39;s fix nodeinfo and people will follow their ideas.</p>\n",
"id": "https://mymath.rocks/objects/2b89e564-30cf-4eeb-97ca-7e638a154026",
"inReplyTo": "https://mastodon.social/users/nikclayton/statuses/115496665258618127",
"likes": "https://mymath.rocks/objects/2b89e564-30cf-4eeb-97ca-7e638a154026/likes",
"published": "2025-11-06T08:21:17.790Z",
"replies": "https://mymath.rocks/objects/2b89e564-30cf-4eeb-97ca-7e638a154026/replies",
"shares": "https://mymath.rocks/objects/2b89e564-30cf-4eeb-97ca-7e638a154026/shares",
"source": {
"content": "> I note that mymath.rocks does not provide this information.\n\nI'm a big believer in \"Do as I say, not as I __did__\".\n\nI could give a long list of technical reasons, which boil down to: nodeinfo is pretty nonsensical with the way I write stuff.\n\nI think the above statement also addresses a main failure of the Fediverse. People, e.g. me, would love to fix stuff. Unfortunately, we lack the focus to address a lot of issues, e.g. nodeinfo sucks. So stuff gets done in a broken way.\n\nI think the main challenge the Fediverse has faced, and failed at, is avoiding the above situation. To continue the example, there is no way for somebody to say: Let's fix nodeinfo and people will follow their ideas.",
"mediaType": "text/markdown"
},
"tag": {
"href": "https://mastodon.social/users/nikclayton",
"name": "https://mastodon.social/users/nikclayton",
"type": "Mention"
},
"to": "as:Public",
"type": "Note"
}

BIN
test/fixtures/break_analyze.png vendored Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 360 KiB

112
test/fixtures/denniskoch.json vendored Normal file
View file

@ -0,0 +1,112 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
{
"Curve25519Key": "toot:Curve25519Key",
"Device": "toot:Device",
"Ed25519Key": "toot:Ed25519Key",
"Ed25519Signature": "toot:Ed25519Signature",
"EncryptedMessage": "toot:EncryptedMessage",
"PropertyValue": "schema:PropertyValue",
"alsoKnownAs": {
"@id": "as:alsoKnownAs",
"@type": "@id"
},
"cipherText": "toot:cipherText",
"claim": {
"@id": "toot:claim",
"@type": "@id"
},
"deviceId": "toot:deviceId",
"devices": {
"@id": "toot:devices",
"@type": "@id"
},
"discoverable": "toot:discoverable",
"featured": {
"@id": "toot:featured",
"@type": "@id"
},
"featuredTags": {
"@id": "toot:featuredTags",
"@type": "@id"
},
"fingerprintKey": {
"@id": "toot:fingerprintKey",
"@type": "@id"
},
"focalPoint": {
"@container": "@list",
"@id": "toot:focalPoint"
},
"identityKey": {
"@id": "toot:identityKey",
"@type": "@id"
},
"indexable": "toot:indexable",
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
"memorial": "toot:memorial",
"messageFranking": "toot:messageFranking",
"messageType": "toot:messageType",
"movedTo": {
"@id": "as:movedTo",
"@type": "@id"
},
"publicKeyBase64": "toot:publicKeyBase64",
"schema": "http://schema.org#",
"suspended": "toot:suspended",
"toot": "http://joinmastodon.org/ns#",
"value": "schema:value"
}
],
"attachment": [
{
"name": "GitHub",
"type": "PropertyValue",
"value": "<a href=\"https://github.com/pxlrbt/\" target=\"_blank\" rel=\"nofollow noopener noreferrer me\" translate=\"no\"><span class=\"invisible\">https://</span><span class=\"\">github.com/pxlrbt/</span><span class=\"invisible\"></span></a>"
},
{
"name": "Discord",
"type": "PropertyValue",
"value": "pxlrbt#6029"
}
],
"devices": "https://phpc.social/users/denniskoch/collections/devices",
"discoverable": true,
"endpoints": {
"sharedInbox": "https://phpc.social/inbox"
},
"featured": "https://phpc.social/users/denniskoch/collections/featured",
"featuredTags": "https://phpc.social/users/denniskoch/collections/tags",
"followers": "https://phpc.social/users/denniskoch/followers",
"following": "https://phpc.social/users/denniskoch/following",
"icon": {
"mediaType": "image/jpeg",
"type": "Image",
"url": "https://media.phpc.social/accounts/avatars/109/364/097/179/042/485/original/6e770c7b3f5ef72d.jpg"
},
"id": "https://phpc.social/users/denniskoch",
"image": {
"mediaType": "image/jpeg",
"type": "Image",
"url": "https://media.phpc.social/accounts/headers/109/364/097/179/042/485/original/709da24705260c04.jpg"
},
"inbox": "https://phpc.social/users/denniskoch/inbox",
"indexable": true,
"manuallyApprovesFollowers": false,
"memorial": false,
"name": "Dennis Koch",
"outbox": "https://phpc.social/users/denniskoch/outbox",
"preferredUsername": "denniskoch",
"publicKey": {
"id": "https://phpc.social/users/denniskoch#main-key",
"owner": "https://phpc.social/users/denniskoch",
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4dmcSlqLj18gPvuslkmt\nQTniZ8ybO4pgvMvPLYtBuTBUjo49vJ/8Sw6jB5zcKb1haqIdny7Rv/vY3kCdCXcP\nloh1I+jthEgqLT8JpZWGwLGwg9piFhrMGADmt3N8du7HfglzuZ8LlVpnZ8feCw7I\nS2ua/ZCxE47mI45Z3ed2kkFYKWopWWqFn2lan/1OyHrcFKtCvaVjRdvo0UUt2tgl\nvyJI4+zN8FnrCbsMtcbI5nSzfJIrOc4LeaGmLJh+0o2rwoOQZc2487XWbeyfhjsq\nPRBpYN7pfHWQDvzQIN075LHTf9zDFsm6+HqY7Zs5rYxr72rvcX7d9JcP6CasIosY\nqwIDAQAB\n-----END PUBLIC KEY-----\n"
},
"published": "2022-11-18T00:00:00Z",
"summary": "<p>🧑‍💻 Full Stack Developer<br />🚀 Laravel, Filament, Livewire, Vue, Inertia<br />🌍 Germany</p>",
"tag": [],
"type": "Person",
"url": "https://phpc.social/@denniskoch"
}

View file

@ -0,0 +1,76 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
{
"Hashtag": "as:Hashtag",
"PropertyValue": "schema:PropertyValue",
"conversation": "ostatus:conversation",
"dfrn": "http://purl.org/macgirvin/dfrn/1.0/",
"diaspora": "https://diasporafoundation.org/ns/",
"directMessage": "litepub:directMessage",
"discoverable": "toot:discoverable",
"featured": {
"@id": "toot:featured",
"@type": "@id"
},
"litepub": "http://litepub.social/ns#",
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
"ostatus": "http://ostatus.org#",
"quoteUrl": "as:quoteUrl",
"schema": "http://schema.org#",
"sensitive": "as:sensitive",
"toot": "http://joinmastodon.org/ns#",
"value": "schema:value",
"vcard": "http://www.w3.org/2006/vcard/ns#"
}
],
"actor": "https://my-place.social/profile/vaartis",
"cc": [
"https://my-place.social/followers/vaartis"
],
"id": "https://my-place.social/objects/e599373b-1368-4b20-cd24-837166957182/Undo",
"instrument": {
"id": "https://my-place.social/friendica",
"name": "Friendica 'Interrupted Fern' 2024.12-1576",
"type": "Application",
"url": "https://my-place.social"
},
"object": {
"actor": "https://my-place.social/profile/vaartis",
"cc": [
"https://my-place.social/followers/vaartis"
],
"diaspora:guid": "e599373b-1968-4b20-cd24-80d340160302",
"diaspora:like": "{\"author\":\"vaartis@my-place.social\",\"guid\":\"e599373b-1968-4b20-cd24-80d340160302\",\"parent_guid\":\"cd36feba-c31f3ed3fd5c064a-17c31593\",\"parent_type\":\"Post\",\"positive\":\"false\",\"author_signature\":\"xR2zLJNfc9Nhx1n8LLMWM1kde12my4cqamIsrH\\/UntKzuDwO4DuHBL0fkFhgC\\/dylxm4HqsHD45MQbtwQCVGq6WhC96TrbMuYEK61HIO23dTr3m+qJVtfdH4wyhUNHgiiYPhZpkLDfnR1JiRWmFTlmZC8q8JEkOB5IQsrWia2eOR6IsqDcdKO\\/Urgns9\\/BdQi8KnchBKSEFc1iUtcOEruvhunKGyW5zI\\/Rltfdz3xGH8tlw+YlMXeWXPnqgOJ9GzNA0lwG4U421L6yylYagW7oxIznnBLB4bO46vYZbgXZV1hiI9ZyveHOinLMY1QkmTj5CNvnx3\\/VJwLovd0v+0Nr2vu\\/3ftbpBXc6L1bsNjlRqtsfwJlcgl+tH1DC4W8tKf+Y3tdtzVw0CHXCuacxHLyq5wZd\\/5YfYR9SJQ\\/jInU4PHA5+hIE3PGqNUp5QfFE0umq56H7MQKsIPgM5mMV4fPAA8OpltuMVDvQYUxalrnvoTf00k90x1wCTK71\\/jQGh7r7PmGvSdfPr+65eVTjITD8\\/lTGIb8850v1fl3\\/i2R8Dv17jQIRyX5o9MXPSO6jHo4Swma5RzPA\\/0bRj6qRTyPkM1L9qEIr+2H2I7KKhT2ZE5GhAU7yI9A3VLBWzpTrUPMGbfpd1OjVTEqXAdMjpLDYI3Mh5zQ58p8xCzt+W+t0=\"}",
"id": "https://my-place.social/objects/e599373b-1368-4b20-cd24-837166957182",
"instrument": {
"id": "https://my-place.social/friendica",
"name": "Friendica 'Interrupted Fern' 2024.12-1576",
"type": "Application",
"url": "https://my-place.social"
},
"object": "https://pl.kotobank.ch/objects/301bce65-8a1b-4c49-a65c-fe2ce861a213",
"published": "2025-06-12T18:47:41Z",
"to": [
"https://pl.kotobank.ch/users/vaartis",
"https://mitra.social/users/silverpill",
"https://www.w3.org/ns/activitystreams#Public"
],
"type": "Dislike"
},
"published": "2025-06-12T18:41:25Z",
"signature": {
"created": "2025-06-12T18:44:16Z",
"creator": "https://my-place.social/profile/vaartis#main-key",
"nonce": "2d67847d4bd4b1b83a30d61eac6cdc7ad6b980df06a8b9b97217e1d8f7b6cf20",
"signatureValue": "LnoRMZuQGDvTICkShGBq28ynaj2lF1bViJFGS6n4gKn3IbxPWATHxao43gxWRc+HCTrHNg7quzgaW4+PYM7UVUz3jO+bjNKsN845nijOVdyFrPOXbuaij3KQh2OoHhFJWoV/ZQQTFF0kRK1qT4BwG+P8NqOOKAMv+Cw7ruQH+f2w7uDgcNIbCD1gLcwb6cw7WVe5qu8yMkKqp2kBdqW3RCsI85RmmFgwehDgH5nrX7ER1qbeLWrqy7echwD9/fO3rqAu13xDNyiGZHDT7JB3RUt0AyMm0XCfjbwSQ0n+MkYXgE4asvFz81+iiPCLt+6gePWAFc5odF1FxdySBpSuUOs4p92NzP9OhQ0c0qrqrzYI7aYklY7oMfxjkva+X+0bm3up+2IRJdnZa/pXlmwdcqTpyMr1sgzaexMUNBp3dq7zA51eEaakLDX3i2onXJowfmze3+6XgPAFHYamR+pRNtuEoY4uyYEK3fj5GgwJ4RtFJMYVoEs/Q8h3OgYRcK1FE9UlDjSqbQ7QIRn2Ib4wjgmkeM0vrHIwh/1CtqA/M/6WuYFzCaJBc8O9ykpK9ZMbw64ToQXKf2SqhZsDoyTWRWTO1PXOk1XCAAElUh8/WCyeghvgqLXn0LHov4lmBsHA5iMUcLqBKD3GJIHd+ExrOFxMZs4mBLLGyz0p5joJ3NY=",
"type": "RsaSignature2017"
},
"to": [
"https://pl.kotobank.ch/users/vaartis",
"https://mitra.social/users/silverpill",
"https://www.w3.org/ns/activitystreams#Public"
],
"type": "Undo"
}

56
test/fixtures/friendica-dislike.json vendored Normal file
View file

@ -0,0 +1,56 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
{
"Hashtag": "as:Hashtag",
"PropertyValue": "schema:PropertyValue",
"conversation": "ostatus:conversation",
"dfrn": "http://purl.org/macgirvin/dfrn/1.0/",
"diaspora": "https://diasporafoundation.org/ns/",
"directMessage": "litepub:directMessage",
"discoverable": "toot:discoverable",
"featured": {
"@id": "toot:featured",
"@type": "@id"
},
"litepub": "http://litepub.social/ns#",
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
"ostatus": "http://ostatus.org#",
"quoteUrl": "as:quoteUrl",
"schema": "http://schema.org#",
"sensitive": "as:sensitive",
"toot": "http://joinmastodon.org/ns#",
"value": "schema:value",
"vcard": "http://www.w3.org/2006/vcard/ns#"
}
],
"actor": "https://my-place.social/profile/vaartis",
"cc": [
"https://my-place.social/followers/vaartis"
],
"diaspora:guid": "e599373b-1968-4b20-cd24-80d340160302",
"diaspora:like": "{\"author\":\"vaartis@my-place.social\",\"guid\":\"e599373b-1968-4b20-cd24-80d340160302\",\"parent_guid\":\"cd36feba-c31f3ed3fd5c064a-17c31593\",\"parent_type\":\"Post\",\"positive\":\"false\",\"author_signature\":\"xR2zLJNfc9Nhx1n8LLMWM1kde12my4cqamIsrH\\/UntKzuDwO4DuHBL0fkFhgC\\/dylxm4HqsHD45MQbtwQCVGq6WhC96TrbMuYEK61HIO23dTr3m+qJVtfdH4wyhUNHgiiYPhZpkLDfnR1JiRWmFTlmZC8q8JEkOB5IQsrWia2eOR6IsqDcdKO\\/Urgns9\\/BdQi8KnchBKSEFc1iUtcOEruvhunKGyW5zI\\/Rltfdz3xGH8tlw+YlMXeWXPnqgOJ9GzNA0lwG4U421L6yylYagW7oxIznnBLB4bO46vYZbgXZV1hiI9ZyveHOinLMY1QkmTj5CNvnx3\\/VJwLovd0v+0Nr2vu\\/3ftbpBXc6L1bsNjlRqtsfwJlcgl+tH1DC4W8tKf+Y3tdtzVw0CHXCuacxHLyq5wZd\\/5YfYR9SJQ\\/jInU4PHA5+hIE3PGqNUp5QfFE0umq56H7MQKsIPgM5mMV4fPAA8OpltuMVDvQYUxalrnvoTf00k90x1wCTK71\\/jQGh7r7PmGvSdfPr+65eVTjITD8\\/lTGIb8850v1fl3\\/i2R8Dv17jQIRyX5o9MXPSO6jHo4Swma5RzPA\\/0bRj6qRTyPkM1L9qEIr+2H2I7KKhT2ZE5GhAU7yI9A3VLBWzpTrUPMGbfpd1OjVTEqXAdMjpLDYI3Mh5zQ58p8xCzt+W+t0=\"}",
"id": "https://my-place.social/objects/e599373b-1368-4b20-cd24-837166957182",
"instrument": {
"id": "https://my-place.social/friendica",
"name": "Friendica 'Interrupted Fern' 2024.12-1576",
"type": "Application",
"url": "https://my-place.social"
},
"object": "https://pl.kotobank.ch/objects/301bce65-8a1b-4c49-a65c-fe2ce861a213",
"published": "2025-06-12T18:47:41Z",
"signature": {
"created": "2025-06-12T18:47:42Z",
"creator": "https://my-place.social/profile/vaartis#main-key",
"nonce": "84e496f80b09d7a299c5cc89e8cadd13abf621b3a0a321684fa74278b68a6dd8",
"signatureValue": "qe2WxY+j7daIYLRadCctgal6A1s9XgoiMfM/8KjJm15w0sSizYYqruyQ5gS44e+cj5GHc9v5gP2ieod5v7eHAPzlcDI4bfkcyHVapAXTqU67ZebW+v6Q+21IMDgqrkYCv5TbV7LTxltW59dlqovpHE4TEe/M7xLKWJ3vVchRUcWqH9kDmak0nacoqYVAb5E9jYnQhUWPTCfPV82qQpeWQPOZ4iIvPw6rDkSSY5jL6bCogBZblHGpUjXfe/FPlacaCWiTQdoga3yOBXB1RYPw9nh5FI5Xkv/oi+52WmJrECinlD6AL8/BpiYvKz236zy7p/TR4BXlCx9RR/msjOnSabkQ4kmYFrRr80UDCGF+CdkdzLl8K9rSE3PbF1+nEqD7X0GOWn/DdtixqXJw6IR4bh32YW2SlcrSRBvI1p82Mv68BeqRaYqL6FAhKFwLhX5JpXngZ3k0g7rWWxc498voPWnFZDyCTRNxO9VIIUavDDEQ0BdFk6WDb8zx9tsAg8JoK57eVDcFly7tfVQffYiHpve06d8ag1DtzipqguRsURmuqpGNMq28XBTxwtrP2LnXXHKxoYN/YQ9cDnCKclbx7/uKmOVMLkLZlM0wAVoZpm5z2fG4voKqFiGZ1PoiFY2sN4URMArJtygV3PlTX4ASAQrak0ksvEo9msrBUD0Su9c=",
"type": "RsaSignature2017"
},
"to": [
"https://pl.kotobank.ch/users/vaartis",
"https://mitra.social/users/silverpill",
"https://www.w3.org/ns/activitystreams#Public"
],
"type": "Dislike"
}

151
test/fixtures/fulmo.html vendored Normal file
View file

@ -0,0 +1,151 @@
<!DOCTYPE html>
<html lang='eo'>
<head>
<meta charset='utf-8'/>
<meta name='author' content='Tirifto'/>
<meta name='generator' content='Pageling'/>
<meta name='viewport' content='width=device-width,
height=device-height,
initial-scale=1.0'/>
<link rel='stylesheet' type='text/css' href='/r/stiloj/tiriftejo.css'/>
<link rel='alternate' type='application/atom+xml' href='/eo/novajhoj.atom'/>
<link rel='icon' size='16x16' type='image/vnd.microsoft.icon' href='/favicon.ico'/>
<link rel='icon' size='128x128' type='image/png' href='/icon.png'/>
<link rel='alternate' hreflang='eo' href='https://tirifto.xwx.moe/eo/rakontoj/fulmo.html'/>
<title>Fulmo</title>
<meta property='og:title' content='Fulmo'/>
<meta property='og:type' content='website'/>
<meta property='og:url' content='https://tirifto.xwx.moe/eo/rakontoj/fulmo.html'/>
<meta property='og:site_name' content='Tiriftejo'/>
<meta property='og:locale' content='eo'/>
<meta property='og:description' content='Pri feoj, kiuj devis ordigi falintan arbon.'/>
<meta property='og:image' content='https://tirifto.xwx.moe/r/ilustrajhoj/pinglordigado.png'/>
<meta property='og:image:alt' content='Meze de arbaro kuŝas falinta trunko, sen pingloj kaj kun branĉoj derompitaj. Post ĝi videblas du feoj: florofeo maldekstre kaj nubofeo dekstre. La florofeo iom kaŝas sin post la trunko. La nubofeo staras kaj tenas amason da pigloj. Ili iom rigardas al si.'/>
<meta property='og:image:height' content='630'/>
<meta property='og:image:width' content='1200'/>
<meta property='og:image' content='https://tirifto.xwx.moe/r/opengraph/eo.png'/>
<meta property='og:image:alt' content='La tirifta okulo ĉirkaŭita de ornamaj steloj kaj la teksto: »Tiriftejo. Esperanto.«'/>
<meta property='og:image:height' content='630'/>
<meta property='og:image:width' content='1200'/>
</head>
<body>
<header id='website-header'>
<nav id='website-navigation'>
<input type='checkbox' id='website-navigation-toggle'
aria-description='Montri ligilojn al ĉefaj paĝoj de la retejo.'/>
<label for='website-navigation-toggle'>Paĝoj</label>
<a href='/eo/verkoj.html'>Verkoj</a>
<a href='/eo/novajhoj.html'>Novaĵoj</a>
<a href='/eo/donacoj.html'>Donacoj</a>
<a href='/eo/prio.html'>Prio</a>
<a href='/eo/amikoj.html'>Amikoj</a>
<a href='/eo/kontakto.html'>Kontakto</a>
</nav>
<span id='eye' role='img' aria-label=''></span>
<nav id='language-switcher'
aria-roledescription='lingvo-ŝanĝilo'>
<input type='checkbox' id='language-switcher-toggle'
aria-description='Montri ligilojn al tradukoj de tiu ĉi paĝo.'/>
<label for='language-switcher-toggle'>Lingvoj</label>
<a href='fulmo.html' lang='eo' hreflang='eo'><img aria-hidden='true' alt='' src='/r/flagoj/eo.png'/>Esperanto</a>
</nav>
</header>
<div class='bodier'>
<nav id='work-links'>
<a href='.'>Ceteraj rakontoj</a>
<a href='../bildosignoj'>Bildosignoj</a>
<a href='../eseoj'>Eseoj</a>
<a href='../ludoj'>Ludoj</a>
<a href='../poemoj'>Poemoj</a>
<a href='../vortaroj'>Vortaroj</a>
</nav>
<main>
<article>
<header>
<h1>Fulmo</h1>
<p>Skribis Tirifto</p>
<time datetime='2025-01-31'>2025-01-31</time>
</header>
<p>»Kial ĉiam mi? Tio ne justas! Oni kulpigas min, sed ja ne mi kulpas!« La nubofeo lamentis, dum ĝi ordigis restaĵojn de falinta arbo. Plejparto el la pingloj estis brulintaj, kaj el la trunko ankoraŭ leviĝis fumo.</p>
<p>Subite aŭdeblis ekstraj kraketoj deapude. Ĝi rigardis flanken, kaj vidis iun kaŭri apud la arbo, derompi branĉetojn, kaj orde ilin amasigi. Ŝajnis, ke ekde sia rimarkiĝo, la nekonatulo laŭeble kuntiriĝis, kaj strebis labori kiel eble plej silente.</p>
<p>»Saluton…?« La nubofeo stariĝis, alporolante la eston. Tiu kvazaŭ frostiĝis, sed timeme ankaŭ stariĝis.</p>
<p>»S- Saluton…« Ĝi respondis sen kuraĝo rigardi ĝiadirekten. Nun stare, videblis ke ĝi estas verdanta florofeo.</p>
<p>»… kion vi faras tie ĉi?« La nubofeo demandis.</p>
<p>»Nu… tiel kaj tiel… mi ordigas.«</p>
<p>»Ho. Mi ricevis taskon ordigi ĉi tie… se vi povas atendi, vi ne bezonas peni!«</p>
<p>»N- Nu… mi tamen volus…« Parolis la florofeo, plu deturnante la kapon.</p>
<p>»Nu… bone, se vi tion deziras… dankon!« La nubofeo dankis, kaj returniĝis al sia laboro.</p>
<p>Fojfoje ĝi scivole rigardis al sia nova kunlaboranto, kaj fojfoje renkontis similan rigardon de ĝia flanko, kiuokaze ambaŭ rigardoj rapide revenis al la ordigataj pingloj kaj branĉetoj. »(Kial tiom volonte helpi min?)« Pensis al si la nubofeo. »(Ĉu ĝi simple tiom bonkoras? Ĝi ja tre bele floras; eble ankaŭ ĝia koro tiel same belas…)« Kaj vere, ĝiaj surfloroj grandanime malfermis siajn belkolorajn folietojn, kaj bonodoris al mondo.</p>
<figure>
<picture>
<source srcset='/r/ilustrajhoj/pinglordigado.jxl' type='image/jxl'/>
<img src='/r/ilustrajhoj/pinglordigado.png' alt='Meze de arbaro kuŝas falinta trunko, sen pingloj kaj kun branĉoj derompitaj. Post ĝi videblas du feoj: florofeo maldekstre kaj nubofeo dekstre. La florofeo iom kaŝas sin post la trunko. La nubofeo staras kaj tenas amason da pigloj. Ili iom rigardas al si.'/>
</picture>
<figcaption>
Pinglordigado
<details>
<summary>© <time datetime='2025'>2025</time> Tirifto</summary>
<a href='https://artlibre.org/'><img src='/r/permesiloj/lal.svg' class='stamp licence' alt='Emblemo: Permesilo de arto libera'/></a>
</details>
</figcaption>
</figure>
<p>Post iom da tempo, ĉiu feo tralaboris ĝis la trunkomezo, kaj proksimiĝis al la alia feo. Kaj tiam ekpezis sur ili devosento rompi la silenton.</p>
<p>»… kia bela vetero, ĉu ne?« Diris la nubofeo, tuj rimarkonte, ke mallumiĝas, kaj la ĉielo restas kovrita de nuboj.</p>
<p>»Jes ja! Tre nube. Mi ŝatas nubojn!« Respondis la alia entuziasme, sed tuj haltetis kaj deturnis la kapon. Ambaŭ feoj daŭrigis laboron silente, kaj plu proksimiĝis, ĝis tiu preskaŭ estis finita.</p>
<p>»H… H… Ho ne…!« Subite ekdiris la nubofeo urĝe.</p>
<p>»Kio okazas?!«</p>
<p>»T… Tern…!«</p>
<p>»Jen! Tenu!« La florofeo etendis manon kun granda folio. La nubofeo ĝin prenis, kaj tien ternis. Aperis ekfulmo, kaj la cindriĝinta folio disfalis.</p>
<p>»Pardonu… mi ne volis…« Bedaŭris la nubofeo. »Mi ne scias, kial tio ĉiam okazas! Tiom plaĉas al mi promeni tere, sed ĉiuj diras, ke mi maldevus, ĉar ial ĝi ĉiam finiĝas tiel ĉi.« Ĝi montris al la arbo. »Eble ili pravas…«</p>
<p>»Nu…« diris la florofeo bedaŭre, kaj etendis la manon.</p>
<p>»H… H… Ne ree…!«</p>
<p>Ekfulmis. Alia ĵus metita folio cindriĝis en la manoj de la florofeo, time ferminta la okulojn.</p>
<p>»Dankegon… mi tre ŝatas vian helpon! Kaj mi ne… ne…«</p>
<p>Metiĝis. Ekfulmis. Cindriĝis.</p>
<p>»Io tre iritas mian nazon!« Plendis la nubofeo. Poste ĝi rimarkis la florpolvon, kiu disŝutiĝis el la florofeo en la tutan ĉirkaŭaĵon, kaj eĉ tuj antaŭ la nubofeon.</p>
<p>»N- Nu…« Diris la florofeo, honte rigardanta la teron. »… pardonu.«</p>
<footer>
<noscript><p>Ĉu vi ŝatas la verkon? <a href='/eo/donacoj.html'>Subtenu min</a> aŭ kopiu adreson de la verko por diskonigi ĝin!</p></noscript>
<script id='underbuttons'>
/* @license magnet:?xt=urn:btih:90dc5c0be029de84e523b9b3922520e79e0e6f08&dn=cc0.txt CC0-1.0 */
document.getElementById('underbuttons').outerHTML = "<p><a href='/eo/donacoj.html' class='button' target='_blank'>Subtenu min</a> <button onclick='navigator.clipboard.writeText(window.location.href.split(\"\#\")[0]).then(() => window.alert(\"Ligilo al ĉi tiu verko estas kopiita. Sendu ĝin al iu por diskonigi la verkon! 🐱\"))'>Diskonigu la verkon</button></p>"
/* @license-end */
</script>
<details class='history'>
<summary>Historio</summary>
<dl>
<dt><time datetime='2025-01-31'>2025-01-31</time></dt>
<dd>Unua publikigo.</dd>
</dl>
</details>
<details class='licence' open='details'>
<summary>Permesilo</summary>
<p>Ĉi tiun verkon vi rajtas libere kopii, disdoni, kaj ŝanĝi, laŭ kondiĉoj de la <a href='https://artlibre.org/'>Permesilo de arto libera</a>. (Resume: Vi devas mencii la aŭtoron kaj doni ligilon al la verko. Se vi ŝanĝas la verkon, vi devas laŭeble noti la faritajn ŝanĝojn, ilian daton, kaj eldoni ilin sub la sama permesilo.)</p>
<a href='https://artlibre.org/'><img src='/r/permesiloj/lal.svg' class='stamp licence' alt='Emblemo: Permesilo de arto libera'/></a>
</details>
</footer>
</article>
</main>
</div>
<footer id='website-footer'>
<div class='stamps'>
<a href='https://gnu.org/'>
<img class='stamp' src='/r/retetikedoj/gnu.png' lang='en' alt='GNU'/></a>
<img class='stamp' src='/r/retetikedoj/ihhtus.png' lang='el' alt='ΙΧΘΥΣ'/>
<img class='stamp' src='/r/retetikedoj/be-kind.apng' lang='en' alt='Be kind.'/>
<img class='stamp' src='/r/retetikedoj/kulturo-libera.png' lang='eo' alt='Kulturo libera.'/>
<img class='stamp' src='/r/retetikedoj/discord.png' lang='en' alt='Say no to Discord.'/>
<a href='https://xwx.moe/'>
<img class='stamp' src='/r/retetikedoj/xwx-moe.png' alt='xwx.moe'/></a>
<a href='https://mojeek.co.uk' hreflang='en'>
<img class='stamp' src='/r/retetikedoj/mojeek.png' lang='en' alt='Mojeek'/></a>
<a href='https://raku.org/' hreflang='en'>
<img class='stamp' src='/r/retetikedoj/raku.png' alt='Raku'/></a>
<picture>
<source srcset='/r/retetikedoj/jxl.jxl' type='image/jxl'/>
<img class='stamp' src='/r/retetikedoj/jxl.png' alt='JPEG XL'/></picture>
</div>
</footer>
</body>
</html>

View file

@ -0,0 +1,90 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
{
"atomUri": "ostatus:atomUri",
"conversation": "ostatus:conversation",
"inReplyToAtomUri": "ostatus:inReplyToAtomUri",
"ostatus": "http://ostatus.org#",
"sensitive": "as:sensitive",
"toot": "http://joinmastodon.org/ns#",
"votersCount": "toot:votersCount"
},
"https://w3id.org/security/v1"
],
"actor": "https://pol.social/users/mkljczk",
"cc": ["https://www.w3.org/ns/activitystreams#Public",
"https://pol.social/users/aemstuz", "https://gts.mkljczk.pl/users/mkljczk",
"https://pl.fediverse.pl/users/mkljczk",
"https://fedi.kutno.pl/users/mkljczk"],
"id": "https://pol.social/users/mkljczk/statuses/113907871635572263#updates/1738096776",
"object": {
"atomUri": "https://pol.social/users/mkljczk/statuses/113907871635572263",
"attachment": [],
"attributedTo": "https://pol.social/users/mkljczk",
"cc": ["https://www.w3.org/ns/activitystreams#Public",
"https://pol.social/users/aemstuz", "https://gts.mkljczk.pl/users/mkljczk",
"https://pl.fediverse.pl/users/mkljczk",
"https://fedi.kutno.pl/users/mkljczk"],
"content": "<p>test</p>",
"contentMap": {
"pl": "<p>test</p>"
},
"conversation": "https://fedi.kutno.pl/contexts/43c14c70-d3fb-42b4-a36d-4eacfab9695a",
"id": "https://pol.social/users/mkljczk/statuses/113907871635572263",
"inReplyTo": "https://pol.social/users/aemstuz/statuses/113907854282654767",
"inReplyToAtomUri": "https://pol.social/users/aemstuz/statuses/113907854282654767",
"likes": {
"id": "https://pol.social/users/mkljczk/statuses/113907871635572263/likes",
"totalItems": 1,
"type": "Collection"
},
"published": "2025-01-28T20:29:45Z",
"replies": {
"first": {
"items": [],
"next": "https://pol.social/users/mkljczk/statuses/113907871635572263/replies?only_other_accounts=true&page=true",
"partOf": "https://pol.social/users/mkljczk/statuses/113907871635572263/replies",
"type": "CollectionPage"
},
"id": "https://pol.social/users/mkljczk/statuses/113907871635572263/replies",
"type": "Collection"
},
"sensitive": false,
"shares": {
"id": "https://pol.social/users/mkljczk/statuses/113907871635572263/shares",
"totalItems": 0,
"type": "Collection"
},
"summary": null,
"tag": [
{
"href": "https://pol.social/users/aemstuz",
"name": "@aemstuz",
"type": "Mention"
},
{
"href": "https://gts.mkljczk.pl/users/mkljczk",
"name": "@mkljczk@gts.mkljczk.pl",
"type": "Mention"
},
{
"href": "https://pl.fediverse.pl/users/mkljczk",
"name": "@mkljczk@fediverse.pl",
"type": "Mention"
},
{
"href": "https://fedi.kutno.pl/users/mkljczk",
"name": "@mkljczk@fedi.kutno.pl",
"type": "Mention"
}
],
"to": ["https://pol.social/users/mkljczk/followers"],
"type": "Note",
"updated": "2025-01-28T20:39:36Z",
"url": "https://pol.social/@mkljczk/113907871635572263"
},
"published": "2025-01-28T20:39:36Z",
"to": ["https://pol.social/users/mkljczk/followers"],
"type": "Update"
}

View file

@ -0,0 +1,54 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
{
"Emoji": "toot:Emoji",
"Hashtag": "as:Hashtag",
"PropertyValue": "schema:PropertyValue",
"_misskey_content": "misskey:_misskey_content",
"_misskey_quote": "misskey:_misskey_quote",
"_misskey_reaction": "misskey:_misskey_reaction",
"_misskey_summary": "misskey:_misskey_summary",
"_misskey_votes": "misskey:_misskey_votes",
"backgroundUrl": "sharkey:backgroundUrl",
"discoverable": "toot:discoverable",
"featured": "toot:featured",
"fedibird": "http://fedibird.com/ns#",
"firefish": "https://joinfirefish.org/ns#",
"isCat": "misskey:isCat",
"listenbrainz": "sharkey:listenbrainz",
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
"misskey": "https://misskey-hub.net/ns#",
"quoteUri": "fedibird:quoteUri",
"quoteUrl": "as:quoteUrl",
"schema": "http://schema.org#",
"sensitive": "as:sensitive",
"sharkey": "https://joinsharkey.org/ns#",
"speakAsCat": "firefish:speakAsCat",
"toot": "http://joinmastodon.org/ns#",
"value": "schema:value",
"vcard": "http://www.w3.org/2006/vcard/ns#"
}
],
"_misskey_reaction": ":blobwtfnotlikethis:",
"actor": "https://mai.waifuism.life/users/9otxaeemjqy70001",
"content": ":blobwtfnotlikethis:",
"id": "https://mai.waifuism.life/likes/9q2xifhrdnb0001b",
"object": "https://bungle.online/notes/9q2xi2sy4k",
"tag": [
{
"icon": {
"mediaType": "image/png",
"type": "Image",
"url": "https://mai.waifuism.life/files/1b0510f2-1fb4-43f5-a399-10053bbd8f0f"
},
"id": "https://mai.waifuism.life/emojis/blobwtfnotlikethis",
"name": ":blobwtfnotlikethis:",
"type": "Emoji",
"updated": "2024-02-07T02:21:46.497Z"
}
],
"type": "Like"
}

View file

@ -0,0 +1,46 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
"https://w3id.org/security/data-integrity/v1",
{
"Emoji": "toot:Emoji",
"Hashtag": "as:Hashtag",
"sensitive": "as:sensitive",
"toot": "http://joinmastodon.org/ns#"
}
],
"actor": "https://mitra.social/users/silverpill",
"cc": [],
"content": ":ablobcatheartsqueeze:",
"id": "https://mitra.social/activities/like/0195a89a-a3a0-ead4-3a1c-aa6311397cfd",
"object": "https://framapiaf.org/users/peertube/statuses/114182703352270287",
"proof": {
"created": "2025-03-18T09:34:21.610678375Z",
"cryptosuite": "eddsa-jcs-2022",
"proofPurpose": "assertionMethod",
"proofValue": "z5AvpwkXQGFpTneRVDNeF48Jo9qYG6PgrE5HaPPpQNdNyc31ULMN4Vxd4aFXELo4Rk5Y9hd9nDy254xP8v5uGGWp1",
"type": "DataIntegrityProof",
"verificationMethod": "https://mitra.social/users/silverpill#ed25519-key"
},
"tag": [
{
"attributedTo": "https://mitra.social/actor",
"icon": {
"mediaType": "image/png",
"type": "Image",
"url": "https://mitra.social/media/a08e153441b25e512ab1b2e8922f5d8cd928322c8b79958cd48297ac722a4117.png"
},
"id": "https://mitra.social/objects/emojis/ablobcatheartsqueeze",
"name": ":ablobcatheartsqueeze:",
"type": "Emoji",
"updated": "1970-01-01T00:00:00Z"
}
],
"to": [
"https://framapiaf.org/users/peertube",
"https://www.w3.org/ns/activitystreams#Public"
],
"type": "Like"
}

View file

@ -0,0 +1,93 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
{
"ostatus": "http://ostatus.org#",
"atomUri": "ostatus:atomUri",
"inReplyToAtomUri": "ostatus:inReplyToAtomUri",
"conversation": "ostatus:conversation",
"sensitive": "as:sensitive",
"toot": "http://joinmastodon.org/ns#",
"votersCount": "toot:votersCount",
"quote": "https://w3id.org/fep/044f#quote",
"quoteUri": "http://fedibird.com/ns#quoteUri",
"_misskey_quote": "https://misskey-hub.net/ns#_misskey_quote",
"quoteAuthorization": {
"@id": "https://w3id.org/fep/044f#quoteAuthorization",
"@type": "@id"
},
"gts": "https://gotosocial.org/ns#",
"interactionPolicy": {
"@id": "gts:interactionPolicy",
"@type": "@id"
},
"canQuote": {
"@id": "gts:canQuote",
"@type": "@id"
},
"automaticApproval": {
"@id": "gts:automaticApproval",
"@type": "@id"
},
"manualApproval": {
"@id": "gts:manualApproval",
"@type": "@id"
}
}
],
"id": "https://mastodon.social/users/gwynnion/statuses/115345489087257171",
"type": "Note",
"summary": null,
"inReplyTo": null,
"published": "2025-10-09T17:54:47Z",
"url": "https://mastodon.social/@gwynnion/115345489087257171",
"attributedTo": "https://mastodon.social/users/gwynnion",
"to": [
"https://www.w3.org/ns/activitystreams#Public"
],
"cc": [
"https://mastodon.social/users/gwynnion/followers"
],
"sensitive": false,
"atomUri": "https://mastodon.social/users/gwynnion/statuses/115345489087257171",
"inReplyToAtomUri": null,
"conversation": "https://mastodon.social/contexts/109836797527169643-115345489087257171",
"context": "https://mastodon.social/contexts/109836797527169643-115345489087257171",
"content": "<p class=\"quote-inline\">RE: <a href=\"https://mastodon.social/@404mediaco/115344945575874225\" target=\"_blank\" rel=\"nofollow noopener\" translate=\"no\"><span class=\"invisible\">https://</span><span class=\"ellipsis\">mastodon.social/@404mediaco/11</span><span class=\"invisible\">5344945575874225</span></a></p><p>Every age verification system is just a scheme for companies and hackers to steal your identity.</p>",
"contentMap": {
"en": "<p class=\"quote-inline\">RE: <a href=\"https://mastodon.social/@404mediaco/115344945575874225\" target=\"_blank\" rel=\"nofollow noopener\" translate=\"no\"><span class=\"invisible\">https://</span><span class=\"ellipsis\">mastodon.social/@404mediaco/11</span><span class=\"invisible\">5344945575874225</span></a></p><p>Every age verification system is just a scheme for companies and hackers to steal your identity.</p>"
},
"quote": "https://mastodon.social/users/404mediaco/statuses/115344945575874225",
"_misskey_quote": "https://mastodon.social/users/404mediaco/statuses/115344945575874225",
"quoteUri": "https://mastodon.social/users/404mediaco/statuses/115344945575874225",
"quoteAuthorization": "https://mastodon.social/users/404mediaco/quote_authorizations/115345489087269783",
"interactionPolicy": {
"canQuote": {
"automaticApproval": [
"https://www.w3.org/ns/activitystreams#Public"
]
}
},
"attachment": [],
"tag": [],
"replies": {
"id": "https://mastodon.social/users/gwynnion/statuses/115345489087257171/replies",
"type": "Collection",
"first": {
"type": "CollectionPage",
"next": "https://mastodon.social/users/gwynnion/statuses/115345489087257171/replies?only_other_accounts=true&page=true",
"partOf": "https://mastodon.social/users/gwynnion/statuses/115345489087257171/replies",
"items": []
}
},
"likes": {
"id": "https://mastodon.social/users/gwynnion/statuses/115345489087257171/likes",
"type": "Collection",
"totalItems": 26
},
"shares": {
"id": "https://mastodon.social/users/gwynnion/statuses/115345489087257171/shares",
"type": "Collection",
"totalItems": 28
}
}

View file

@ -0,0 +1,109 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
{
"claim": {
"@id": "toot:claim",
"@type": "@id"
},
"memorial": "toot:memorial",
"atomUri": "ostatus:atomUri",
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
"blurhash": "toot:blurhash",
"ostatus": "http://ostatus.org#",
"discoverable": "toot:discoverable",
"focalPoint": {
"@container": "@list",
"@id": "toot:focalPoint"
},
"votersCount": "toot:votersCount",
"Hashtag": "as:Hashtag",
"Emoji": "toot:Emoji",
"alsoKnownAs": {
"@id": "as:alsoKnownAs",
"@type": "@id"
},
"sensitive": "as:sensitive",
"movedTo": {
"@id": "as:movedTo",
"@type": "@id"
},
"inReplyToAtomUri": "ostatus:inReplyToAtomUri",
"conversation": "ostatus:conversation",
"Device": "toot:Device",
"schema": "http://schema.org#",
"toot": "http://joinmastodon.org/ns#",
"cipherText": "toot:cipherText",
"suspended": "toot:suspended",
"messageType": "toot:messageType",
"featuredTags": {
"@id": "toot:featuredTags",
"@type": "@id"
},
"Curve25519Key": "toot:Curve25519Key",
"deviceId": "toot:deviceId",
"Ed25519Signature": "toot:Ed25519Signature",
"featured": {
"@id": "toot:featured",
"@type": "@id"
},
"devices": {
"@id": "toot:devices",
"@type": "@id"
},
"value": "schema:value",
"PropertyValue": "schema:PropertyValue",
"messageFranking": "toot:messageFranking",
"publicKeyBase64": "toot:publicKeyBase64",
"identityKey": {
"@id": "toot:identityKey",
"@type": "@id"
},
"Ed25519Key": "toot:Ed25519Key",
"indexable": "toot:indexable",
"EncryptedMessage": "toot:EncryptedMessage",
"fingerprintKey": {
"@id": "toot:fingerprintKey",
"@type": "@id"
}
}
],
"actor": "https://phpc.social/users/denniskoch",
"cc": [],
"id": "https://phpc.social/users/denniskoch/statuses/112847382711461301/activity",
"inReplyTo": null,
"inReplyToAtomUri": null,
"object": {
"atomUri": "https://phpc.social/users/denniskoch/statuses/112847382711461301",
"attachment": [],
"attributedTo": "https://phpc.social/users/denniskoch",
"cc": [],
"content": "<p><span class=\"h-card\" translate=\"no\"><a href=\"https://mastodon.social/@bastianallgeier\" class=\"u-url mention\">@<span>bastianallgeier</span></a></span> <span class=\"h-card\" translate=\"no\"><a href=\"https://chaos.social/@distantnative\" class=\"u-url mention\">@<span>distantnative</span></a></span> <span class=\"h-card\" translate=\"no\"><a href=\"https://fosstodon.org/@kev\" class=\"u-url mention\">@<span>kev</span></a></span> Another main argument: Discord is popular. Many people have an account, so you can just join an server quickly. Also you know the app and how to get around.</p>",
"contentMap": {
"en": "<p><span class=\"h-card\" translate=\"no\"><a href=\"https://mastodon.social/@bastianallgeier\" class=\"u-url mention\">@<span>bastianallgeier</span></a></span> <span class=\"h-card\" translate=\"no\"><a href=\"https://chaos.social/@distantnative\" class=\"u-url mention\">@<span>distantnative</span></a></span> <span class=\"h-card\" translate=\"no\"><a href=\"https://fosstodon.org/@kev\" class=\"u-url mention\">@<span>kev</span></a></span> Another main argument: Discord is popular. Many people have an account, so you can just join an server quickly. Also you know the app and how to get around.</p>"
},
"conversation": "tag:mastodon.social,2024-07-25:objectId=760068442:objectType=Conversation",
"id": "https://phpc.social/users/denniskoch/statuses/112847382711461301",
"published": "2024-07-25T13:33:29Z",
"replies": null,
"sensitive": false,
"tag": [],
"to": [
"https://www.w3.org/ns/activitystreams#Public"
],
"type": "Note",
"url": "https://phpc.social/@denniskoch/112847382711461301"
},
"published": "2024-07-25T13:33:29Z",
"signature": {
"created": "2024-07-25T13:33:29Z",
"creator": "https://phpc.social/users/denniskoch#main-key",
"signatureValue": "slz9BKJzd2n1S44wdXGOU+bV/wsskdgAaUpwxj8R16mYOL8+DTpE6VnfSKoZGsBBJT8uG5gnVfVEz1YsTUYtymeUgLMh7cvd8VnJnZPS+oixbmBRVky/Myf91TEgQQE7G4vDmTdB4ii54hZrHcOOYYf5FKPNRSkMXboKA6LMqNtekhbI+JTUJYIB02WBBK6PUyo15f6B1RJ6HGWVgud9NE0y1EZXfrkqUt682p8/9D49ORf7AwjXUJibKic2RbPvhEBj70qUGfBm4vvgdWhSUn1IG46xh+U0+NrTSUED82j1ZVOeua/2k/igkGs8cSBkY35quXTkPz6gbqCCH66CuA==",
"type": "RsaSignature2017"
},
"to": [
"https://www.w3.org/ns/activitystreams#Public"
],
"type": "Create"
}

File diff suppressed because one or more lines are too long

18
test/fixtures/server.pem vendored Normal file
View file

@ -0,0 +1,18 @@
-----BEGIN CERTIFICATE-----
MIICpDCCAYwCCQC0vCQAnSoGdzANBgkqhkiG9w0BAQsFADAUMRIwEAYDVQQDDAls
b2NhbGhvc3QwHhcNMjYwMTE2MTY1ODE5WhcNMzYwMTE0MTY1ODE5WjAUMRIwEAYD
VQQDDAlsb2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCq
dZ4O2upZqwIo1eK5KrW1IIsjkfsFK8hE7Llh+4axcesiUKot0ib1CUhRSYiL1DLO
CIYQOw8IKQDVSC4JWAX9SsnX4W8dwexMQuSQG7/IKX2auC1bNNySFvoqM6Gq3GL9
MqBFonZGXDPZu8fmxsI/2p9+2GK13F+HXgoLlXSCoO3XELJaBmjv29tgxxWRxCiH
m4u0briSxgUEx+CctpKPvGDmLaoIOIhjtuoG6OjkeWUOp6jDcteazO23VxPyF5cS
NbRJgm8AckrTQ6wbWSnhyqF8rPEsIc0ZAlUdDEs5fL3sjugc566FvE+GOkZIEyDD
tgWbc4Ne+Kp/nnt6oVxpAgMBAAEwDQYJKoZIhvcNAQELBQADggEBADv+J1DTok8V
MKVKo0hsRnHTeJQ2+EIgOspuYlEzez3PysOZH6diAQxO2lzuo9LKxP3hnmw17XO/
P2oCzYyb9/P58VY/gr4UDIfuhgcE0cVfdsRhVId/I2FW6VP2f5q1TGbDUxSsVIlG
6hufn1aLBu90LtEbDkHqbnD05yYPwdqzWg4TrOXbX+jBhQrXJJdB3W7KTgozjRQw
F7+/2IyXoxXuxcwQBQlYhUbvGlsFqFpP/6cz2al5i5pNUkiNaSYwlRmuwa7zoTft
tHf57dhfXIpXET2BaJM6DSjDOOG/QleRXkvkTI5J21q+Bo+XnOzo19p4cZKJpTFC
SNgrftyNh3k=
-----END CERTIFICATE-----

View file

@ -0,0 +1 @@
[{"language":"BG","name":"Bulgarian","supports_formality":false},{"language":"CS","name":"Czech","supports_formality":false},{"language":"DA","name":"Danish","supports_formality":false},{"language":"DE","name":"German","supports_formality":true},{"language":"EL","name":"Greek","supports_formality":false},{"language":"EN-GB","name":"English (British)","supports_formality":false},{"language":"EN-US","name":"English (American)","supports_formality":false},{"language":"ES","name":"Spanish","supports_formality":true},{"language":"ET","name":"Estonian","supports_formality":false},{"language":"FI","name":"Finnish","supports_formality":false},{"language":"FR","name":"French","supports_formality":true},{"language":"HU","name":"Hungarian","supports_formality":false},{"language":"ID","name":"Indonesian","supports_formality":false},{"language":"IT","name":"Italian","supports_formality":true},{"language":"JA","name":"Japanese","supports_formality":false},{"language":"LT","name":"Lithuanian","supports_formality":false},{"language":"LV","name":"Latvian","supports_formality":false},{"language":"NL","name":"Dutch","supports_formality":true},{"language":"PL","name":"Polish","supports_formality":true},{"language":"PT-BR","name":"Portuguese (Brazilian)","supports_formality":true},{"language":"PT-PT","name":"Portuguese (European)","supports_formality":true},{"language":"RO","name":"Romanian","supports_formality":false},{"language":"RU","name":"Russian","supports_formality":true},{"language":"SK","name":"Slovak","supports_formality":false},{"language":"SL","name":"Slovenian","supports_formality":false},{"language":"SV","name":"Swedish","supports_formality":false},{"language":"TR","name":"Turkish","supports_formality":false},{"language":"UK","name":"Ukrainian","supports_formality":false},{"language":"ZH","name":"Chinese (simplified)","supports_formality":false}]

View file

@ -0,0 +1 @@
{"translations":[{"detected_source_language":"PL","text":"REMOVE THE FOLLOWER!Paste this on your follower. If we get 70% of nk users...they will remove the follower!!!"}]}

View file

@ -13,7 +13,7 @@
"directMessage": "litepub:directMessage"
}
],
"id": "http://localhost:8080/followers/fuser3",
"id": "https://remote.org/followers/fuser3",
"type": "OrderedCollection",
"totalItems": 296
}

View file

@ -13,7 +13,7 @@
"directMessage": "litepub:directMessage"
}
],
"id": "http://localhost:8080/following/fuser3",
"id": "https://remote.org/following/fuser3",
"type": "OrderedCollection",
"totalItems": 32
}

View file

@ -0,0 +1,41 @@
{
"alsoKnownAs": [],
"attachment": [],
"capabilities": {},
"discoverable": true,
"endpoints": {},
"featured": "https://queef.in/cute_cat/collections/featured",
"followers": "https://queef.in/cute_cat/followers",
"following": "https://queef.in/cute_cat/following",
"icon": {
"type": "Image",
"url": [
"https://queef.in/storage/profile.webp",
"https://example.com/image"
]
},
"id": "https://queef.in/cute_cat",
"image": {
"type": "Image",
"url": [
"https://queef.in/storage/banner.gif",
"https://example.com/image"
]
},
"inbox": "https://queef.in/cute_cat/inbox",
"manuallyApprovesFollowers": false,
"name": "cute_cat",
"outbox": "https://queef.in/cute_cat/outbox",
"preferredUsername": "cute_cat",
"publicKey": {
"id": "https://queef.in/cute_cat#main-key",
"owner": "https://queef.in/cute_cat"
},
"published": "2025-08-18T01:16:10.000Z",
"summary": "A cute cat",
"tag": [],
"type": "Person",
"url": "https://queef.in/cute_cat",
"vcard:bday": null,
"webfinger": "acct:cute_cat@queef.in"
}

File diff suppressed because one or more lines are too long

View file

@ -1,7 +1,7 @@
{
"@context": "https://www.w3.org/ns/activitystreams",
"id": "http://localhost:4001/users/masto_closed/followers",
"id": "https://remote.org/users/masto_closed/followers",
"type": "OrderedCollection",
"totalItems": 437,
"first": "http://localhost:4001/users/masto_closed/followers?page=1"
"first": "https://remote.org/users/masto_closed/followers?page=1"
}

View file

@ -1 +1 @@
{"@context":"https://www.w3.org/ns/activitystreams","id":"http://localhost:4001/users/masto_closed/followers?page=1","type":"OrderedCollectionPage","totalItems":437,"next":"http://localhost:4001/users/masto_closed/followers?page=2","partOf":"http://localhost:4001/users/masto_closed/followers","orderedItems":["https://testing.uguu.ltd/users/rin","https://patch.cx/users/rin","https://letsalllovela.in/users/xoxo","https://pleroma.site/users/crushv","https://aria.company/users/boris","https://kawen.space/users/crushv","https://freespeech.host/users/cvcvcv","https://pleroma.site/users/picpub","https://pixelfed.social/users/nosleep","https://boopsnoot.gq/users/5c1896d162f7d337f90492a3","https://pikachu.rocks/users/waifu","https://royal.crablettesare.life/users/crablettes"]}
{"@context":"https://www.w3.org/ns/activitystreams","id":"https://remote.org/users/masto_closed/followers?page=1","type":"OrderedCollectionPage","totalItems":437,"next":"https://remote.org/users/masto_closed/followers?page=2","partOf":"https://remote.org/users/masto_closed/followers","orderedItems":["https://testing.uguu.ltd/users/rin","https://patch.cx/users/rin","https://letsalllovela.in/users/xoxo","https://pleroma.site/users/crushv","https://aria.company/users/boris","https://kawen.space/users/crushv","https://freespeech.host/users/cvcvcv","https://pleroma.site/users/picpub","https://pixelfed.social/users/nosleep","https://boopsnoot.gq/users/5c1896d162f7d337f90492a3","https://pikachu.rocks/users/waifu","https://royal.crablettesare.life/users/crablettes"]}

View file

@ -1,7 +1,7 @@
{
"@context": "https://www.w3.org/ns/activitystreams",
"id": "http://localhost:4001/users/masto_closed/following",
"id": "https://remote.org/users/masto_closed/following",
"type": "OrderedCollection",
"totalItems": 152,
"first": "http://localhost:4001/users/masto_closed/following?page=1"
"first": "https://remote.org/users/masto_closed/following?page=1"
}

View file

@ -1 +1 @@
{"@context":"https://www.w3.org/ns/activitystreams","id":"http://localhost:4001/users/masto_closed/following?page=1","type":"OrderedCollectionPage","totalItems":152,"next":"http://localhost:4001/users/masto_closed/following?page=2","partOf":"http://localhost:4001/users/masto_closed/following","orderedItems":["https://testing.uguu.ltd/users/rin","https://patch.cx/users/rin","https://letsalllovela.in/users/xoxo","https://pleroma.site/users/crushv","https://aria.company/users/boris","https://kawen.space/users/crushv","https://freespeech.host/users/cvcvcv","https://pleroma.site/users/picpub","https://pixelfed.social/users/nosleep","https://boopsnoot.gq/users/5c1896d162f7d337f90492a3","https://pikachu.rocks/users/waifu","https://royal.crablettesare.life/users/crablettes"]}
{"@context":"https://www.w3.org/ns/activitystreams","id":"https://remote.org/users/masto_closed/following?page=1","type":"OrderedCollectionPage","totalItems":152,"next":"https://remote.org/users/masto_closed/following?page=2","partOf":"https://remote.org/users/masto_closed/following","orderedItems":["https://testing.uguu.ltd/users/rin","https://patch.cx/users/rin","https://letsalllovela.in/users/xoxo","https://pleroma.site/users/crushv","https://aria.company/users/boris","https://kawen.space/users/crushv","https://freespeech.host/users/cvcvcv","https://pleroma.site/users/picpub","https://pixelfed.social/users/nosleep","https://boopsnoot.gq/users/5c1896d162f7d337f90492a3","https://pikachu.rocks/users/waifu","https://royal.crablettesare.life/users/crablettes"]}

View file

@ -1,18 +1,18 @@
{
"type": "OrderedCollection",
"totalItems": 527,
"id": "http://localhost:4001/users/fuser2/followers",
"id": "https://remote.org/users/fuser2/followers",
"first": {
"type": "OrderedCollectionPage",
"totalItems": 527,
"partOf": "http://localhost:4001/users/fuser2/followers",
"partOf": "https://remote.org/users/fuser2/followers",
"orderedItems": [],
"next": "http://localhost:4001/users/fuser2/followers?page=2",
"id": "http://localhost:4001/users/fuser2/followers?page=1"
"next": "https://remote.org/users/fuser2/followers?page=2",
"id": "https://remote.org/users/fuser2/followers?page=1"
},
"@context": [
"https://www.w3.org/ns/activitystreams",
"http://localhost:4001/schemas/litepub-0.1.jsonld",
"https://remote.org/schemas/litepub-0.1.jsonld",
{
"@language": "und"
}

View file

@ -1,18 +1,18 @@
{
"type": "OrderedCollection",
"totalItems": 267,
"id": "http://localhost:4001/users/fuser2/following",
"id": "https://remote.org/users/fuser2/following",
"first": {
"type": "OrderedCollectionPage",
"totalItems": 267,
"partOf": "http://localhost:4001/users/fuser2/following",
"partOf": "https://remote.org/users/fuser2/following",
"orderedItems": [],
"next": "http://localhost:4001/users/fuser2/following?page=2",
"id": "http://localhost:4001/users/fuser2/following?page=1"
"next": "https://remote.org/users/fuser2/following?page=2",
"id": "https://remote.org/users/fuser2/following?page=1"
},
"@context": [
"https://www.w3.org/ns/activitystreams",
"http://localhost:4001/schemas/litepub-0.1.jsonld",
"https://remote.org/schemas/litepub-0.1.jsonld",
{
"@language": "und"
}

View file

@ -1 +0,0 @@
21.1

View file

@ -1 +0,0 @@
22.1

View file

@ -1 +0,0 @@
22.4

View file

@ -1 +0,0 @@
23.0

View file

@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.PleromaTest do
use ExUnit.Case, async: true
use ExUnit.Case, async: false
import Mix.Pleroma
setup_all do

View file

@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.AppTest do
use Pleroma.DataCase, async: true
use Pleroma.DataCase, async: false
setup_all do
Mix.shell(Mix.Shell.Process)
@ -42,9 +42,10 @@ defmodule Mix.Tasks.Pleroma.AppTest do
test "with errors" do
Mix.Tasks.Pleroma.App.run(["create"])
{:mix_shell, :error, ["Creating failed:"]}
{:mix_shell, :error, ["name: can't be blank"]}
{:mix_shell, :error, ["redirect_uris: can't be blank"]}
assert_receive {:mix_shell, :error, ["Creating failed:"]}
assert_receive {:mix_shell, :error, ["name: can't be blank"]}
assert_receive {:mix_shell, :error, ["redirect_uris: can't be blank"]}
end
defp assert_app(name, redirect, scopes) do

View file

@ -329,5 +329,39 @@ defmodule Mix.Tasks.Pleroma.ConfigTest do
assert config_records() == []
end
test "filters non-whitelisted settings" do
clear_config(:database_config_whitelist, [
{:pleroma},
{:web_push_encryption, :vapid_details}
])
insert_config_record(:web_push_encryption, :non_whitelisted_key, a: 1)
insert_config_record(:web_push_encryption, :vapid_details, b: 1)
MixTask.run(["filter_whitelisted", "--force"])
assert [
%ConfigDB{group: :pleroma, key: :instance},
%ConfigDB{group: :pleroma, key: Pleroma.Captcha},
%ConfigDB{group: :web_push_encryption, key: :vapid_details}
] = config_records()
end
test "filter_whitelisted doesn't crash when whitelist is unset" do
clear_config(:database_config_whitelist, nil)
existing = config_records()
MixTask.run(["filter_whitelisted", "--force"])
assert config_records() == existing
end
test "filter_whitelisted doesn't crash when whitelist is disabled" do
clear_config(:database_config_whitelist, false)
existing = config_records()
MixTask.run(["filter_whitelisted", "--force"])
assert config_records() == existing
end
end
end

View file

@ -3,11 +3,12 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.DatabaseTest do
use Pleroma.DataCase, async: true
use Pleroma.DataCase, async: false
use Oban.Testing, repo: Pleroma.Repo
alias Pleroma.Activity
alias Pleroma.Bookmark
alias Pleroma.Hashtag
alias Pleroma.Object
alias Pleroma.Repo
alias Pleroma.User
@ -251,7 +252,7 @@ defmodule Mix.Tasks.Pleroma.DatabaseTest do
|> Repo.update!()
{:ok, old_favourite_activity} =
CommonAPI.favorite(remote_user2, old_remote_post_activity.id)
CommonAPI.favorite(old_remote_post_activity.id, remote_user2)
old_favourite_activity
|> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date})
@ -302,7 +303,7 @@ defmodule Mix.Tasks.Pleroma.DatabaseTest do
|> Ecto.Changeset.change(%{local: false, updated_at: old_insert_date})
|> Repo.update!()
{:ok, old_favourite_activity} = CommonAPI.favorite(local_user, old_remote_post3_activity.id)
{:ok, old_favourite_activity} = CommonAPI.favorite(old_remote_post3_activity.id, local_user)
old_favourite_activity
|> Ecto.Changeset.change(%{local: true, updated_at: old_insert_date})
@ -411,7 +412,7 @@ defmodule Mix.Tasks.Pleroma.DatabaseTest do
["scheduled_activities"],
["schema_migrations"],
["thread_mutes"],
# ["user_follows_hashtag"], # not in pleroma
["user_follows_hashtag"],
# ["user_frontend_setting_profiles"], # not in pleroma
["user_invite_tokens"],
["user_notes"],
@ -550,6 +551,39 @@ defmodule Mix.Tasks.Pleroma.DatabaseTest do
assert length(activities) == 3
end
test "it prunes hashtags with no objects associated", %{old_insert_date: old_insert_date} do
user = insert(:user)
{:ok, hashtag_post_activity} =
CommonAPI.post(user, %{status: "morning #cofe", local: true})
hashtag_post_object = Object.normalize(hashtag_post_activity)
{:ok, hashtag_post2_activity} =
CommonAPI.post(user, %{status: "morning #cawfee", local: true})
hashtag_post2_object = Object.normalize(hashtag_post2_activity)
hashtag_post_object
|> Ecto.Changeset.change(%{updated_at: old_insert_date})
|> Repo.update!()
hashtag_post2_object
|> Ecto.Changeset.change(%{updated_at: old_insert_date})
|> Repo.update!()
# Test whether hashtags with follow relationships are kept
User.follow_hashtag(user, Hashtag.get_by_name("cofe"))
assert length(Repo.all(Hashtag)) == 2
assert length(Repo.all(Object)) == 2
Mix.Tasks.Pleroma.Database.run(["prune_objects"])
assert length(Repo.all(Hashtag)) == 1
assert length(Repo.all(Object)) == 0
assert Repo.one(Hashtag) |> Map.fetch!(:name) == "cofe"
end
end
describe "running update_users_following_followers_counts" do
@ -586,7 +620,7 @@ defmodule Mix.Tasks.Pleroma.DatabaseTest do
{:ok, %{id: id, object: object}} = CommonAPI.post(user, %{status: "test"})
{:ok, %{object: object2}} = CommonAPI.post(user, %{status: "test test"})
CommonAPI.favorite(user2, id)
CommonAPI.favorite(id, user2)
likes = %{
"first" =>
@ -623,10 +657,12 @@ defmodule Mix.Tasks.Pleroma.DatabaseTest do
expires_at = DateTime.add(DateTime.utc_now(), 60 * 61)
Pleroma.Workers.PurgeExpiredActivity.enqueue(%{
activity_id: activity_id3,
expires_at: expires_at
})
Pleroma.Workers.PurgeExpiredActivity.enqueue(
%{
activity_id: activity_id3
},
scheduled_at: expires_at
)
Mix.Tasks.Pleroma.Database.run(["ensure_expiration"])

View file

@ -24,7 +24,7 @@ defmodule Mix.Tasks.Pleroma.DigestTest do
setup do: clear_config([Pleroma.Emails.Mailer, :enabled], true)
setup do
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
:ok
end

View file

@ -11,7 +11,7 @@ defmodule Mix.Tasks.Pleroma.FrontendTest do
@dir "test/frontend_static_test"
setup do
File.mkdir_p!(@dir)
Pleroma.Backports.mkdir_p!(@dir)
clear_config([:instance, :static_dir], @dir)
on_exit(fn ->
@ -50,7 +50,7 @@ defmodule Mix.Tasks.Pleroma.FrontendTest do
folder = Path.join([@dir, "frontends", "pleroma", "fantasy"])
previously_existing = Path.join([folder, "temp"])
File.mkdir_p!(folder)
Pleroma.Backports.mkdir_p!(folder)
File.write!(previously_existing, "yey")
assert File.exists?(previously_existing)

View file

@ -7,7 +7,7 @@ defmodule Mix.Tasks.Pleroma.InstanceTest do
use Pleroma.DataCase
setup do
File.mkdir_p!(tmp_path())
Pleroma.Backports.mkdir_p!(tmp_path())
on_exit(fn ->
File.rm_rf(tmp_path())

View file

@ -3,12 +3,14 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.UploadsTest do
alias Pleroma.Config
alias Pleroma.Upload
use Pleroma.DataCase
use Pleroma.DataCase, async: false
import Mock
setup_all do
prep_uploads()
Mix.shell(Mix.Shell.Process)
on_exit(fn ->
@ -18,6 +20,8 @@ defmodule Mix.Tasks.Pleroma.UploadsTest do
:ok
end
setup do: clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
describe "running migrate_local" do
test "uploads migrated" do
with_mock Upload,
@ -53,4 +57,15 @@ defmodule Mix.Tasks.Pleroma.UploadsTest do
end
end
end
defp prep_uploads do
upload_dir = Config.get([Pleroma.Uploaders.Local, :uploads])
if not File.exists?(upload_dir) || File.ls!(upload_dir) == [] do
Pleroma.Backports.mkdir_p(upload_dir)
Path.join([upload_dir, "file.txt"])
|> File.touch()
end
end
end

View file

@ -21,7 +21,7 @@ defmodule Mix.Tasks.Pleroma.UserTest do
import Pleroma.Factory
setup do
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
:ok
end

View file

@ -249,7 +249,7 @@ defmodule Pleroma.ActivityTest do
{:ok, %{id: id, object: %{data: %{"id" => obj_id}}}} =
Pleroma.Web.CommonAPI.post(user, %{status: "cofe"})
Pleroma.Web.CommonAPI.favorite(another, id)
Pleroma.Web.CommonAPI.favorite(id, another)
assert obj_id
|> Pleroma.Activity.Queries.by_object_id()
@ -261,23 +261,27 @@ defmodule Pleroma.ActivityTest do
test "add_by_params_query/3" do
user = insert(:user)
note = insert(:note_activity, user: user)
note_activity = insert(:note_activity, user: user)
insert(:add_activity, user: user, note: note)
insert(:add_activity, user: user, note: note)
insert(:add_activity, user: user, note_activity: note_activity)
insert(:add_activity, user: user, note_activity: note_activity)
insert(:add_activity, user: user)
assert Repo.aggregate(Activity, :count, :id) == 4
assert Repo.aggregate(Activity, :count, :id) == 5
add_query =
Activity.add_by_params_query(note.data["object"], user.ap_id, user.featured_address)
Activity.add_by_params_query(
note_activity.data["object"],
user.ap_id,
user.featured_address
)
assert Repo.aggregate(add_query, :count, :id) == 2
Repo.delete_all(add_query)
assert Repo.aggregate(add_query, :count, :id) == 0
assert Repo.aggregate(Activity, :count, :id) == 2
assert Repo.aggregate(Activity, :count, :id) == 3
end
describe "associated_object_id() sql function" do

View file

@ -273,24 +273,28 @@ defmodule Pleroma.ConfigDBTest do
end
test "sigil" do
assert ConfigDB.to_elixir_types("~r[comp[lL][aA][iI][nN]er]") == ~r/comp[lL][aA][iI][nN]er/
assert ConfigDB.to_elixir_types("~r[comp[lL][aA][iI][nN]er]").source ==
~r/comp[lL][aA][iI][nN]er/.source
end
test "link sigil" do
assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/") == ~r/https:\/\/example.com/
assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/").source ==
~r/https:\/\/example.com/.source
end
test "link sigil with um modifiers" do
assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/um") ==
~r/https:\/\/example.com/um
assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/um").source ==
~r/https:\/\/example.com/um.source
end
test "link sigil with i modifier" do
assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/i") == ~r/https:\/\/example.com/i
assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/i").source ==
~r/https:\/\/example.com/i.source
end
test "link sigil with s modifier" do
assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/s") == ~r/https:\/\/example.com/s
assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/s").source ==
~r/https:\/\/example.com/s.source
end
test "raise if valid delimiter not found" do
@ -460,11 +464,11 @@ defmodule Pleroma.ConfigDBTest do
test "complex keyword with sigil" do
assert ConfigDB.to_elixir_types([
%{"tuple" => [":federated_timeline_removal", []]},
%{"tuple" => [":reject", ["~r/comp[lL][aA][iI][nN]er/"]]},
%{"tuple" => [":reject", [~r/comp[lL][aA][iI][nN]er/.source]]},
%{"tuple" => [":replace", []]}
]) == [
federated_timeline_removal: [],
reject: [~r/comp[lL][aA][iI][nN]er/],
reject: [~r/comp[lL][aA][iI][nN]er/.source],
replace: []
]
end

View file

@ -332,7 +332,7 @@ defmodule Pleroma.Conversation.ParticipationTest do
# When it's a reply from the blocked user
{:ok, _direct2} =
CommonAPI.post(blocked, %{
status: "reply",
status: "@#{third_user.nickname}, #{blocker.nickname} reply",
visibility: "direct",
in_reply_to_conversation_id: blocked_participation.id
})

View file

@ -14,7 +14,7 @@ defmodule Pleroma.ConversationTest do
setup_all do: clear_config([:instance, :federating], true)
setup do
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
:ok
end
@ -66,8 +66,10 @@ defmodule Pleroma.ConversationTest do
jafnhar = insert(:user, local: false)
tridi = insert(:user)
to = [har.nickname, jafnhar.nickname, tridi.nickname]
{:ok, activity} =
CommonAPI.post(har, %{status: "Hey @#{jafnhar.nickname}", visibility: "direct"})
CommonAPI.post(har, %{status: "Hey @#{jafnhar.nickname}", visibility: "direct", to: to})
object = Pleroma.Object.normalize(activity, fetch: false)
context = object.data["context"]
@ -88,7 +90,8 @@ defmodule Pleroma.ConversationTest do
CommonAPI.post(jafnhar, %{
status: "Hey @#{har.nickname}",
visibility: "direct",
in_reply_to_status_id: activity.id
in_reply_to_status_id: activity.id,
to: to
})
object = Pleroma.Object.normalize(activity, fetch: false)
@ -112,7 +115,8 @@ defmodule Pleroma.ConversationTest do
CommonAPI.post(tridi, %{
status: "Hey @#{har.nickname}",
visibility: "direct",
in_reply_to_status_id: activity.id
in_reply_to_status_id: activity.id,
to: to
})
object = Pleroma.Object.normalize(activity, fetch: false)

View file

@ -0,0 +1,56 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.ContentLanguageMapTest do
use Pleroma.DataCase, async: true
alias Pleroma.EctoType.ActivityPub.ObjectValidators.ContentLanguageMap
test "it validates" do
data = %{
"en-US" => "mew mew",
"en-GB" => "meow meow"
}
assert {:ok, ^data} = ContentLanguageMap.cast(data)
end
test "it validates empty strings" do
data = %{
"en-US" => "mew mew",
"en-GB" => ""
}
assert {:ok, ^data} = ContentLanguageMap.cast(data)
end
test "it ignores non-strings within the map" do
data = %{
"en-US" => "mew mew",
"en-GB" => 123
}
assert {:ok, validated_data} = ContentLanguageMap.cast(data)
assert validated_data == %{"en-US" => "mew mew"}
end
test "it ignores bad locale codes" do
data = %{
"en-US" => "mew mew",
"en_GB" => "meow meow",
"en<<#@!$#!@%!GB" => "meow meow"
}
assert {:ok, validated_data} = ContentLanguageMap.cast(data)
assert validated_data == %{"en-US" => "mew mew"}
end
test "it complains with non-map data" do
assert :error = ContentLanguageMap.cast("mew")
assert :error = ContentLanguageMap.cast(["mew"])
assert :error = ContentLanguageMap.cast([%{"en-US" => "mew"}])
end
end

View file

@ -0,0 +1,29 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCodeTest do
use Pleroma.DataCase, async: true
alias Pleroma.EctoType.ActivityPub.ObjectValidators.LanguageCode
test "it accepts language code" do
text = "pl"
assert {:ok, ^text} = LanguageCode.cast(text)
end
test "it accepts language code with region" do
text = "pl-PL"
assert {:ok, ^text} = LanguageCode.cast(text)
end
test "errors for invalid language code" do
assert {:error, :invalid_language} = LanguageCode.cast("ru_RU")
assert {:error, :invalid_language} = LanguageCode.cast(" ")
assert {:error, :invalid_language} = LanguageCode.cast("en-US\n")
end
test "errors for non-text" do
assert :error == LanguageCode.cast(42)
end
end

View file

@ -4,6 +4,7 @@
defmodule Pleroma.Emoji.PackTest do
use Pleroma.DataCase
alias Pleroma.Emoji
alias Pleroma.Emoji.Pack
@emoji_path Path.join(
@ -12,6 +13,9 @@ defmodule Pleroma.Emoji.PackTest do
)
setup do
# Reload emoji to ensure a clean state
Emoji.reload()
pack_path = Path.join(@emoji_path, "dump_pack")
File.mkdir(pack_path)
@ -53,6 +57,63 @@ defmodule Pleroma.Emoji.PackTest do
assert updated_pack.files_count == 5
end
test "skips existing emojis when adding from zip file", %{pack: pack} do
# First, let's create a test pack with a "bear" emoji
test_pack_path = Path.join(@emoji_path, "test_bear_pack")
Pleroma.Backports.mkdir_p(test_pack_path)
# Create a pack.json file
File.write!(Path.join(test_pack_path, "pack.json"), """
{
"files": { "bear": "bear.png" },
"pack": {
"description": "Bear Pack", "homepage": "https://pleroma.social",
"license": "Test license", "share-files": true
}}
""")
# Copy a test image to use as the bear emoji
File.cp!(
Path.absname("test/instance_static/emoji/test_pack/blank.png"),
Path.join(test_pack_path, "bear.png")
)
# Load the pack to register the "bear" emoji in the global registry
{:ok, _bear_pack} = Pleroma.Emoji.Pack.load_pack("test_bear_pack")
# Reload emoji to make sure the bear emoji is in the global registry
Emoji.reload()
# Verify that the bear emoji exists in the global registry
assert Emoji.exist?("bear")
# Now try to add a zip file that contains an emoji with the same shortcode
file = %Plug.Upload{
content_type: "application/zip",
filename: "emojis.zip",
path: Path.absname("test/fixtures/emojis.zip")
}
{:ok, updated_pack} = Pack.add_file(pack, nil, nil, file)
# Verify that the "bear" emoji was skipped
refute Map.has_key?(updated_pack.files, "bear")
# Other emojis should be added
assert Map.has_key?(updated_pack.files, "a_trusted_friend-128")
assert Map.has_key?(updated_pack.files, "auroraborealis")
assert Map.has_key?(updated_pack.files, "baby_in_a_box")
assert Map.has_key?(updated_pack.files, "bear-128")
# Total count should be 4 (all emojis except "bear")
assert updated_pack.files_count == 4
# Clean up the test pack
on_exit(fn ->
File.rm_rf!(test_pack_path)
end)
end
end
test "returns error when zip file is bad", %{pack: pack} do
@ -62,7 +123,7 @@ defmodule Pleroma.Emoji.PackTest do
path: Path.absname("test/instance_static/emoji/test_pack/blank.png")
}
assert Pack.add_file(pack, nil, nil, file) == {:error, :einval}
assert {:error, _} = Pack.add_file(pack, nil, nil, file)
end
test "returns pack when zip file is empty", %{pack: pack} do

View file

@ -9,7 +9,7 @@ defmodule Pleroma.FrontendTest do
@dir "test/frontend_static_test"
setup do
File.mkdir_p!(@dir)
Pleroma.Backports.mkdir_p!(@dir)
clear_config([:instance, :static_dir], @dir)
on_exit(fn ->
@ -46,7 +46,7 @@ defmodule Pleroma.FrontendTest do
folder = Path.join([@dir, "frontends", "pleroma", "fantasy"])
previously_existing = Path.join([folder, "temp"])
File.mkdir_p!(folder)
Pleroma.Backports.mkdir_p!(folder)
File.write!(previously_existing, "yey")
assert File.exists?(previously_existing)

View file

@ -14,4 +14,133 @@ defmodule Pleroma.HashtagTest do
assert {:name, {"can't be blank", [validation: :required]}} in changeset.errors
end
end
describe "search_hashtags" do
test "searches hashtags by partial match" do
{:ok, _} = Hashtag.get_or_create_by_name("car")
{:ok, _} = Hashtag.get_or_create_by_name("racecar")
{:ok, _} = Hashtag.get_or_create_by_name("nascar")
{:ok, _} = Hashtag.get_or_create_by_name("bicycle")
results = Hashtag.search("car")
assert "car" in results
assert "racecar" in results
assert "nascar" in results
refute "bicycle" in results
results = Hashtag.search("race")
assert "racecar" in results
refute "car" in results
refute "nascar" in results
refute "bicycle" in results
results = Hashtag.search("nonexistent")
assert results == []
end
test "searches hashtags by multiple words in query" do
{:ok, _} = Hashtag.get_or_create_by_name("computer")
{:ok, _} = Hashtag.get_or_create_by_name("laptop")
{:ok, _} = Hashtag.get_or_create_by_name("desktop")
{:ok, _} = Hashtag.get_or_create_by_name("phone")
# Search for "new computer" - should return "computer"
results = Hashtag.search("new computer")
assert "computer" in results
refute "laptop" in results
refute "desktop" in results
refute "phone" in results
# Search for "computer laptop" - should return both
results = Hashtag.search("computer laptop")
assert "computer" in results
assert "laptop" in results
refute "desktop" in results
refute "phone" in results
# Search for "new phone" - should return "phone"
results = Hashtag.search("new phone")
assert "phone" in results
refute "computer" in results
refute "laptop" in results
refute "desktop" in results
end
test "supports pagination" do
{:ok, _} = Hashtag.get_or_create_by_name("alpha")
{:ok, _} = Hashtag.get_or_create_by_name("beta")
{:ok, _} = Hashtag.get_or_create_by_name("gamma")
{:ok, _} = Hashtag.get_or_create_by_name("delta")
results = Hashtag.search("a", limit: 2)
assert length(results) == 2
results = Hashtag.search("a", limit: 2, offset: 1)
assert length(results) == 2
end
test "handles matching many search terms" do
{:ok, _} = Hashtag.get_or_create_by_name("computer")
{:ok, _} = Hashtag.get_or_create_by_name("laptop")
{:ok, _} = Hashtag.get_or_create_by_name("phone")
{:ok, _} = Hashtag.get_or_create_by_name("tablet")
results = Hashtag.search("new fast computer laptop phone tablet device")
assert "computer" in results
assert "laptop" in results
assert "phone" in results
assert "tablet" in results
end
test "ranks results by match quality" do
{:ok, _} = Hashtag.get_or_create_by_name("my_computer")
{:ok, _} = Hashtag.get_or_create_by_name("computer_science")
{:ok, _} = Hashtag.get_or_create_by_name("computer")
results = Hashtag.search("computer")
# Exact match first
assert Enum.at(results, 0) == "computer"
# Prefix match would be next
assert Enum.at(results, 1) == "computer_science"
# worst match is last
assert Enum.at(results, 2) == "my_computer"
end
test "prioritizes shorter names when ranking is equal" do
# Create hashtags with same ranking but different lengths
{:ok, _} = Hashtag.get_or_create_by_name("car")
{:ok, _} = Hashtag.get_or_create_by_name("racecar")
{:ok, _} = Hashtag.get_or_create_by_name("nascar")
# Search for "car" - shorter names should come first
results = Hashtag.search("car")
# Shortest exact match first
assert Enum.at(results, 0) == "car"
assert "racecar" in results
assert "nascar" in results
end
test "handles hashtag symbols in search query" do
{:ok, _} = Hashtag.get_or_create_by_name("computer")
{:ok, _} = Hashtag.get_or_create_by_name("laptop")
{:ok, _} = Hashtag.get_or_create_by_name("phone")
results_with_hash = Hashtag.search("#computer #laptop")
results_without_hash = Hashtag.search("computer laptop")
assert results_with_hash == results_without_hash
results_mixed = Hashtag.search("#computer laptop #phone")
assert "computer" in results_mixed
assert "laptop" in results_mixed
assert "phone" in results_mixed
results_only_hash = Hashtag.search("#computer")
results_no_hash = Hashtag.search("computer")
assert results_only_hash == results_no_hash
end
end
end

View file

@ -41,6 +41,10 @@ defmodule Pleroma.HTMLTest do
<span class="h-card"><a class="u-url mention animate-spin">@<span>foo</span></a></span>
"""
@mention_hashtags_sample """
<a href="https://mastodon.example/tags/linux" class="mention hashtag" rel="tag">#<span>linux</span></a>
"""
describe "StripTags scrubber" do
test "works as expected" do
expected = """
@ -126,6 +130,15 @@ defmodule Pleroma.HTMLTest do
Pleroma.HTML.Scrubber.TwitterText
)
end
test "does allow mention hashtags" do
expected = """
<a href="https://mastodon.example/tags/linux" class="mention hashtag" rel="tag">#<span>linux</span></a>
"""
assert expected ==
HTML.filter_tags(@mention_hashtags_sample, Pleroma.HTML.Scrubber.Default)
end
end
describe "default scrubber" do
@ -189,6 +202,15 @@ defmodule Pleroma.HTMLTest do
Pleroma.HTML.Scrubber.Default
)
end
test "does allow mention hashtags" do
expected = """
<a href="https://mastodon.example/tags/linux" class="mention hashtag" rel="tag">#<span>linux</span></a>
"""
assert expected ==
HTML.filter_tags(@mention_hashtags_sample, Pleroma.HTML.Scrubber.Default)
end
end
describe "extract_first_external_url_from_object" do

View file

@ -16,6 +16,14 @@ defmodule Pleroma.HTTP.AdapterHelper.HackneyTest do
describe "options/2" do
setup do: clear_config([:http, :adapter], a: 1, b: 2)
test "uses redirect-safe defaults", %{uri: uri} do
opts = Hackney.options([], uri)
assert opts[:follow_redirect] == false
assert opts[:force_redirect] == false
assert opts[:with_body] == true
end
test "add proxy and opts from config", %{uri: uri} do
opts = Hackney.options([proxy: "localhost:8123"], uri)

View file

@ -0,0 +1,355 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTP.HackneyFollowRedirectRegressionTest do
use ExUnit.Case, async: false
setup do
{:ok, _} = Application.ensure_all_started(:hackney)
{:ok, tls_server} = start_tls_redirect_server()
{:ok, proxy} = start_connect_proxy()
on_exit(fn ->
stop_connect_proxy(proxy)
stop_tls_redirect_server(tls_server)
end)
{:ok, tls_server: tls_server, proxy: proxy}
end
test "hackney follow_redirect crashes behind CONNECT proxy on relative redirects", %{
tls_server: tls_server,
proxy: proxy
} do
url = "#{tls_server.base_url}/redirect"
opts = [
pool: :media,
proxy: proxy.proxy_url,
insecure: true,
connect_timeout: 1_000,
recv_timeout: 1_000,
follow_redirect: true,
force_redirect: true
]
{pid, ref} = spawn_monitor(fn -> :hackney.request(:get, url, [], <<>>, opts) end)
assert_receive {:DOWN, ^ref, :process, ^pid, reason}, 5_000
assert match?({%FunctionClauseError{}, _}, reason) or match?(%FunctionClauseError{}, reason) or
match?({:function_clause, _}, reason)
end
test "redirects work via proxy when hackney follow_redirect is disabled", %{
tls_server: tls_server,
proxy: proxy
} do
url = "#{tls_server.base_url}/redirect"
adapter_opts = [
pool: :media,
proxy: proxy.proxy_url,
insecure: true,
connect_timeout: 1_000,
recv_timeout: 1_000,
follow_redirect: false,
force_redirect: false,
with_body: true
]
client = Tesla.client([Tesla.Middleware.FollowRedirects], Tesla.Adapter.Hackney)
assert {:ok, %Tesla.Env{status: 200, body: "ok"}} =
Tesla.request(client, method: :get, url: url, opts: [adapter: adapter_opts])
end
test "reverse proxy hackney client follows redirects via proxy without crashing", %{
tls_server: tls_server,
proxy: proxy
} do
url = "#{tls_server.base_url}/redirect"
opts = [
pool: :media,
proxy: proxy.proxy_url,
insecure: true,
connect_timeout: 1_000,
recv_timeout: 1_000,
follow_redirect: true
]
assert {:ok, 200, _headers, ref} =
Pleroma.ReverseProxy.Client.Hackney.request(:get, url, [], "", opts)
assert collect_body(ref) == "ok"
Pleroma.ReverseProxy.Client.Hackney.close(ref)
end
defp collect_body(ref, acc \\ "") do
case Pleroma.ReverseProxy.Client.Hackney.stream_body(ref) do
:done -> acc
{:ok, data, _ref} -> collect_body(ref, acc <> data)
{:error, error} -> flunk("stream_body failed: #{inspect(error)}")
end
end
defp start_tls_redirect_server do
certfile = Path.expand("../../fixtures/server.pem", __DIR__)
keyfile = Path.expand("../../fixtures/private_key.pem", __DIR__)
{:ok, listener} =
:ssl.listen(0, [
:binary,
certfile: certfile,
keyfile: keyfile,
reuseaddr: true,
active: false,
packet: :raw,
ip: {127, 0, 0, 1}
])
{:ok, {{127, 0, 0, 1}, port}} = :ssl.sockname(listener)
{:ok, acceptor} =
Task.start_link(fn ->
accept_tls_loop(listener)
end)
{:ok, %{listener: listener, acceptor: acceptor, base_url: "https://127.0.0.1:#{port}"}}
end
defp stop_tls_redirect_server(%{listener: listener, acceptor: acceptor}) do
:ok = :ssl.close(listener)
if Process.alive?(acceptor) do
Process.exit(acceptor, :normal)
end
end
defp accept_tls_loop(listener) do
case :ssl.transport_accept(listener) do
{:ok, socket} ->
_ = Task.start(fn -> serve_tls(socket) end)
accept_tls_loop(listener)
{:error, :closed} ->
:ok
{:error, _reason} ->
:ok
end
end
defp serve_tls(tcp_socket) do
with {:ok, ssl_socket} <- :ssl.handshake(tcp_socket, 2_000),
{:ok, data} <- recv_ssl_headers(ssl_socket),
{:ok, path} <- parse_path(data) do
case path do
"/redirect" ->
send_ssl_response(ssl_socket, 302, "Found", [{"Location", "/final"}], "")
"/final" ->
send_ssl_response(ssl_socket, 200, "OK", [], "ok")
_ ->
send_ssl_response(ssl_socket, 404, "Not Found", [], "not found")
end
:ssl.close(ssl_socket)
else
_ ->
_ = :gen_tcp.close(tcp_socket)
:ok
end
end
defp recv_ssl_headers(socket, acc \\ <<>>) do
case :ssl.recv(socket, 0, 1_000) do
{:ok, data} ->
acc = acc <> data
if :binary.match(acc, "\r\n\r\n") != :nomatch do
{:ok, acc}
else
if byte_size(acc) > 8_192 do
{:error, :too_large}
else
recv_ssl_headers(socket, acc)
end
end
{:error, _} = error ->
error
end
end
defp send_ssl_response(socket, status, reason, headers, body) do
base_headers =
[
{"Content-Length", Integer.to_string(byte_size(body))},
{"Connection", "close"}
] ++ headers
iodata =
[
"HTTP/1.1 ",
Integer.to_string(status),
" ",
reason,
"\r\n",
Enum.map(base_headers, fn {k, v} -> [k, ": ", v, "\r\n"] end),
"\r\n",
body
]
:ssl.send(socket, iodata)
end
defp start_connect_proxy do
{:ok, listener} =
:gen_tcp.listen(0, [
:binary,
active: false,
packet: :raw,
reuseaddr: true,
ip: {127, 0, 0, 1}
])
{:ok, {{127, 0, 0, 1}, port}} = :inet.sockname(listener)
{:ok, acceptor} =
Task.start_link(fn ->
accept_proxy_loop(listener)
end)
{:ok, %{listener: listener, acceptor: acceptor, proxy_url: "127.0.0.1:#{port}"}}
end
defp stop_connect_proxy(%{listener: listener, acceptor: acceptor}) do
:ok = :gen_tcp.close(listener)
if Process.alive?(acceptor) do
Process.exit(acceptor, :normal)
end
end
defp accept_proxy_loop(listener) do
case :gen_tcp.accept(listener) do
{:ok, socket} ->
_ = Task.start(fn -> serve_proxy(socket) end)
accept_proxy_loop(listener)
{:error, :closed} ->
:ok
{:error, _reason} ->
:ok
end
end
defp serve_proxy(client_socket) do
with {:ok, {headers, rest}} <- recv_tcp_headers(client_socket),
{:ok, {host, port}} <- parse_connect(headers),
{:ok, upstream_socket} <- connect_upstream(host, port) do
:gen_tcp.send(client_socket, "HTTP/1.1 200 Connection established\r\n\r\n")
if rest != <<>> do
:gen_tcp.send(upstream_socket, rest)
end
tunnel(client_socket, upstream_socket)
else
_ ->
:gen_tcp.close(client_socket)
:ok
end
end
defp tunnel(client_socket, upstream_socket) do
parent = self()
_ = spawn_link(fn -> forward(client_socket, upstream_socket, parent) end)
_ = spawn_link(fn -> forward(upstream_socket, client_socket, parent) end)
receive do
:tunnel_closed -> :ok
after
10_000 -> :ok
end
:gen_tcp.close(client_socket)
:gen_tcp.close(upstream_socket)
end
defp forward(from_socket, to_socket, parent) do
case :gen_tcp.recv(from_socket, 0, 10_000) do
{:ok, data} ->
_ = :gen_tcp.send(to_socket, data)
forward(from_socket, to_socket, parent)
{:error, _reason} ->
send(parent, :tunnel_closed)
:ok
end
end
defp recv_tcp_headers(socket, acc \\ <<>>) do
case :gen_tcp.recv(socket, 0, 1_000) do
{:ok, data} ->
acc = acc <> data
case :binary.match(acc, "\r\n\r\n") do
:nomatch ->
if byte_size(acc) > 8_192 do
{:error, :too_large}
else
recv_tcp_headers(socket, acc)
end
{idx, _len} ->
split_at = idx + 4
<<headers::binary-size(split_at), rest::binary>> = acc
{:ok, {headers, rest}}
end
{:error, _} = error ->
error
end
end
defp parse_connect(data) do
with [request_line | _] <- String.split(data, "\r\n", trim: true),
["CONNECT", hostport | _] <- String.split(request_line, " ", parts: 3),
[host, port_str] <- String.split(hostport, ":", parts: 2),
{port, ""} <- Integer.parse(port_str) do
{:ok, {host, port}}
else
_ -> {:error, :invalid_connect}
end
end
defp connect_upstream(host, port) do
address =
case :inet.parse_address(String.to_charlist(host)) do
{:ok, ip} -> ip
{:error, _} -> String.to_charlist(host)
end
:gen_tcp.connect(address, port, [:binary, active: false, packet: :raw], 1_000)
end
defp parse_path(data) do
case String.split(data, "\r\n", parts: 2) do
[request_line | _] ->
case String.split(request_line, " ") do
[_method, path, _protocol] -> {:ok, path}
_ -> {:error, :invalid_request}
end
_ ->
{:error, :invalid_request}
end
end
end

View file

@ -0,0 +1,151 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTP.HackneyRedirectRegressionTest do
use ExUnit.Case, async: false
alias Pleroma.HTTP.AdapterHelper.Hackney, as: HackneyAdapterHelper
setup do
{:ok, _} = Application.ensure_all_started(:hackney)
{:ok, server} = start_server()
on_exit(fn -> stop_server(server) end)
{:ok, server: server}
end
test "pooled redirects work with follow_redirect disabled", %{server: server} do
url = "#{server.base_url}/redirect"
uri = URI.parse(url)
adapter_opts =
HackneyAdapterHelper.options(
[pool: :media, follow_redirect: false, no_proxy_env: true],
uri
)
client = Tesla.client([Tesla.Middleware.FollowRedirects], Tesla.Adapter.Hackney)
assert {:ok, %Tesla.Env{status: 200, body: "ok"}} =
Tesla.request(client, method: :get, url: url, opts: [adapter: adapter_opts])
end
defp start_server do
{:ok, listener} =
:gen_tcp.listen(0, [
:binary,
active: false,
packet: :raw,
reuseaddr: true,
ip: {127, 0, 0, 1}
])
{:ok, {{127, 0, 0, 1}, port}} = :inet.sockname(listener)
{:ok, acceptor} =
Task.start_link(fn ->
accept_loop(listener)
end)
{:ok, %{listener: listener, acceptor: acceptor, base_url: "http://127.0.0.1:#{port}"}}
end
defp stop_server(%{listener: listener, acceptor: acceptor}) do
:ok = :gen_tcp.close(listener)
if Process.alive?(acceptor) do
Process.exit(acceptor, :normal)
end
end
defp accept_loop(listener) do
case :gen_tcp.accept(listener) do
{:ok, socket} ->
serve(socket)
accept_loop(listener)
{:error, :closed} ->
:ok
{:error, _reason} ->
:ok
end
end
defp serve(socket) do
with {:ok, data} <- recv_headers(socket),
{:ok, path} <- parse_path(data) do
case path do
"/redirect" ->
send_response(socket, 302, "Found", [{"Location", "/final"}], "")
"/final" ->
send_response(socket, 200, "OK", [], "ok")
_ ->
send_response(socket, 404, "Not Found", [], "not found")
end
else
_ -> :ok
end
:gen_tcp.close(socket)
end
defp recv_headers(socket, acc \\ <<>>) do
case :gen_tcp.recv(socket, 0, 1_000) do
{:ok, data} ->
acc = acc <> data
if :binary.match(acc, "\r\n\r\n") != :nomatch do
{:ok, acc}
else
if byte_size(acc) > 8_192 do
{:error, :too_large}
else
recv_headers(socket, acc)
end
end
{:error, _} = error ->
error
end
end
defp parse_path(data) do
case String.split(data, "\r\n", parts: 2) do
[request_line | _] ->
case String.split(request_line, " ") do
[_method, path, _protocol] -> {:ok, path}
_ -> {:error, :invalid_request}
end
_ ->
{:error, :invalid_request}
end
end
defp send_response(socket, status, reason, headers, body) do
base_headers =
[
{"Content-Length", Integer.to_string(byte_size(body))},
{"Connection", "close"}
] ++ headers
iodata =
[
"HTTP/1.1 ",
Integer.to_string(status),
" ",
reason,
"\r\n",
Enum.map(base_headers, fn {k, v} -> [k, ": ", v, "\r\n"] end),
"\r\n",
body
]
:gen_tcp.send(socket, iodata)
end
end

View file

@ -3,10 +3,13 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.HTTPTest do
use ExUnit.Case, async: true
use ExUnit.Case, async: false
use Pleroma.Tests.Helpers
import Tesla.Mock
alias Pleroma.HTTP
alias Pleroma.Utils.URIEncoding
setup do
mock(fn
@ -25,6 +28,39 @@ defmodule Pleroma.HTTPTest do
%{method: :post, url: "http://example.com/world"} ->
%Tesla.Env{status: 200, body: "world"}
%{method: :get, url: "https://example.com/emoji/Pack%201/koronebless.png?foo=bar+baz"} ->
%Tesla.Env{status: 200, body: "emoji data"}
%{
method: :get,
url: "https://example.com/media/foo/bar%20!$&'()*+,;=/:%20@a%20%5Bbaz%5D.mp4"
} ->
%Tesla.Env{status: 200, body: "video data"}
%{method: :get, url: "https://example.com/media/unicode%20%F0%9F%99%82%20.gif"} ->
%Tesla.Env{status: 200, body: "unicode data"}
%{
method: :get,
url:
"https://i.guim.co.uk/img/media/1069ef13c447908272c4de94174cec2b6352cb2f/0_91_2000_1201/master/2000.jpg?width=1200&height=630&quality=85&auto=format&fit=crop&precrop=40:21,offset-x50,offset-y0&overlay-align=bottom%2Cleft&overlay-width=100p&overlay-base64=L2ltZy9zdGF0aWMvb3ZlcmxheXMvdGctb3BpbmlvbnMtYWdlLTIwMTkucG5n&enable=upscale&s=cba21427a73512fdc9863c486c03fdd8"
} ->
%Tesla.Env{status: 200, body: "Guardian image quirk"}
%{
method: :get,
url:
"https://i.guim.co.uk/emoji/Pack%201/koronebless.png?precrop=40:21,overlay-x0,overlay-y0&foo=bar+baz"
} ->
%Tesla.Env{status: 200, body: "Space in query with Guardian quirk"}
%{
method: :get,
url:
"https://examplebucket.s3.amazonaws.com/test.txt?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=accessKEY%2F20130721%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20130721T201207Z&X-Amz-Expires=86400&X-Amz-Signature=SIGNATURE&X-Amz-SignedHeaders=host"
} ->
%Tesla.Env{status: 200, body: "AWS S3 data"}
end)
:ok
@ -67,4 +103,115 @@ defmodule Pleroma.HTTPTest do
}
end
end
test "URL encoding properly encodes URLs with spaces" do
clear_config(:test_url_encoding, true)
url_with_space = "https://example.com/emoji/Pack 1/koronebless.png?foo=bar baz"
{:ok, result} = HTTP.get(url_with_space)
assert result.status == 200
properly_encoded_url = "https://example.com/emoji/Pack%201/koronebless.png?foo=bar+baz"
{:ok, result} = HTTP.get(properly_encoded_url)
assert result.status == 200
url_with_reserved_chars = "https://example.com/media/foo/bar !$&'()*+,;=/: @a [baz].mp4"
{:ok, result} = HTTP.get(url_with_reserved_chars)
assert result.status == 200
url_with_unicode = "https://example.com/media/unicode 🙂 .gif"
{:ok, result} = HTTP.get(url_with_unicode)
assert result.status == 200
end
test "decodes URL first by default" do
clear_config(:test_url_encoding, true)
normal_url = "https://example.com/media/file%20with%20space.jpg?name=a+space.jpg"
result = URIEncoding.encode_url(normal_url)
assert result == "https://example.com/media/file%20with%20space.jpg?name=a+space.jpg"
end
test "doesn't decode URL first when specified" do
clear_config(:test_url_encoding, true)
normal_url = "https://example.com/media/file%20with%20space.jpg"
result = URIEncoding.encode_url(normal_url, bypass_decode: true)
assert result == "https://example.com/media/file%2520with%2520space.jpg"
end
test "properly applies Guardian image query quirk" do
clear_config(:test_url_encoding, true)
url =
"https://i.guim.co.uk/img/media/1069ef13c447908272c4de94174cec2b6352cb2f/0_91_2000_1201/master/2000.jpg?width=1200&height=630&quality=85&auto=format&fit=crop&precrop=40:21,offset-x50,offset-y0&overlay-align=bottom%2Cleft&overlay-width=100p&overlay-base64=L2ltZy9zdGF0aWMvb3ZlcmxheXMvdGctb3BpbmlvbnMtYWdlLTIwMTkucG5n&enable=upscale&s=cba21427a73512fdc9863c486c03fdd8"
result = URIEncoding.encode_url(url)
assert result == url
{:ok, result_get} = HTTP.get(result)
assert result_get.status == 200
end
test "properly encodes spaces as \"pluses\" in query when using quirks" do
clear_config(:test_url_encoding, true)
url =
"https://i.guim.co.uk/emoji/Pack 1/koronebless.png?precrop=40:21,overlay-x0,overlay-y0&foo=bar baz"
properly_encoded_url =
"https://i.guim.co.uk/emoji/Pack%201/koronebless.png?precrop=40:21,overlay-x0,overlay-y0&foo=bar+baz"
result = URIEncoding.encode_url(url)
assert result == properly_encoded_url
{:ok, result_get} = HTTP.get(result)
assert result_get.status == 200
end
test "properly encode AWS S3 queries" do
clear_config(:test_url_encoding, true)
url =
"https://examplebucket.s3.amazonaws.com/test.txt?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=accessKEY%2F20130721%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20130721T201207Z&X-Amz-Expires=86400&X-Amz-Signature=SIGNATURE&X-Amz-SignedHeaders=host"
unencoded_url =
"https://examplebucket.s3.amazonaws.com/test.txt?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=accessKEY/20130721/us-east-1/s3/aws4_request&X-Amz-Date=20130721T201207Z&X-Amz-Expires=86400&X-Amz-Signature=SIGNATURE&X-Amz-SignedHeaders=host"
result = URIEncoding.encode_url(url)
result_unencoded = URIEncoding.encode_url(unencoded_url)
assert result == url
assert result == result_unencoded
{:ok, result_get} = HTTP.get(result)
assert result_get.status == 200
end
test "preserves query key order" do
clear_config(:test_url_encoding, true)
url = "https://example.com/foo?hjkl=qwertz&xyz=abc&bar=baz"
result = URIEncoding.encode_url(url)
assert result == url
end
end

View file

@ -3,19 +3,15 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Instances.InstanceTest do
alias Pleroma.Instances
alias Pleroma.Instances.Instance
alias Pleroma.Repo
alias Pleroma.Tests.ObanHelpers
alias Pleroma.Web.CommonAPI
use Oban.Testing, repo: Pleroma.Repo
use Pleroma.DataCase
import ExUnit.CaptureLog
import Pleroma.Factory
setup_all do: clear_config([:instance, :federation_reachability_timeout_days], 1)
describe "set_reachable/1" do
test "clears `unreachable_since` of existing matching Instance record having non-nil `unreachable_since`" do
unreachable_since = NaiveDateTime.to_iso8601(NaiveDateTime.utc_now())
@ -31,6 +27,32 @@ defmodule Pleroma.Instances.InstanceTest do
assert {:ok, instance} = Instance.set_reachable(instance.host)
refute instance.unreachable_since
end
test "cancels all ReachabilityWorker jobs for the domain" do
domain = "cancelme.example.org"
insert(:instance, host: domain, unreachable_since: NaiveDateTime.utc_now())
# Insert a ReachabilityWorker job for this domain, scheduled 5 minutes in the future
scheduled_at = DateTime.add(DateTime.utc_now(), 300, :second)
{:ok, job} =
Pleroma.Workers.ReachabilityWorker.new(
%{"domain" => domain, "phase" => "phase_1min", "attempt" => 1},
scheduled_at: scheduled_at
)
|> Oban.insert()
# Ensure the job is present
job = Pleroma.Repo.get(Oban.Job, job.id)
assert job
# Call set_reachable, which should delete the job
assert {:ok, _} = Instance.set_reachable(domain)
# Reload the job and assert it is deleted
job = Pleroma.Repo.get(Oban.Job, job.id)
refute job
end
end
describe "set_unreachable/1" do
@ -145,7 +167,11 @@ defmodule Pleroma.Instances.InstanceTest do
end
test "Doesn't scrapes unreachable instances" do
instance = insert(:instance, unreachable_since: Instances.reachability_datetime_threshold())
instance =
insert(:instance,
unreachable_since: NaiveDateTime.utc_now() |> NaiveDateTime.add(-:timer.hours(24))
)
url = "https://" <> instance.host
assert capture_log(fn -> assert nil == Instance.get_or_update_favicon(URI.parse(url)) end) =~
@ -213,32 +239,44 @@ defmodule Pleroma.Instances.InstanceTest do
end
end
test "delete_users_and_activities/1 deletes remote instance users and activities" do
[mario, luigi, _peach, wario] =
users = [
insert(:user, nickname: "mario@mushroom.kingdom", name: "Mario"),
insert(:user, nickname: "luigi@mushroom.kingdom", name: "Luigi"),
insert(:user, nickname: "peach@mushroom.kingdom", name: "Peach"),
insert(:user, nickname: "wario@greedville.biz", name: "Wario")
]
test "delete/1 schedules a job to delete the instance and users" do
insert(:user, nickname: "mario@mushroom.kingdom", name: "Mario")
{:ok, post1} = CommonAPI.post(mario, %{status: "letsa go!"})
{:ok, post2} = CommonAPI.post(luigi, %{status: "itsa me... luigi"})
{:ok, post3} = CommonAPI.post(wario, %{status: "WHA-HA-HA!"})
{:ok, _job} = Instance.delete("mushroom.kingdom")
{:ok, job} = Instance.delete_users_and_activities("mushroom.kingdom")
:ok = ObanHelpers.perform(job)
assert_enqueued(
worker: Pleroma.Workers.DeleteWorker,
args: %{"op" => "delete_instance", "host" => "mushroom.kingdom"}
)
end
[mario, luigi, peach, wario] = Repo.reload(users)
describe "check_unreachable/1" do
test "schedules a ReachabilityWorker job for the given domain" do
domain = "test.example.com"
refute mario.is_active
refute luigi.is_active
refute peach.is_active
refute peach.name == "Peach"
# Call check_unreachable
assert {:ok, _job} = Instance.check_unreachable(domain)
assert wario.is_active
assert wario.name == "Wario"
# Verify that a ReachabilityWorker job was scheduled
jobs = all_enqueued(worker: Pleroma.Workers.ReachabilityWorker)
assert length(jobs) == 1
[job] = jobs
assert job.args["domain"] == domain
end
assert [nil, nil, %{}] = Repo.reload([post1, post2, post3])
test "handles multiple calls for the same domain (uniqueness enforced)" do
domain = "duplicate.example.com"
assert {:ok, _job1} = Instance.check_unreachable(domain)
# Second call for the same domain
assert {:ok, %Oban.Job{conflict?: true}} = Instance.check_unreachable(domain)
# Should only have one job due to uniqueness
jobs = all_enqueued(worker: Pleroma.Workers.ReachabilityWorker)
assert length(jobs) == 1
[job] = jobs
assert job.args["domain"] == domain
end
end
end

View file

@ -6,74 +6,42 @@ defmodule Pleroma.InstancesTest do
alias Pleroma.Instances
use Pleroma.DataCase
setup_all do: clear_config([:instance, :federation_reachability_timeout_days], 1)
use Oban.Testing, repo: Pleroma.Repo
describe "reachable?/1" do
test "returns `true` for host / url with unknown reachability status" do
assert Instances.reachable?("unknown.site")
assert Instances.reachable?("http://unknown.site")
end
test "returns `false` for host / url marked unreachable for at least `reachability_datetime_threshold()`" do
host = "consistently-unreachable.name"
Instances.set_consistently_unreachable(host)
refute Instances.reachable?(host)
refute Instances.reachable?("http://#{host}/path")
end
test "returns `true` for host / url marked unreachable for less than `reachability_datetime_threshold()`" do
url = "http://eventually-unreachable.name/path"
Instances.set_unreachable(url)
assert Instances.reachable?(url)
assert Instances.reachable?(URI.parse(url).host)
end
test "raises FunctionClauseError exception on non-binary input" do
assert_raise FunctionClauseError, fn -> Instances.reachable?(nil) end
assert_raise FunctionClauseError, fn -> Instances.reachable?(1) end
end
end
describe "filter_reachable/1" do
setup do
host = "consistently-unreachable.name"
url1 = "http://eventually-unreachable.com/path"
url2 = "http://domain.com/path"
unreachable_host = "consistently-unreachable.name"
reachable_host = "http://domain.com/path"
Instances.set_consistently_unreachable(host)
Instances.set_unreachable(url1)
Instances.set_unreachable(unreachable_host)
result = Instances.filter_reachable([host, url1, url2, nil])
%{result: result, url1: url1, url2: url2}
result = Instances.filter_reachable([unreachable_host, reachable_host, nil])
%{result: result, reachable_host: reachable_host, unreachable_host: unreachable_host}
end
test "returns a map with keys containing 'not marked consistently unreachable' elements of supplied list",
%{result: result, url1: url1, url2: url2} do
assert is_map(result)
assert Enum.sort([url1, url2]) == result |> Map.keys() |> Enum.sort()
test "returns a list of only reachable elements",
%{result: result, reachable_host: reachable_host} do
assert is_list(result)
assert [reachable_host] == result
end
test "returns a map with `unreachable_since` values for keys",
%{result: result, url1: url1, url2: url2} do
assert is_map(result)
assert %NaiveDateTime{} = result[url1]
assert is_nil(result[url2])
end
test "returns an empty map for empty list or list containing no hosts / url" do
assert %{} == Instances.filter_reachable([])
assert %{} == Instances.filter_reachable([nil])
test "returns an empty list when provided no data" do
assert [] == Instances.filter_reachable([])
assert [] == Instances.filter_reachable([nil])
end
end
describe "set_reachable/1" do
test "sets unreachable url or host reachable" do
host = "domain.com"
Instances.set_consistently_unreachable(host)
Instances.set_unreachable(host)
refute Instances.reachable?(host)
Instances.set_reachable(host)
@ -103,22 +71,68 @@ defmodule Pleroma.InstancesTest do
end
end
describe "set_consistently_unreachable/1" do
test "sets reachable url or host unreachable" do
url = "http://domain.com?q="
assert Instances.reachable?(url)
describe "check_all_unreachable/0" do
test "schedules ReachabilityWorker jobs for all unreachable instances" do
domain1 = "unreachable1.example.com"
domain2 = "unreachable2.example.com"
domain3 = "unreachable3.example.com"
Instances.set_consistently_unreachable(url)
refute Instances.reachable?(url)
Instances.set_unreachable(domain1)
Instances.set_unreachable(domain2)
Instances.set_unreachable(domain3)
Instances.check_all_unreachable()
# Verify that ReachabilityWorker jobs were scheduled for all unreachable domains
jobs = all_enqueued(worker: Pleroma.Workers.ReachabilityWorker)
assert length(jobs) == 3
domains = Enum.map(jobs, & &1.args["domain"])
assert domain1 in domains
assert domain2 in domains
assert domain3 in domains
end
test "keeps unreachable url or host unreachable" do
host = "site.name"
Instances.set_consistently_unreachable(host)
refute Instances.reachable?(host)
test "does not schedule jobs for reachable instances" do
unreachable_domain = "unreachable.example.com"
reachable_domain = "reachable.example.com"
Instances.set_consistently_unreachable(host)
refute Instances.reachable?(host)
Instances.set_unreachable(unreachable_domain)
Instances.set_reachable(reachable_domain)
Instances.check_all_unreachable()
# Verify that only one job was scheduled (for the unreachable domain)
jobs = all_enqueued(worker: Pleroma.Workers.ReachabilityWorker)
assert length(jobs) == 1
[job] = jobs
assert job.args["domain"] == unreachable_domain
end
end
test "delete_all_unreachable/0 schedules DeleteWorker jobs for all unreachable instances" do
domain1 = "unreachable1.example.com"
domain2 = "unreachable2.example.com"
domain3 = "unreachable3.example.com"
Instances.set_unreachable(domain1)
Instances.set_unreachable(domain2)
Instances.set_unreachable(domain3)
Instances.delete_all_unreachable()
# Verify that DeleteWorker jobs were scheduled for all unreachable domains
jobs = all_enqueued(worker: Pleroma.Workers.DeleteWorker)
assert length(jobs) == 3
domains = Enum.map(jobs, & &1.args["host"])
assert domain1 in domains
assert domain2 in domains
assert domain3 in domains
# Verify all jobs are delete_instance operations
Enum.each(jobs, fn job ->
assert job.args["op"] == "delete_instance"
end)
end
end

View file

@ -268,6 +268,17 @@ defmodule Pleroma.Integration.MastodonWebsocketTest do
end)
end
test "accepts valid token on Sec-WebSocket-Protocol header", %{token: token} do
assert {:ok, _} = start_socket("?stream=user", [{"Sec-WebSocket-Protocol", token.token}])
capture_log(fn ->
assert {:error, %WebSockex.RequestError{code: 401}} =
start_socket("?stream=user", [{"Sec-WebSocket-Protocol", "I am a friend"}])
Process.sleep(30)
end)
end
test "accepts valid token on client-sent event", %{token: token} do
assert {:ok, pid} = start_socket()
@ -352,7 +363,7 @@ defmodule Pleroma.Integration.MastodonWebsocketTest do
test "accepts the 'list' stream", %{token: token, user: user} do
posting_user = insert(:user)
{:ok, list} = Pleroma.List.create("test", user)
{:ok, list} = Pleroma.List.create(%{title: "test"}, user)
Pleroma.List.follow(list, posting_user)
assert {:ok, _} = start_socket("?stream=list&access_token=#{token.token}&list=#{list.id}")
@ -404,7 +415,7 @@ defmodule Pleroma.Integration.MastodonWebsocketTest do
test "receives private statuses", %{user: reading_user, token: token} do
user = insert(:user)
CommonAPI.follow(reading_user, user)
CommonAPI.follow(user, reading_user)
{:ok, _} = start_socket("?stream=user&access_token=#{token.token}")
@ -431,7 +442,7 @@ defmodule Pleroma.Integration.MastodonWebsocketTest do
test "receives edits", %{user: reading_user, token: token} do
user = insert(:user)
CommonAPI.follow(reading_user, user)
CommonAPI.follow(user, reading_user)
{:ok, _} = start_socket("?stream=user&access_token=#{token.token}")
@ -440,7 +451,7 @@ defmodule Pleroma.Integration.MastodonWebsocketTest do
assert_receive {:text, _raw_json}, 1_000
{:ok, _} = CommonAPI.update(user, activity, %{status: "mew mew", visibility: "private"})
{:ok, _} = CommonAPI.update(activity, user, %{status: "mew mew", visibility: "private"})
assert_receive {:text, raw_json}, 1_000
@ -459,7 +470,7 @@ defmodule Pleroma.Integration.MastodonWebsocketTest do
test "receives notifications", %{user: reading_user, token: token} do
user = insert(:user)
CommonAPI.follow(reading_user, user)
CommonAPI.follow(user, reading_user)
{:ok, _} = start_socket("?stream=user:notification&access_token=#{token.token}")

View file

@ -0,0 +1,56 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Language.LanguageDetectorTest do
use Pleroma.DataCase, async: true
alias Pleroma.Language.LanguageDetector
alias Pleroma.Language.LanguageDetectorMock
alias Pleroma.StaticStubbedConfigMock
import Mox
setup do
# Stub the StaticStubbedConfigMock to return our mock for the provider
StaticStubbedConfigMock
|> stub(:get, fn
[Pleroma.Language.LanguageDetector, :provider] -> LanguageDetectorMock
_other -> nil
end)
# Stub the LanguageDetectorMock with default implementations
LanguageDetectorMock
|> stub(:missing_dependencies, fn -> [] end)
|> stub(:configured?, fn -> true end)
:ok
end
test "it detects text language" do
LanguageDetectorMock
|> expect(:detect, fn _text -> "fr" end)
detected_language = LanguageDetector.detect("Je viens d'atterrir en Tchéquie.")
assert detected_language == "fr"
end
test "it returns nil if text is not long enough" do
# No need to set expectations as the word count check happens before the provider is called
detected_language = LanguageDetector.detect("it returns nil")
assert detected_language == nil
end
test "it returns nil if no provider specified" do
# Override the stub to return nil for the provider
StaticStubbedConfigMock
|> expect(:get, fn [Pleroma.Language.LanguageDetector, :provider] -> nil end)
detected_language = LanguageDetector.detect("this should also return nil")
assert detected_language == nil
end
end

View file

@ -0,0 +1,37 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Language.Translation.DeeplTest do
use Pleroma.Web.ConnCase
alias Pleroma.Language.Translation.Deepl
test "it translates text" do
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
clear_config([Pleroma.Language.Translation.Deepl, :base_url], "https://api-free.deepl.com")
clear_config([Pleroma.Language.Translation.Deepl, :api_key], "API_KEY")
{:ok, res} =
Deepl.translate(
"USUNĄĆ ŚLEDZIKA!Wklej to na swojego śledzika. Jeżeli uzbieramy 70% użytkowników nk...to usuną śledzika!!!",
"pl",
"en"
)
assert %{
detected_source_language: "PL",
provider: "DeepL"
} = res
end
test "it returns languages list" do
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
clear_config([Pleroma.Language.Translation.Deepl, :base_url], "https://api-free.deepl.com")
clear_config([Pleroma.Language.Translation.Deepl, :api_key], "API_KEY")
assert {:ok, [language | _languages]} = Deepl.supported_languages(:target)
assert is_binary(language)
end
end

View file

@ -0,0 +1,59 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2024 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Language.Translation.TranslateLocallyTest do
use Pleroma.DataCase
alias Pleroma.Language.Translation.TranslateLocally
@example_models %{
"de" => %{
"en" => "de-en-base"
},
"en" => %{
"de" => "en-de-base",
"pl" => "en-pl-tiny"
},
"cs" => %{
"en" => "cs-en-base"
},
"pl" => %{
"en" => "pl-en-tiny"
}
}
test "it returns languages list" do
clear_config([Pleroma.Language.Translation.TranslateLocally, :models], @example_models)
assert {:ok, languages} = TranslateLocally.supported_languages(:source)
assert ["cs", "de", "en", "pl"] = languages |> Enum.sort()
end
describe "it returns languages matrix" do
test "without intermediary language" do
clear_config([Pleroma.Language.Translation.TranslateLocally, :models], @example_models)
assert {:ok,
%{
"cs" => ["en"],
"de" => ["en"],
"en" => ["de", "pl"],
"pl" => ["en"]
}} = TranslateLocally.languages_matrix()
end
test "with intermediary language" do
clear_config([Pleroma.Language.Translation.TranslateLocally, :models], @example_models)
clear_config([Pleroma.Language.Translation.TranslateLocally, :intermediary_language], "en")
assert {:ok,
%{
"cs" => ["de", "en", "pl"],
"de" => ["en", "pl"],
"en" => ["de", "pl"],
"pl" => ["de", "en"]
}} = TranslateLocally.languages_matrix()
end
end
end

View file

@ -0,0 +1,28 @@
defmodule Pleroma.Language.TranslationTest do
use Pleroma.Web.ConnCase
alias Pleroma.Language.Translation
setup do: clear_config([Pleroma.Language.Translation, :provider], TranslationMock)
test "it translates text" do
assert {:ok,
%{
content: "txet emos",
detected_source_language: _,
provider: _
}} = Translation.translate("some text", "en", "uk")
end
test "it stores translation result in cache" do
Translation.translate("some text", "en", "uk")
assert {:ok, result} =
Cachex.get(
:translations_cache,
"en/uk/#{:crypto.hash(:sha256, "some text") |> Base.encode64()}"
)
assert result.content == "txet emos"
end
end

View file

@ -10,22 +10,23 @@ defmodule Pleroma.ListTest do
test "creating a list" do
user = insert(:user)
{:ok, %Pleroma.List{} = list} = Pleroma.List.create("title", user)
%Pleroma.List{title: title} = Pleroma.List.get(list.id, user)
{:ok, %Pleroma.List{} = list} = Pleroma.List.create(%{title: "title"}, user)
%Pleroma.List{title: title, exclusive: exclusive} = Pleroma.List.get(list.id, user)
assert title == "title"
assert exclusive == false
end
test "validates title" do
user = insert(:user)
assert {:error, changeset} = Pleroma.List.create("", user)
assert {:error, changeset} = Pleroma.List.create(%{title: ""}, user)
assert changeset.errors == [title: {"can't be blank", [validation: :required]}]
end
test "getting a list not belonging to the user" do
user = insert(:user)
other_user = insert(:user)
{:ok, %Pleroma.List{} = list} = Pleroma.List.create("title", user)
{:ok, %Pleroma.List{} = list} = Pleroma.List.create(%{title: "title"}, user)
ret = Pleroma.List.get(list.id, other_user)
assert is_nil(ret)
end
@ -33,7 +34,7 @@ defmodule Pleroma.ListTest do
test "adding an user to a list" do
user = insert(:user)
other_user = insert(:user)
{:ok, list} = Pleroma.List.create("title", user)
{:ok, list} = Pleroma.List.create(%{title: "title"}, user)
{:ok, %{following: following}} = Pleroma.List.follow(list, other_user)
assert [other_user.follower_address] == following
end
@ -41,7 +42,7 @@ defmodule Pleroma.ListTest do
test "removing an user from a list" do
user = insert(:user)
other_user = insert(:user)
{:ok, list} = Pleroma.List.create("title", user)
{:ok, list} = Pleroma.List.create(%{title: "title"}, user)
{:ok, %{following: _following}} = Pleroma.List.follow(list, other_user)
{:ok, %{following: following}} = Pleroma.List.unfollow(list, other_user)
assert [] == following
@ -49,14 +50,27 @@ defmodule Pleroma.ListTest do
test "renaming a list" do
user = insert(:user)
{:ok, list} = Pleroma.List.create("title", user)
{:ok, %{title: title}} = Pleroma.List.rename(list, "new")
{:ok, list} = Pleroma.List.create(%{title: "title"}, user)
{:ok, %{title: title}} = Pleroma.List.update(list, %{title: "new"})
assert "new" == title
end
test "updating a list exclusivity" do
user = insert(:user)
{:ok, %{exclusive: exclusive} = list} =
Pleroma.List.create(%{title: "title", exclusive: true}, user)
assert exclusive == true
{:ok, %{exclusive: exclusive} = list} = Pleroma.List.update(list, %{exclusive: false})
assert exclusive == false
{:ok, %{exclusive: exclusive}} = Pleroma.List.update(list, %{exclusive: true})
assert exclusive == true
end
test "deleting a list" do
user = insert(:user)
{:ok, list} = Pleroma.List.create("title", user)
{:ok, list} = Pleroma.List.create(%{title: "title"}, user)
{:ok, list} = Pleroma.List.delete(list)
assert is_nil(Repo.get(Pleroma.List, list.id))
end
@ -65,7 +79,7 @@ defmodule Pleroma.ListTest do
user = insert(:user)
other_user = insert(:user)
third_user = insert(:user)
{:ok, list} = Pleroma.List.create("title", user)
{:ok, list} = Pleroma.List.create(%{title: "title"}, user)
{:ok, list} = Pleroma.List.follow(list, other_user)
{:ok, list} = Pleroma.List.follow(list, third_user)
{:ok, following} = Pleroma.List.get_following(list)
@ -76,9 +90,9 @@ defmodule Pleroma.ListTest do
test "getting all lists by an user" do
user = insert(:user)
other_user = insert(:user)
{:ok, list_one} = Pleroma.List.create("title", user)
{:ok, list_two} = Pleroma.List.create("other title", user)
{:ok, list_three} = Pleroma.List.create("third title", other_user)
{:ok, list_one} = Pleroma.List.create(%{title: "title"}, user)
{:ok, list_two} = Pleroma.List.create(%{title: "other title"}, user)
{:ok, list_three} = Pleroma.List.create(%{title: "third title"}, other_user)
lists = Pleroma.List.for_user(user, %{})
assert list_one in lists
assert list_two in lists
@ -88,9 +102,9 @@ defmodule Pleroma.ListTest do
test "getting all lists the user is a member of" do
user = insert(:user)
other_user = insert(:user)
{:ok, list_one} = Pleroma.List.create("title", user)
{:ok, list_two} = Pleroma.List.create("other title", user)
{:ok, list_three} = Pleroma.List.create("third title", other_user)
{:ok, list_one} = Pleroma.List.create(%{title: "title"}, user)
{:ok, list_two} = Pleroma.List.create(%{title: "other title"}, user)
{:ok, list_three} = Pleroma.List.create(%{title: "third title"}, other_user)
{:ok, list_one} = Pleroma.List.follow(list_one, other_user)
{:ok, list_two} = Pleroma.List.follow(list_two, other_user)
{:ok, list_three} = Pleroma.List.follow(list_three, user)
@ -106,8 +120,8 @@ defmodule Pleroma.ListTest do
not_owner = insert(:user)
member_1 = insert(:user)
member_2 = insert(:user)
{:ok, owned_list} = Pleroma.List.create("owned", owner)
{:ok, not_owned_list} = Pleroma.List.create("not owned", not_owner)
{:ok, owned_list} = Pleroma.List.create(%{title: "owned"}, owner)
{:ok, not_owned_list} = Pleroma.List.create(%{title: "not owned"}, not_owner)
{:ok, owned_list} = Pleroma.List.follow(owned_list, member_1)
{:ok, owned_list} = Pleroma.List.follow(owned_list, member_2)
{:ok, not_owned_list} = Pleroma.List.follow(not_owned_list, member_1)
@ -123,14 +137,14 @@ defmodule Pleroma.ListTest do
test "get by ap_id" do
user = insert(:user)
{:ok, list} = Pleroma.List.create("foo", user)
{:ok, list} = Pleroma.List.create(%{title: "foo"}, user)
assert Pleroma.List.get_by_ap_id(list.ap_id) == list
end
test "memberships" do
user = insert(:user)
member = insert(:user)
{:ok, list} = Pleroma.List.create("foo", user)
{:ok, list} = Pleroma.List.create(%{title: "foo"}, user)
{:ok, list} = Pleroma.List.follow(list, member)
assert Pleroma.List.memberships(member) == [list.ap_id]
@ -140,7 +154,7 @@ defmodule Pleroma.ListTest do
user = insert(:user)
member = insert(:user)
{:ok, list} = Pleroma.List.create("foo", user)
{:ok, list} = Pleroma.List.create(%{title: "foo"}, user)
{:ok, list} = Pleroma.List.follow(list, member)
assert Pleroma.List.member?(list, member)

View file

@ -36,11 +36,12 @@ defmodule Pleroma.MarkerTest do
insert(:notification, user: user, activity: insert(:note_activity))
insert(:notification, user: user, activity: insert(:note_activity))
insert(:marker, timeline: "home", user: user)
%Marker{} = refreshed_marker = refresh_record(marker)
assert Marker.get_markers(
user,
["notifications"]
) == [%Marker{refresh_record(marker) | unread_count: 2}]
) == [%{refreshed_marker | unread_count: 2}]
end
end

View file

@ -21,7 +21,7 @@ defmodule Pleroma.MigrationHelper.NotificationBackfillTest do
{:ok, post} = CommonAPI.post(user, %{status: "yeah, @#{other_user.nickname}"})
{:ok, chat} = CommonAPI.post_chat_message(user, other_user, "yo")
{:ok, react} = CommonAPI.react_with_emoji(post.id, other_user, "")
{:ok, like} = CommonAPI.favorite(other_user, post.id)
{:ok, like} = CommonAPI.favorite(post.id, other_user)
{:ok, react_2} = CommonAPI.react_with_emoji(post.id, other_user, "")
data =

View file

@ -308,4 +308,37 @@ defmodule Pleroma.ModerationLogTest do
assert log.data["message"] == "@#{moderator.nickname} deleted status ##{note.id}"
end
end
describe "get_log_entry_message/1" do
setup do
moderator = insert(:user, is_moderator: true)
[moderator: moderator]
end
test "handles unknown action types gracefully", %{moderator: moderator} do
log_entry = %ModerationLog{
data: %{
"actor" => %{"nickname" => moderator.nickname},
"action" => "unknown_action",
"some_data" => "test_value"
}
}
assert ModerationLog.get_log_entry_message(log_entry) =~ moderator.nickname
assert ModerationLog.get_log_entry_message(log_entry) =~ "unknown_action"
end
test "handles malformed log entries gracefully" do
log_entry = %ModerationLog{
data: %{
"action" => "force_password_reset"
# Missing "actor" and "subject" fields
}
}
message = ModerationLog.get_log_entry_message(log_entry)
assert is_binary(message)
assert message =~ "force_password_reset"
end
end
end

View file

@ -17,9 +17,10 @@ defmodule Pleroma.NotificationTest do
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.MastodonAPI.NotificationView
alias Pleroma.Web.Streamer
setup do
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
:ok
end
@ -165,7 +166,7 @@ defmodule Pleroma.NotificationTest do
{:ok, _activity_two} = CommonAPI.repeat(activity_one.id, repeated_user)
{:ok, _edit_activity} =
CommonAPI.update(user, activity_one, %{
CommonAPI.update(activity_one, user, %{
status: "hey @#{other_user.nickname}! mew mew"
})
@ -180,8 +181,8 @@ defmodule Pleroma.NotificationTest do
question = insert(:question, user: user1)
activity = insert(:question_activity, question: question)
{:ok, _, _} = CommonAPI.vote(user2, question, [0])
{:ok, _, _} = CommonAPI.vote(user3, question, [1])
{:ok, _, _} = CommonAPI.vote(question, user2, [0])
{:ok, _, _} = CommonAPI.vote(question, user3, [1])
{:ok, notifications} = Notification.create_poll_notifications(activity)
@ -209,7 +210,7 @@ defmodule Pleroma.NotificationTest do
notification_settings: %Pleroma.User.NotificationSetting{block_from_strangers: true}
)
CommonAPI.follow(follower, followed)
CommonAPI.follow(followed, follower)
{:ok, activity} = CommonAPI.post(follower, %{status: "hey @#{followed.nickname}"})
refute Notification.create_notification(activity, followed)
end
@ -222,7 +223,7 @@ defmodule Pleroma.NotificationTest do
notification_settings: %Pleroma.User.NotificationSetting{block_from_strangers: true}
)
CommonAPI.follow(receiver, poster)
CommonAPI.follow(poster, receiver)
{:ok, activity} = CommonAPI.post(poster, %{status: "hey @#{receiver.nickname}"})
assert Notification.create_notification(activity, receiver)
end
@ -238,7 +239,7 @@ defmodule Pleroma.NotificationTest do
user = insert(:user)
subscriber = insert(:user)
{:ok, _, _, _} = CommonAPI.follow(subscriber, user)
{:ok, _, _, _} = CommonAPI.follow(user, subscriber)
User.subscribe(subscriber, user)
{:ok, status} = CommonAPI.post(user, %{status: "Akariiiin"})
{:ok, [_notif]} = Notification.create_notifications(status)
@ -295,7 +296,7 @@ defmodule Pleroma.NotificationTest do
insert(:filter, user: user, phrase: "tesla", hide: true)
{:ok, activity_one} = CommonAPI.post(user, %{status: "wow tesla"})
{:ok, activity_two} = CommonAPI.favorite(other_user, activity_one.id)
{:ok, activity_two} = CommonAPI.favorite(activity_one.id, other_user)
{:ok, [notification]} = Notification.create_notifications(activity_two)
@ -309,7 +310,7 @@ defmodule Pleroma.NotificationTest do
user = insert(:user)
followed_user = insert(:user, is_locked: false)
{:ok, _, _, _activity} = CommonAPI.follow(user, followed_user)
{:ok, _, _, _activity} = CommonAPI.follow(followed_user, user)
assert FollowingRelationship.following?(user, followed_user)
assert [notification] = Notification.for_user(followed_user)
@ -324,7 +325,7 @@ defmodule Pleroma.NotificationTest do
user = insert(:user)
followed_user = insert(:user, is_locked: true)
{:ok, _, _, _activity} = CommonAPI.follow(user, followed_user)
{:ok, _, _, _activity} = CommonAPI.follow(followed_user, user)
refute FollowingRelationship.following?(user, followed_user)
assert [notification] = Notification.for_user(followed_user)
@ -349,12 +350,12 @@ defmodule Pleroma.NotificationTest do
user = insert(:user)
followed_user = insert(:user, is_locked: false)
{:ok, _, _, _activity} = CommonAPI.follow(user, followed_user)
{:ok, _, _, _activity} = CommonAPI.follow(followed_user, user)
assert FollowingRelationship.following?(user, followed_user)
assert [notification] = Notification.for_user(followed_user)
CommonAPI.unfollow(user, followed_user)
{:ok, _, _, _activity_dupe} = CommonAPI.follow(user, followed_user)
CommonAPI.unfollow(followed_user, user)
{:ok, _, _, _activity_dupe} = CommonAPI.follow(followed_user, user)
notification_id = notification.id
assert [%{id: ^notification_id}] = Notification.for_user(followed_user)
@ -363,7 +364,7 @@ defmodule Pleroma.NotificationTest do
test "dismisses the notification on follow request rejection" do
user = insert(:user, is_locked: true)
follower = insert(:user)
{:ok, _, _, _follow_activity} = CommonAPI.follow(follower, user)
{:ok, _, _, _follow_activity} = CommonAPI.follow(user, follower)
assert [_notification] = Notification.for_user(user)
{:ok, _follower} = CommonAPI.reject_follow_request(follower, user)
assert [] = Notification.for_user(user)
@ -446,8 +447,7 @@ defmodule Pleroma.NotificationTest do
describe "set_read_up_to()" do
test "it sets all notifications as read up to a specified notification ID" do
user = insert(:user)
other_user = insert(:user)
[user, other_user] = insert_pair(:user)
{:ok, _activity} =
CommonAPI.post(user, %{
@ -486,6 +486,37 @@ defmodule Pleroma.NotificationTest do
assert m.last_read_id == to_string(n2.id)
end
@tag needs_streamer: true
test "it sends updated marker to the 'user' and the 'user:notification' stream" do
%{user: user, token: oauth_token} = oauth_access(["read"])
other_user = insert(:user)
{:ok, _activity} =
CommonAPI.post(other_user, %{
status: "hi @#{user.nickname}!"
})
[%{id: notification_id}] = Notification.for_user(user)
notification_id = to_string(notification_id)
task =
Task.async(fn ->
{:ok, _topic} =
Streamer.get_topic_and_add_socket("user:notification", user, oauth_token)
assert_receive {:text, event}, 4_000
assert %{"event" => "marker", "payload" => payload} = Jason.decode!(event)
assert %{"notifications" => %{"last_read_id" => ^notification_id}} =
Jason.decode!(payload)
end)
Notification.set_read_up_to(user, notification_id)
Task.await(task)
end
end
describe "for_user_since/2" do
@ -617,7 +648,7 @@ defmodule Pleroma.NotificationTest do
status: "hey @#{other_user.nickname}!"
})
{:ok, activity_two} = CommonAPI.favorite(third_user, activity_one.id)
{:ok, activity_two} = CommonAPI.favorite(activity_one.id, third_user)
enabled_receivers = Notification.get_notified_from_activity(activity_two)
@ -693,7 +724,7 @@ defmodule Pleroma.NotificationTest do
{:ok, activity} = CommonAPI.post(user, %{status: "hey @#{other_user.nickname}!"})
{:ok, _} = CommonAPI.add_mute(other_user, activity)
{:ok, _} = CommonAPI.add_mute(activity, other_user)
{:ok, same_context_activity} =
CommonAPI.post(user, %{
@ -748,7 +779,7 @@ defmodule Pleroma.NotificationTest do
{:ok, _activity_two} = CommonAPI.repeat(activity_one.id, repeated_user)
{:ok, edit_activity} =
CommonAPI.update(user, activity_one, %{
CommonAPI.update(activity_one, user, %{
status: "hey @#{other_user.nickname}! mew mew"
})
@ -768,7 +799,7 @@ defmodule Pleroma.NotificationTest do
assert Enum.empty?(Notification.for_user(user))
{:ok, _} = CommonAPI.favorite(other_user, activity.id)
{:ok, _} = CommonAPI.favorite(activity.id, other_user)
assert length(Notification.for_user(user)) == 1
@ -785,7 +816,7 @@ defmodule Pleroma.NotificationTest do
assert Enum.empty?(Notification.for_user(user))
{:ok, _} = CommonAPI.favorite(other_user, activity.id)
{:ok, _} = CommonAPI.favorite(activity.id, other_user)
assert length(Notification.for_user(user)) == 1
@ -840,7 +871,7 @@ defmodule Pleroma.NotificationTest do
assert Enum.empty?(Notification.for_user(user))
{:error, :not_found} = CommonAPI.favorite(other_user, activity.id)
{:error, :not_found} = CommonAPI.favorite(activity.id, other_user)
assert Enum.empty?(Notification.for_user(user))
end
@ -1090,7 +1121,7 @@ defmodule Pleroma.NotificationTest do
another_user = insert(:user)
{:ok, activity} = CommonAPI.post(user, %{status: "Give me my cofe!"})
{:ok, _} = CommonAPI.favorite(another_user, activity.id)
{:ok, _} = CommonAPI.favorite(activity.id, another_user)
assert length(Notification.for_user(user)) == 1
end
@ -1101,7 +1132,7 @@ defmodule Pleroma.NotificationTest do
insert(:filter, user: followed_user, phrase: "test", hide: true)
{:ok, _, _, _activity} = CommonAPI.follow(user, followed_user)
{:ok, _, _, _activity} = CommonAPI.follow(followed_user, user)
refute FollowingRelationship.following?(user, followed_user)
assert [notification] = Notification.for_user(followed_user)

View file

@ -6,7 +6,6 @@ defmodule Pleroma.Object.FetcherTest do
use Pleroma.DataCase
alias Pleroma.Activity
alias Pleroma.Instances
alias Pleroma.Object
alias Pleroma.Object.Fetcher
alias Pleroma.Web.ActivityPub.ObjectValidator
@ -100,7 +99,7 @@ defmodule Pleroma.Object.FetcherTest do
test "it returns thread depth exceeded error if thread depth is exceeded" do
clear_config([:instance, :federation_incoming_replies_max_depth], 0)
assert {:error, :allowed_depth} = Fetcher.fetch_object_from_id(@ap_id, depth: 1)
assert {:allowed_depth, false} = Fetcher.fetch_object_from_id(@ap_id, depth: 1)
end
test "it fetches object if max thread depth is restricted to 0 and depth is not specified" do
@ -118,15 +117,18 @@ defmodule Pleroma.Object.FetcherTest do
describe "actor origin containment" do
test "it rejects objects with a bogus origin" do
{:error, _} = Fetcher.fetch_object_from_id("https://info.pleroma.site/activity.json")
{:containment, :error} =
Fetcher.fetch_object_from_id("https://info.pleroma.site/activity.json")
end
test "it rejects objects when attributedTo is wrong (variant 1)" do
{:error, _} = Fetcher.fetch_object_from_id("https://info.pleroma.site/activity2.json")
{:containment, :error} =
Fetcher.fetch_object_from_id("https://info.pleroma.site/activity2.json")
end
test "it rejects objects when attributedTo is wrong (variant 2)" do
{:error, _} = Fetcher.fetch_object_from_id("https://info.pleroma.site/activity3.json")
{:containment, :error} =
Fetcher.fetch_object_from_id("https://info.pleroma.site/activity3.json")
end
end
@ -150,28 +152,102 @@ defmodule Pleroma.Object.FetcherTest do
clear_config([:mrf_keyword, :reject], ["yeah"])
clear_config([:mrf, :policies], [Pleroma.Web.ActivityPub.MRF.KeywordPolicy])
assert {:reject, "[KeywordPolicy] Matches with rejected keyword"} ==
assert {:transmogrifier, {:reject, "[KeywordPolicy] Matches with rejected keyword"}} ==
Fetcher.fetch_object_from_id(
"http://mastodon.example.org/@admin/99541947525187367"
)
end
test "it does not fetch a spoofed object uploaded on an instance as an attachment" do
assert {:error, _} =
assert {:fetch, {:error, {:content_type, "application/json"}}} =
Fetcher.fetch_object_from_id(
"https://patch.cx/media/03ca3c8b4ac3ddd08bf0f84be7885f2f88de0f709112131a22d83650819e36c2.json"
)
end
test "it resets instance reachability on successful fetch" do
id = "http://mastodon.example.org/@admin/99541947525187367"
Instances.set_consistently_unreachable(id)
refute Instances.reachable?(id)
test "it does not fetch from local instance" do
local_url = Pleroma.Web.Endpoint.url() <> "/objects/local_resource"
{:ok, _object} =
Fetcher.fetch_object_from_id("http://mastodon.example.org/@admin/99541947525187367")
assert {:fetch, {:error, "Trying to fetch local resource"}} =
Fetcher.fetch_object_from_id(local_url)
end
assert Instances.reachable?(id)
test "it validates content-type headers according to ActivityPub spec" do
# Setup a mock for an object with invalid content-type
mock(fn
%{method: :get, url: "https://example.com/objects/invalid-content-type"} ->
%Tesla.Env{
status: 200,
# Not a valid AP content-type
headers: [{"content-type", "application/json"}],
body:
Jason.encode!(%{
"id" => "https://example.com/objects/invalid-content-type",
"type" => "Note",
"content" => "This has an invalid content type",
"actor" => "https://example.com/users/actor",
"attributedTo" => "https://example.com/users/actor"
})
}
end)
assert {:fetch, {:error, {:content_type, "application/json"}}} =
Fetcher.fetch_object_from_id("https://example.com/objects/invalid-content-type")
end
test "it accepts objects with application/ld+json and ActivityStreams profile" do
# Setup a mock for an object with ld+json content-type and AS profile
mock(fn
%{method: :get, url: "https://example.com/objects/valid-ld-json"} ->
%Tesla.Env{
status: 200,
headers: [
{"content-type",
"application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\""}
],
body:
Jason.encode!(%{
"id" => "https://example.com/objects/valid-ld-json",
"type" => "Note",
"content" => "This has a valid ld+json content type",
"actor" => "https://example.com/users/actor",
"attributedTo" => "https://example.com/users/actor"
})
}
end)
# This should pass if content-type validation works correctly
assert {:ok, object} =
Fetcher.fetch_and_contain_remote_object_from_id(
"https://example.com/objects/valid-ld-json"
)
assert object["content"] == "This has a valid ld+json content type"
end
test "it rejects objects with no content-type header" do
# Setup a mock for an object with no content-type header
mock(fn
%{method: :get, url: "https://example.com/objects/no-content-type"} ->
%Tesla.Env{
status: 200,
# No content-type header
headers: [],
body:
Jason.encode!(%{
"id" => "https://example.com/objects/no-content-type",
"type" => "Note",
"content" => "This has no content type header",
"actor" => "https://example.com/users/actor",
"attributedTo" => "https://example.com/users/actor"
})
}
end)
# We want to test that the request fails with a missing content-type error
# but the actual error is {:fetch, {:error, nil}} - we'll check for this format
result = Fetcher.fetch_object_from_id("https://example.com/objects/no-content-type")
assert {:fetch, {:error, nil}} = result
end
end
@ -531,6 +607,110 @@ defmodule Pleroma.Object.FetcherTest do
end
end
describe "cross-domain redirect handling" do
setup do
mock(fn
# Cross-domain redirect with original domain in id
%{method: :get, url: "https://original.test/objects/123"} ->
%Tesla.Env{
status: 200,
url: "https://media.test/objects/123",
headers: [{"content-type", "application/activity+json"}],
body:
Jason.encode!(%{
"id" => "https://original.test/objects/123",
"type" => "Note",
"content" => "This is redirected content",
"actor" => "https://original.test/users/actor",
"attributedTo" => "https://original.test/users/actor"
})
}
# Cross-domain redirect with final domain in id
%{method: :get, url: "https://original.test/objects/final-domain-id"} ->
%Tesla.Env{
status: 200,
url: "https://media.test/objects/final-domain-id",
headers: [{"content-type", "application/activity+json"}],
body:
Jason.encode!(%{
"id" => "https://media.test/objects/final-domain-id",
"type" => "Note",
"content" => "This has final domain in id",
"actor" => "https://original.test/users/actor",
"attributedTo" => "https://original.test/users/actor"
})
}
# No redirect - same domain
%{method: :get, url: "https://original.test/objects/same-domain-redirect"} ->
%Tesla.Env{
status: 200,
url: "https://original.test/objects/different-path",
headers: [{"content-type", "application/activity+json"}],
body:
Jason.encode!(%{
"id" => "https://original.test/objects/same-domain-redirect",
"type" => "Note",
"content" => "This has a same-domain redirect",
"actor" => "https://original.test/users/actor",
"attributedTo" => "https://original.test/users/actor"
})
}
# Test case with missing url field in response (common in tests)
%{method: :get, url: "https://original.test/objects/missing-url"} ->
%Tesla.Env{
status: 200,
# No url field
headers: [{"content-type", "application/activity+json"}],
body:
Jason.encode!(%{
"id" => "https://original.test/objects/missing-url",
"type" => "Note",
"content" => "This has no URL field in response",
"actor" => "https://original.test/users/actor",
"attributedTo" => "https://original.test/users/actor"
})
}
end)
:ok
end
test "it rejects objects from cross-domain redirects with original domain in id" do
assert {:error, {:cross_domain_redirect, true}} =
Fetcher.fetch_and_contain_remote_object_from_id(
"https://original.test/objects/123"
)
end
test "it rejects objects from cross-domain redirects with final domain in id" do
assert {:error, {:cross_domain_redirect, true}} =
Fetcher.fetch_and_contain_remote_object_from_id(
"https://original.test/objects/final-domain-id"
)
end
test "it accepts objects with same-domain redirects" do
assert {:ok, data} =
Fetcher.fetch_and_contain_remote_object_from_id(
"https://original.test/objects/same-domain-redirect"
)
assert data["content"] == "This has a same-domain redirect"
end
test "it handles responses without URL field (common in tests)" do
assert {:ok, data} =
Fetcher.fetch_and_contain_remote_object_from_id(
"https://original.test/objects/missing-url"
)
assert data["content"] == "This has no URL field in response"
end
end
describe "fetch with history" do
setup do
object2 = %{

View file

@ -6,12 +6,10 @@ defmodule Pleroma.ObjectTest do
use Pleroma.DataCase
use Oban.Testing, repo: Pleroma.Repo
import ExUnit.CaptureLog
import Mox
import Pleroma.Factory
import Tesla.Mock
alias Pleroma.Activity
alias Pleroma.Hashtag
alias Pleroma.Object
alias Pleroma.Repo
@ -158,7 +156,7 @@ defmodule Pleroma.ObjectTest do
uploads_dir = Pleroma.Config.get!([Pleroma.Uploaders.Local, :uploads])
File.mkdir_p!(uploads_dir)
Pleroma.Backports.mkdir_p!(uploads_dir)
file = %Plug.Upload{
content_type: "image/jpeg",
@ -176,8 +174,9 @@ defmodule Pleroma.ObjectTest do
filename = Path.basename(href)
assert {:ok, files} = File.ls(uploads_dir)
assert filename in files
expected_path = Path.join([uploads_dir, Pleroma.Upload.Filter.Dedupe.shard_path(filename)])
assert File.exists?(expected_path)
Object.delete(note)
@ -185,8 +184,7 @@ defmodule Pleroma.ObjectTest do
assert Object.get_by_id(note.id).data["deleted"]
assert Object.get_by_id(attachment.id) == nil
assert {:ok, files} = File.ls(uploads_dir)
refute filename in files
refute File.exists?(expected_path)
end
test "with objects that have legacy data.url attribute" do
@ -282,148 +280,6 @@ defmodule Pleroma.ObjectTest do
end
end
describe "get_by_id_and_maybe_refetch" do
setup do
mock(fn
%{method: :get, url: "https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d"} ->
%Tesla.Env{
status: 200,
body: File.read!("test/fixtures/tesla_mock/poll_original.json"),
headers: HttpRequestMock.activitypub_object_headers()
}
env ->
apply(HttpRequestMock, :request, [env])
end)
mock_modified = fn resp ->
mock(fn
%{method: :get, url: "https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d"} ->
resp
env ->
apply(HttpRequestMock, :request, [env])
end)
end
on_exit(fn -> mock(fn env -> apply(HttpRequestMock, :request, [env]) end) end)
[mock_modified: mock_modified]
end
test "refetches if the time since the last refetch is greater than the interval", %{
mock_modified: mock_modified
} do
%Object{} =
object =
Object.normalize("https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d",
fetch: true
)
Object.set_cache(object)
assert Enum.at(object.data["oneOf"], 0)["replies"]["totalItems"] == 4
assert Enum.at(object.data["oneOf"], 1)["replies"]["totalItems"] == 0
mock_modified.(%Tesla.Env{
status: 200,
body: File.read!("test/fixtures/tesla_mock/poll_modified.json"),
headers: HttpRequestMock.activitypub_object_headers()
})
updated_object = Object.get_by_id_and_maybe_refetch(object.id, interval: -1)
object_in_cache = Object.get_cached_by_ap_id(object.data["id"])
assert updated_object == object_in_cache
assert Enum.at(updated_object.data["oneOf"], 0)["replies"]["totalItems"] == 8
assert Enum.at(updated_object.data["oneOf"], 1)["replies"]["totalItems"] == 3
end
test "returns the old object if refetch fails", %{mock_modified: mock_modified} do
%Object{} =
object =
Object.normalize("https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d",
fetch: true
)
Object.set_cache(object)
assert Enum.at(object.data["oneOf"], 0)["replies"]["totalItems"] == 4
assert Enum.at(object.data["oneOf"], 1)["replies"]["totalItems"] == 0
assert capture_log(fn ->
mock_modified.(%Tesla.Env{status: 404, body: ""})
updated_object = Object.get_by_id_and_maybe_refetch(object.id, interval: -1)
object_in_cache = Object.get_cached_by_ap_id(object.data["id"])
assert updated_object == object_in_cache
assert Enum.at(updated_object.data["oneOf"], 0)["replies"]["totalItems"] == 4
assert Enum.at(updated_object.data["oneOf"], 1)["replies"]["totalItems"] == 0
end) =~
"[error] Couldn't refresh https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d"
end
test "does not refetch if the time since the last refetch is greater than the interval", %{
mock_modified: mock_modified
} do
%Object{} =
object =
Object.normalize("https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d",
fetch: true
)
Object.set_cache(object)
assert Enum.at(object.data["oneOf"], 0)["replies"]["totalItems"] == 4
assert Enum.at(object.data["oneOf"], 1)["replies"]["totalItems"] == 0
mock_modified.(%Tesla.Env{
status: 200,
body: File.read!("test/fixtures/tesla_mock/poll_modified.json"),
headers: HttpRequestMock.activitypub_object_headers()
})
updated_object = Object.get_by_id_and_maybe_refetch(object.id, interval: 100)
object_in_cache = Object.get_cached_by_ap_id(object.data["id"])
assert updated_object == object_in_cache
assert Enum.at(updated_object.data["oneOf"], 0)["replies"]["totalItems"] == 4
assert Enum.at(updated_object.data["oneOf"], 1)["replies"]["totalItems"] == 0
end
test "preserves internal fields on refetch", %{mock_modified: mock_modified} do
%Object{} =
object =
Object.normalize("https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d",
fetch: true
)
Object.set_cache(object)
assert Enum.at(object.data["oneOf"], 0)["replies"]["totalItems"] == 4
assert Enum.at(object.data["oneOf"], 1)["replies"]["totalItems"] == 0
user = insert(:user)
activity = Activity.get_create_by_object_ap_id(object.data["id"])
{:ok, activity} = CommonAPI.favorite(user, activity.id)
object = Object.get_by_ap_id(activity.data["object"])
assert object.data["like_count"] == 1
mock_modified.(%Tesla.Env{
status: 200,
body: File.read!("test/fixtures/tesla_mock/poll_modified.json"),
headers: HttpRequestMock.activitypub_object_headers()
})
updated_object = Object.get_by_id_and_maybe_refetch(object.id, interval: -1)
object_in_cache = Object.get_cached_by_ap_id(object.data["id"])
assert updated_object == object_in_cache
assert Enum.at(updated_object.data["oneOf"], 0)["replies"]["totalItems"] == 8
assert Enum.at(updated_object.data["oneOf"], 1)["replies"]["totalItems"] == 3
assert updated_object.data["like_count"] == 1
end
end
describe ":hashtags association" do
test "Hashtag records are created with Object record and updated on its change" do
user = insert(:user)

View file

@ -1,42 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.OTPVersionTest do
use ExUnit.Case, async: true
alias Pleroma.OTPVersion
describe "check/1" do
test "22.4" do
assert OTPVersion.get_version_from_files(["test/fixtures/warnings/otp_version/22.4"]) ==
"22.4"
end
test "22.1" do
assert OTPVersion.get_version_from_files(["test/fixtures/warnings/otp_version/22.1"]) ==
"22.1"
end
test "21.1" do
assert OTPVersion.get_version_from_files(["test/fixtures/warnings/otp_version/21.1"]) ==
"21.1"
end
test "23.0" do
assert OTPVersion.get_version_from_files(["test/fixtures/warnings/otp_version/23.0"]) ==
"23.0"
end
test "with nonexistent file" do
assert OTPVersion.get_version_from_files([
"test/fixtures/warnings/otp_version/non-exising",
"test/fixtures/warnings/otp_version/22.4"
]) == "22.4"
end
test "empty paths" do
assert OTPVersion.get_version_from_files([]) == nil
end
end
end

View file

@ -0,0 +1,19 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ReleaseTaskTest do
use Pleroma.DataCase, async: true
alias Pleroma.ReleaseTasks
test "finding the module" do
task = "search.meilisearch"
assert Mix.Tasks.Pleroma.Search.Meilisearch == ReleaseTasks.find_module(task)
task = "user"
assert Mix.Tasks.Pleroma.User == ReleaseTasks.find_module(task)
refute ReleaseTasks.find_module("doesnt.exist")
end
end

View file

@ -3,12 +3,11 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Repo.Migrations.AutolinkerToLinkifyTest do
use Pleroma.DataCase
use Pleroma.DataCase, async: false
import Pleroma.Factory
import Pleroma.Tests.Helpers
alias Pleroma.ConfigDB
setup do: clear_config(Pleroma.Formatter)
setup_all do: require_migration("20200716195806_autolinker_to_linkify")
test "change/0 converts auto_linker opts for Pleroma.Formatter", %{migration: migration} do

View file

@ -0,0 +1,43 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Repo.Migrations.PublisherMigrationChangeTest do
use Oban.Testing, repo: Pleroma.Repo
use Pleroma.DataCase
import Pleroma.Factory
import Pleroma.Tests.Helpers
alias Pleroma.Activity
alias Pleroma.Workers.PublisherWorker
setup_all do: require_migration("20240729163838_publisher_job_change")
describe "up/0" do
test "migrates publisher jobs to new format", %{migration: migration} do
user = insert(:user)
%Activity{id: activity_id, data: %{"id" => ap_id}} =
insert(:note_activity, user: user)
{:ok, %{id: job_id}} =
PublisherWorker.new(%{
"actor_id" => user.id,
"json" => "{}",
"id" => ap_id,
"inbox" => "https://example.com/inbox",
"unreachable_since" => nil
})
|> Oban.insert()
assert [%{id: ^job_id, args: %{"id" => ^ap_id}}] = all_enqueued(worker: PublisherWorker)
assert migration.up() == :ok
assert_enqueued(
worker: PublisherWorker,
args: %{"id" => ap_id, "activity_id" => activity_id}
)
end
end
end

View file

@ -24,7 +24,8 @@ defmodule Pleroma.RepoTest do
describe "get_assoc/2" do
test "get assoc from preloaded data" do
user = %User{name: "Agent Smith"}
token = %Pleroma.Web.OAuth.Token{insert(:oauth_token) | user: user}
%Pleroma.Web.OAuth.Token{} = token = insert(:oauth_token)
token = %{token | user: user}
assert Repo.get_assoc(token, :user) == {:ok, user}
end

View file

@ -18,7 +18,7 @@ defmodule Pleroma.ResilienceTest do
other_user = insert(:user)
{:ok, post_one} = CommonAPI.post(user, %{status: "Here is a post"})
{:ok, like} = CommonAPI.favorite(other_user, post_one.id)
{:ok, like} = CommonAPI.favorite(post_one.id, other_user)
%{
user: user,
@ -90,7 +90,7 @@ defmodule Pleroma.ResilienceTest do
|> json_response(200)
# Favoriting again doesn't hurt
{:ok, _like_two} = CommonAPI.favorite(other_user, post.id)
{:ok, _like_two} = CommonAPI.favorite(post.id, other_user)
post = Repo.get(Activity, post.id)

View file

@ -63,7 +63,11 @@ defmodule Pleroma.ReverseProxyTest do
|> Plug.Conn.put_req_header("user-agent", "fake/1.0")
|> ReverseProxy.call("/user-agent")
assert json_response(conn, 200) == %{"user-agent" => Pleroma.Application.user_agent()}
# Convert the response to a map without relying on json_response
body = conn.resp_body
assert conn.status == 200
response = Jason.decode!(body)
assert response == %{"user-agent" => Pleroma.Application.user_agent()}
end
test "closed connection", %{conn: conn} do
@ -138,11 +142,14 @@ defmodule Pleroma.ReverseProxyTest do
test "common", %{conn: conn} do
ClientMock
|> expect(:request, fn :head, "/head", _, _, _ ->
{:ok, 200, [{"content-type", "text/html; charset=utf-8"}]}
{:ok, 200, [{"content-type", "image/png"}]}
end)
conn = ReverseProxy.call(Map.put(conn, :method, "HEAD"), "/head")
assert html_response(conn, 200) == ""
assert conn.status == 200
assert Conn.get_resp_header(conn, "content-type") == ["image/png"]
assert conn.resp_body == ""
end
end
@ -249,7 +256,10 @@ defmodule Pleroma.ReverseProxyTest do
)
|> ReverseProxy.call("/headers")
%{"headers" => headers} = json_response(conn, 200)
body = conn.resp_body
assert conn.status == 200
response = Jason.decode!(body)
headers = response["headers"]
assert headers["Accept"] == "text/html"
end
@ -262,7 +272,10 @@ defmodule Pleroma.ReverseProxyTest do
)
|> ReverseProxy.call("/headers")
%{"headers" => headers} = json_response(conn, 200)
body = conn.resp_body
assert conn.status == 200
response = Jason.decode!(body)
headers = response["headers"]
refute headers["Accept-Language"]
end
end
@ -328,4 +341,121 @@ defmodule Pleroma.ReverseProxyTest do
assert {"content-disposition", "attachment; filename=\"filename.jpg\""} in conn.resp_headers
end
end
describe "content-type sanitisation" do
test "preserves allowed image type", %{conn: conn} do
ClientMock
|> expect(:request, fn :get, "/content", _, _, _ ->
{:ok, 200, [{"content-type", "image/png"}], %{url: "/content"}}
end)
|> expect(:stream_body, fn _ -> :done end)
conn = ReverseProxy.call(conn, "/content")
assert conn.status == 200
assert Conn.get_resp_header(conn, "content-type") == ["image/png"]
end
test "preserves allowed video type", %{conn: conn} do
ClientMock
|> expect(:request, fn :get, "/content", _, _, _ ->
{:ok, 200, [{"content-type", "video/mp4"}], %{url: "/content"}}
end)
|> expect(:stream_body, fn _ -> :done end)
conn = ReverseProxy.call(conn, "/content")
assert conn.status == 200
assert Conn.get_resp_header(conn, "content-type") == ["video/mp4"]
end
test "sanitizes ActivityPub content type", %{conn: conn} do
ClientMock
|> expect(:request, fn :get, "/content", _, _, _ ->
{:ok, 200, [{"content-type", "application/activity+json"}], %{url: "/content"}}
end)
|> expect(:stream_body, fn _ -> :done end)
conn = ReverseProxy.call(conn, "/content")
assert conn.status == 200
assert Conn.get_resp_header(conn, "content-type") == ["application/octet-stream"]
end
test "sanitizes LD-JSON content type", %{conn: conn} do
ClientMock
|> expect(:request, fn :get, "/content", _, _, _ ->
{:ok, 200, [{"content-type", "application/ld+json"}], %{url: "/content"}}
end)
|> expect(:stream_body, fn _ -> :done end)
conn = ReverseProxy.call(conn, "/content")
assert conn.status == 200
assert Conn.get_resp_header(conn, "content-type") == ["application/octet-stream"]
end
end
# Hackney is used for Reverse Proxy when Hackney or Finch is the Tesla Adapter
# Gun is able to proxy through Tesla, so it does not need testing as the
# test cases in the Pleroma.HTTPTest module are sufficient
describe "Hackney URL encoding:" do
setup do
ClientMock
|> expect(:request, fn
:get,
"https://example.com/emoji/Pack%201/koronebless.png?foo=bar+baz",
_headers,
_body,
_opts ->
{:ok, 200, [{"content-type", "image/png"}], "It works!"}
:get,
"https://example.com/media/foo/bar%20!$&'()*+,;=/:%20@a%20%5Bbaz%5D.mp4",
_headers,
_body,
_opts ->
{:ok, 200, [{"content-type", "video/mp4"}], "Allowed reserved chars."}
:get, "https://example.com/media/unicode%20%F0%9F%99%82%20.gif", _headers, _body, _opts ->
{:ok, 200, [{"content-type", "image/gif"}], "Unicode emoji in path"}
end)
|> stub(:stream_body, fn _ -> :done end)
|> stub(:close, fn _ -> :ok end)
:ok
end
test "properly encodes URLs with spaces", %{conn: conn} do
url_with_space = "https://example.com/emoji/Pack 1/koronebless.png?foo=bar baz"
result = ReverseProxy.call(conn, url_with_space)
assert result.status == 200
end
test "properly encoded URL should not be altered", %{conn: conn} do
properly_encoded_url = "https://example.com/emoji/Pack%201/koronebless.png?foo=bar+baz"
result = ReverseProxy.call(conn, properly_encoded_url)
assert result.status == 200
end
test "properly encodes URLs with allowed reserved characters", %{conn: conn} do
url_with_reserved_chars = "https://example.com/media/foo/bar !$&'()*+,;=/: @a [baz].mp4"
result = ReverseProxy.call(conn, url_with_reserved_chars)
assert result.status == 200
end
test "properly encodes URLs with unicode in path", %{conn: conn} do
url_with_unicode = "https://example.com/media/unicode 🙂 .gif"
result = ReverseProxy.call(conn, url_with_unicode)
assert result.status == 200
end
end
end

View file

@ -0,0 +1,496 @@
defmodule Pleroma.SafeZipTest do
# Not making this async because it creates and deletes files
use ExUnit.Case
alias Pleroma.SafeZip
@fixtures_dir "test/fixtures"
@tmp_dir "test/zip_tmp"
setup do
# Ensure tmp directory exists
Pleroma.Backports.mkdir_p!(@tmp_dir)
on_exit(fn ->
# Clean up any files created during tests
File.rm_rf!(@tmp_dir)
Pleroma.Backports.mkdir_p!(@tmp_dir)
end)
:ok
end
describe "list_dir_file/1" do
test "lists files in a valid zip" do
{:ok, files} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "emojis.zip"))
assert is_list(files)
assert length(files) > 0
end
test "returns an empty list for empty zip" do
{:ok, files} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "empty.zip"))
assert files == []
end
test "returns error for non-existent file" do
assert {:error, _} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "nonexistent.zip"))
end
test "only lists regular files, not directories" do
# Create a zip with both files and directories
zip_path = create_zip_with_directory()
# List files with SafeZip
{:ok, files} = SafeZip.list_dir_file(zip_path)
# Verify only regular files are listed, not directories
assert "file_in_dir/test_file.txt" in files
assert "root_file.txt" in files
# Directory entries should not be included in the list
refute "file_in_dir/" in files
end
end
describe "contains_all_data?/2" do
test "returns true when all files are in the archive" do
# For this test, we'll create our own zip file with known content
# to ensure we can test the contains_all_data? function properly
zip_path = create_zip_with_directory()
archive_data = File.read!(zip_path)
# Check if the archive contains the root file
# Note: The function expects charlists (Erlang strings) in the MapSet
assert SafeZip.contains_all_data?(archive_data, MapSet.new([~c"root_file.txt"]))
end
test "returns false when files are missing" do
archive_path = Path.join(@fixtures_dir, "emojis.zip")
archive_data = File.read!(archive_path)
# Create a MapSet with non-existent files
fset = MapSet.new([~c"nonexistent.txt"])
refute SafeZip.contains_all_data?(archive_data, fset)
end
test "returns false for invalid archive data" do
refute SafeZip.contains_all_data?("invalid data", MapSet.new([~c"file.txt"]))
end
test "only checks for regular files, not directories" do
# Create a zip with both files and directories
zip_path = create_zip_with_directory()
archive_data = File.read!(zip_path)
# Check if the archive contains a directory (should return false)
refute SafeZip.contains_all_data?(archive_data, MapSet.new([~c"file_in_dir/"]))
# For this test, we'll manually check if the file exists in the archive
# by extracting it and verifying it exists
extract_dir = Path.join(@tmp_dir, "extract_check")
Pleroma.Backports.mkdir_p!(extract_dir)
{:ok, files} = SafeZip.unzip_file(zip_path, extract_dir)
# Verify the root file was extracted
assert Enum.any?(files, fn file ->
Path.basename(file) == "root_file.txt"
end)
# Verify the file exists on disk
assert File.exists?(Path.join(extract_dir, "root_file.txt"))
end
end
describe "zip/4" do
test "creates a zip file on disk" do
# Create a test file
test_file_path = Path.join(@tmp_dir, "test_file.txt")
File.write!(test_file_path, "test content")
# Create a zip file
zip_path = Path.join(@tmp_dir, "test.zip")
assert {:ok, ^zip_path} = SafeZip.zip(zip_path, ["test_file.txt"], @tmp_dir, false)
# Verify the zip file exists
assert File.exists?(zip_path)
end
test "creates a zip file in memory" do
# Create a test file
test_file_path = Path.join(@tmp_dir, "test_file.txt")
File.write!(test_file_path, "test content")
# Create a zip file in memory
zip_name = Path.join(@tmp_dir, "test.zip")
assert {:ok, {^zip_name, zip_data}} =
SafeZip.zip(zip_name, ["test_file.txt"], @tmp_dir, true)
# Verify the zip data is binary
assert is_binary(zip_data)
end
test "returns error for unsafe paths" do
# Try to zip a file with path traversal
assert {:error, _} =
SafeZip.zip(
Path.join(@tmp_dir, "test.zip"),
["../fixtures/test.txt"],
@tmp_dir,
false
)
end
test "can create zip with directories" do
# Create a directory structure
dir_path = Path.join(@tmp_dir, "test_dir")
Pleroma.Backports.mkdir_p!(dir_path)
file_in_dir_path = Path.join(dir_path, "file_in_dir.txt")
File.write!(file_in_dir_path, "file in directory")
# Create a zip file
zip_path = Path.join(@tmp_dir, "dir_test.zip")
assert {:ok, ^zip_path} =
SafeZip.zip(
zip_path,
["test_dir/file_in_dir.txt"],
@tmp_dir,
false
)
# Verify the zip file exists
assert File.exists?(zip_path)
# Extract and verify the directory structure is preserved
extract_dir = Path.join(@tmp_dir, "extract")
{:ok, files} = SafeZip.unzip_file(zip_path, extract_dir)
# Check if the file path is in the list, accounting for possible full paths
assert Enum.any?(files, fn file ->
String.ends_with?(file, "file_in_dir.txt")
end)
# Verify the file exists in the expected location
assert File.exists?(Path.join([extract_dir, "test_dir", "file_in_dir.txt"]))
end
end
describe "unzip_file/3" do
test "extracts files from a zip archive" do
archive_path = Path.join(@fixtures_dir, "emojis.zip")
# Extract the archive
assert {:ok, files} = SafeZip.unzip_file(archive_path, @tmp_dir)
# Verify files were extracted
assert is_list(files)
assert length(files) > 0
# Verify at least one file exists
first_file = List.first(files)
# Simply check that the file exists in the tmp directory
assert File.exists?(first_file)
end
test "extracts specific files from a zip archive" do
archive_path = Path.join(@fixtures_dir, "emojis.zip")
# Get list of files in the archive
{:ok, all_files} = SafeZip.list_dir_file(archive_path)
file_to_extract = List.first(all_files)
# Extract only one file
assert {:ok, [extracted_file]} =
SafeZip.unzip_file(archive_path, @tmp_dir, [file_to_extract])
# Verify only the specified file was extracted
assert Path.basename(extracted_file) == Path.basename(file_to_extract)
# Check that the file exists in the tmp directory
assert File.exists?(Path.join(@tmp_dir, Path.basename(file_to_extract)))
end
test "returns error for invalid zip file" do
invalid_path = Path.join(@tmp_dir, "invalid.zip")
File.write!(invalid_path, "not a zip file")
assert {:error, _} = SafeZip.unzip_file(invalid_path, @tmp_dir)
end
test "creates directories when extracting files in subdirectories" do
# Create a zip with files in subdirectories
zip_path = create_zip_with_directory()
# Extract the archive
assert {:ok, files} = SafeZip.unzip_file(zip_path, @tmp_dir)
# Verify files were extracted - handle both relative and absolute paths
assert Enum.any?(files, fn file ->
Path.basename(file) == "test_file.txt" &&
String.contains?(file, "file_in_dir")
end)
assert Enum.any?(files, fn file ->
Path.basename(file) == "root_file.txt"
end)
# Verify directory was created
dir_path = Path.join(@tmp_dir, "file_in_dir")
assert File.exists?(dir_path)
assert File.dir?(dir_path)
# Verify file in directory was extracted
file_path = Path.join(dir_path, "test_file.txt")
assert File.exists?(file_path)
end
end
describe "unzip_data/3" do
test "extracts files from zip data" do
archive_path = Path.join(@fixtures_dir, "emojis.zip")
archive_data = File.read!(archive_path)
# Extract the archive from data
assert {:ok, files} = SafeZip.unzip_data(archive_data, @tmp_dir)
# Verify files were extracted
assert is_list(files)
assert length(files) > 0
# Verify at least one file exists
first_file = List.first(files)
# Simply check that the file exists in the tmp directory
assert File.exists?(first_file)
end
test "extracts specific files from zip data" do
archive_path = Path.join(@fixtures_dir, "emojis.zip")
archive_data = File.read!(archive_path)
# Get list of files in the archive
{:ok, all_files} = SafeZip.list_dir_file(archive_path)
file_to_extract = List.first(all_files)
# Extract only one file
assert {:ok, extracted_files} =
SafeZip.unzip_data(archive_data, @tmp_dir, [file_to_extract])
# Verify only the specified file was extracted
assert Enum.any?(extracted_files, fn path ->
Path.basename(path) == Path.basename(file_to_extract)
end)
# Simply check that the file exists in the tmp directory
assert File.exists?(Path.join(@tmp_dir, Path.basename(file_to_extract)))
end
test "returns error for invalid zip data" do
assert {:error, _} = SafeZip.unzip_data("not a zip file", @tmp_dir)
end
test "creates directories when extracting files in subdirectories from data" do
# Create a zip with files in subdirectories
zip_path = create_zip_with_directory()
archive_data = File.read!(zip_path)
# Extract the archive from data
assert {:ok, files} = SafeZip.unzip_data(archive_data, @tmp_dir)
# Verify files were extracted - handle both relative and absolute paths
assert Enum.any?(files, fn file ->
Path.basename(file) == "test_file.txt" &&
String.contains?(file, "file_in_dir")
end)
assert Enum.any?(files, fn file ->
Path.basename(file) == "root_file.txt"
end)
# Verify directory was created
dir_path = Path.join(@tmp_dir, "file_in_dir")
assert File.exists?(dir_path)
assert File.dir?(dir_path)
# Verify file in directory was extracted
file_path = Path.join(dir_path, "test_file.txt")
assert File.exists?(file_path)
end
end
# Security tests
describe "security checks" do
test "prevents path traversal in zip extraction" do
# Create a malicious zip file with path traversal
malicious_zip_path = create_malicious_zip_with_path_traversal()
# Try to extract it with SafeZip
assert {:error, _} = SafeZip.unzip_file(malicious_zip_path, @tmp_dir)
# Verify the file was not extracted outside the target directory
refute File.exists?(Path.join(Path.dirname(@tmp_dir), "traversal_attempt.txt"))
end
test "prevents directory traversal in zip listing" do
# Create a malicious zip file with path traversal
malicious_zip_path = create_malicious_zip_with_path_traversal()
# Try to list files with SafeZip
assert {:error, _} = SafeZip.list_dir_file(malicious_zip_path)
end
test "prevents path traversal in zip data extraction" do
# Create a malicious zip file with path traversal
malicious_zip_path = create_malicious_zip_with_path_traversal()
malicious_data = File.read!(malicious_zip_path)
# Try to extract it with SafeZip
assert {:error, _} = SafeZip.unzip_data(malicious_data, @tmp_dir)
# Verify the file was not extracted outside the target directory
refute File.exists?(Path.join(Path.dirname(@tmp_dir), "traversal_attempt.txt"))
end
test "handles zip bomb attempts" do
# Create a zip bomb (a zip with many files or large files)
zip_bomb_path = create_zip_bomb()
# The SafeZip module should handle this gracefully
# Either by successfully extracting it (if it's not too large)
# or by returning an error (if it detects a potential zip bomb)
result = SafeZip.unzip_file(zip_bomb_path, @tmp_dir)
case result do
{:ok, _} ->
# If it successfully extracts, make sure it didn't fill up the disk
# This is a simple check to ensure the extraction was controlled
assert File.exists?(@tmp_dir)
{:error, _} ->
# If it returns an error, that's also acceptable
# The important thing is that it doesn't crash or hang
assert true
end
end
test "handles deeply nested directory structures" do
# Create a zip with deeply nested directories
deep_nest_path = create_deeply_nested_zip()
# The SafeZip module should handle this gracefully
result = SafeZip.unzip_file(deep_nest_path, @tmp_dir)
case result do
{:ok, files} ->
# If it successfully extracts, verify the files were extracted
assert is_list(files)
assert length(files) > 0
{:error, _} ->
# If it returns an error, that's also acceptable
# The important thing is that it doesn't crash or hang
assert true
end
end
end
# Helper functions to create test fixtures
# Creates a zip file with a path traversal attempt
defp create_malicious_zip_with_path_traversal do
malicious_zip_path = Path.join(@tmp_dir, "path_traversal.zip")
# Create a file to include in the zip
test_file_path = Path.join(@tmp_dir, "test_file.txt")
File.write!(test_file_path, "malicious content")
# Use Erlang's zip module directly to create a zip with path traversal
{:ok, charlist_path} =
:zip.create(
String.to_charlist(malicious_zip_path),
[{String.to_charlist("../traversal_attempt.txt"), File.read!(test_file_path)}]
)
to_string(charlist_path)
end
# Creates a zip file with directory entries
defp create_zip_with_directory do
zip_path = Path.join(@tmp_dir, "with_directory.zip")
# Create files to include in the zip
root_file_path = Path.join(@tmp_dir, "root_file.txt")
File.write!(root_file_path, "root file content")
# Create a directory and a file in it
dir_path = Path.join(@tmp_dir, "file_in_dir")
Pleroma.Backports.mkdir_p!(dir_path)
file_in_dir_path = Path.join(dir_path, "test_file.txt")
File.write!(file_in_dir_path, "file in directory content")
# Use Erlang's zip module to create a zip with directory structure
{:ok, charlist_path} =
:zip.create(
String.to_charlist(zip_path),
[
{String.to_charlist("root_file.txt"), File.read!(root_file_path)},
{String.to_charlist("file_in_dir/test_file.txt"), File.read!(file_in_dir_path)}
]
)
to_string(charlist_path)
end
# Creates a zip bomb (a zip with many small files)
defp create_zip_bomb do
zip_path = Path.join(@tmp_dir, "zip_bomb.zip")
# Create a small file to duplicate many times
small_file_path = Path.join(@tmp_dir, "small_file.txt")
File.write!(small_file_path, String.duplicate("A", 100))
# Create a list of many files to include in the zip
file_entries =
for i <- 1..100 do
{String.to_charlist("file_#{i}.txt"), File.read!(small_file_path)}
end
# Use Erlang's zip module to create a zip with many files
{:ok, charlist_path} =
:zip.create(
String.to_charlist(zip_path),
file_entries
)
to_string(charlist_path)
end
# Creates a zip with deeply nested directories
defp create_deeply_nested_zip do
zip_path = Path.join(@tmp_dir, "deep_nest.zip")
# Create a file to include in the zip
file_content = "test content"
# Create a list of deeply nested files
file_entries =
for i <- 1..10 do
nested_path = Enum.reduce(1..i, "nested", fn j, acc -> "#{acc}/level_#{j}" end)
{String.to_charlist("#{nested_path}/file.txt"), file_content}
end
# Use Erlang's zip module to create a zip with deeply nested directories
{:ok, charlist_path} =
:zip.create(
String.to_charlist(zip_path),
file_entries
)
to_string(charlist_path)
end
end

View file

@ -51,7 +51,7 @@ defmodule Pleroma.Search.QdrantSearchTest do
})
Config
|> expect(:get, 3, fn
|> expect(:get, 4, fn
[Pleroma.Search, :module], nil ->
QdrantSearch
@ -93,7 +93,7 @@ defmodule Pleroma.Search.QdrantSearchTest do
})
Config
|> expect(:get, 3, fn
|> expect(:get, 4, fn
[Pleroma.Search, :module], nil ->
QdrantSearch
@ -158,7 +158,7 @@ defmodule Pleroma.Search.QdrantSearchTest do
end)
Config
|> expect(:get, 6, fn
|> expect(:get, 7, fn
[Pleroma.Search, :module], nil ->
QdrantSearch

View file

@ -73,8 +73,8 @@ defmodule Pleroma.StatsTest do
user = insert(:user)
other_user = insert(:user)
{:ok, activity} = CommonAPI.post(user, %{visibility: "public", status: "hey"})
_ = CommonAPI.follow(user, other_user)
CommonAPI.favorite(other_user, activity.id)
_ = CommonAPI.follow(other_user, user)
CommonAPI.favorite(activity.id, other_user)
CommonAPI.repeat(activity.id, other_user)
assert %{"direct" => 0, "private" => 0, "public" => 1, "unlisted" => 0} =

View file

@ -34,6 +34,20 @@ defmodule Pleroma.Upload.Filter.AnalyzeMetadataTest do
assert meta.blurhash == "eXJi-E:SwCEm5rCmn$+YWYn+15K#5A$xxCi{SiV]s*W:Efa#s.jE-T"
end
test "it gets dimensions for grayscale images" do
upload = %Pleroma.Upload{
name: "break_analyze.png",
content_type: "image/png",
path: Path.absname("test/fixtures/break_analyze.png"),
tempfile: Path.absname("test/fixtures/break_analyze.png")
}
{:ok, :filtered, meta} = AnalyzeMetadata.filter(upload)
assert %{width: 1410, height: 2048} = meta
assert is_nil(meta.blurhash)
end
test "adds the dimensions for videos" do
upload = %Pleroma.Upload{
name: "coolvideo.mp4",

View file

@ -3,8 +3,10 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Upload.Filter.AnonymizeFilenameTest do
use Pleroma.DataCase
use Pleroma.DataCase, async: true
import Mox
alias Pleroma.StaticStubbedConfigMock, as: ConfigMock
alias Pleroma.Upload
setup do
@ -19,21 +21,26 @@ defmodule Pleroma.Upload.Filter.AnonymizeFilenameTest do
%{upload_file: upload_file}
end
setup do: clear_config([Pleroma.Upload.Filter.AnonymizeFilename, :text])
test "it replaces filename on pre-defined text", %{upload_file: upload_file} do
clear_config([Upload.Filter.AnonymizeFilename, :text], "custom-file.png")
ConfigMock
|> stub(:get, fn [Upload.Filter.AnonymizeFilename, :text] -> "custom-file.png" end)
{:ok, :filtered, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
assert name == "custom-file.png"
end
test "it replaces filename on pre-defined text expression", %{upload_file: upload_file} do
clear_config([Upload.Filter.AnonymizeFilename, :text], "custom-file.{extension}")
ConfigMock
|> stub(:get, fn [Upload.Filter.AnonymizeFilename, :text] -> "custom-file.{extension}" end)
{:ok, :filtered, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
assert name == "custom-file.jpg"
end
test "it replaces filename on random text", %{upload_file: upload_file} do
ConfigMock
|> stub(:get, fn [Upload.Filter.AnonymizeFilename, :text] -> nil end)
{:ok, :filtered, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
assert <<_::bytes-size(14)>> <> ".jpg" = name
refute name == "an… image.jpg"

View file

@ -10,6 +10,10 @@ defmodule Pleroma.Upload.Filter.DedupeTest do
@shasum "e30397b58d226d6583ab5b8b3c5defb0c682bda5c31ef07a9f57c1c4986e3781"
test "generates a shard path for a shasum" do
assert "e3/03/97/" <> _path = Dedupe.shard_path(@shasum)
end
test "adds shasum" do
File.cp!(
"test/fixtures/image.jpg",
@ -23,10 +27,12 @@ defmodule Pleroma.Upload.Filter.DedupeTest do
tempfile: Path.absname("test/fixtures/image_tmp.jpg")
}
expected_path = Dedupe.shard_path(@shasum <> ".jpg")
assert {
:ok,
:filtered,
%Pleroma.Upload{id: @shasum, path: @shasum <> ".jpg"}
%Pleroma.Upload{id: @shasum, path: ^expected_path}
} = Dedupe.filter(upload)
end
end

View file

@ -9,29 +9,31 @@ defmodule Pleroma.Upload.Filter.Exiftool.StripLocationTest do
test "apply exiftool filter" do
assert Pleroma.Utils.command_available?("exiftool")
File.cp!(
"test/fixtures/DSCN0010.jpg",
"test/fixtures/DSCN0010_tmp.jpg"
)
~w{jpg png}
|> Enum.map(fn type ->
File.cp!(
"test/fixtures/DSCN0010.#{type}",
"test/fixtures/DSCN0010_tmp.#{type}"
)
upload = %Pleroma.Upload{
name: "image_with_GPS_data.jpg",
content_type: "image/jpeg",
path: Path.absname("test/fixtures/DSCN0010.jpg"),
tempfile: Path.absname("test/fixtures/DSCN0010_tmp.jpg")
}
upload = %Pleroma.Upload{
name: "image_with_GPS_data.#{type}",
content_type: "image/jpeg",
path: Path.absname("test/fixtures/DSCN0010.#{type}"),
tempfile: Path.absname("test/fixtures/DSCN0010_tmp.#{type}")
}
assert Filter.Exiftool.StripLocation.filter(upload) == {:ok, :filtered}
assert Filter.Exiftool.StripLocation.filter(upload) == {:ok, :filtered}
{exif_original, 0} = System.cmd("exiftool", ["test/fixtures/DSCN0010.jpg"])
{exif_filtered, 0} = System.cmd("exiftool", ["test/fixtures/DSCN0010_tmp.jpg"])
{exif_original, 0} = System.cmd("exiftool", ["-m", "test/fixtures/DSCN0010.#{type}"])
{exif_filtered, 0} = System.cmd("exiftool", ["-m", "test/fixtures/DSCN0010_tmp.#{type}"])
refute exif_original == exif_filtered
assert String.match?(exif_original, ~r/GPS/)
refute String.match?(exif_filtered, ~r/GPS/)
assert String.match?(exif_original, ~r/GPS/)
refute String.match?(exif_filtered, ~r/GPS/)
end)
end
test "verify webp, heic, svg files are skipped" do
test "verify webp, heic, svg files are skipped" do
uploads =
~w{webp heic svg svg+xml}
|> Enum.map(fn type ->

View file

@ -3,9 +3,10 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Upload.Filter.MogrifunTest do
use Pleroma.DataCase
import Mock
use Pleroma.DataCase, async: true
import Mox
alias Pleroma.MogrifyMock
alias Pleroma.Upload
alias Pleroma.Upload.Filter
@ -22,23 +23,12 @@ defmodule Pleroma.Upload.Filter.MogrifunTest do
tempfile: Path.absname("test/fixtures/image_tmp.jpg")
}
task =
Task.async(fn ->
assert_receive {:apply_filter, {}}, 4_000
end)
MogrifyMock
|> stub(:open, fn _file -> %{} end)
|> stub(:custom, fn _image, _action -> %{} end)
|> stub(:custom, fn _image, _action, _options -> %{} end)
|> stub(:save, fn _image, [in_place: true] -> :ok end)
with_mocks([
{Mogrify, [],
[
open: fn _f -> %Mogrify.Image{} end,
custom: fn _m, _a -> send(task.pid, {:apply_filter, {}}) end,
custom: fn _m, _a, _o -> send(task.pid, {:apply_filter, {}}) end,
save: fn _f, _o -> :ok end
]}
]) do
assert Filter.Mogrifun.filter(upload) == {:ok, :filtered}
end
Task.await(task)
assert Filter.Mogrifun.filter(upload) == {:ok, :filtered}
end
end

View file

@ -3,13 +3,18 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Upload.Filter.MogrifyTest do
use Pleroma.DataCase
import Mock
use Pleroma.DataCase, async: true
import Mox
alias Pleroma.MogrifyMock
alias Pleroma.StaticStubbedConfigMock, as: ConfigMock
alias Pleroma.Upload.Filter
setup :verify_on_exit!
test "apply mogrify filter" do
clear_config(Filter.Mogrify, args: [{"tint", "40"}])
ConfigMock
|> stub(:get!, fn [Filter.Mogrify, :args] -> [{"tint", "40"}] end)
File.cp!(
"test/fixtures/image.jpg",
@ -23,19 +28,11 @@ defmodule Pleroma.Upload.Filter.MogrifyTest do
tempfile: Path.absname("test/fixtures/image_tmp.jpg")
}
task =
Task.async(fn ->
assert_receive {:apply_filter, {_, "tint", "40"}}, 4_000
end)
MogrifyMock
|> expect(:open, fn _file -> %{} end)
|> expect(:custom, fn _image, "tint", "40" -> %{} end)
|> expect(:save, fn _image, [in_place: true] -> :ok end)
with_mock Mogrify,
open: fn _f -> %Mogrify.Image{} end,
custom: fn _m, _a -> :ok end,
custom: fn m, a, o -> send(task.pid, {:apply_filter, {m, a, o}}) end,
save: fn _f, _o -> :ok end do
assert Filter.Mogrify.filter(upload) == {:ok, :filtered}
end
Task.await(task)
assert Filter.Mogrify.filter(upload) == {:ok, :filtered}
end
end

View file

@ -5,12 +5,13 @@
defmodule Pleroma.Upload.FilterTest do
use Pleroma.DataCase
import Mox
alias Pleroma.StaticStubbedConfigMock, as: ConfigMock
alias Pleroma.Upload.Filter
setup do: clear_config([Pleroma.Upload.Filter.AnonymizeFilename, :text])
test "applies filters" do
clear_config([Pleroma.Upload.Filter.AnonymizeFilename, :text], "custom-file.png")
ConfigMock
|> stub(:get, fn [Pleroma.Upload.Filter.AnonymizeFilename, :text] -> "custom-file.png" end)
File.cp!(
"test/fixtures/image.jpg",

View file

@ -149,6 +149,9 @@ defmodule Pleroma.UploadTest do
test "copies the file to the configured folder with deduping" do
File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg")
expected_filename = "e30397b58d226d6583ab5b8b3c5defb0c682bda5c31ef07a9f57c1c4986e3781.jpg"
expected_path = Pleroma.Upload.Filter.Dedupe.shard_path(expected_filename)
file = %Plug.Upload{
content_type: "image/jpeg",
@ -159,8 +162,7 @@ defmodule Pleroma.UploadTest do
{:ok, data} = Upload.store(file, filters: [Pleroma.Upload.Filter.Dedupe])
assert List.first(data["url"])["href"] ==
Pleroma.Upload.base_url() <>
"e30397b58d226d6583ab5b8b3c5defb0c682bda5c31ef07a9f57c1c4986e3781.jpg"
Path.join([Pleroma.Upload.base_url(), expected_path])
end
test "copies the file to the configured folder without deduping" do
@ -225,20 +227,35 @@ defmodule Pleroma.UploadTest do
assert Path.basename(attachment_url["href"]) == "an%E2%80%A6%20image.jpg"
end
test "escapes reserved uri characters" do
test "escapes disallowed reserved characters in uri path" do
File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg")
file = %Plug.Upload{
content_type: "image/jpeg",
path: Path.absname("test/fixtures/image_tmp.jpg"),
filename: ":?#[]@!$&\\'()*+,;=.jpg"
filename: ":?#[]@!$&'()*+,;=.jpg"
}
{:ok, data} = Upload.store(file)
[attachment_url | _] = data["url"]
assert Path.basename(attachment_url["href"]) ==
"%3A%3F%23%5B%5D%40%21%24%26%5C%27%28%29%2A%2B%2C%3B%3D.jpg"
":%3F%23%5B%5D@!$&'()*+,;=.jpg"
end
test "double %-encodes filename" do
File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg")
file = %Plug.Upload{
content_type: "image/jpeg",
path: Path.absname("test/fixtures/image_tmp.jpg"),
filename: "file with %20.jpg"
}
{:ok, data} = Upload.store(file)
[attachment_url | _] = data["url"]
assert Path.basename(attachment_url["href"]) == "file%20with%20%2520.jpg"
end
end
@ -265,4 +282,23 @@ defmodule Pleroma.UploadTest do
refute String.starts_with?(url, base_url <> "/media/")
end
end
describe "Setting a link_name for uploaded media" do
setup do: clear_config([Pleroma.Upload, :link_name], true)
test "encodes name parameter in query" do
File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg")
file = %Plug.Upload{
content_type: "image/jpeg",
path: Path.absname("test/fixtures/image_tmp.jpg"),
filename: "test file.jpg"
}
{:ok, data} = Upload.store(file)
[attachment_url | _] = data["url"]
assert Path.basename(attachment_url["href"]) == "test%20file.jpg?name=test+file.jpg"
end
end
end

View file

@ -1,49 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.User.BackupAsyncTest do
use Pleroma.DataCase, async: true
import Pleroma.Factory
import Mox
alias Pleroma.UnstubbedConfigMock, as: ConfigMock
alias Pleroma.User.Backup
alias Pleroma.User.Backup.ProcessorMock
setup do
user = insert(:user, %{nickname: "cofe", name: "Cofe", ap_id: "http://cofe.io/users/cofe"})
{:ok, backup} = user |> Backup.new() |> Repo.insert()
%{backup: backup}
end
test "it handles unrecoverable exceptions", %{backup: backup} do
ProcessorMock
|> expect(:do_process, fn _, _ ->
raise "mock exception"
end)
ConfigMock
|> stub_with(Pleroma.Config)
{:error, %{backup: backup, reason: :exit}} = Backup.process(backup, ProcessorMock)
assert backup.state == :failed
end
test "it handles timeouts", %{backup: backup} do
ProcessorMock
|> expect(:do_process, fn _, _ ->
Process.sleep(:timer.seconds(4))
end)
ConfigMock
|> expect(:get, fn [Pleroma.User.Backup, :process_wait_time] -> :timer.seconds(2) end)
{:error, %{backup: backup, reason: :timeout}} = Backup.process(backup, ProcessorMock)
assert backup.state == :failed
end
end

View file

@ -6,7 +6,6 @@ defmodule Pleroma.User.BackupTest do
use Oban.Testing, repo: Pleroma.Repo
use Pleroma.DataCase
import Mock
import Pleroma.Factory
import Swoosh.TestAssertions
import Mox
@ -16,7 +15,6 @@ defmodule Pleroma.User.BackupTest do
alias Pleroma.UnstubbedConfigMock, as: ConfigMock
alias Pleroma.Uploaders.S3.ExAwsMock
alias Pleroma.User.Backup
alias Pleroma.User.Backup.ProcessorMock
alias Pleroma.Web.CommonAPI
alias Pleroma.Workers.BackupWorker
@ -28,79 +26,56 @@ defmodule Pleroma.User.BackupTest do
ConfigMock
|> stub_with(Pleroma.Config)
ProcessorMock
|> stub_with(Pleroma.User.Backup.Processor)
:ok
end
test "it does not requrie enabled email" do
clear_config([Pleroma.Emails.Mailer, :enabled], false)
user = insert(:user)
assert {:ok, _} = Backup.create(user)
assert {:ok, _} = Backup.user(user)
end
test "it does not require user's email" do
user = insert(:user, %{email: nil})
assert {:ok, _} = Backup.create(user)
assert {:ok, _} = Backup.user(user)
end
test "it creates a backup record and an Oban job" do
%{id: user_id} = user = insert(:user)
assert {:ok, %Oban.Job{args: args}} = Backup.create(user)
user = insert(:user)
assert {:ok, %Backup{} = backup} = Backup.user(user)
assert {:ok, %Oban.Job{args: args}} = Backup.schedule_backup(backup)
assert_enqueued(worker: BackupWorker, args: args)
backup = Backup.get(args["backup_id"])
assert %Backup{user_id: ^user_id, processed: false, file_size: 0, state: :pending} = backup
backup = Backup.get_by_id(args["backup_id"])
assert %Backup{processed: false, file_size: 0} = backup
end
test "it return an error if the export limit is over" do
%{id: user_id} = user = insert(:user)
user = insert(:user)
limit_days = Pleroma.Config.get([Backup, :limit_days])
assert {:ok, %Oban.Job{args: args}} = Backup.create(user)
backup = Backup.get(args["backup_id"])
assert %Backup{user_id: ^user_id, processed: false, file_size: 0} = backup
{:ok, first_backup} = Backup.user(user)
{:ok, _run_backup} = Backup.run(first_backup)
assert Backup.create(user) == {:error, "Last export was less than #{limit_days} days ago"}
assert Backup.user(user) == {:error, "Last export was less than #{limit_days} days ago"}
end
test "it process a backup record" do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
%{id: user_id} = user = insert(:user)
assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user)
assert {:ok, backup} = perform_job(BackupWorker, args)
assert {:ok, %Backup{id: backup_id}} = Backup.user(user)
oban_args = %{"op" => "process", "backup_id" => backup_id}
assert {:ok, backup} = perform_job(BackupWorker, oban_args)
assert backup.file_size > 0
assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id, state: :complete} = backup
assert match?(%Backup{id: ^backup_id, processed: true, user_id: ^user_id}, backup)
delete_job_args = %{"op" => "delete", "backup_id" => backup_id}
assert_enqueued(worker: BackupWorker, args: delete_job_args)
assert {:ok, backup} = perform_job(BackupWorker, delete_job_args)
refute Backup.get(backup_id)
email = Pleroma.Emails.UserEmail.backup_is_ready_email(backup)
assert_email_sent(
to: {user.name, user.email},
html_body: email.html_body
)
end
test "it updates states of the backup" do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
%{id: user_id} = user = insert(:user)
assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user)
assert {:ok, backup} = perform_job(BackupWorker, args)
assert backup.file_size > 0
assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id, state: :complete} = backup
delete_job_args = %{"op" => "delete", "backup_id" => backup_id}
assert_enqueued(worker: BackupWorker, args: delete_job_args)
assert {:ok, backup} = perform_job(BackupWorker, delete_job_args)
refute Backup.get(backup_id)
refute Backup.get_by_id(backup_id)
email = Pleroma.Emails.UserEmail.backup_is_ready_email(backup)
@ -114,10 +89,15 @@ defmodule Pleroma.User.BackupTest do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
%{id: user_id} = user = insert(:user, %{email: nil})
assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user)
assert {:ok, backup} = perform_job(BackupWorker, args)
assert backup.file_size > 0
assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id} = backup
assert {:ok, %Backup{} = backup} = Backup.user(user)
expected_args = %{"op" => "process", "backup_id" => backup.id}
assert_enqueued(worker: BackupWorker, args: %{"backup_id" => backup.id})
assert {:ok, completed_backup} = perform_job(BackupWorker, expected_args)
assert completed_backup.file_size > 0
assert completed_backup.processed
assert completed_backup.user_id == user_id
assert_no_email_sent()
end
@ -127,10 +107,13 @@ defmodule Pleroma.User.BackupTest do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
%{id: user_id} = user = insert(:user)
assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user)
assert {:ok, backup} = perform_job(BackupWorker, args)
assert {:ok, %Backup{id: backup_id}} = Backup.user(user)
oban_args = %{"op" => "process", "backup_id" => backup_id}
assert {:ok, backup} = perform_job(BackupWorker, oban_args)
assert backup.file_size > 0
assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id} = backup
assert match?(%Backup{id: ^backup_id, processed: true, user_id: ^user_id}, backup)
assert_no_email_sent()
end
@ -139,10 +122,15 @@ defmodule Pleroma.User.BackupTest do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
%{id: user_id} = user = insert(:user, %{email: ""})
assert {:ok, %Oban.Job{args: %{"backup_id" => backup_id} = args}} = Backup.create(user)
assert {:ok, backup} = perform_job(BackupWorker, args)
assert {:ok, %Backup{id: backup_id} = backup} = Backup.user(user)
expected_args = %{"op" => "process", "backup_id" => backup.id}
assert_enqueued(worker: BackupWorker, args: expected_args)
assert {:ok, backup} = perform_job(BackupWorker, expected_args)
assert backup.file_size > 0
assert %Backup{id: ^backup_id, processed: true, user_id: ^user_id} = backup
assert match?(%Backup{id: ^backup_id, processed: true, user_id: ^user_id}, backup)
assert_no_email_sent()
end
@ -152,16 +140,13 @@ defmodule Pleroma.User.BackupTest do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
user = insert(:user)
assert {:ok, job1} = Backup.create(user)
assert {:ok, %Backup{}} = ObanHelpers.perform(job1)
assert {:ok, job2} = Backup.create(user)
assert Pleroma.Repo.aggregate(Backup, :count) == 2
assert {:ok, backup2} = ObanHelpers.perform(job2)
assert {:ok, %{id: backup_one_id}} = Backup.user(user)
assert {:ok, %{id: _backup_two_id}} = Backup.user(user)
# Run the backups
ObanHelpers.perform_all()
assert [^backup2] = Pleroma.Repo.all(Backup)
assert_enqueued(worker: BackupWorker, args: %{"op" => "delete", "backup_id" => backup_one_id})
end
test "it creates a zip archive with user data" do
@ -177,17 +162,20 @@ defmodule Pleroma.User.BackupTest do
{:ok, %{object: %{data: %{"id" => id3}}} = status3} =
CommonAPI.post(user, %{status: "status3"})
CommonAPI.favorite(user, status1.id)
CommonAPI.favorite(user, status2.id)
CommonAPI.favorite(status1.id, user)
CommonAPI.favorite(status2.id, user)
Bookmark.create(user.id, status2.id)
Bookmark.create(user.id, status3.id)
CommonAPI.follow(user, other_user)
CommonAPI.follow(other_user, user)
assert {:ok, backup} = user |> Backup.new() |> Repo.insert()
assert {:ok, path} = Backup.export(backup, self())
assert {:ok, zipfile} = :zip.zip_open(String.to_charlist(path), [:memory])
assert {:ok, backup} = Backup.user(user)
assert {:ok, run_backup} = Backup.run(backup)
tempfile = Path.join([run_backup.tempdir, run_backup.file_name])
assert {:ok, zipfile} = :zip.zip_open(String.to_charlist(tempfile), [:memory])
assert {:ok, {~c"actor.json", json}} = :zip.zip_get(~c"actor.json", zipfile)
assert %{
@ -197,13 +185,13 @@ defmodule Pleroma.User.BackupTest do
%{"@language" => "und"}
],
"bookmarks" => "bookmarks.json",
"followers" => "http://cofe.io/users/cofe/followers",
"following" => "http://cofe.io/users/cofe/following",
"followers" => "followers.json",
"following" => "following.json",
"id" => "http://cofe.io/users/cofe",
"inbox" => "http://cofe.io/users/cofe/inbox",
"likes" => "likes.json",
"name" => "Cofe",
"outbox" => "http://cofe.io/users/cofe/outbox",
"outbox" => "outbox.json",
"preferredUsername" => "cofe",
"publicKey" => %{
"id" => "http://cofe.io/users/cofe#main-key",
@ -275,56 +263,34 @@ defmodule Pleroma.User.BackupTest do
} = Jason.decode!(json)
:zip.zip_close(zipfile)
File.rm!(path)
File.rm_rf!(run_backup.tempdir)
end
test "it counts the correct number processed" do
test "correct number processed" do
user = insert(:user, %{nickname: "cofe", name: "Cofe", ap_id: "http://cofe.io/users/cofe"})
Enum.map(1..120, fn i ->
{:ok, status} = CommonAPI.post(user, %{status: "status #{i}"})
CommonAPI.favorite(user, status.id)
CommonAPI.favorite(status.id, user)
Bookmark.create(user.id, status.id)
end)
assert {:ok, backup} = user |> Backup.new() |> Repo.insert()
{:ok, backup} = Backup.process(backup)
{:ok, backup} = Backup.run(backup)
assert backup.processed_number == 1 + 120 + 120 + 120
zip_path = Path.join([backup.tempdir, backup.file_name])
Backup.delete(backup)
end
assert {:ok, zipfile} = :zip.zip_open(String.to_charlist(zip_path), [:memory])
test "it handles errors" do
user = insert(:user, %{nickname: "cofe", name: "Cofe", ap_id: "http://cofe.io/users/cofe"})
backup_parts = [~c"likes.json", ~c"bookmarks.json", ~c"outbox.json"]
Enum.map(1..120, fn i ->
{:ok, _status} = CommonAPI.post(user, %{status: "status #{i}"})
Enum.each(backup_parts, fn part ->
assert {:ok, {_part, part_json}} = :zip.zip_get(part, zipfile)
{:ok, decoded_part} = Jason.decode(part_json)
assert decoded_part["totalItems"] == 120
end)
assert {:ok, backup} = user |> Backup.new() |> Repo.insert()
with_mock Pleroma.Web.ActivityPub.Transmogrifier,
[:passthrough],
prepare_outgoing: fn data ->
object =
data["object"]
|> Pleroma.Object.normalize(fetch: false)
|> Map.get(:data)
data = data |> Map.put("object", object)
if String.contains?(data["object"]["content"], "119"),
do: raise(%Postgrex.Error{}),
else: {:ok, data}
end do
{:ok, backup} = Backup.process(backup)
assert backup.processed
assert backup.state == :complete
assert backup.processed_number == 1 + 119
Backup.delete(backup)
end
Backup.delete_archive(backup)
end
describe "it uploads and deletes a backup archive" do
@ -337,18 +303,17 @@ defmodule Pleroma.User.BackupTest do
{:ok, status1} = CommonAPI.post(user, %{status: "status1"})
{:ok, status2} = CommonAPI.post(user, %{status: "status2"})
{:ok, status3} = CommonAPI.post(user, %{status: "status3"})
CommonAPI.favorite(user, status1.id)
CommonAPI.favorite(user, status2.id)
CommonAPI.favorite(status1.id, user)
CommonAPI.favorite(status2.id, user)
Bookmark.create(user.id, status2.id)
Bookmark.create(user.id, status3.id)
assert {:ok, backup} = user |> Backup.new() |> Repo.insert()
assert {:ok, path} = Backup.export(backup, self())
[path: path, backup: backup]
[backup: backup]
end
test "S3", %{path: path, backup: backup} do
test "S3", %{backup: backup} do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.S3)
clear_config([Pleroma.Uploaders.S3, :streaming_enabled], false)
@ -358,15 +323,17 @@ defmodule Pleroma.User.BackupTest do
%{http_method: :delete} -> {:ok, %{status_code: 204}}
end)
assert {:ok, %Pleroma.Upload{}} = Backup.upload(backup, path)
assert {:ok, _backup} = Backup.delete(backup)
assert {:ok, backup} = Backup.run(backup)
assert {:ok, %Backup{processed: true}} = Backup.upload(backup)
assert {:ok, _backup} = Backup.delete_archive(backup)
end
test "Local", %{path: path, backup: backup} do
test "Local", %{backup: backup} do
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
assert {:ok, %Pleroma.Upload{}} = Backup.upload(backup, path)
assert {:ok, _backup} = Backup.delete(backup)
assert {:ok, backup} = Backup.run(backup)
assert {:ok, %Backup{processed: true}} = Backup.upload(backup)
assert {:ok, _backup} = Backup.delete_archive(backup)
end
end
end

View file

@ -25,11 +25,12 @@ defmodule Pleroma.User.ImportTest do
user3.nickname
]
{:ok, job} = User.Import.follow_import(user1, identifiers)
{:ok, jobs} = User.Import.follows_import(user1, identifiers)
for job <- jobs do
assert {:ok, %User{}} = ObanHelpers.perform(job)
end
assert {:ok, result} = ObanHelpers.perform(job)
assert is_list(result)
assert result == [refresh_record(user2), refresh_record(user3)]
assert User.following?(user1, user2)
assert User.following?(user1, user3)
end
@ -44,11 +45,12 @@ defmodule Pleroma.User.ImportTest do
user3.nickname
]
{:ok, job} = User.Import.blocks_import(user1, identifiers)
{:ok, jobs} = User.Import.blocks_import(user1, identifiers)
for job <- jobs do
assert {:ok, %User{}} = ObanHelpers.perform(job)
end
assert {:ok, result} = ObanHelpers.perform(job)
assert is_list(result)
assert result == [user2, user3]
assert User.blocks?(user1, user2)
assert User.blocks?(user1, user3)
end
@ -63,11 +65,12 @@ defmodule Pleroma.User.ImportTest do
user3.nickname
]
{:ok, job} = User.Import.mutes_import(user1, identifiers)
{:ok, jobs} = User.Import.mutes_import(user1, identifiers)
for job <- jobs do
assert {:ok, %User{}} = ObanHelpers.perform(job)
end
assert {:ok, result} = ObanHelpers.perform(job)
assert is_list(result)
assert result == [user2, user3]
assert User.mutes?(user1, user2)
assert User.mutes?(user1, user3)
end

View file

@ -3,11 +3,12 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.UserRelationshipTest do
alias Pleroma.DateTimeMock
alias Pleroma.UserRelationship
use Pleroma.DataCase, async: false
use Pleroma.DataCase, async: true
import Mock
import Mox
import Pleroma.Factory
describe "*_exists?/2" do
@ -52,6 +53,9 @@ defmodule Pleroma.UserRelationshipTest do
end
test "creates user relationship record if it doesn't exist", %{users: [user1, user2]} do
DateTimeMock
|> stub_with(Pleroma.DateTime.Impl)
for relationship_type <- [
:block,
:mute,
@ -80,13 +84,15 @@ defmodule Pleroma.UserRelationshipTest do
end
test "if record already exists, returns it", %{users: [user1, user2]} do
user_block =
with_mock NaiveDateTime, [:passthrough], utc_now: fn -> ~N[2017-03-17 17:09:58] end do
{:ok, %{inserted_at: ~N[2017-03-17 17:09:58]}} =
UserRelationship.create_block(user1, user2)
end
fixed_datetime = ~N[2017-03-17 17:09:58]
assert user_block == UserRelationship.create_block(user1, user2)
Pleroma.DateTimeMock
|> expect(:utc_now, 2, fn -> fixed_datetime end)
{:ok, %{inserted_at: ^fixed_datetime}} = UserRelationship.create_block(user1, user2)
# Test the idempotency without caring about the exact time
assert {:ok, _} = UserRelationship.create_block(user1, user2)
end
end

View file

@ -366,5 +366,13 @@ defmodule Pleroma.UserSearchTest do
assert user == result |> Map.put(:search_rank, nil) |> Map.put(:search_type, nil)
end
test "find users accepting chat messages only" do
user1 = insert(:user, nickname: "user1", accepts_chat_messages: true)
insert(:user, nickname: "user2", accepts_chat_messages: false)
[found_user1] = User.search("user", capabilities: ["accepts_chat_messages"])
assert found_user1.id == user1.id
end
end
end

View file

@ -20,7 +20,7 @@ defmodule Pleroma.UserTest do
import Swoosh.TestAssertions
setup do
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
:ok
end
@ -182,8 +182,8 @@ defmodule Pleroma.UserTest do
locked = insert(:user, is_locked: true)
follower = insert(:user)
CommonAPI.follow(follower, unlocked)
CommonAPI.follow(follower, locked)
CommonAPI.follow(unlocked, follower)
CommonAPI.follow(locked, follower)
assert [] = User.get_follow_requests(unlocked)
assert [activity] = User.get_follow_requests(locked)
@ -196,9 +196,9 @@ defmodule Pleroma.UserTest do
pending_follower = insert(:user)
accepted_follower = insert(:user)
CommonAPI.follow(pending_follower, locked)
CommonAPI.follow(pending_follower, locked)
CommonAPI.follow(accepted_follower, locked)
CommonAPI.follow(locked, pending_follower)
CommonAPI.follow(locked, pending_follower)
CommonAPI.follow(locked, accepted_follower)
Pleroma.FollowingRelationship.update(accepted_follower, locked, :follow_accept)
@ -209,7 +209,7 @@ defmodule Pleroma.UserTest do
locked = insert(:user, is_locked: true)
pending_follower = insert(:user, %{is_active: false})
CommonAPI.follow(pending_follower, locked)
CommonAPI.follow(locked, pending_follower)
refute pending_follower.is_active
assert [] = User.get_follow_requests(locked)
@ -219,7 +219,7 @@ defmodule Pleroma.UserTest do
followed = insert(:user, is_locked: true)
follower = insert(:user)
CommonAPI.follow(follower, followed)
CommonAPI.follow(followed, follower)
assert [_activity] = User.get_follow_requests(followed)
{:ok, _user_relationship} = User.block(followed, follower)
@ -1075,6 +1075,21 @@ defmodule Pleroma.UserTest do
refute cs.valid?
end
test "it truncates fields" do
clear_config([:instance, :max_remote_account_fields], 2)
fields = [
%{"name" => "One", "value" => "Uno"},
%{"name" => "Two", "value" => "Dos"},
%{"name" => "Three", "value" => "Tres"}
]
cs = User.remote_user_changeset(@valid_remote |> Map.put(:fields, fields))
assert [%{"name" => "One", "value" => "Uno"}, %{"name" => "Two", "value" => "Dos"}] ==
Ecto.Changeset.get_field(cs, :fields)
end
end
describe "followers and friends" do
@ -1526,7 +1541,7 @@ defmodule Pleroma.UserTest do
assert [activity] == ActivityPub.fetch_public_activities(%{}) |> Repo.preload(:bookmark)
assert [%{activity | thread_muted?: CommonAPI.thread_muted?(user2, activity)}] ==
assert [%{activity | thread_muted?: CommonAPI.thread_muted?(activity, user2)}] ==
ActivityPub.fetch_activities([user2.ap_id | User.following(user2)], %{
user: user2
})
@ -1691,8 +1706,8 @@ defmodule Pleroma.UserTest do
object_two = insert(:note, user: follower)
activity_two = insert(:note_activity, user: follower, note: object_two)
{:ok, like} = CommonAPI.favorite(user, activity_two.id)
{:ok, like_two} = CommonAPI.favorite(follower, activity.id)
{:ok, like} = CommonAPI.favorite(activity_two.id, user)
{:ok, like_two} = CommonAPI.favorite(activity.id, follower)
{:ok, repeat} = CommonAPI.repeat(activity_two.id, user)
{:ok, job} = User.delete(user)
@ -1866,6 +1881,11 @@ defmodule Pleroma.UserTest do
end
end
test "get_or_fetch_public_key_for_ap_id fetches a user that's not in the db" do
assert {:ok, _key} =
User.get_or_fetch_public_key_for_ap_id("http://mastodon.example.org/users/admin")
end
test "get_public_key_for_ap_id returns correctly for user that's not in the db" do
assert :error = User.get_public_key_for_ap_id("http://mastodon.example.org/users/admin")
end
@ -2390,8 +2410,8 @@ defmodule Pleroma.UserTest do
other_user =
insert(:user,
local: false,
follower_address: "http://localhost:4001/users/masto_closed/followers",
following_address: "http://localhost:4001/users/masto_closed/following"
follower_address: "https://remote.org/users/masto_closed/followers",
following_address: "https://remote.org/users/masto_closed/following"
)
assert other_user.following_count == 0
@ -2411,8 +2431,8 @@ defmodule Pleroma.UserTest do
other_user =
insert(:user,
local: false,
follower_address: "http://localhost:4001/users/masto_closed/followers",
following_address: "http://localhost:4001/users/masto_closed/following"
follower_address: "https://remote.org/users/masto_closed/followers",
following_address: "https://remote.org/users/masto_closed/following"
)
assert other_user.following_count == 0
@ -2432,8 +2452,8 @@ defmodule Pleroma.UserTest do
other_user =
insert(:user,
local: false,
follower_address: "http://localhost:4001/users/masto_closed/followers",
following_address: "http://localhost:4001/users/masto_closed/following"
follower_address: "https://remote.org/users/masto_closed/followers",
following_address: "https://remote.org/users/masto_closed/following"
)
assert other_user.following_count == 0
@ -2654,8 +2674,12 @@ defmodule Pleroma.UserTest do
assert {:ok, user} = User.update_last_active_at(user)
assert user.last_active_at >= test_started_at
assert user.last_active_at <= NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second)
assert NaiveDateTime.compare(user.last_active_at, test_started_at) in [:gt, :eq]
assert NaiveDateTime.compare(
user.last_active_at,
NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second)
) in [:lt, :eq]
last_active_at =
NaiveDateTime.utc_now()
@ -2667,10 +2691,15 @@ defmodule Pleroma.UserTest do
|> cast(%{last_active_at: last_active_at}, [:last_active_at])
|> User.update_and_set_cache()
assert user.last_active_at == last_active_at
assert NaiveDateTime.compare(user.last_active_at, last_active_at) == :eq
assert {:ok, user} = User.update_last_active_at(user)
assert user.last_active_at >= test_started_at
assert user.last_active_at <= NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second)
assert NaiveDateTime.compare(user.last_active_at, test_started_at) in [:gt, :eq]
assert NaiveDateTime.compare(
user.last_active_at,
NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second)
) in [:lt, :eq]
end
test "active_user_count/1" do
@ -2768,6 +2797,15 @@ defmodule Pleroma.UserTest do
assert user_updated.also_known_as |> length() == 1
assert user2.ap_id in user_updated.also_known_as
end
test "should tolerate non-http(s) aliases" do
user =
insert(:user, %{
also_known_as: ["at://did:plc:xgvzy7ni6ig6ievcbls5jaxe"]
})
assert "at://did:plc:xgvzy7ni6ig6ievcbls5jaxe" in user.also_known_as
end
end
describe "alias_users/1" do
@ -2904,4 +2942,74 @@ defmodule Pleroma.UserTest do
assert [%{"verified_at" => ^verified_at}] = user.fields
end
describe "follow_hashtag/2" do
test "should follow a hashtag" do
user = insert(:user)
hashtag = insert(:hashtag)
assert {:ok, _} = user |> User.follow_hashtag(hashtag)
user = User.get_cached_by_ap_id(user.ap_id)
assert user.followed_hashtags |> Enum.count() == 1
assert hashtag.name in Enum.map(user.followed_hashtags, fn %{name: name} -> name end)
end
test "should not follow a hashtag twice" do
user = insert(:user)
hashtag = insert(:hashtag)
assert {:ok, _} = user |> User.follow_hashtag(hashtag)
assert {:ok, _} = user |> User.follow_hashtag(hashtag)
user = User.get_cached_by_ap_id(user.ap_id)
assert user.followed_hashtags |> Enum.count() == 1
assert hashtag.name in Enum.map(user.followed_hashtags, fn %{name: name} -> name end)
end
test "can follow multiple hashtags" do
user = insert(:user)
hashtag = insert(:hashtag)
other_hashtag = insert(:hashtag)
assert {:ok, _} = user |> User.follow_hashtag(hashtag)
assert {:ok, _} = user |> User.follow_hashtag(other_hashtag)
user = User.get_cached_by_ap_id(user.ap_id)
assert user.followed_hashtags |> Enum.count() == 2
assert hashtag.name in Enum.map(user.followed_hashtags, fn %{name: name} -> name end)
assert other_hashtag.name in Enum.map(user.followed_hashtags, fn %{name: name} -> name end)
end
end
describe "unfollow_hashtag/2" do
test "should unfollow a hashtag" do
user = insert(:user)
hashtag = insert(:hashtag)
assert {:ok, _} = user |> User.follow_hashtag(hashtag)
assert {:ok, _} = user |> User.unfollow_hashtag(hashtag)
user = User.get_cached_by_ap_id(user.ap_id)
assert user.followed_hashtags |> Enum.count() == 0
end
test "should not error when trying to unfollow a hashtag twice" do
user = insert(:user)
hashtag = insert(:hashtag)
assert {:ok, _} = user |> User.follow_hashtag(hashtag)
assert {:ok, _} = user |> User.unfollow_hashtag(hashtag)
assert {:ok, _} = user |> User.unfollow_hashtag(hashtag)
user = User.get_cached_by_ap_id(user.ap_id)
assert user.followed_hashtags |> Enum.count() == 0
end
end
end

View file

@ -8,7 +8,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
alias Pleroma.Activity
alias Pleroma.Delivery
alias Pleroma.Instances
alias Pleroma.Object
alias Pleroma.Tests.ObanHelpers
alias Pleroma.User
@ -26,7 +25,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
require Pleroma.Constants
setup do
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
:ok
end
@ -431,7 +430,133 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
end
end
describe "/objects/:uuid/replies" do
test "it renders the top-level collection", %{
conn: conn
} do
user = insert(:user)
note = insert(:note_activity)
note = Pleroma.Activity.get_by_id_with_object(note.id)
uuid = String.split(note.object.data["id"], "/") |> List.last()
{:ok, _} =
CommonAPI.post(user, %{status: "reply1", in_reply_to_status_id: note.id})
conn =
conn
|> put_req_header("accept", "application/activity+json")
|> get("/objects/#{uuid}/replies")
assert match?(
%{
"id" => _,
"type" => "OrderedCollection",
"totalItems" => 1,
"first" => %{
"id" => _,
"type" => "OrderedCollectionPage",
"orderedItems" => [_]
}
},
json_response(conn, 200)
)
end
test "first page id includes `?page=true`", %{conn: conn} do
user = insert(:user)
note = insert(:note_activity)
note = Pleroma.Activity.get_by_id_with_object(note.id)
uuid = String.split(note.object.data["id"], "/") |> List.last()
{:ok, _} =
CommonAPI.post(user, %{status: "reply1", in_reply_to_status_id: note.id})
conn =
conn
|> put_req_header("accept", "application/activity+json")
|> get("/objects/#{uuid}/replies")
%{"id" => collection_id, "first" => %{"id" => page_id, "partOf" => part_of}} =
json_response(conn, 200)
assert part_of == collection_id
assert String.contains?(page_id, "page=true")
end
test "unknown query params do not crash the endpoint", %{conn: conn} do
user = insert(:user)
note = insert(:note_activity)
note = Pleroma.Activity.get_by_id_with_object(note.id)
uuid = String.split(note.object.data["id"], "/") |> List.last()
{:ok, _} =
CommonAPI.post(user, %{status: "reply1", in_reply_to_status_id: note.id})
conn =
conn
|> put_req_header("accept", "application/activity+json")
|> get("/objects/#{uuid}/replies?unknown_param=1")
assert %{"type" => "OrderedCollection"} = json_response(conn, 200)
end
test "it renders a collection page", %{
conn: conn
} do
user = insert(:user)
note = insert(:note_activity)
note = Pleroma.Activity.get_by_id_with_object(note.id)
uuid = String.split(note.object.data["id"], "/") |> List.last()
{:ok, r1} =
CommonAPI.post(user, %{status: "reply1", in_reply_to_status_id: note.id})
{:ok, r2} =
CommonAPI.post(user, %{status: "reply2", in_reply_to_status_id: note.id})
{:ok, _} =
CommonAPI.post(user, %{status: "reply3", in_reply_to_status_id: note.id})
conn =
conn
|> put_req_header("accept", "application/activity+json")
|> get("/objects/#{uuid}/replies?page=true&min_id=#{r1.object.id}&limit=1")
expected_uris = [r2.object.data["id"]]
assert match?(
%{
"id" => _,
"type" => "OrderedCollectionPage",
"prev" => _,
"next" => _,
"orderedItems" => ^expected_uris
},
json_response(conn, 200)
)
end
end
describe "/activities/:uuid" do
test "it does not include a top-level replies collection on activities", %{conn: conn} do
clear_config([:activitypub, :note_replies_output_limit], 1)
activity = insert(:note_activity)
activity = Activity.get_by_id_with_object(activity.id)
uuid = String.split(activity.data["id"], "/") |> List.last()
conn =
conn
|> put_req_header("accept", "application/activity+json")
|> get("/activities/#{uuid}")
res = json_response(conn, 200)
refute Map.has_key?(res, "replies")
assert get_in(res, ["object", "replies", "id"]) == activity.object.data["id"] <> "/replies"
end
test "it doesn't return a local-only activity", %{conn: conn} do
user = insert(:user)
{:ok, post} = CommonAPI.post(user, %{status: "test", visibility: "local"})
@ -601,23 +726,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
assert Activity.get_by_ap_id(data["id"])
end
test "it clears `unreachable` federation status of the sender", %{conn: conn} do
data = File.read!("test/fixtures/mastodon-post-activity.json") |> Jason.decode!()
sender_url = data["actor"]
Instances.set_consistently_unreachable(sender_url)
refute Instances.reachable?(sender_url)
conn =
conn
|> assign(:valid_signature, true)
|> put_req_header("content-type", "application/activity+json")
|> post("/inbox", data)
assert "ok" == json_response(conn, 200)
assert Instances.reachable?(sender_url)
end
test "accept follow activity", %{conn: conn} do
clear_config([:instance, :federating], true)
relay = Relay.get_actor()
@ -657,7 +765,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
end
test "without valid signature, " <>
"it only accepts Create activities and requires enabled federation",
"it accepts Create activities and requires enabled federation",
%{conn: conn} do
data = File.read!("test/fixtures/mastodon-post-activity.json") |> Jason.decode!()
non_create_data = File.read!("test/fixtures/mastodon-announce.json") |> Jason.decode!()
@ -684,6 +792,54 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|> json_response(400)
end
# When activity is delivered to the inbox and we cannot immediately verify signature
# we capture all the params and process it later in the Oban job.
# Once we begin processing it through Oban we risk fetching the actor to validate the
# activity which just leads to inserting a new user to process a Delete not relevant to us.
test "Activities of certain types from an unknown actor are discarded", %{conn: conn} do
example_bad_types =
Pleroma.Constants.activity_types() --
Pleroma.Constants.allowed_activity_types_from_strangers()
Enum.each(example_bad_types, fn bad_type ->
params =
%{
"type" => bad_type,
"actor" => "https://unknown.mastodon.instance/users/somebody"
}
|> Jason.encode!()
conn
|> assign(:valid_signature, false)
|> put_req_header("content-type", "application/activity+json")
|> post("/inbox", params)
|> json_response(400)
assert all_enqueued() == []
end)
end
test "Unknown activity types are discarded", %{conn: conn} do
unknown_types = ["Poke", "Read", "Dazzle"]
Enum.each(unknown_types, fn bad_type ->
params =
%{
"type" => bad_type,
"actor" => "https://unknown.mastodon.instance/users/somebody"
}
|> Jason.encode!()
conn
|> assign(:valid_signature, true)
|> put_req_header("content-type", "application/activity+json")
|> post("/inbox", params)
|> json_response(400)
assert all_enqueued() == []
end)
end
test "accepts Add/Remove activities", %{conn: conn} do
object_id = "c61d6733-e256-4fe1-ab13-1e369789423f"
@ -893,23 +1049,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
assert Activity.get_by_ap_id(data["id"])
end
test "it rejects an invalid incoming activity", %{conn: conn, data: data} do
user = insert(:user, is_active: false)
data =
data
|> Map.put("bcc", [user.ap_id])
|> Kernel.put_in(["object", "bcc"], [user.ap_id])
conn =
conn
|> assign(:valid_signature, true)
|> put_req_header("content-type", "application/activity+json")
|> post("/users/#{user.nickname}/inbox", data)
assert "Invalid request." == json_response(conn, 400)
end
test "it accepts messages with to as string instead of array", %{conn: conn, data: data} do
user = insert(:user)
@ -1060,24 +1199,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
assert response(conn, 200) =~ note_object.data["content"]
end
test "it clears `unreachable` federation status of the sender", %{conn: conn, data: data} do
user = insert(:user)
data = Map.put(data, "bcc", [user.ap_id])
sender_host = URI.parse(data["actor"]).host
Instances.set_consistently_unreachable(sender_host)
refute Instances.reachable?(sender_host)
conn =
conn
|> assign(:valid_signature, true)
|> put_req_header("content-type", "application/activity+json")
|> post("/users/#{user.nickname}/inbox", data)
assert "ok" == json_response(conn, 200)
assert Instances.reachable?(sender_host)
end
test "it removes all follower collections but actor's", %{conn: conn} do
[actor, recipient] = insert_pair(:user)
@ -1157,9 +1278,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
}
],
"actor" => actor.ap_id,
"cc" => [
reported_user.ap_id
],
# CC and TO might either not exist at all, or be empty. We should be able to handle either.
# "cc" => [],
"content" => "test",
"context" => "context",
"id" => "http://#{remote_domain}/activities/02be56cf-35e3-46b4-b2c6-47ae08dfee9e",
@ -1224,7 +1344,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
note = insert(:note_activity, user: reported_user)
Pleroma.Web.CommonAPI.favorite(another, note.id)
Pleroma.Web.CommonAPI.favorite(note.id, another)
mock_json_body =
"test/fixtures/mastodon/application_actor.json"
@ -1272,9 +1392,79 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
html_body: ~r/#{note.data["object"]}/i
)
end
test "it accepts an incoming Block", %{conn: conn, data: data} do
user = insert(:user)
data =
data
|> Map.put("type", "Block")
|> Map.put("to", [user.ap_id])
|> Map.put("cc", [])
|> Map.put("object", user.ap_id)
conn =
conn
|> assign(:valid_signature, true)
|> put_req_header("content-type", "application/activity+json")
|> post("/users/#{user.nickname}/inbox", data)
assert "ok" == json_response(conn, 200)
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
assert Activity.get_by_ap_id(data["id"])
end
test "it returns an error when receiving an activity sent to a deactivated user", %{
conn: conn,
data: data
} do
user = insert(:user)
{:ok, _} = User.set_activation(user, false)
data =
data
|> Map.put("bcc", [user.ap_id])
|> Kernel.put_in(["object", "bcc"], [user.ap_id])
conn =
conn
|> assign(:valid_signature, true)
|> put_req_header("content-type", "application/activity+json")
|> post("/users/#{user.nickname}/inbox", data)
assert "User deactivated" == json_response(conn, 404)
end
test "it returns an error when receiving an activity sent from a deactivated user", %{
conn: conn,
data: data
} do
sender = insert(:user)
user = insert(:user)
{:ok, _} = User.set_activation(sender, false)
data =
data
|> Map.put("bcc", [user.ap_id])
|> Map.put("actor", sender.ap_id)
|> Kernel.put_in(["object", "bcc"], [user.ap_id])
conn =
conn
|> assign(:valid_signature, true)
|> put_req_header("content-type", "application/activity+json")
|> post("/users/#{user.nickname}/inbox", data)
assert "Sender deactivated" == json_response(conn, 404)
end
end
describe "GET /users/:nickname/outbox" do
setup do
Mox.stub_with(Pleroma.StaticStubbedConfigMock, Pleroma.Config)
:ok
end
test "it paginates correctly", %{conn: conn} do
user = insert(:user)
conn = assign(conn, :user, user)
@ -1363,6 +1553,22 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
assert %{"orderedItems" => []} = resp
end
test "it does not return a local note activity when C2S API is disabled", %{conn: conn} do
clear_config([:activitypub, :client_api_enabled], false)
user = insert(:user)
reader = insert(:user)
{:ok, _note_activity} = CommonAPI.post(user, %{status: "mew mew", visibility: "local"})
resp =
conn
|> assign(:user, reader)
|> put_req_header("accept", "application/activity+json")
|> get("/users/#{user.nickname}/outbox?page=true")
|> json_response(200)
assert %{"orderedItems" => []} = resp
end
test "it returns a note activity in a collection", %{conn: conn} do
note_activity = insert(:note_activity)
note_object = Object.normalize(note_activity, fetch: false)
@ -1402,7 +1608,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
assert question = Object.normalize(activity, fetch: false)
{:ok, [activity], _object} = CommonAPI.vote(voter, question, [1])
{:ok, [activity], _object} = CommonAPI.vote(question, voter, [1])
assert outbox_get =
conn
@ -1414,6 +1620,35 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
assert [answer_outbox] = outbox_get["orderedItems"]
assert answer_outbox["id"] == activity.data["id"]
end
test "it works with authorized fetch forced when authenticated" do
clear_config([:activitypub, :authorized_fetch_mode], true)
user = insert(:user)
outbox_endpoint = user.ap_id <> "/outbox"
conn =
build_conn()
|> assign(:user, user)
|> put_req_header("accept", "application/activity+json")
|> get(outbox_endpoint)
assert json_response(conn, 200)
end
test "it fails with authorized fetch forced when unauthenticated", %{conn: conn} do
clear_config([:activitypub, :authorized_fetch_mode], true)
user = insert(:user)
outbox_endpoint = user.ap_id <> "/outbox"
conn =
conn
|> put_req_header("accept", "application/activity+json")
|> get(outbox_endpoint)
assert response(conn, 401)
end
end
describe "POST /users/:nickname/outbox (C2S)" do
@ -1471,6 +1706,41 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
assert object["content"] == activity["object"]["content"]
end
test "it inserts an incoming reply create activity into the database", %{conn: conn} do
user = insert(:user)
replying_user = insert(:user)
{:ok, activity} = CommonAPI.post(user, %{status: "cofe"})
data = %{
type: "Create",
object: %{
to: [Pleroma.Constants.as_public(), user.ap_id],
cc: [replying_user.follower_address],
inReplyTo: activity.object.data["id"],
content: "green tea",
type: "Note"
}
}
result =
conn
|> assign(:user, replying_user)
|> put_req_header("content-type", "application/activity+json")
|> post("/users/#{replying_user.nickname}/outbox", data)
|> json_response(201)
updated_object = Object.normalize(activity.object.data["id"], fetch: false)
assert Activity.get_by_ap_id(result["id"])
assert result["object"]
assert %Object{data: object} = Object.normalize(result["object"], fetch: false)
assert object["content"] == data.object.content
assert Pleroma.Web.ActivityPub.Visibility.public?(object)
assert object["inReplyTo"] == activity.object.data["id"]
assert updated_object.data["repliesCount"] == 1
end
test "it rejects anything beyond 'Note' creations", %{conn: conn, activity: activity} do
user = insert(:user)
@ -1575,6 +1845,311 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
assert json_response(conn, 403)
end
test "it rejects update activity of object from other actor", %{conn: conn} do
note_activity = insert(:note_activity)
note_object = Object.normalize(note_activity, fetch: false)
user = insert(:user)
data = %{
type: "Update",
object: %{
id: note_object.data["id"]
}
}
conn =
conn
|> assign(:user, user)
|> put_req_header("content-type", "application/activity+json")
|> post("/users/#{user.nickname}/outbox", data)
assert json_response(conn, 400)
assert note_object == Object.normalize(note_activity, fetch: false)
end
test "it rejects Add to other user's collection", %{conn: conn} do
user = insert(:user)
target_user = insert(:user)
{:ok, activity} = CommonAPI.post(user, %{status: "Post"})
object = Object.normalize(activity, fetch: false)
object_id = object.data["id"]
data = %{
type: "Add",
target:
"#{Pleroma.Web.Endpoint.url()}/users/#{target_user.nickname}/collections/featured",
object: object_id
}
conn =
conn
|> assign(:user, user)
|> put_req_header("content-type", "application/activity+json")
|> post("/users/#{user.nickname}/outbox", data)
assert json_response(conn, 400)
end
test "it rejects Remove to other user's collection", %{conn: conn} do
user = insert(:user)
target_user = insert(:user)
{:ok, activity} = CommonAPI.post(user, %{status: "Post"})
object = Object.normalize(activity, fetch: false)
object_id = object.data["id"]
data = %{
type: "Remove",
target:
"#{Pleroma.Web.Endpoint.url()}/users/#{target_user.nickname}/collections/featured",
object: object_id
}
conn =
conn
|> assign(:user, user)
|> put_req_header("content-type", "application/activity+json")
|> post("/users/#{user.nickname}/outbox", data)
assert json_response(conn, 400)
end
test "it rejects updating Actor's profile", %{conn: conn} do
user = insert(:user, local: true)
user_object = Pleroma.Web.ActivityPub.UserView.render("user.json", %{user: user})
user_object_new = Map.put(user_object, "name", "lain")
data = %{
type: "Update",
object: user_object_new
}
conn =
conn
|> assign(:user, user)
|> put_req_header("content-type", "application/json")
|> post("/users/#{user.nickname}/outbox", data)
updated_user_object = Pleroma.Web.ActivityPub.UserView.render("user.json", %{user: user})
assert updated_user_object == user_object
assert json_response(conn, 400)
end
# Actor publicKey tests are redundant with above test,
# left here for the case that Updating Actors is ever supported
test "it rejects updating Actor's publicKey", %{conn: conn} do
user = insert(:user, local: true)
{:ok, pem} = Pleroma.Keys.generate_rsa_pem()
{:ok, _, public_key} = Pleroma.Keys.keys_from_pem(pem)
# Taken from UserView
public_key = :public_key.pem_entry_encode(:SubjectPublicKeyInfo, public_key)
public_key = :public_key.pem_encode([public_key])
user_object = Pleroma.Web.ActivityPub.UserView.render("user.json", %{user: user})
user_object_public_key = Map.fetch!(user_object, "publicKey")
user_object_public_key = Map.put(user_object_public_key, "publicKeyPem", public_key)
user_object_new = Map.put(user_object, "publicKey", user_object_public_key)
refute user_object == user_object_new
data = %{
type: "Update",
object: user_object_new
}
conn =
conn
|> assign(:user, user)
|> put_req_header("content-type", "application/json")
|> post("/users/#{user.nickname}/outbox", data)
new_user_object = Pleroma.Web.ActivityPub.UserView.render("user.json", %{user: user})
assert user_object == new_user_object
assert json_response(conn, 400)
end
test "it rejects updating Actor's publicKey of another user", %{conn: conn} do
user = insert(:user)
target_user = insert(:user, local: true)
{:ok, pem} = Pleroma.Keys.generate_rsa_pem()
{:ok, _, public_key} = Pleroma.Keys.keys_from_pem(pem)
# Taken from UserView
public_key = :public_key.pem_entry_encode(:SubjectPublicKeyInfo, public_key)
public_key = :public_key.pem_encode([public_key])
target_user_object =
Pleroma.Web.ActivityPub.UserView.render("user.json", %{user: target_user})
target_user_object_public_key = Map.fetch!(target_user_object, "publicKey")
target_user_object_public_key =
Map.put(target_user_object_public_key, "publicKeyPem", public_key)
target_user_object_new =
Map.put(target_user_object, "publicKey", target_user_object_public_key)
refute target_user_object == target_user_object_new
data = %{
type: "Update",
object: target_user_object_new
}
conn =
conn
|> assign(:user, user)
|> put_req_header("content-type", "application/json")
|> post("/users/#{target_user.nickname}/outbox", data)
new_target_user_object =
Pleroma.Web.ActivityPub.UserView.render("user.json", %{user: target_user})
assert target_user_object == new_target_user_object
assert json_response(conn, 403)
end
test "it rejects creating Actors of type Application", %{conn: conn} do
user = insert(:user, local: true)
data = %{
type: "Create",
object: %{
type: "Application"
}
}
conn =
conn
|> assign(:user, user)
|> put_req_header("content-type", "application/json")
|> post("/users/#{user.nickname}/outbox", data)
assert json_response(conn, 400)
end
test "it rejects creating Actors of type Person", %{conn: conn} do
user = insert(:user, local: true)
data = %{
type: "Create",
object: %{
type: "Person"
}
}
conn =
conn
|> assign(:user, user)
|> put_req_header("content-type", "application/json")
|> post("/users/#{user.nickname}/outbox", data)
assert json_response(conn, 400)
end
test "it rejects creating Actors of type Service", %{conn: conn} do
user = insert(:user, local: true)
data = %{
type: "Create",
object: %{
type: "Service"
}
}
conn =
conn
|> assign(:user, user)
|> put_req_header("content-type", "application/json")
|> post("/users/#{user.nickname}/outbox", data)
assert json_response(conn, 400)
end
test "it rejects like activity to object invisible to actor", %{conn: conn} do
user = insert(:user)
stranger = insert(:user, local: true)
{:ok, post} = CommonAPI.post(user, %{status: "cofe", visibility: "private"})
assert Pleroma.Web.ActivityPub.Visibility.private?(post)
refute Pleroma.Web.ActivityPub.Visibility.visible_for_user?(post, stranger)
post_object = Object.normalize(post, fetch: false)
data = %{
type: "Like",
object: %{
id: post_object.data["id"]
}
}
conn =
conn
|> assign(:user, stranger)
|> put_req_header("content-type", "application/activity+json")
|> post("/users/#{stranger.nickname}/outbox", data)
assert json_response(conn, 403)
end
test "it rejects announce activity to object invisible to actor", %{conn: conn} do
user = insert(:user)
stranger = insert(:user, local: true)
{:ok, post} = CommonAPI.post(user, %{status: "cofe", visibility: "private"})
assert Pleroma.Web.ActivityPub.Visibility.private?(post)
refute Pleroma.Web.ActivityPub.Visibility.visible_for_user?(post, stranger)
post_object = Object.normalize(post, fetch: false)
data = %{
type: "Announce",
object: %{
id: post_object.data["id"]
}
}
conn =
conn
|> assign(:user, stranger)
|> put_req_header("content-type", "application/activity+json")
|> post("/users/#{stranger.nickname}/outbox", data)
assert json_response(conn, 403)
end
test "it rejects emojireact activity to object invisible to actor", %{conn: conn} do
user = insert(:user)
stranger = insert(:user, local: true)
{:ok, post} = CommonAPI.post(user, %{status: "cofe", visibility: "private"})
assert Pleroma.Web.ActivityPub.Visibility.private?(post)
refute Pleroma.Web.ActivityPub.Visibility.visible_for_user?(post, stranger)
post_object = Object.normalize(post, fetch: false)
data = %{
type: "EmojiReact",
object: %{
id: post_object.data["id"]
},
content: "😀"
}
conn =
conn
|> assign(:user, stranger)
|> put_req_header("content-type", "application/activity+json")
|> post("/users/#{stranger.nickname}/outbox", data)
assert json_response(conn, 403)
end
test "it increases like count when receiving a like action", %{conn: conn} do
note_activity = insert(:note_activity)
note_object = Object.normalize(note_activity, fetch: false)
@ -1747,7 +2322,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
%{conn: conn} do
user = insert(:user, hide_followers: true)
other_user = insert(:user)
{:ok, _other_user, user, _activity} = CommonAPI.follow(other_user, user)
{:ok, user, _other_user, _activity} = CommonAPI.follow(user, other_user)
result =
conn
@ -1843,7 +2418,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
%{conn: conn} do
user = insert(:user, hide_follows: true)
other_user = insert(:user)
{:ok, user, _other_user, _activity} = CommonAPI.follow(user, other_user)
{:ok, _other_user, user, _activity} = CommonAPI.follow(other_user, user)
result =
conn
@ -2062,6 +2637,30 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|> post("/api/ap/upload_media", %{"file" => image, "description" => desc})
|> json_response(403)
end
test "they don't work when C2S API is disabled", %{conn: conn} do
clear_config([:activitypub, :client_api_enabled], false)
user = insert(:user)
assert conn
|> assign(:user, user)
|> get("/api/ap/whoami")
|> response(403)
desc = "Description of the image"
image = %Plug.Upload{
content_type: "image/jpeg",
path: Path.absname("test/fixtures/image.jpg"),
filename: "an_image.jpg"
}
assert conn
|> assign(:user, user)
|> post("/api/ap/upload_media", %{"file" => image, "description" => desc})
|> response(403)
end
end
test "pinned collection", %{conn: conn} do

View file

@ -232,12 +232,14 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
assert user.avatar == %{
"type" => "Image",
"url" => [%{"href" => "https://jk.nipponalba.scot/images/profile.jpg"}]
"url" => [%{"href" => "https://jk.nipponalba.scot/images/profile.jpg"}],
"name" => "profile picture"
}
assert user.banner == %{
"type" => "Image",
"url" => [%{"href" => "https://jk.nipponalba.scot/images/profile.jpg"}]
"url" => [%{"href" => "https://jk.nipponalba.scot/images/profile.jpg"}],
"name" => "profile picture"
}
end
@ -432,6 +434,98 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
assert user.birthday == ~D[2001-02-12]
end
test "fetches avatar description" do
user_id = "https://example.com/users/marcin"
user_data =
"test/fixtures/users_mock/user.json"
|> File.read!()
|> String.replace("{{nickname}}", "marcin")
|> Jason.decode!()
|> Map.delete("featured")
|> Map.update("icon", %{}, fn image -> Map.put(image, "name", "image description") end)
|> Jason.encode!()
Tesla.Mock.mock(fn
%{
method: :get,
url: ^user_id
} ->
%Tesla.Env{
status: 200,
body: user_data,
headers: [{"content-type", "application/activity+json"}]
}
end)
{:ok, user} = ActivityPub.make_user_from_ap_id(user_id)
assert user.avatar["name"] == "image description"
end
end
test "works with avatar/banner href as list" do
user_id = "https://queef.in/cute_cat"
user_data =
"test/fixtures/users_mock/href_as_array.json"
|> File.read!()
|> Jason.decode!()
|> Map.delete("featured")
|> Jason.encode!()
Tesla.Mock.mock(fn
%{
method: :get,
url: ^user_id
} ->
%Tesla.Env{
status: 200,
body: user_data,
headers: [{"content-type", "application/activity+json"}]
}
end)
{:ok, user} = ActivityPub.make_user_from_ap_id(user_id)
assert length(user.avatar["url"]) == 1
assert length(user.banner["url"]) == 1
assert user.avatar["url"] |> List.first() |> Map.fetch!("href") ==
"https://queef.in/storage/profile.webp"
assert user.banner["url"] |> List.first() |> Map.fetch!("href") ==
"https://queef.in/storage/banner.gif"
end
test "works with alsoKnownAs as string" do
user_id = "https://hub.netzgemeinde.eu/channel/jupiter_rowland"
user_data =
"test/fixtures/users_mock/hubzilla-actor-alsoknownas-string.json"
|> File.read!()
user_data_decoded =
user_data
|> Jason.decode!()
Tesla.Mock.mock(fn
%{
method: :get,
url: ^user_id
} ->
%Tesla.Env{
status: 200,
body: user_data,
headers: [{"content-type", "application/activity+json"}]
}
end)
{:ok, user} = ActivityPub.make_user_from_ap_id(user_id)
assert is_list(user.also_known_as)
assert user.also_known_as == [user_data_decoded["alsoKnownAs"]]
end
test "it fetches the appropriate tag-restricted posts" do
@ -795,12 +889,12 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
assert object.data["repliesCount"] == 2
end
test "increates quotes count", %{user: user} do
test "increases quotes count", %{user: user} do
user2 = insert(:user)
{:ok, activity} = CommonAPI.post(user, %{status: "1", visibility: "public"})
ap_id = activity.data["id"]
quote_data = %{status: "1", quote_id: activity.id}
quote_data = %{status: "1", quoted_status_id: activity.id}
# public
{:ok, _} = CommonAPI.post(user2, Map.put(quote_data, :visibility, "public"))
@ -836,6 +930,33 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
end
end
describe "fetch activities for followed hashtags" do
test "it should return public activities that reference a given hashtag" do
hashtag = insert(:hashtag, name: "tenshi")
user = insert(:user)
other_user = insert(:user)
{:ok, normally_visible} =
CommonAPI.post(other_user, %{status: "hello :)", visibility: "public"})
{:ok, public} = CommonAPI.post(user, %{status: "maji #tenshi", visibility: "public"})
{:ok, _unrelated} = CommonAPI.post(user, %{status: "dai #tensh", visibility: "public"})
{:ok, unlisted} = CommonAPI.post(user, %{status: "maji #tenshi", visibility: "unlisted"})
{:ok, _private} = CommonAPI.post(user, %{status: "maji #tenshi", visibility: "private"})
activities =
ActivityPub.fetch_activities([other_user.follower_address], %{
followed_hashtags: [hashtag.id]
})
assert length(activities) == 3
normal_id = normally_visible.id
public_id = public.id
unlisted_id = unlisted.id
assert [%{id: ^normal_id}, %{id: ^public_id}, %{id: ^unlisted_id}] = activities
end
end
describe "fetch activities in context" do
test "retrieves activities that have a given context" do
{:ok, activity} = ActivityBuilder.insert(%{"type" => "Create", "context" => "2hu"})
@ -1038,7 +1159,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
refute activity in activities
followed_user = insert(:user)
CommonAPI.follow(user, followed_user)
CommonAPI.follow(followed_user, user)
{:ok, repeat_activity} = CommonAPI.repeat(activity.id, followed_user)
activities = ActivityPub.fetch_activities([], %{blocking_user: user, skip_preload: true})
@ -1171,7 +1292,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
note_two = insert(:note, data: %{"context" => "suya.."})
activity_two = insert(:note_activity, note: note_two)
{:ok, _activity_two} = CommonAPI.add_mute(user, activity_two)
{:ok, _activity_two} = CommonAPI.add_mute(activity_two, user)
assert [_activity_one] = ActivityPub.fetch_activities([], %{muting_user: user})
end
@ -1182,7 +1303,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
note_two = insert(:note, data: %{"context" => "suya.."})
activity_two = insert(:note_activity, note: note_two)
{:ok, _activity_two} = CommonAPI.add_mute(user, activity_two)
{:ok, _activity_two} = CommonAPI.add_mute(activity_two, user)
assert [_activity_two, _activity_one] =
ActivityPub.fetch_activities([], %{muting_user: user, with_muted: true})
@ -1212,6 +1333,16 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
assert activity == expected_activity
end
test "includes only reblogs on request" do
user = insert(:user)
{:ok, _} = ActivityBuilder.insert(%{"type" => "Create"}, %{:user => user})
{:ok, expected_activity} = ActivityBuilder.insert(%{"type" => "Announce"}, %{:user => user})
[activity] = ActivityPub.fetch_user_activities(user, nil, %{only_reblogs: true})
assert activity == expected_activity
end
describe "irreversible filters" do
setup do
user = insert(:user)
@ -1358,7 +1489,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
activity = insert(:note_activity)
user = insert(:user)
booster = insert(:user)
{:ok, _reblog_mute} = CommonAPI.hide_reblogs(user, booster)
{:ok, _reblog_mute} = CommonAPI.hide_reblogs(booster, user)
{:ok, activity} = CommonAPI.repeat(activity.id, booster)
@ -1371,8 +1502,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
activity = insert(:note_activity)
user = insert(:user)
booster = insert(:user)
{:ok, _reblog_mute} = CommonAPI.hide_reblogs(user, booster)
{:ok, _reblog_mute} = CommonAPI.show_reblogs(user, booster)
{:ok, _reblog_mute} = CommonAPI.hide_reblogs(booster, user)
{:ok, _reblog_mute} = CommonAPI.show_reblogs(booster, user)
{:ok, activity} = CommonAPI.repeat(activity.id, booster)
@ -1393,8 +1524,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
%{test_file: test_file}
end
test "strips / from filename", %{test_file: file} do
file = %Plug.Upload{file | filename: "../../../../../nested/bad.jpg"}
test "strips / from filename", %{test_file: %Plug.Upload{} = file} do
file = %{file | filename: "../../../../../nested/bad.jpg"}
{:ok, %Object{} = object} = ActivityPub.upload(file)
[%{"href" => href}] = object.data["url"]
assert Regex.match?(~r"/bad.jpg$", href)
@ -1452,7 +1583,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
follower = insert(:user)
followed = insert(:user)
{:ok, _, _, follow_activity} = CommonAPI.follow(follower, followed)
{:ok, _, _, follow_activity} = CommonAPI.follow(followed, follower)
with_mock(Utils, [:passthrough], maybe_federate: fn _ -> {:error, :reverted} end) do
assert {:error, :reverted} = ActivityPub.unfollow(follower, followed)
@ -1469,7 +1600,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
follower = insert(:user)
followed = insert(:user)
{:ok, _, _, follow_activity} = CommonAPI.follow(follower, followed)
{:ok, _, _, follow_activity} = CommonAPI.follow(followed, follower)
{:ok, activity} = ActivityPub.unfollow(follower, followed)
assert activity.data["type"] == "Undo"
@ -1486,7 +1617,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
follower = insert(:user)
followed = insert(:user, %{is_locked: true})
{:ok, _, _, follow_activity} = CommonAPI.follow(follower, followed)
{:ok, _, _, follow_activity} = CommonAPI.follow(followed, follower)
{:ok, activity} = ActivityPub.unfollow(follower, followed)
assert activity.data["type"] == "Undo"
@ -1623,32 +1754,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
} = activity
end
test_with_mock "strips status data from Flag, before federating it",
%{
reporter: reporter,
context: context,
target_account: target_account,
reported_activity: reported_activity,
object_ap_id: object_ap_id,
content: content
},
Utils,
[:passthrough],
[] do
{:ok, activity} =
ActivityPub.flag(%{
actor: reporter,
context: context,
account: target_account,
statuses: [reported_activity],
content: content
})
new_data = put_in(activity.data, ["object"], [target_account.ap_id, object_ap_id])
assert_called(Utils.maybe_federate(%{activity | data: new_data}))
end
test_with_mock "reverts on error",
%{
reporter: reporter,
@ -1678,13 +1783,14 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
test "fetch_activities/2 returns activities addressed to a list " do
user = insert(:user)
member = insert(:user)
{:ok, list} = Pleroma.List.create("foo", user)
{:ok, list} = Pleroma.List.create(%{title: "foo"}, user)
{:ok, list} = Pleroma.List.follow(list, member)
{:ok, activity} = CommonAPI.post(user, %{status: "foobar", visibility: "list:#{list.id}"})
{:ok, %Activity{} = activity} =
CommonAPI.post(user, %{status: "foobar", visibility: "list:#{list.id}"})
activity = Repo.preload(activity, :bookmark)
activity = %Activity{activity | thread_muted?: !!activity.thread_muted?}
activity = %{activity | thread_muted?: !!activity.thread_muted?}
assert ActivityPub.fetch_activities([], %{user: user}) == [activity]
end
@ -1727,8 +1833,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
user =
insert(:user,
local: false,
follower_address: "http://localhost:4001/users/fuser2/followers",
following_address: "http://localhost:4001/users/fuser2/following"
follower_address: "https://remote.org/users/fuser2/followers",
following_address: "https://remote.org/users/fuser2/following"
)
{:ok, info} = ActivityPub.fetch_follow_information_for_user(user)
@ -1739,7 +1845,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
test "detects hidden followers" do
mock(fn env ->
case env.url do
"http://localhost:4001/users/masto_closed/followers?page=1" ->
"https://remote.org/users/masto_closed/followers?page=1" ->
%Tesla.Env{status: 403, body: ""}
_ ->
@ -1750,8 +1856,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
user =
insert(:user,
local: false,
follower_address: "http://localhost:4001/users/masto_closed/followers",
following_address: "http://localhost:4001/users/masto_closed/following"
follower_address: "https://remote.org/users/masto_closed/followers",
following_address: "https://remote.org/users/masto_closed/following"
)
{:ok, follow_info} = ActivityPub.fetch_follow_information_for_user(user)
@ -1762,7 +1868,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
test "detects hidden follows" do
mock(fn env ->
case env.url do
"http://localhost:4001/users/masto_closed/following?page=1" ->
"https://remote.org/users/masto_closed/following?page=1" ->
%Tesla.Env{status: 403, body: ""}
_ ->
@ -1773,8 +1879,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
user =
insert(:user,
local: false,
follower_address: "http://localhost:4001/users/masto_closed/followers",
following_address: "http://localhost:4001/users/masto_closed/following"
follower_address: "https://remote.org/users/masto_closed/followers",
following_address: "https://remote.org/users/masto_closed/following"
)
{:ok, follow_info} = ActivityPub.fetch_follow_information_for_user(user)
@ -1786,8 +1892,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
user =
insert(:user,
local: false,
follower_address: "http://localhost:8080/followers/fuser3",
following_address: "http://localhost:8080/following/fuser3"
follower_address: "https://remote.org/followers/fuser3",
following_address: "https://remote.org/following/fuser3"
)
{:ok, follow_info} = ActivityPub.fetch_follow_information_for_user(user)
@ -1800,28 +1906,28 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
test "doesn't crash when follower and following counters are hidden" do
mock(fn env ->
case env.url do
"http://localhost:4001/users/masto_hidden_counters/following" ->
"https://remote.org/users/masto_hidden_counters/following" ->
json(
%{
"@context" => "https://www.w3.org/ns/activitystreams",
"id" => "http://localhost:4001/users/masto_hidden_counters/followers"
"id" => "https://remote.org/users/masto_hidden_counters/followers"
},
headers: HttpRequestMock.activitypub_object_headers()
)
"http://localhost:4001/users/masto_hidden_counters/following?page=1" ->
"https://remote.org/users/masto_hidden_counters/following?page=1" ->
%Tesla.Env{status: 403, body: ""}
"http://localhost:4001/users/masto_hidden_counters/followers" ->
"https://remote.org/users/masto_hidden_counters/followers" ->
json(
%{
"@context" => "https://www.w3.org/ns/activitystreams",
"id" => "http://localhost:4001/users/masto_hidden_counters/following"
"id" => "https://remote.org/users/masto_hidden_counters/following"
},
headers: HttpRequestMock.activitypub_object_headers()
)
"http://localhost:4001/users/masto_hidden_counters/followers?page=1" ->
"https://remote.org/users/masto_hidden_counters/followers?page=1" ->
%Tesla.Env{status: 403, body: ""}
end
end)
@ -1829,8 +1935,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
user =
insert(:user,
local: false,
follower_address: "http://localhost:4001/users/masto_hidden_counters/followers",
following_address: "http://localhost:4001/users/masto_hidden_counters/following"
follower_address: "https://remote.org/users/masto_hidden_counters/followers",
following_address: "https://remote.org/users/masto_hidden_counters/following"
)
{:ok, follow_info} = ActivityPub.fetch_follow_information_for_user(user)
@ -1854,14 +1960,14 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
{:ok, a4} = CommonAPI.post(user2, %{status: "Agent Smith "})
{:ok, a5} = CommonAPI.post(user1, %{status: "Red or Blue "})
{:ok, _} = CommonAPI.favorite(user, a4.id)
{:ok, _} = CommonAPI.favorite(other_user, a3.id)
{:ok, _} = CommonAPI.favorite(user, a3.id)
{:ok, _} = CommonAPI.favorite(other_user, a5.id)
{:ok, _} = CommonAPI.favorite(user, a5.id)
{:ok, _} = CommonAPI.favorite(other_user, a4.id)
{:ok, _} = CommonAPI.favorite(user, a1.id)
{:ok, _} = CommonAPI.favorite(other_user, a1.id)
{:ok, _} = CommonAPI.favorite(a4.id, user)
{:ok, _} = CommonAPI.favorite(a3.id, other_user)
{:ok, _} = CommonAPI.favorite(a3.id, user)
{:ok, _} = CommonAPI.favorite(a5.id, other_user)
{:ok, _} = CommonAPI.favorite(a5.id, user)
{:ok, _} = CommonAPI.favorite(a4.id, other_user)
{:ok, _} = CommonAPI.favorite(a1.id, user)
{:ok, _} = CommonAPI.favorite(a1.id, other_user)
result = ActivityPub.fetch_favourites(user)
assert Enum.map(result, & &1.id) == [a1.id, a5.id, a3.id, a4.id]
@ -1884,7 +1990,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
assert User.following?(follower, old_user)
assert User.following?(follower_move_opted_out, old_user)
assert {:ok, activity} = ActivityPub.move(old_user, new_user)
assert {:ok, %Activity{} = activity} = ActivityPub.move(old_user, new_user)
assert %Activity{
actor: ^old_ap_id,
@ -1916,7 +2022,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
assert User.following?(follower_move_opted_out, old_user)
refute User.following?(follower_move_opted_out, new_user)
activity = %Activity{activity | object: nil}
activity = %{activity | object: nil}
assert [%Notification{activity: ^activity}] = Notification.for_user(follower)

View file

@ -15,7 +15,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.HashtagPolicyTest do
user = insert(:user)
{:ok, activity} = CommonAPI.post(user, %{status: "#nsfw hey"})
{:ok, modified} = Transmogrifier.prepare_outgoing(activity.data)
{:ok, modified} = Transmogrifier.prepare_activity(activity.data)
assert modified["object"]["sensitive"]
end
@ -94,7 +94,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.HashtagPolicyTest do
user = insert(:user)
{:ok, activity} = CommonAPI.post(user, %{status: "#cofe hey"})
{:ok, modified} = Transmogrifier.prepare_outgoing(activity.data)
{:ok, modified} = Transmogrifier.prepare_activity(activity.data)
refute modified["object"]["sensitive"]
end

View file

@ -109,4 +109,22 @@ defmodule Pleroma.Web.ActivityPub.MRF.InlineQuotePolicyTest do
{:ok, filtered} = InlineQuotePolicy.filter(activity)
assert filtered == activity
end
# Mastodon uses p tags instead of span in their quote posts
# URLs in quoteUri and post content are already mismatched
test "skips objects which already have an .inline-quote p" do
object = File.read!("test/fixtures/quote_post/mastodon_quote_post.json") |> Jason.decode!()
# Normally the ObjectValidator will fix this before it reaches MRF
object = Map.put(object, "quoteUrl", object["quoteUri"])
activity = %{
"type" => "Create",
"actor" => "https://mastodon.social/users/gwynnion",
"object" => object
}
{:ok, filtered} = InlineQuotePolicy.filter(activity)
assert filtered == activity
end
end

View file

@ -54,14 +54,17 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicyTest do
setup do: clear_config([:media_proxy, :enabled], true)
test "it prefetches media proxy URIs" do
Tesla.Mock.mock(fn %{method: :get, url: "http://example.com/image.jpg"} ->
{:ok, %Tesla.Env{status: 200, body: ""}}
end)
with_mock HTTP, get: fn _, _, _ -> {:ok, []} end do
with_mock HTTP,
get: fn _, _, opts ->
send(self(), {:prefetch_opts, opts})
{:ok, []}
end do
MediaProxyWarmingPolicy.filter(@message)
assert called(HTTP.get(:_, :_, :_))
assert_receive {:prefetch_opts, opts}
refute Keyword.has_key?(opts, :follow_redirect)
refute Keyword.has_key?(opts, :force_redirect)
end
end
@ -81,10 +84,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicyTest do
end
test "history-aware" do
Tesla.Mock.mock(fn %{method: :get, url: "http://example.com/image.jpg"} ->
{:ok, %Tesla.Env{status: 200, body: ""}}
end)
with_mock HTTP, get: fn _, _, _ -> {:ok, []} end do
MRF.filter_one(MediaProxyWarmingPolicy, @message_with_history)
@ -93,10 +92,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicyTest do
end
test "works with Updates" do
Tesla.Mock.mock(fn %{method: :get, url: "http://example.com/image.jpg"} ->
{:ok, %Tesla.Env{status: 200, body: ""}}
end)
with_mock HTTP, get: fn _, _, _ -> {:ok, []} end do
MRF.filter_one(MediaProxyWarmingPolicy, @message_with_history |> Map.put("type", "Update"))

View file

@ -0,0 +1,139 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.QuietReplyTest do
use Pleroma.DataCase
import Pleroma.Factory
require Pleroma.Constants
alias Pleroma.Object
alias Pleroma.Web.ActivityPub.MRF.QuietReply
alias Pleroma.Web.CommonAPI
test "replying to public post is forced to be quiet" do
batman = insert(:user, nickname: "batman")
robin = insert(:user, nickname: "robin")
{:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"})
reply = %{
"type" => "Create",
"actor" => robin.ap_id,
"to" => [
batman.ap_id,
Pleroma.Constants.as_public()
],
"cc" => [robin.follower_address],
"object" => %{
"type" => "Note",
"actor" => robin.ap_id,
"content" => "@batman Wait up, I forgot my spandex!",
"to" => [
batman.ap_id,
Pleroma.Constants.as_public()
],
"cc" => [robin.follower_address],
"inReplyTo" => Object.normalize(post).data["id"]
}
}
assert {:ok, filtered} = QuietReply.filter(reply)
assert batman.ap_id in filtered["to"]
assert batman.ap_id in filtered["object"]["to"]
assert robin.follower_address in filtered["to"]
assert robin.follower_address in filtered["object"]["to"]
assert Pleroma.Constants.as_public() in filtered["cc"]
assert Pleroma.Constants.as_public() in filtered["object"]["cc"]
end
test "replying to unlisted post is unmodified" do
batman = insert(:user, nickname: "batman")
robin = insert(:user, nickname: "robin")
{:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!", visibility: "private"})
reply = %{
"type" => "Create",
"actor" => robin.ap_id,
"to" => [batman.ap_id],
"cc" => [],
"object" => %{
"type" => "Note",
"actor" => robin.ap_id,
"content" => "@batman Wait up, I forgot my spandex!",
"to" => [batman.ap_id],
"cc" => [],
"inReplyTo" => Object.normalize(post).data["id"]
}
}
assert {:ok, filtered} = QuietReply.filter(reply)
assert match?(^filtered, reply)
end
test "replying direct is unmodified" do
batman = insert(:user, nickname: "batman")
robin = insert(:user, nickname: "robin")
{:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"})
reply = %{
"type" => "Create",
"actor" => robin.ap_id,
"to" => [batman.ap_id],
"cc" => [],
"object" => %{
"type" => "Note",
"actor" => robin.ap_id,
"content" => "@batman Wait up, I forgot my spandex!",
"to" => [batman.ap_id],
"cc" => [],
"inReplyTo" => Object.normalize(post).data["id"]
}
}
assert {:ok, filtered} = QuietReply.filter(reply)
assert match?(^filtered, reply)
end
test "replying followers-only is unmodified" do
batman = insert(:user, nickname: "batman")
robin = insert(:user, nickname: "robin")
{:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"})
reply = %{
"type" => "Create",
"actor" => robin.ap_id,
"to" => [batman.ap_id, robin.follower_address],
"cc" => [],
"object" => %{
"type" => "Note",
"actor" => robin.ap_id,
"content" => "@batman Wait up, I forgot my spandex!",
"to" => [batman.ap_id, robin.follower_address],
"cc" => [],
"inReplyTo" => Object.normalize(post).data["id"]
}
}
assert {:ok, filtered} = QuietReply.filter(reply)
assert match?(^filtered, reply)
end
test "non-reply posts are unmodified" do
batman = insert(:user, nickname: "batman")
{:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"})
assert {:ok, filtered} = QuietReply.filter(post)
assert match?(^filtered, post)
end
end

View file

@ -0,0 +1,155 @@
defmodule Pleroma.Web.ActivityPub.MRF.RemoteReportPolicyTest do
use Pleroma.DataCase, async: false
alias Pleroma.Web.ActivityPub.MRF.RemoteReportPolicy
setup do
clear_config([:mrf_remote_report, :reject_all], false)
end
test "doesn't impact local report" do
clear_config([:mrf_remote_report, :reject_anonymous], true)
clear_config([:mrf_remote_report, :reject_empty_message], true)
activity = %{
"type" => "Flag",
"actor" => "http://localhost:4001/actor",
"object" => ["https://mastodon.online/users/Gargron"]
}
assert {:ok, _} = RemoteReportPolicy.filter(activity)
end
test "rejects anonymous report if `reject_anonymous: true`" do
clear_config([:mrf_remote_report, :reject_anonymous], true)
clear_config([:mrf_remote_report, :reject_empty_message], true)
activity = %{
"type" => "Flag",
"actor" => "https://mastodon.social/actor",
"object" => ["https://mastodon.online/users/Gargron"]
}
assert {:reject, _} = RemoteReportPolicy.filter(activity)
end
test "preserves anonymous report if `reject_anonymous: false`" do
clear_config([:mrf_remote_report, :reject_anonymous], false)
clear_config([:mrf_remote_report, :reject_empty_message], false)
activity = %{
"type" => "Flag",
"actor" => "https://mastodon.social/actor",
"object" => ["https://mastodon.online/users/Gargron"]
}
assert {:ok, _} = RemoteReportPolicy.filter(activity)
end
test "rejects report on third party if `reject_third_party: true`" do
clear_config([:mrf_remote_report, :reject_third_party], true)
clear_config([:mrf_remote_report, :reject_empty_message], false)
activity = %{
"type" => "Flag",
"actor" => "https://mastodon.social/users/Gargron",
"object" => ["https://mastodon.online/users/Gargron"]
}
assert {:reject, _} = RemoteReportPolicy.filter(activity)
end
test "preserves report on first party if `reject_third_party: true`" do
clear_config([:mrf_remote_report, :reject_third_party], true)
clear_config([:mrf_remote_report, :reject_empty_message], false)
activity = %{
"type" => "Flag",
"actor" => "https://mastodon.social/users/Gargron",
"object" => ["http://localhost:4001/actor"]
}
assert {:ok, _} = RemoteReportPolicy.filter(activity)
end
test "preserves report on third party if `reject_third_party: false`" do
clear_config([:mrf_remote_report, :reject_third_party], false)
clear_config([:mrf_remote_report, :reject_empty_message], false)
activity = %{
"type" => "Flag",
"actor" => "https://mastodon.social/users/Gargron",
"object" => ["https://mastodon.online/users/Gargron"]
}
assert {:ok, _} = RemoteReportPolicy.filter(activity)
end
test "rejects empty message report if `reject_empty_message: true`" do
clear_config([:mrf_remote_report, :reject_anonymous], false)
clear_config([:mrf_remote_report, :reject_empty_message], true)
activity = %{
"type" => "Flag",
"actor" => "https://mastodon.social/users/Gargron",
"object" => ["https://mastodon.online/users/Gargron"]
}
assert {:reject, _} = RemoteReportPolicy.filter(activity)
end
test "rejects empty message report (\"\") if `reject_empty_message: true`" do
clear_config([:mrf_remote_report, :reject_anonymous], false)
clear_config([:mrf_remote_report, :reject_empty_message], true)
activity = %{
"type" => "Flag",
"actor" => "https://mastodon.social/users/Gargron",
"object" => ["https://mastodon.online/users/Gargron"],
"content" => ""
}
assert {:reject, _} = RemoteReportPolicy.filter(activity)
end
test "preserves empty message report if `reject_empty_message: false`" do
clear_config([:mrf_remote_report, :reject_anonymous], false)
clear_config([:mrf_remote_report, :reject_empty_message], false)
activity = %{
"type" => "Flag",
"actor" => "https://mastodon.social/users/Gargron",
"object" => ["https://mastodon.online/users/Gargron"]
}
assert {:ok, _} = RemoteReportPolicy.filter(activity)
end
test "preserves anonymous, empty message report with all settings disabled" do
clear_config([:mrf_remote_report, :reject_anonymous], false)
clear_config([:mrf_remote_report, :reject_empty_message], false)
activity = %{
"type" => "Flag",
"actor" => "https://mastodon.social/actor",
"object" => ["https://mastodon.online/users/Gargron"]
}
assert {:ok, _} = RemoteReportPolicy.filter(activity)
end
test "reject remote report if `reject_all: true`" do
clear_config([:mrf_remote_report, :reject_all], true)
clear_config([:mrf_remote_report, :reject_anonymous], false)
clear_config([:mrf_remote_report, :reject_empty_message], false)
activity = %{
"type" => "Flag",
"actor" => "https://mastodon.social/users/Gargron",
"content" => "Transphobia",
"object" => ["https://mastodon.online/users/Gargron"]
}
assert {:reject, _} = RemoteReportPolicy.filter(activity)
end
end

View file

@ -252,6 +252,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do
remote_message = build_remote_message()
assert SimplePolicy.filter(remote_message) == {:ok, remote_message}
assert SimplePolicy.id_filter(remote_message["actor"])
end
test "activity has a matching host" do
@ -260,6 +261,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do
remote_message = build_remote_message()
assert {:reject, _} = SimplePolicy.filter(remote_message)
refute SimplePolicy.id_filter(remote_message["actor"])
end
test "activity matches with wildcard domain" do
@ -268,6 +270,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do
remote_message = build_remote_message()
assert {:reject, _} = SimplePolicy.filter(remote_message)
refute SimplePolicy.id_filter(remote_message["actor"])
end
test "actor has a matching host" do
@ -276,6 +279,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do
remote_user = build_remote_user()
assert {:reject, _} = SimplePolicy.filter(remote_user)
refute SimplePolicy.id_filter(remote_user["id"])
end
test "reject Announce when object would be rejected" do
@ -288,6 +292,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do
}
assert {:reject, _} = SimplePolicy.filter(announce)
# Note: Non-Applicable for id_filter/1
end
test "reject by URI object" do
@ -300,6 +305,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do
}
assert {:reject, _} = SimplePolicy.filter(announce)
# Note: Non-Applicable for id_filter/1
end
end
@ -318,7 +324,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do
following_user = insert(:user)
non_following_user = insert(:user)
{:ok, _, _, _} = CommonAPI.follow(following_user, actor)
{:ok, _, _, _} = CommonAPI.follow(actor, following_user)
activity = %{
"actor" => actor.ap_id,
@ -370,6 +376,8 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do
assert SimplePolicy.filter(local_message) == {:ok, local_message}
assert SimplePolicy.filter(remote_message) == {:ok, remote_message}
assert SimplePolicy.id_filter(local_message["actor"])
assert SimplePolicy.id_filter(remote_message["actor"])
end
test "is not empty but activity doesn't have a matching host" do
@ -380,6 +388,8 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do
assert SimplePolicy.filter(local_message) == {:ok, local_message}
assert {:reject, _} = SimplePolicy.filter(remote_message)
assert SimplePolicy.id_filter(local_message["actor"])
refute SimplePolicy.id_filter(remote_message["actor"])
end
test "activity has a matching host" do
@ -390,6 +400,8 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do
assert SimplePolicy.filter(local_message) == {:ok, local_message}
assert SimplePolicy.filter(remote_message) == {:ok, remote_message}
assert SimplePolicy.id_filter(local_message["actor"])
assert SimplePolicy.id_filter(remote_message["actor"])
end
test "activity matches with wildcard domain" do
@ -400,6 +412,8 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do
assert SimplePolicy.filter(local_message) == {:ok, local_message}
assert SimplePolicy.filter(remote_message) == {:ok, remote_message}
assert SimplePolicy.id_filter(local_message["actor"])
assert SimplePolicy.id_filter(remote_message["actor"])
end
test "actor has a matching host" do
@ -408,6 +422,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicyTest do
remote_user = build_remote_user()
assert SimplePolicy.filter(remote_user) == {:ok, remote_user}
assert SimplePolicy.id_filter(remote_user["id"])
end
end

View file

@ -87,7 +87,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicyTest do
assert File.exists?(fullpath)
end
test "rejects invalid shortcodes", %{path: path} do
test "rejects invalid shortcodes with slashes", %{path: path} do
message = %{
"type" => "Create",
"object" => %{
@ -113,6 +113,58 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicyTest do
refute File.exists?(fullpath)
end
test "rejects invalid shortcodes with dots", %{path: path} do
message = %{
"type" => "Create",
"object" => %{
"emoji" => [{"fired.fox", "https://example.org/emoji/firedfox"}],
"actor" => "https://example.org/users/admin"
}
}
fullpath = Path.join(path, "fired.fox.png")
Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/firedfox"} ->
%Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")}
end)
clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468)
refute "fired.fox" in installed()
refute File.exists?(path)
assert {:ok, _message} = StealEmojiPolicy.filter(message)
refute "fired.fox" in installed()
refute File.exists?(fullpath)
end
test "rejects invalid shortcodes with special characters", %{path: path} do
message = %{
"type" => "Create",
"object" => %{
"emoji" => [{"fired:fox", "https://example.org/emoji/firedfox"}],
"actor" => "https://example.org/users/admin"
}
}
fullpath = Path.join(path, "fired:fox.png")
Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/firedfox"} ->
%Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")}
end)
clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468)
refute "fired:fox" in installed()
refute File.exists?(path)
assert {:ok, _message} = StealEmojiPolicy.filter(message)
refute "fired:fox" in installed()
refute File.exists?(fullpath)
end
test "reject regex shortcode", %{message: message} do
refute "firedfox" in installed()
@ -171,5 +223,74 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicyTest do
refute "firedfox" in installed()
end
test "accepts valid alphanum shortcodes", %{path: path} do
message = %{
"type" => "Create",
"object" => %{
"emoji" => [{"fire1fox", "https://example.org/emoji/fire1fox.png"}],
"actor" => "https://example.org/users/admin"
}
}
Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/fire1fox.png"} ->
%Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")}
end)
clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468)
refute "fire1fox" in installed()
refute File.exists?(path)
assert {:ok, _message} = StealEmojiPolicy.filter(message)
assert "fire1fox" in installed()
end
test "accepts valid shortcodes with underscores", %{path: path} do
message = %{
"type" => "Create",
"object" => %{
"emoji" => [{"fire_fox", "https://example.org/emoji/fire_fox.png"}],
"actor" => "https://example.org/users/admin"
}
}
Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/fire_fox.png"} ->
%Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")}
end)
clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468)
refute "fire_fox" in installed()
refute File.exists?(path)
assert {:ok, _message} = StealEmojiPolicy.filter(message)
assert "fire_fox" in installed()
end
test "accepts valid shortcodes with hyphens", %{path: path} do
message = %{
"type" => "Create",
"object" => %{
"emoji" => [{"fire-fox", "https://example.org/emoji/fire-fox.png"}],
"actor" => "https://example.org/users/admin"
}
}
Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/fire-fox.png"} ->
%Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")}
end)
clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468)
refute "fire-fox" in installed()
refute File.exists?(path)
assert {:ok, _message} = StealEmojiPolicy.filter(message)
assert "fire-fox" in installed()
end
defp installed, do: Emoji.get_all() |> Enum.map(fn {k, _} -> k end)
end

Some files were not shown because too many files have changed in this diff Show more