Merge remote-tracking branch 'origin/develop' into translate-posts
Signed-off-by: mkljczk <git@mkljczk.pl>
This commit is contained in:
commit
08de5f94e3
118 changed files with 3560 additions and 929 deletions
|
|
@ -14,7 +14,7 @@ defmodule Pleroma.ConversationTest do
|
|||
setup_all do: clear_config([:instance, :federating], true)
|
||||
|
||||
setup do
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
|
||||
:ok
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@
|
|||
|
||||
defmodule Pleroma.Emoji.PackTest do
|
||||
use Pleroma.DataCase
|
||||
alias Pleroma.Emoji
|
||||
alias Pleroma.Emoji.Pack
|
||||
|
||||
@emoji_path Path.join(
|
||||
|
|
@ -53,6 +54,63 @@ defmodule Pleroma.Emoji.PackTest do
|
|||
|
||||
assert updated_pack.files_count == 5
|
||||
end
|
||||
|
||||
test "skips existing emojis when adding from zip file", %{pack: pack} do
|
||||
# First, let's create a test pack with a "bear" emoji
|
||||
test_pack_path = Path.join(@emoji_path, "test_bear_pack")
|
||||
File.mkdir_p(test_pack_path)
|
||||
|
||||
# Create a pack.json file
|
||||
File.write!(Path.join(test_pack_path, "pack.json"), """
|
||||
{
|
||||
"files": { "bear": "bear.png" },
|
||||
"pack": {
|
||||
"description": "Bear Pack", "homepage": "https://pleroma.social",
|
||||
"license": "Test license", "share-files": true
|
||||
}}
|
||||
""")
|
||||
|
||||
# Copy a test image to use as the bear emoji
|
||||
File.cp!(
|
||||
Path.absname("test/instance_static/emoji/test_pack/blank.png"),
|
||||
Path.join(test_pack_path, "bear.png")
|
||||
)
|
||||
|
||||
# Load the pack to register the "bear" emoji in the global registry
|
||||
{:ok, _bear_pack} = Pleroma.Emoji.Pack.load_pack("test_bear_pack")
|
||||
|
||||
# Reload emoji to make sure the bear emoji is in the global registry
|
||||
Emoji.reload()
|
||||
|
||||
# Verify that the bear emoji exists in the global registry
|
||||
assert Emoji.exist?("bear")
|
||||
|
||||
# Now try to add a zip file that contains an emoji with the same shortcode
|
||||
file = %Plug.Upload{
|
||||
content_type: "application/zip",
|
||||
filename: "emojis.zip",
|
||||
path: Path.absname("test/fixtures/emojis.zip")
|
||||
}
|
||||
|
||||
{:ok, updated_pack} = Pack.add_file(pack, nil, nil, file)
|
||||
|
||||
# Verify that the "bear" emoji was skipped
|
||||
refute Map.has_key?(updated_pack.files, "bear")
|
||||
|
||||
# Other emojis should be added
|
||||
assert Map.has_key?(updated_pack.files, "a_trusted_friend-128")
|
||||
assert Map.has_key?(updated_pack.files, "auroraborealis")
|
||||
assert Map.has_key?(updated_pack.files, "baby_in_a_box")
|
||||
assert Map.has_key?(updated_pack.files, "bear-128")
|
||||
|
||||
# Total count should be 4 (all emojis except "bear")
|
||||
assert updated_pack.files_count == 4
|
||||
|
||||
# Clean up the test pack
|
||||
on_exit(fn ->
|
||||
File.rm_rf!(test_pack_path)
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
test "returns error when zip file is bad", %{pack: pack} do
|
||||
|
|
@ -62,7 +120,7 @@ defmodule Pleroma.Emoji.PackTest do
|
|||
path: Path.absname("test/instance_static/emoji/test_pack/blank.png")
|
||||
}
|
||||
|
||||
assert Pack.add_file(pack, nil, nil, file) == {:error, :einval}
|
||||
assert {:error, _} = Pack.add_file(pack, nil, nil, file)
|
||||
end
|
||||
|
||||
test "returns pack when zip file is empty", %{pack: pack} do
|
||||
|
|
|
|||
56
test/pleroma/language/language_detector_test.exs
Normal file
56
test/pleroma/language/language_detector_test.exs
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Language.LanguageDetectorTest do
|
||||
use Pleroma.DataCase, async: true
|
||||
|
||||
alias Pleroma.Language.LanguageDetector
|
||||
alias Pleroma.Language.LanguageDetectorMock
|
||||
alias Pleroma.StaticStubbedConfigMock
|
||||
|
||||
import Mox
|
||||
|
||||
setup do
|
||||
# Stub the StaticStubbedConfigMock to return our mock for the provider
|
||||
StaticStubbedConfigMock
|
||||
|> stub(:get, fn
|
||||
[Pleroma.Language.LanguageDetector, :provider] -> LanguageDetectorMock
|
||||
_other -> nil
|
||||
end)
|
||||
|
||||
# Stub the LanguageDetectorMock with default implementations
|
||||
LanguageDetectorMock
|
||||
|> stub(:missing_dependencies, fn -> [] end)
|
||||
|> stub(:configured?, fn -> true end)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
test "it detects text language" do
|
||||
LanguageDetectorMock
|
||||
|> expect(:detect, fn _text -> "fr" end)
|
||||
|
||||
detected_language = LanguageDetector.detect("Je viens d'atterrir en Tchéquie.")
|
||||
|
||||
assert detected_language == "fr"
|
||||
end
|
||||
|
||||
test "it returns nil if text is not long enough" do
|
||||
# No need to set expectations as the word count check happens before the provider is called
|
||||
|
||||
detected_language = LanguageDetector.detect("it returns nil")
|
||||
|
||||
assert detected_language == nil
|
||||
end
|
||||
|
||||
test "it returns nil if no provider specified" do
|
||||
# Override the stub to return nil for the provider
|
||||
StaticStubbedConfigMock
|
||||
|> expect(:get, fn [Pleroma.Language.LanguageDetector, :provider] -> nil end)
|
||||
|
||||
detected_language = LanguageDetector.detect("this should also return nil")
|
||||
|
||||
assert detected_language == nil
|
||||
end
|
||||
end
|
||||
|
|
@ -19,7 +19,7 @@ defmodule Pleroma.NotificationTest do
|
|||
alias Pleroma.Web.MastodonAPI.NotificationView
|
||||
|
||||
setup do
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
|
||||
:ok
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -166,6 +166,91 @@ defmodule Pleroma.Object.FetcherTest do
|
|||
)
|
||||
end
|
||||
|
||||
test "it does not fetch from local instance" do
|
||||
local_url = Pleroma.Web.Endpoint.url() <> "/objects/local_resource"
|
||||
|
||||
assert {:fetch, {:error, "Trying to fetch local resource"}} =
|
||||
Fetcher.fetch_object_from_id(local_url)
|
||||
end
|
||||
|
||||
test "it validates content-type headers according to ActivityPub spec" do
|
||||
# Setup a mock for an object with invalid content-type
|
||||
mock(fn
|
||||
%{method: :get, url: "https://example.com/objects/invalid-content-type"} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
# Not a valid AP content-type
|
||||
headers: [{"content-type", "application/json"}],
|
||||
body:
|
||||
Jason.encode!(%{
|
||||
"id" => "https://example.com/objects/invalid-content-type",
|
||||
"type" => "Note",
|
||||
"content" => "This has an invalid content type",
|
||||
"actor" => "https://example.com/users/actor",
|
||||
"attributedTo" => "https://example.com/users/actor"
|
||||
})
|
||||
}
|
||||
end)
|
||||
|
||||
assert {:fetch, {:error, {:content_type, "application/json"}}} =
|
||||
Fetcher.fetch_object_from_id("https://example.com/objects/invalid-content-type")
|
||||
end
|
||||
|
||||
test "it accepts objects with application/ld+json and ActivityStreams profile" do
|
||||
# Setup a mock for an object with ld+json content-type and AS profile
|
||||
mock(fn
|
||||
%{method: :get, url: "https://example.com/objects/valid-ld-json"} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
headers: [
|
||||
{"content-type",
|
||||
"application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\""}
|
||||
],
|
||||
body:
|
||||
Jason.encode!(%{
|
||||
"id" => "https://example.com/objects/valid-ld-json",
|
||||
"type" => "Note",
|
||||
"content" => "This has a valid ld+json content type",
|
||||
"actor" => "https://example.com/users/actor",
|
||||
"attributedTo" => "https://example.com/users/actor"
|
||||
})
|
||||
}
|
||||
end)
|
||||
|
||||
# This should pass if content-type validation works correctly
|
||||
assert {:ok, object} =
|
||||
Fetcher.fetch_and_contain_remote_object_from_id(
|
||||
"https://example.com/objects/valid-ld-json"
|
||||
)
|
||||
|
||||
assert object["content"] == "This has a valid ld+json content type"
|
||||
end
|
||||
|
||||
test "it rejects objects with no content-type header" do
|
||||
# Setup a mock for an object with no content-type header
|
||||
mock(fn
|
||||
%{method: :get, url: "https://example.com/objects/no-content-type"} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
# No content-type header
|
||||
headers: [],
|
||||
body:
|
||||
Jason.encode!(%{
|
||||
"id" => "https://example.com/objects/no-content-type",
|
||||
"type" => "Note",
|
||||
"content" => "This has no content type header",
|
||||
"actor" => "https://example.com/users/actor",
|
||||
"attributedTo" => "https://example.com/users/actor"
|
||||
})
|
||||
}
|
||||
end)
|
||||
|
||||
# We want to test that the request fails with a missing content-type error
|
||||
# but the actual error is {:fetch, {:error, nil}} - we'll check for this format
|
||||
result = Fetcher.fetch_object_from_id("https://example.com/objects/no-content-type")
|
||||
assert {:fetch, {:error, nil}} = result
|
||||
end
|
||||
|
||||
test "it resets instance reachability on successful fetch" do
|
||||
id = "http://mastodon.example.org/@admin/99541947525187367"
|
||||
Instances.set_consistently_unreachable(id)
|
||||
|
|
@ -534,6 +619,110 @@ defmodule Pleroma.Object.FetcherTest do
|
|||
end
|
||||
end
|
||||
|
||||
describe "cross-domain redirect handling" do
|
||||
setup do
|
||||
mock(fn
|
||||
# Cross-domain redirect with original domain in id
|
||||
%{method: :get, url: "https://original.test/objects/123"} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
url: "https://media.test/objects/123",
|
||||
headers: [{"content-type", "application/activity+json"}],
|
||||
body:
|
||||
Jason.encode!(%{
|
||||
"id" => "https://original.test/objects/123",
|
||||
"type" => "Note",
|
||||
"content" => "This is redirected content",
|
||||
"actor" => "https://original.test/users/actor",
|
||||
"attributedTo" => "https://original.test/users/actor"
|
||||
})
|
||||
}
|
||||
|
||||
# Cross-domain redirect with final domain in id
|
||||
%{method: :get, url: "https://original.test/objects/final-domain-id"} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
url: "https://media.test/objects/final-domain-id",
|
||||
headers: [{"content-type", "application/activity+json"}],
|
||||
body:
|
||||
Jason.encode!(%{
|
||||
"id" => "https://media.test/objects/final-domain-id",
|
||||
"type" => "Note",
|
||||
"content" => "This has final domain in id",
|
||||
"actor" => "https://original.test/users/actor",
|
||||
"attributedTo" => "https://original.test/users/actor"
|
||||
})
|
||||
}
|
||||
|
||||
# No redirect - same domain
|
||||
%{method: :get, url: "https://original.test/objects/same-domain-redirect"} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
url: "https://original.test/objects/different-path",
|
||||
headers: [{"content-type", "application/activity+json"}],
|
||||
body:
|
||||
Jason.encode!(%{
|
||||
"id" => "https://original.test/objects/same-domain-redirect",
|
||||
"type" => "Note",
|
||||
"content" => "This has a same-domain redirect",
|
||||
"actor" => "https://original.test/users/actor",
|
||||
"attributedTo" => "https://original.test/users/actor"
|
||||
})
|
||||
}
|
||||
|
||||
# Test case with missing url field in response (common in tests)
|
||||
%{method: :get, url: "https://original.test/objects/missing-url"} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
# No url field
|
||||
headers: [{"content-type", "application/activity+json"}],
|
||||
body:
|
||||
Jason.encode!(%{
|
||||
"id" => "https://original.test/objects/missing-url",
|
||||
"type" => "Note",
|
||||
"content" => "This has no URL field in response",
|
||||
"actor" => "https://original.test/users/actor",
|
||||
"attributedTo" => "https://original.test/users/actor"
|
||||
})
|
||||
}
|
||||
end)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
test "it rejects objects from cross-domain redirects with original domain in id" do
|
||||
assert {:error, {:cross_domain_redirect, true}} =
|
||||
Fetcher.fetch_and_contain_remote_object_from_id(
|
||||
"https://original.test/objects/123"
|
||||
)
|
||||
end
|
||||
|
||||
test "it rejects objects from cross-domain redirects with final domain in id" do
|
||||
assert {:error, {:cross_domain_redirect, true}} =
|
||||
Fetcher.fetch_and_contain_remote_object_from_id(
|
||||
"https://original.test/objects/final-domain-id"
|
||||
)
|
||||
end
|
||||
|
||||
test "it accepts objects with same-domain redirects" do
|
||||
assert {:ok, data} =
|
||||
Fetcher.fetch_and_contain_remote_object_from_id(
|
||||
"https://original.test/objects/same-domain-redirect"
|
||||
)
|
||||
|
||||
assert data["content"] == "This has a same-domain redirect"
|
||||
end
|
||||
|
||||
test "it handles responses without URL field (common in tests)" do
|
||||
assert {:ok, data} =
|
||||
Fetcher.fetch_and_contain_remote_object_from_id(
|
||||
"https://original.test/objects/missing-url"
|
||||
)
|
||||
|
||||
assert data["content"] == "This has no URL field in response"
|
||||
end
|
||||
end
|
||||
|
||||
describe "fetch with history" do
|
||||
setup do
|
||||
object2 = %{
|
||||
|
|
|
|||
|
|
@ -3,12 +3,11 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Repo.Migrations.AutolinkerToLinkifyTest do
|
||||
use Pleroma.DataCase
|
||||
use Pleroma.DataCase, async: true
|
||||
import Pleroma.Factory
|
||||
import Pleroma.Tests.Helpers
|
||||
alias Pleroma.ConfigDB
|
||||
|
||||
setup do: clear_config(Pleroma.Formatter)
|
||||
setup_all do: require_migration("20200716195806_autolinker_to_linkify")
|
||||
|
||||
test "change/0 converts auto_linker opts for Pleroma.Formatter", %{migration: migration} do
|
||||
|
|
|
|||
|
|
@ -63,7 +63,11 @@ defmodule Pleroma.ReverseProxyTest do
|
|||
|> Plug.Conn.put_req_header("user-agent", "fake/1.0")
|
||||
|> ReverseProxy.call("/user-agent")
|
||||
|
||||
assert json_response(conn, 200) == %{"user-agent" => Pleroma.Application.user_agent()}
|
||||
# Convert the response to a map without relying on json_response
|
||||
body = conn.resp_body
|
||||
assert conn.status == 200
|
||||
response = Jason.decode!(body)
|
||||
assert response == %{"user-agent" => Pleroma.Application.user_agent()}
|
||||
end
|
||||
|
||||
test "closed connection", %{conn: conn} do
|
||||
|
|
@ -138,11 +142,14 @@ defmodule Pleroma.ReverseProxyTest do
|
|||
test "common", %{conn: conn} do
|
||||
ClientMock
|
||||
|> expect(:request, fn :head, "/head", _, _, _ ->
|
||||
{:ok, 200, [{"content-type", "text/html; charset=utf-8"}]}
|
||||
{:ok, 200, [{"content-type", "image/png"}]}
|
||||
end)
|
||||
|
||||
conn = ReverseProxy.call(Map.put(conn, :method, "HEAD"), "/head")
|
||||
assert html_response(conn, 200) == ""
|
||||
|
||||
assert conn.status == 200
|
||||
assert Conn.get_resp_header(conn, "content-type") == ["image/png"]
|
||||
assert conn.resp_body == ""
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -249,7 +256,10 @@ defmodule Pleroma.ReverseProxyTest do
|
|||
)
|
||||
|> ReverseProxy.call("/headers")
|
||||
|
||||
%{"headers" => headers} = json_response(conn, 200)
|
||||
body = conn.resp_body
|
||||
assert conn.status == 200
|
||||
response = Jason.decode!(body)
|
||||
headers = response["headers"]
|
||||
assert headers["Accept"] == "text/html"
|
||||
end
|
||||
|
||||
|
|
@ -262,7 +272,10 @@ defmodule Pleroma.ReverseProxyTest do
|
|||
)
|
||||
|> ReverseProxy.call("/headers")
|
||||
|
||||
%{"headers" => headers} = json_response(conn, 200)
|
||||
body = conn.resp_body
|
||||
assert conn.status == 200
|
||||
response = Jason.decode!(body)
|
||||
headers = response["headers"]
|
||||
refute headers["Accept-Language"]
|
||||
end
|
||||
end
|
||||
|
|
@ -328,4 +341,58 @@ defmodule Pleroma.ReverseProxyTest do
|
|||
assert {"content-disposition", "attachment; filename=\"filename.jpg\""} in conn.resp_headers
|
||||
end
|
||||
end
|
||||
|
||||
describe "content-type sanitisation" do
|
||||
test "preserves allowed image type", %{conn: conn} do
|
||||
ClientMock
|
||||
|> expect(:request, fn :get, "/content", _, _, _ ->
|
||||
{:ok, 200, [{"content-type", "image/png"}], %{url: "/content"}}
|
||||
end)
|
||||
|> expect(:stream_body, fn _ -> :done end)
|
||||
|
||||
conn = ReverseProxy.call(conn, "/content")
|
||||
|
||||
assert conn.status == 200
|
||||
assert Conn.get_resp_header(conn, "content-type") == ["image/png"]
|
||||
end
|
||||
|
||||
test "preserves allowed video type", %{conn: conn} do
|
||||
ClientMock
|
||||
|> expect(:request, fn :get, "/content", _, _, _ ->
|
||||
{:ok, 200, [{"content-type", "video/mp4"}], %{url: "/content"}}
|
||||
end)
|
||||
|> expect(:stream_body, fn _ -> :done end)
|
||||
|
||||
conn = ReverseProxy.call(conn, "/content")
|
||||
|
||||
assert conn.status == 200
|
||||
assert Conn.get_resp_header(conn, "content-type") == ["video/mp4"]
|
||||
end
|
||||
|
||||
test "sanitizes ActivityPub content type", %{conn: conn} do
|
||||
ClientMock
|
||||
|> expect(:request, fn :get, "/content", _, _, _ ->
|
||||
{:ok, 200, [{"content-type", "application/activity+json"}], %{url: "/content"}}
|
||||
end)
|
||||
|> expect(:stream_body, fn _ -> :done end)
|
||||
|
||||
conn = ReverseProxy.call(conn, "/content")
|
||||
|
||||
assert conn.status == 200
|
||||
assert Conn.get_resp_header(conn, "content-type") == ["application/octet-stream"]
|
||||
end
|
||||
|
||||
test "sanitizes LD-JSON content type", %{conn: conn} do
|
||||
ClientMock
|
||||
|> expect(:request, fn :get, "/content", _, _, _ ->
|
||||
{:ok, 200, [{"content-type", "application/ld+json"}], %{url: "/content"}}
|
||||
end)
|
||||
|> expect(:stream_body, fn _ -> :done end)
|
||||
|
||||
conn = ReverseProxy.call(conn, "/content")
|
||||
|
||||
assert conn.status == 200
|
||||
assert Conn.get_resp_header(conn, "content-type") == ["application/octet-stream"]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
496
test/pleroma/safe_zip_test.exs
Normal file
496
test/pleroma/safe_zip_test.exs
Normal file
|
|
@ -0,0 +1,496 @@
|
|||
defmodule Pleroma.SafeZipTest do
|
||||
# Not making this async because it creates and deletes files
|
||||
use ExUnit.Case
|
||||
|
||||
alias Pleroma.SafeZip
|
||||
|
||||
@fixtures_dir "test/fixtures"
|
||||
@tmp_dir "test/zip_tmp"
|
||||
|
||||
setup do
|
||||
# Ensure tmp directory exists
|
||||
File.mkdir_p!(@tmp_dir)
|
||||
|
||||
on_exit(fn ->
|
||||
# Clean up any files created during tests
|
||||
File.rm_rf!(@tmp_dir)
|
||||
File.mkdir_p!(@tmp_dir)
|
||||
end)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
describe "list_dir_file/1" do
|
||||
test "lists files in a valid zip" do
|
||||
{:ok, files} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "emojis.zip"))
|
||||
assert is_list(files)
|
||||
assert length(files) > 0
|
||||
end
|
||||
|
||||
test "returns an empty list for empty zip" do
|
||||
{:ok, files} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "empty.zip"))
|
||||
assert files == []
|
||||
end
|
||||
|
||||
test "returns error for non-existent file" do
|
||||
assert {:error, _} = SafeZip.list_dir_file(Path.join(@fixtures_dir, "nonexistent.zip"))
|
||||
end
|
||||
|
||||
test "only lists regular files, not directories" do
|
||||
# Create a zip with both files and directories
|
||||
zip_path = create_zip_with_directory()
|
||||
|
||||
# List files with SafeZip
|
||||
{:ok, files} = SafeZip.list_dir_file(zip_path)
|
||||
|
||||
# Verify only regular files are listed, not directories
|
||||
assert "file_in_dir/test_file.txt" in files
|
||||
assert "root_file.txt" in files
|
||||
|
||||
# Directory entries should not be included in the list
|
||||
refute "file_in_dir/" in files
|
||||
end
|
||||
end
|
||||
|
||||
describe "contains_all_data?/2" do
|
||||
test "returns true when all files are in the archive" do
|
||||
# For this test, we'll create our own zip file with known content
|
||||
# to ensure we can test the contains_all_data? function properly
|
||||
zip_path = create_zip_with_directory()
|
||||
archive_data = File.read!(zip_path)
|
||||
|
||||
# Check if the archive contains the root file
|
||||
# Note: The function expects charlists (Erlang strings) in the MapSet
|
||||
assert SafeZip.contains_all_data?(archive_data, MapSet.new([~c"root_file.txt"]))
|
||||
end
|
||||
|
||||
test "returns false when files are missing" do
|
||||
archive_path = Path.join(@fixtures_dir, "emojis.zip")
|
||||
archive_data = File.read!(archive_path)
|
||||
|
||||
# Create a MapSet with non-existent files
|
||||
fset = MapSet.new([~c"nonexistent.txt"])
|
||||
|
||||
refute SafeZip.contains_all_data?(archive_data, fset)
|
||||
end
|
||||
|
||||
test "returns false for invalid archive data" do
|
||||
refute SafeZip.contains_all_data?("invalid data", MapSet.new([~c"file.txt"]))
|
||||
end
|
||||
|
||||
test "only checks for regular files, not directories" do
|
||||
# Create a zip with both files and directories
|
||||
zip_path = create_zip_with_directory()
|
||||
archive_data = File.read!(zip_path)
|
||||
|
||||
# Check if the archive contains a directory (should return false)
|
||||
refute SafeZip.contains_all_data?(archive_data, MapSet.new([~c"file_in_dir/"]))
|
||||
|
||||
# For this test, we'll manually check if the file exists in the archive
|
||||
# by extracting it and verifying it exists
|
||||
extract_dir = Path.join(@tmp_dir, "extract_check")
|
||||
File.mkdir_p!(extract_dir)
|
||||
{:ok, files} = SafeZip.unzip_file(zip_path, extract_dir)
|
||||
|
||||
# Verify the root file was extracted
|
||||
assert Enum.any?(files, fn file ->
|
||||
Path.basename(file) == "root_file.txt"
|
||||
end)
|
||||
|
||||
# Verify the file exists on disk
|
||||
assert File.exists?(Path.join(extract_dir, "root_file.txt"))
|
||||
end
|
||||
end
|
||||
|
||||
describe "zip/4" do
|
||||
test "creates a zip file on disk" do
|
||||
# Create a test file
|
||||
test_file_path = Path.join(@tmp_dir, "test_file.txt")
|
||||
File.write!(test_file_path, "test content")
|
||||
|
||||
# Create a zip file
|
||||
zip_path = Path.join(@tmp_dir, "test.zip")
|
||||
assert {:ok, ^zip_path} = SafeZip.zip(zip_path, ["test_file.txt"], @tmp_dir, false)
|
||||
|
||||
# Verify the zip file exists
|
||||
assert File.exists?(zip_path)
|
||||
end
|
||||
|
||||
test "creates a zip file in memory" do
|
||||
# Create a test file
|
||||
test_file_path = Path.join(@tmp_dir, "test_file.txt")
|
||||
File.write!(test_file_path, "test content")
|
||||
|
||||
# Create a zip file in memory
|
||||
zip_name = Path.join(@tmp_dir, "test.zip")
|
||||
|
||||
assert {:ok, {^zip_name, zip_data}} =
|
||||
SafeZip.zip(zip_name, ["test_file.txt"], @tmp_dir, true)
|
||||
|
||||
# Verify the zip data is binary
|
||||
assert is_binary(zip_data)
|
||||
end
|
||||
|
||||
test "returns error for unsafe paths" do
|
||||
# Try to zip a file with path traversal
|
||||
assert {:error, _} =
|
||||
SafeZip.zip(
|
||||
Path.join(@tmp_dir, "test.zip"),
|
||||
["../fixtures/test.txt"],
|
||||
@tmp_dir,
|
||||
false
|
||||
)
|
||||
end
|
||||
|
||||
test "can create zip with directories" do
|
||||
# Create a directory structure
|
||||
dir_path = Path.join(@tmp_dir, "test_dir")
|
||||
File.mkdir_p!(dir_path)
|
||||
|
||||
file_in_dir_path = Path.join(dir_path, "file_in_dir.txt")
|
||||
File.write!(file_in_dir_path, "file in directory")
|
||||
|
||||
# Create a zip file
|
||||
zip_path = Path.join(@tmp_dir, "dir_test.zip")
|
||||
|
||||
assert {:ok, ^zip_path} =
|
||||
SafeZip.zip(
|
||||
zip_path,
|
||||
["test_dir/file_in_dir.txt"],
|
||||
@tmp_dir,
|
||||
false
|
||||
)
|
||||
|
||||
# Verify the zip file exists
|
||||
assert File.exists?(zip_path)
|
||||
|
||||
# Extract and verify the directory structure is preserved
|
||||
extract_dir = Path.join(@tmp_dir, "extract")
|
||||
{:ok, files} = SafeZip.unzip_file(zip_path, extract_dir)
|
||||
|
||||
# Check if the file path is in the list, accounting for possible full paths
|
||||
assert Enum.any?(files, fn file ->
|
||||
String.ends_with?(file, "file_in_dir.txt")
|
||||
end)
|
||||
|
||||
# Verify the file exists in the expected location
|
||||
assert File.exists?(Path.join([extract_dir, "test_dir", "file_in_dir.txt"]))
|
||||
end
|
||||
end
|
||||
|
||||
describe "unzip_file/3" do
|
||||
test "extracts files from a zip archive" do
|
||||
archive_path = Path.join(@fixtures_dir, "emojis.zip")
|
||||
|
||||
# Extract the archive
|
||||
assert {:ok, files} = SafeZip.unzip_file(archive_path, @tmp_dir)
|
||||
|
||||
# Verify files were extracted
|
||||
assert is_list(files)
|
||||
assert length(files) > 0
|
||||
|
||||
# Verify at least one file exists
|
||||
first_file = List.first(files)
|
||||
|
||||
# Simply check that the file exists in the tmp directory
|
||||
assert File.exists?(first_file)
|
||||
end
|
||||
|
||||
test "extracts specific files from a zip archive" do
|
||||
archive_path = Path.join(@fixtures_dir, "emojis.zip")
|
||||
|
||||
# Get list of files in the archive
|
||||
{:ok, all_files} = SafeZip.list_dir_file(archive_path)
|
||||
file_to_extract = List.first(all_files)
|
||||
|
||||
# Extract only one file
|
||||
assert {:ok, [extracted_file]} =
|
||||
SafeZip.unzip_file(archive_path, @tmp_dir, [file_to_extract])
|
||||
|
||||
# Verify only the specified file was extracted
|
||||
assert Path.basename(extracted_file) == Path.basename(file_to_extract)
|
||||
|
||||
# Check that the file exists in the tmp directory
|
||||
assert File.exists?(Path.join(@tmp_dir, Path.basename(file_to_extract)))
|
||||
end
|
||||
|
||||
test "returns error for invalid zip file" do
|
||||
invalid_path = Path.join(@tmp_dir, "invalid.zip")
|
||||
File.write!(invalid_path, "not a zip file")
|
||||
|
||||
assert {:error, _} = SafeZip.unzip_file(invalid_path, @tmp_dir)
|
||||
end
|
||||
|
||||
test "creates directories when extracting files in subdirectories" do
|
||||
# Create a zip with files in subdirectories
|
||||
zip_path = create_zip_with_directory()
|
||||
|
||||
# Extract the archive
|
||||
assert {:ok, files} = SafeZip.unzip_file(zip_path, @tmp_dir)
|
||||
|
||||
# Verify files were extracted - handle both relative and absolute paths
|
||||
assert Enum.any?(files, fn file ->
|
||||
Path.basename(file) == "test_file.txt" &&
|
||||
String.contains?(file, "file_in_dir")
|
||||
end)
|
||||
|
||||
assert Enum.any?(files, fn file ->
|
||||
Path.basename(file) == "root_file.txt"
|
||||
end)
|
||||
|
||||
# Verify directory was created
|
||||
dir_path = Path.join(@tmp_dir, "file_in_dir")
|
||||
assert File.exists?(dir_path)
|
||||
assert File.dir?(dir_path)
|
||||
|
||||
# Verify file in directory was extracted
|
||||
file_path = Path.join(dir_path, "test_file.txt")
|
||||
assert File.exists?(file_path)
|
||||
end
|
||||
end
|
||||
|
||||
describe "unzip_data/3" do
|
||||
test "extracts files from zip data" do
|
||||
archive_path = Path.join(@fixtures_dir, "emojis.zip")
|
||||
archive_data = File.read!(archive_path)
|
||||
|
||||
# Extract the archive from data
|
||||
assert {:ok, files} = SafeZip.unzip_data(archive_data, @tmp_dir)
|
||||
|
||||
# Verify files were extracted
|
||||
assert is_list(files)
|
||||
assert length(files) > 0
|
||||
|
||||
# Verify at least one file exists
|
||||
first_file = List.first(files)
|
||||
|
||||
# Simply check that the file exists in the tmp directory
|
||||
assert File.exists?(first_file)
|
||||
end
|
||||
|
||||
test "extracts specific files from zip data" do
|
||||
archive_path = Path.join(@fixtures_dir, "emojis.zip")
|
||||
archive_data = File.read!(archive_path)
|
||||
|
||||
# Get list of files in the archive
|
||||
{:ok, all_files} = SafeZip.list_dir_file(archive_path)
|
||||
file_to_extract = List.first(all_files)
|
||||
|
||||
# Extract only one file
|
||||
assert {:ok, extracted_files} =
|
||||
SafeZip.unzip_data(archive_data, @tmp_dir, [file_to_extract])
|
||||
|
||||
# Verify only the specified file was extracted
|
||||
assert Enum.any?(extracted_files, fn path ->
|
||||
Path.basename(path) == Path.basename(file_to_extract)
|
||||
end)
|
||||
|
||||
# Simply check that the file exists in the tmp directory
|
||||
assert File.exists?(Path.join(@tmp_dir, Path.basename(file_to_extract)))
|
||||
end
|
||||
|
||||
test "returns error for invalid zip data" do
|
||||
assert {:error, _} = SafeZip.unzip_data("not a zip file", @tmp_dir)
|
||||
end
|
||||
|
||||
test "creates directories when extracting files in subdirectories from data" do
|
||||
# Create a zip with files in subdirectories
|
||||
zip_path = create_zip_with_directory()
|
||||
archive_data = File.read!(zip_path)
|
||||
|
||||
# Extract the archive from data
|
||||
assert {:ok, files} = SafeZip.unzip_data(archive_data, @tmp_dir)
|
||||
|
||||
# Verify files were extracted - handle both relative and absolute paths
|
||||
assert Enum.any?(files, fn file ->
|
||||
Path.basename(file) == "test_file.txt" &&
|
||||
String.contains?(file, "file_in_dir")
|
||||
end)
|
||||
|
||||
assert Enum.any?(files, fn file ->
|
||||
Path.basename(file) == "root_file.txt"
|
||||
end)
|
||||
|
||||
# Verify directory was created
|
||||
dir_path = Path.join(@tmp_dir, "file_in_dir")
|
||||
assert File.exists?(dir_path)
|
||||
assert File.dir?(dir_path)
|
||||
|
||||
# Verify file in directory was extracted
|
||||
file_path = Path.join(dir_path, "test_file.txt")
|
||||
assert File.exists?(file_path)
|
||||
end
|
||||
end
|
||||
|
||||
# Security tests
|
||||
describe "security checks" do
|
||||
test "prevents path traversal in zip extraction" do
|
||||
# Create a malicious zip file with path traversal
|
||||
malicious_zip_path = create_malicious_zip_with_path_traversal()
|
||||
|
||||
# Try to extract it with SafeZip
|
||||
assert {:error, _} = SafeZip.unzip_file(malicious_zip_path, @tmp_dir)
|
||||
|
||||
# Verify the file was not extracted outside the target directory
|
||||
refute File.exists?(Path.join(Path.dirname(@tmp_dir), "traversal_attempt.txt"))
|
||||
end
|
||||
|
||||
test "prevents directory traversal in zip listing" do
|
||||
# Create a malicious zip file with path traversal
|
||||
malicious_zip_path = create_malicious_zip_with_path_traversal()
|
||||
|
||||
# Try to list files with SafeZip
|
||||
assert {:error, _} = SafeZip.list_dir_file(malicious_zip_path)
|
||||
end
|
||||
|
||||
test "prevents path traversal in zip data extraction" do
|
||||
# Create a malicious zip file with path traversal
|
||||
malicious_zip_path = create_malicious_zip_with_path_traversal()
|
||||
malicious_data = File.read!(malicious_zip_path)
|
||||
|
||||
# Try to extract it with SafeZip
|
||||
assert {:error, _} = SafeZip.unzip_data(malicious_data, @tmp_dir)
|
||||
|
||||
# Verify the file was not extracted outside the target directory
|
||||
refute File.exists?(Path.join(Path.dirname(@tmp_dir), "traversal_attempt.txt"))
|
||||
end
|
||||
|
||||
test "handles zip bomb attempts" do
|
||||
# Create a zip bomb (a zip with many files or large files)
|
||||
zip_bomb_path = create_zip_bomb()
|
||||
|
||||
# The SafeZip module should handle this gracefully
|
||||
# Either by successfully extracting it (if it's not too large)
|
||||
# or by returning an error (if it detects a potential zip bomb)
|
||||
result = SafeZip.unzip_file(zip_bomb_path, @tmp_dir)
|
||||
|
||||
case result do
|
||||
{:ok, _} ->
|
||||
# If it successfully extracts, make sure it didn't fill up the disk
|
||||
# This is a simple check to ensure the extraction was controlled
|
||||
assert File.exists?(@tmp_dir)
|
||||
|
||||
{:error, _} ->
|
||||
# If it returns an error, that's also acceptable
|
||||
# The important thing is that it doesn't crash or hang
|
||||
assert true
|
||||
end
|
||||
end
|
||||
|
||||
test "handles deeply nested directory structures" do
|
||||
# Create a zip with deeply nested directories
|
||||
deep_nest_path = create_deeply_nested_zip()
|
||||
|
||||
# The SafeZip module should handle this gracefully
|
||||
result = SafeZip.unzip_file(deep_nest_path, @tmp_dir)
|
||||
|
||||
case result do
|
||||
{:ok, files} ->
|
||||
# If it successfully extracts, verify the files were extracted
|
||||
assert is_list(files)
|
||||
assert length(files) > 0
|
||||
|
||||
{:error, _} ->
|
||||
# If it returns an error, that's also acceptable
|
||||
# The important thing is that it doesn't crash or hang
|
||||
assert true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Helper functions to create test fixtures
|
||||
|
||||
# Creates a zip file with a path traversal attempt
|
||||
defp create_malicious_zip_with_path_traversal do
|
||||
malicious_zip_path = Path.join(@tmp_dir, "path_traversal.zip")
|
||||
|
||||
# Create a file to include in the zip
|
||||
test_file_path = Path.join(@tmp_dir, "test_file.txt")
|
||||
File.write!(test_file_path, "malicious content")
|
||||
|
||||
# Use Erlang's zip module directly to create a zip with path traversal
|
||||
{:ok, charlist_path} =
|
||||
:zip.create(
|
||||
String.to_charlist(malicious_zip_path),
|
||||
[{String.to_charlist("../traversal_attempt.txt"), File.read!(test_file_path)}]
|
||||
)
|
||||
|
||||
to_string(charlist_path)
|
||||
end
|
||||
|
||||
# Creates a zip file with directory entries
|
||||
defp create_zip_with_directory do
|
||||
zip_path = Path.join(@tmp_dir, "with_directory.zip")
|
||||
|
||||
# Create files to include in the zip
|
||||
root_file_path = Path.join(@tmp_dir, "root_file.txt")
|
||||
File.write!(root_file_path, "root file content")
|
||||
|
||||
# Create a directory and a file in it
|
||||
dir_path = Path.join(@tmp_dir, "file_in_dir")
|
||||
File.mkdir_p!(dir_path)
|
||||
|
||||
file_in_dir_path = Path.join(dir_path, "test_file.txt")
|
||||
File.write!(file_in_dir_path, "file in directory content")
|
||||
|
||||
# Use Erlang's zip module to create a zip with directory structure
|
||||
{:ok, charlist_path} =
|
||||
:zip.create(
|
||||
String.to_charlist(zip_path),
|
||||
[
|
||||
{String.to_charlist("root_file.txt"), File.read!(root_file_path)},
|
||||
{String.to_charlist("file_in_dir/test_file.txt"), File.read!(file_in_dir_path)}
|
||||
]
|
||||
)
|
||||
|
||||
to_string(charlist_path)
|
||||
end
|
||||
|
||||
# Creates a zip bomb (a zip with many small files)
|
||||
defp create_zip_bomb do
|
||||
zip_path = Path.join(@tmp_dir, "zip_bomb.zip")
|
||||
|
||||
# Create a small file to duplicate many times
|
||||
small_file_path = Path.join(@tmp_dir, "small_file.txt")
|
||||
File.write!(small_file_path, String.duplicate("A", 100))
|
||||
|
||||
# Create a list of many files to include in the zip
|
||||
file_entries =
|
||||
for i <- 1..100 do
|
||||
{String.to_charlist("file_#{i}.txt"), File.read!(small_file_path)}
|
||||
end
|
||||
|
||||
# Use Erlang's zip module to create a zip with many files
|
||||
{:ok, charlist_path} =
|
||||
:zip.create(
|
||||
String.to_charlist(zip_path),
|
||||
file_entries
|
||||
)
|
||||
|
||||
to_string(charlist_path)
|
||||
end
|
||||
|
||||
# Creates a zip with deeply nested directories
|
||||
defp create_deeply_nested_zip do
|
||||
zip_path = Path.join(@tmp_dir, "deep_nest.zip")
|
||||
|
||||
# Create a file to include in the zip
|
||||
file_content = "test content"
|
||||
|
||||
# Create a list of deeply nested files
|
||||
file_entries =
|
||||
for i <- 1..10 do
|
||||
nested_path = Enum.reduce(1..i, "nested", fn j, acc -> "#{acc}/level_#{j}" end)
|
||||
{String.to_charlist("#{nested_path}/file.txt"), file_content}
|
||||
end
|
||||
|
||||
# Use Erlang's zip module to create a zip with deeply nested directories
|
||||
{:ok, charlist_path} =
|
||||
:zip.create(
|
||||
String.to_charlist(zip_path),
|
||||
file_entries
|
||||
)
|
||||
|
||||
to_string(charlist_path)
|
||||
end
|
||||
end
|
||||
|
|
@ -3,8 +3,10 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Upload.Filter.AnonymizeFilenameTest do
|
||||
use Pleroma.DataCase
|
||||
use Pleroma.DataCase, async: true
|
||||
|
||||
import Mox
|
||||
alias Pleroma.StaticStubbedConfigMock, as: ConfigMock
|
||||
alias Pleroma.Upload
|
||||
|
||||
setup do
|
||||
|
|
@ -19,21 +21,26 @@ defmodule Pleroma.Upload.Filter.AnonymizeFilenameTest do
|
|||
%{upload_file: upload_file}
|
||||
end
|
||||
|
||||
setup do: clear_config([Pleroma.Upload.Filter.AnonymizeFilename, :text])
|
||||
|
||||
test "it replaces filename on pre-defined text", %{upload_file: upload_file} do
|
||||
clear_config([Upload.Filter.AnonymizeFilename, :text], "custom-file.png")
|
||||
ConfigMock
|
||||
|> stub(:get, fn [Upload.Filter.AnonymizeFilename, :text] -> "custom-file.png" end)
|
||||
|
||||
{:ok, :filtered, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
|
||||
assert name == "custom-file.png"
|
||||
end
|
||||
|
||||
test "it replaces filename on pre-defined text expression", %{upload_file: upload_file} do
|
||||
clear_config([Upload.Filter.AnonymizeFilename, :text], "custom-file.{extension}")
|
||||
ConfigMock
|
||||
|> stub(:get, fn [Upload.Filter.AnonymizeFilename, :text] -> "custom-file.{extension}" end)
|
||||
|
||||
{:ok, :filtered, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
|
||||
assert name == "custom-file.jpg"
|
||||
end
|
||||
|
||||
test "it replaces filename on random text", %{upload_file: upload_file} do
|
||||
ConfigMock
|
||||
|> stub(:get, fn [Upload.Filter.AnonymizeFilename, :text] -> nil end)
|
||||
|
||||
{:ok, :filtered, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
|
||||
assert <<_::bytes-size(14)>> <> ".jpg" = name
|
||||
refute name == "an… image.jpg"
|
||||
|
|
|
|||
|
|
@ -3,9 +3,10 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Upload.Filter.MogrifunTest do
|
||||
use Pleroma.DataCase
|
||||
import Mock
|
||||
use Pleroma.DataCase, async: true
|
||||
import Mox
|
||||
|
||||
alias Pleroma.MogrifyMock
|
||||
alias Pleroma.Upload
|
||||
alias Pleroma.Upload.Filter
|
||||
|
||||
|
|
@ -22,23 +23,12 @@ defmodule Pleroma.Upload.Filter.MogrifunTest do
|
|||
tempfile: Path.absname("test/fixtures/image_tmp.jpg")
|
||||
}
|
||||
|
||||
task =
|
||||
Task.async(fn ->
|
||||
assert_receive {:apply_filter, {}}, 4_000
|
||||
end)
|
||||
MogrifyMock
|
||||
|> stub(:open, fn _file -> %{} end)
|
||||
|> stub(:custom, fn _image, _action -> %{} end)
|
||||
|> stub(:custom, fn _image, _action, _options -> %{} end)
|
||||
|> stub(:save, fn _image, [in_place: true] -> :ok end)
|
||||
|
||||
with_mocks([
|
||||
{Mogrify, [],
|
||||
[
|
||||
open: fn _f -> %Mogrify.Image{} end,
|
||||
custom: fn _m, _a -> send(task.pid, {:apply_filter, {}}) end,
|
||||
custom: fn _m, _a, _o -> send(task.pid, {:apply_filter, {}}) end,
|
||||
save: fn _f, _o -> :ok end
|
||||
]}
|
||||
]) do
|
||||
assert Filter.Mogrifun.filter(upload) == {:ok, :filtered}
|
||||
end
|
||||
|
||||
Task.await(task)
|
||||
assert Filter.Mogrifun.filter(upload) == {:ok, :filtered}
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,13 +3,18 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Upload.Filter.MogrifyTest do
|
||||
use Pleroma.DataCase
|
||||
import Mock
|
||||
use Pleroma.DataCase, async: true
|
||||
import Mox
|
||||
|
||||
alias Pleroma.MogrifyMock
|
||||
alias Pleroma.StaticStubbedConfigMock, as: ConfigMock
|
||||
alias Pleroma.Upload.Filter
|
||||
|
||||
setup :verify_on_exit!
|
||||
|
||||
test "apply mogrify filter" do
|
||||
clear_config(Filter.Mogrify, args: [{"tint", "40"}])
|
||||
ConfigMock
|
||||
|> stub(:get!, fn [Filter.Mogrify, :args] -> [{"tint", "40"}] end)
|
||||
|
||||
File.cp!(
|
||||
"test/fixtures/image.jpg",
|
||||
|
|
@ -23,19 +28,11 @@ defmodule Pleroma.Upload.Filter.MogrifyTest do
|
|||
tempfile: Path.absname("test/fixtures/image_tmp.jpg")
|
||||
}
|
||||
|
||||
task =
|
||||
Task.async(fn ->
|
||||
assert_receive {:apply_filter, {_, "tint", "40"}}, 4_000
|
||||
end)
|
||||
MogrifyMock
|
||||
|> expect(:open, fn _file -> %{} end)
|
||||
|> expect(:custom, fn _image, "tint", "40" -> %{} end)
|
||||
|> expect(:save, fn _image, [in_place: true] -> :ok end)
|
||||
|
||||
with_mock Mogrify,
|
||||
open: fn _f -> %Mogrify.Image{} end,
|
||||
custom: fn _m, _a -> :ok end,
|
||||
custom: fn m, a, o -> send(task.pid, {:apply_filter, {m, a, o}}) end,
|
||||
save: fn _f, _o -> :ok end do
|
||||
assert Filter.Mogrify.filter(upload) == {:ok, :filtered}
|
||||
end
|
||||
|
||||
Task.await(task)
|
||||
assert Filter.Mogrify.filter(upload) == {:ok, :filtered}
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -5,12 +5,13 @@
|
|||
defmodule Pleroma.Upload.FilterTest do
|
||||
use Pleroma.DataCase
|
||||
|
||||
import Mox
|
||||
alias Pleroma.StaticStubbedConfigMock, as: ConfigMock
|
||||
alias Pleroma.Upload.Filter
|
||||
|
||||
setup do: clear_config([Pleroma.Upload.Filter.AnonymizeFilename, :text])
|
||||
|
||||
test "applies filters" do
|
||||
clear_config([Pleroma.Upload.Filter.AnonymizeFilename, :text], "custom-file.png")
|
||||
ConfigMock
|
||||
|> stub(:get, fn [Pleroma.Upload.Filter.AnonymizeFilename, :text] -> "custom-file.png" end)
|
||||
|
||||
File.cp!(
|
||||
"test/fixtures/image.jpg",
|
||||
|
|
|
|||
|
|
@ -3,11 +3,12 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.UserRelationshipTest do
|
||||
alias Pleroma.DateTimeMock
|
||||
alias Pleroma.UserRelationship
|
||||
|
||||
use Pleroma.DataCase, async: false
|
||||
use Pleroma.DataCase, async: true
|
||||
|
||||
import Mock
|
||||
import Mox
|
||||
import Pleroma.Factory
|
||||
|
||||
describe "*_exists?/2" do
|
||||
|
|
@ -52,6 +53,9 @@ defmodule Pleroma.UserRelationshipTest do
|
|||
end
|
||||
|
||||
test "creates user relationship record if it doesn't exist", %{users: [user1, user2]} do
|
||||
DateTimeMock
|
||||
|> stub_with(Pleroma.DateTime.Impl)
|
||||
|
||||
for relationship_type <- [
|
||||
:block,
|
||||
:mute,
|
||||
|
|
@ -80,13 +84,15 @@ defmodule Pleroma.UserRelationshipTest do
|
|||
end
|
||||
|
||||
test "if record already exists, returns it", %{users: [user1, user2]} do
|
||||
user_block =
|
||||
with_mock NaiveDateTime, [:passthrough], utc_now: fn -> ~N[2017-03-17 17:09:58] end do
|
||||
{:ok, %{inserted_at: ~N[2017-03-17 17:09:58]}} =
|
||||
UserRelationship.create_block(user1, user2)
|
||||
end
|
||||
fixed_datetime = ~N[2017-03-17 17:09:58]
|
||||
|
||||
assert user_block == UserRelationship.create_block(user1, user2)
|
||||
Pleroma.DateTimeMock
|
||||
|> expect(:utc_now, 2, fn -> fixed_datetime end)
|
||||
|
||||
{:ok, %{inserted_at: ^fixed_datetime}} = UserRelationship.create_block(user1, user2)
|
||||
|
||||
# Test the idempotency without caring about the exact time
|
||||
assert {:ok, _} = UserRelationship.create_block(user1, user2)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ defmodule Pleroma.UserTest do
|
|||
import Swoosh.TestAssertions
|
||||
|
||||
setup do
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
|
||||
:ok
|
||||
end
|
||||
|
||||
|
|
@ -2405,8 +2405,8 @@ defmodule Pleroma.UserTest do
|
|||
other_user =
|
||||
insert(:user,
|
||||
local: false,
|
||||
follower_address: "http://localhost:4001/users/masto_closed/followers",
|
||||
following_address: "http://localhost:4001/users/masto_closed/following"
|
||||
follower_address: "https://remote.org/users/masto_closed/followers",
|
||||
following_address: "https://remote.org/users/masto_closed/following"
|
||||
)
|
||||
|
||||
assert other_user.following_count == 0
|
||||
|
|
@ -2426,8 +2426,8 @@ defmodule Pleroma.UserTest do
|
|||
other_user =
|
||||
insert(:user,
|
||||
local: false,
|
||||
follower_address: "http://localhost:4001/users/masto_closed/followers",
|
||||
following_address: "http://localhost:4001/users/masto_closed/following"
|
||||
follower_address: "https://remote.org/users/masto_closed/followers",
|
||||
following_address: "https://remote.org/users/masto_closed/following"
|
||||
)
|
||||
|
||||
assert other_user.following_count == 0
|
||||
|
|
@ -2447,8 +2447,8 @@ defmodule Pleroma.UserTest do
|
|||
other_user =
|
||||
insert(:user,
|
||||
local: false,
|
||||
follower_address: "http://localhost:4001/users/masto_closed/followers",
|
||||
following_address: "http://localhost:4001/users/masto_closed/following"
|
||||
follower_address: "https://remote.org/users/masto_closed/followers",
|
||||
following_address: "https://remote.org/users/masto_closed/following"
|
||||
)
|
||||
|
||||
assert other_user.following_count == 0
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
|||
require Pleroma.Constants
|
||||
|
||||
setup do
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
|
||||
:ok
|
||||
end
|
||||
|
||||
|
|
@ -1344,6 +1344,11 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
|||
end
|
||||
|
||||
describe "GET /users/:nickname/outbox" do
|
||||
setup do
|
||||
Mox.stub_with(Pleroma.StaticStubbedConfigMock, Pleroma.Config)
|
||||
:ok
|
||||
end
|
||||
|
||||
test "it paginates correctly", %{conn: conn} do
|
||||
user = insert(:user)
|
||||
conn = assign(conn, :user, user)
|
||||
|
|
@ -1432,6 +1437,22 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
|||
assert %{"orderedItems" => []} = resp
|
||||
end
|
||||
|
||||
test "it does not return a local note activity when C2S API is disabled", %{conn: conn} do
|
||||
clear_config([:activitypub, :client_api_enabled], false)
|
||||
user = insert(:user)
|
||||
reader = insert(:user)
|
||||
{:ok, _note_activity} = CommonAPI.post(user, %{status: "mew mew", visibility: "local"})
|
||||
|
||||
resp =
|
||||
conn
|
||||
|> assign(:user, reader)
|
||||
|> put_req_header("accept", "application/activity+json")
|
||||
|> get("/users/#{user.nickname}/outbox?page=true")
|
||||
|> json_response(200)
|
||||
|
||||
assert %{"orderedItems" => []} = resp
|
||||
end
|
||||
|
||||
test "it returns a note activity in a collection", %{conn: conn} do
|
||||
note_activity = insert(:note_activity)
|
||||
note_object = Object.normalize(note_activity, fetch: false)
|
||||
|
|
@ -1483,6 +1504,35 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
|||
assert [answer_outbox] = outbox_get["orderedItems"]
|
||||
assert answer_outbox["id"] == activity.data["id"]
|
||||
end
|
||||
|
||||
test "it works with authorized fetch forced when authenticated" do
|
||||
clear_config([:activitypub, :authorized_fetch_mode], true)
|
||||
|
||||
user = insert(:user)
|
||||
outbox_endpoint = user.ap_id <> "/outbox"
|
||||
|
||||
conn =
|
||||
build_conn()
|
||||
|> assign(:user, user)
|
||||
|> put_req_header("accept", "application/activity+json")
|
||||
|> get(outbox_endpoint)
|
||||
|
||||
assert json_response(conn, 200)
|
||||
end
|
||||
|
||||
test "it fails with authorized fetch forced when unauthenticated", %{conn: conn} do
|
||||
clear_config([:activitypub, :authorized_fetch_mode], true)
|
||||
|
||||
user = insert(:user)
|
||||
outbox_endpoint = user.ap_id <> "/outbox"
|
||||
|
||||
conn =
|
||||
conn
|
||||
|> put_req_header("accept", "application/activity+json")
|
||||
|> get(outbox_endpoint)
|
||||
|
||||
assert response(conn, 401)
|
||||
end
|
||||
end
|
||||
|
||||
describe "POST /users/:nickname/outbox (C2S)" do
|
||||
|
|
@ -2153,6 +2203,30 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
|||
|> post("/api/ap/upload_media", %{"file" => image, "description" => desc})
|
||||
|> json_response(403)
|
||||
end
|
||||
|
||||
test "they don't work when C2S API is disabled", %{conn: conn} do
|
||||
clear_config([:activitypub, :client_api_enabled], false)
|
||||
|
||||
user = insert(:user)
|
||||
|
||||
assert conn
|
||||
|> assign(:user, user)
|
||||
|> get("/api/ap/whoami")
|
||||
|> response(403)
|
||||
|
||||
desc = "Description of the image"
|
||||
|
||||
image = %Plug.Upload{
|
||||
content_type: "image/jpeg",
|
||||
path: Path.absname("test/fixtures/image.jpg"),
|
||||
filename: "an_image.jpg"
|
||||
}
|
||||
|
||||
assert conn
|
||||
|> assign(:user, user)
|
||||
|> post("/api/ap/upload_media", %{"file" => image, "description" => desc})
|
||||
|> response(403)
|
||||
end
|
||||
end
|
||||
|
||||
test "pinned collection", %{conn: conn} do
|
||||
|
|
|
|||
|
|
@ -1785,8 +1785,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
|
|||
user =
|
||||
insert(:user,
|
||||
local: false,
|
||||
follower_address: "http://localhost:4001/users/fuser2/followers",
|
||||
following_address: "http://localhost:4001/users/fuser2/following"
|
||||
follower_address: "https://remote.org/users/fuser2/followers",
|
||||
following_address: "https://remote.org/users/fuser2/following"
|
||||
)
|
||||
|
||||
{:ok, info} = ActivityPub.fetch_follow_information_for_user(user)
|
||||
|
|
@ -1797,7 +1797,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
|
|||
test "detects hidden followers" do
|
||||
mock(fn env ->
|
||||
case env.url do
|
||||
"http://localhost:4001/users/masto_closed/followers?page=1" ->
|
||||
"https://remote.org/users/masto_closed/followers?page=1" ->
|
||||
%Tesla.Env{status: 403, body: ""}
|
||||
|
||||
_ ->
|
||||
|
|
@ -1808,8 +1808,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
|
|||
user =
|
||||
insert(:user,
|
||||
local: false,
|
||||
follower_address: "http://localhost:4001/users/masto_closed/followers",
|
||||
following_address: "http://localhost:4001/users/masto_closed/following"
|
||||
follower_address: "https://remote.org/users/masto_closed/followers",
|
||||
following_address: "https://remote.org/users/masto_closed/following"
|
||||
)
|
||||
|
||||
{:ok, follow_info} = ActivityPub.fetch_follow_information_for_user(user)
|
||||
|
|
@ -1820,7 +1820,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
|
|||
test "detects hidden follows" do
|
||||
mock(fn env ->
|
||||
case env.url do
|
||||
"http://localhost:4001/users/masto_closed/following?page=1" ->
|
||||
"https://remote.org/users/masto_closed/following?page=1" ->
|
||||
%Tesla.Env{status: 403, body: ""}
|
||||
|
||||
_ ->
|
||||
|
|
@ -1831,8 +1831,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
|
|||
user =
|
||||
insert(:user,
|
||||
local: false,
|
||||
follower_address: "http://localhost:4001/users/masto_closed/followers",
|
||||
following_address: "http://localhost:4001/users/masto_closed/following"
|
||||
follower_address: "https://remote.org/users/masto_closed/followers",
|
||||
following_address: "https://remote.org/users/masto_closed/following"
|
||||
)
|
||||
|
||||
{:ok, follow_info} = ActivityPub.fetch_follow_information_for_user(user)
|
||||
|
|
@ -1844,8 +1844,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
|
|||
user =
|
||||
insert(:user,
|
||||
local: false,
|
||||
follower_address: "http://localhost:8080/followers/fuser3",
|
||||
following_address: "http://localhost:8080/following/fuser3"
|
||||
follower_address: "https://remote.org/followers/fuser3",
|
||||
following_address: "https://remote.org/following/fuser3"
|
||||
)
|
||||
|
||||
{:ok, follow_info} = ActivityPub.fetch_follow_information_for_user(user)
|
||||
|
|
@ -1858,28 +1858,28 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
|
|||
test "doesn't crash when follower and following counters are hidden" do
|
||||
mock(fn env ->
|
||||
case env.url do
|
||||
"http://localhost:4001/users/masto_hidden_counters/following" ->
|
||||
"https://remote.org/users/masto_hidden_counters/following" ->
|
||||
json(
|
||||
%{
|
||||
"@context" => "https://www.w3.org/ns/activitystreams",
|
||||
"id" => "http://localhost:4001/users/masto_hidden_counters/followers"
|
||||
"id" => "https://remote.org/users/masto_hidden_counters/followers"
|
||||
},
|
||||
headers: HttpRequestMock.activitypub_object_headers()
|
||||
)
|
||||
|
||||
"http://localhost:4001/users/masto_hidden_counters/following?page=1" ->
|
||||
"https://remote.org/users/masto_hidden_counters/following?page=1" ->
|
||||
%Tesla.Env{status: 403, body: ""}
|
||||
|
||||
"http://localhost:4001/users/masto_hidden_counters/followers" ->
|
||||
"https://remote.org/users/masto_hidden_counters/followers" ->
|
||||
json(
|
||||
%{
|
||||
"@context" => "https://www.w3.org/ns/activitystreams",
|
||||
"id" => "http://localhost:4001/users/masto_hidden_counters/following"
|
||||
"id" => "https://remote.org/users/masto_hidden_counters/following"
|
||||
},
|
||||
headers: HttpRequestMock.activitypub_object_headers()
|
||||
)
|
||||
|
||||
"http://localhost:4001/users/masto_hidden_counters/followers?page=1" ->
|
||||
"https://remote.org/users/masto_hidden_counters/followers?page=1" ->
|
||||
%Tesla.Env{status: 403, body: ""}
|
||||
end
|
||||
end)
|
||||
|
|
@ -1887,8 +1887,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
|
|||
user =
|
||||
insert(:user,
|
||||
local: false,
|
||||
follower_address: "http://localhost:4001/users/masto_hidden_counters/followers",
|
||||
following_address: "http://localhost:4001/users/masto_hidden_counters/following"
|
||||
follower_address: "https://remote.org/users/masto_hidden_counters/followers",
|
||||
following_address: "https://remote.org/users/masto_hidden_counters/following"
|
||||
)
|
||||
|
||||
{:ok, follow_info} = ActivityPub.fetch_follow_information_for_user(user)
|
||||
|
|
|
|||
|
|
@ -1,117 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF.FODirectReplyTest do
|
||||
use Pleroma.DataCase
|
||||
import Pleroma.Factory
|
||||
|
||||
require Pleroma.Constants
|
||||
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Web.ActivityPub.MRF.FODirectReply
|
||||
alias Pleroma.Web.CommonAPI
|
||||
|
||||
test "replying to followers-only/private is changed to direct" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
robin = insert(:user, nickname: "robin")
|
||||
|
||||
{:ok, post} =
|
||||
CommonAPI.post(batman, %{
|
||||
status: "Has anyone seen Selina Kyle's latest selfies?",
|
||||
visibility: "private"
|
||||
})
|
||||
|
||||
reply = %{
|
||||
"type" => "Create",
|
||||
"actor" => robin.ap_id,
|
||||
"to" => [batman.ap_id, robin.follower_address],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"actor" => robin.ap_id,
|
||||
"content" => "@batman 🤤 ❤️ 🐈⬛",
|
||||
"to" => [batman.ap_id, robin.follower_address],
|
||||
"cc" => [],
|
||||
"inReplyTo" => Object.normalize(post).data["id"]
|
||||
}
|
||||
}
|
||||
|
||||
expected_to = [batman.ap_id]
|
||||
expected_cc = []
|
||||
|
||||
assert {:ok, filtered} = FODirectReply.filter(reply)
|
||||
|
||||
assert expected_to == filtered["to"]
|
||||
assert expected_cc == filtered["cc"]
|
||||
assert expected_to == filtered["object"]["to"]
|
||||
assert expected_cc == filtered["object"]["cc"]
|
||||
end
|
||||
|
||||
test "replies to unlisted posts are unmodified" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
robin = insert(:user, nickname: "robin")
|
||||
|
||||
{:ok, post} =
|
||||
CommonAPI.post(batman, %{
|
||||
status: "Has anyone seen Selina Kyle's latest selfies?",
|
||||
visibility: "unlisted"
|
||||
})
|
||||
|
||||
reply = %{
|
||||
"type" => "Create",
|
||||
"actor" => robin.ap_id,
|
||||
"to" => [batman.ap_id, robin.follower_address],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"actor" => robin.ap_id,
|
||||
"content" => "@batman 🤤 ❤️ 🐈<200d>⬛",
|
||||
"to" => [batman.ap_id, robin.follower_address],
|
||||
"cc" => [],
|
||||
"inReplyTo" => Object.normalize(post).data["id"]
|
||||
}
|
||||
}
|
||||
|
||||
assert {:ok, filtered} = FODirectReply.filter(reply)
|
||||
|
||||
assert match?(^filtered, reply)
|
||||
end
|
||||
|
||||
test "replies to public posts are unmodified" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
robin = insert(:user, nickname: "robin")
|
||||
|
||||
{:ok, post} =
|
||||
CommonAPI.post(batman, %{status: "Has anyone seen Selina Kyle's latest selfies?"})
|
||||
|
||||
reply = %{
|
||||
"type" => "Create",
|
||||
"actor" => robin.ap_id,
|
||||
"to" => [batman.ap_id, robin.follower_address],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"actor" => robin.ap_id,
|
||||
"content" => "@batman 🤤 ❤️ 🐈<200d>⬛",
|
||||
"to" => [batman.ap_id, robin.follower_address],
|
||||
"cc" => [],
|
||||
"inReplyTo" => Object.normalize(post).data["id"]
|
||||
}
|
||||
}
|
||||
|
||||
assert {:ok, filtered} = FODirectReply.filter(reply)
|
||||
|
||||
assert match?(^filtered, reply)
|
||||
end
|
||||
|
||||
test "non-reply posts are unmodified" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
|
||||
{:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"})
|
||||
|
||||
assert {:ok, filtered} = FODirectReply.filter(post)
|
||||
|
||||
assert match?(^filtered, post)
|
||||
end
|
||||
end
|
||||
|
|
@ -1,140 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF.QuietReplyTest do
|
||||
use Pleroma.DataCase
|
||||
import Pleroma.Factory
|
||||
|
||||
require Pleroma.Constants
|
||||
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Web.ActivityPub.MRF.QuietReply
|
||||
alias Pleroma.Web.CommonAPI
|
||||
|
||||
test "replying to public post is forced to be quiet" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
robin = insert(:user, nickname: "robin")
|
||||
|
||||
{:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"})
|
||||
|
||||
reply = %{
|
||||
"type" => "Create",
|
||||
"actor" => robin.ap_id,
|
||||
"to" => [
|
||||
batman.ap_id,
|
||||
Pleroma.Constants.as_public()
|
||||
],
|
||||
"cc" => [robin.follower_address],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"actor" => robin.ap_id,
|
||||
"content" => "@batman Wait up, I forgot my spandex!",
|
||||
"to" => [
|
||||
batman.ap_id,
|
||||
Pleroma.Constants.as_public()
|
||||
],
|
||||
"cc" => [robin.follower_address],
|
||||
"inReplyTo" => Object.normalize(post).data["id"]
|
||||
}
|
||||
}
|
||||
|
||||
expected_to = [batman.ap_id, robin.follower_address]
|
||||
expected_cc = [Pleroma.Constants.as_public()]
|
||||
|
||||
assert {:ok, filtered} = QuietReply.filter(reply)
|
||||
|
||||
assert expected_to == filtered["to"]
|
||||
assert expected_cc == filtered["cc"]
|
||||
assert expected_to == filtered["object"]["to"]
|
||||
assert expected_cc == filtered["object"]["cc"]
|
||||
end
|
||||
|
||||
test "replying to unlisted post is unmodified" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
robin = insert(:user, nickname: "robin")
|
||||
|
||||
{:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!", visibility: "private"})
|
||||
|
||||
reply = %{
|
||||
"type" => "Create",
|
||||
"actor" => robin.ap_id,
|
||||
"to" => [batman.ap_id],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"actor" => robin.ap_id,
|
||||
"content" => "@batman Wait up, I forgot my spandex!",
|
||||
"to" => [batman.ap_id],
|
||||
"cc" => [],
|
||||
"inReplyTo" => Object.normalize(post).data["id"]
|
||||
}
|
||||
}
|
||||
|
||||
assert {:ok, filtered} = QuietReply.filter(reply)
|
||||
|
||||
assert match?(^filtered, reply)
|
||||
end
|
||||
|
||||
test "replying direct is unmodified" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
robin = insert(:user, nickname: "robin")
|
||||
|
||||
{:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"})
|
||||
|
||||
reply = %{
|
||||
"type" => "Create",
|
||||
"actor" => robin.ap_id,
|
||||
"to" => [batman.ap_id],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"actor" => robin.ap_id,
|
||||
"content" => "@batman Wait up, I forgot my spandex!",
|
||||
"to" => [batman.ap_id],
|
||||
"cc" => [],
|
||||
"inReplyTo" => Object.normalize(post).data["id"]
|
||||
}
|
||||
}
|
||||
|
||||
assert {:ok, filtered} = QuietReply.filter(reply)
|
||||
|
||||
assert match?(^filtered, reply)
|
||||
end
|
||||
|
||||
test "replying followers-only is unmodified" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
robin = insert(:user, nickname: "robin")
|
||||
|
||||
{:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"})
|
||||
|
||||
reply = %{
|
||||
"type" => "Create",
|
||||
"actor" => robin.ap_id,
|
||||
"to" => [batman.ap_id, robin.follower_address],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"actor" => robin.ap_id,
|
||||
"content" => "@batman Wait up, I forgot my spandex!",
|
||||
"to" => [batman.ap_id, robin.follower_address],
|
||||
"cc" => [],
|
||||
"inReplyTo" => Object.normalize(post).data["id"]
|
||||
}
|
||||
}
|
||||
|
||||
assert {:ok, filtered} = QuietReply.filter(reply)
|
||||
|
||||
assert match?(^filtered, reply)
|
||||
end
|
||||
|
||||
test "non-reply posts are unmodified" do
|
||||
batman = insert(:user, nickname: "batman")
|
||||
|
||||
{:ok, post} = CommonAPI.post(batman, %{status: "To the Batmobile!"})
|
||||
|
||||
assert {:ok, filtered} = QuietReply.filter(post)
|
||||
|
||||
assert match?(^filtered, post)
|
||||
end
|
||||
end
|
||||
|
|
@ -87,7 +87,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicyTest do
|
|||
assert File.exists?(fullpath)
|
||||
end
|
||||
|
||||
test "rejects invalid shortcodes", %{path: path} do
|
||||
test "rejects invalid shortcodes with slashes", %{path: path} do
|
||||
message = %{
|
||||
"type" => "Create",
|
||||
"object" => %{
|
||||
|
|
@ -113,6 +113,58 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicyTest do
|
|||
refute File.exists?(fullpath)
|
||||
end
|
||||
|
||||
test "rejects invalid shortcodes with dots", %{path: path} do
|
||||
message = %{
|
||||
"type" => "Create",
|
||||
"object" => %{
|
||||
"emoji" => [{"fired.fox", "https://example.org/emoji/firedfox"}],
|
||||
"actor" => "https://example.org/users/admin"
|
||||
}
|
||||
}
|
||||
|
||||
fullpath = Path.join(path, "fired.fox.png")
|
||||
|
||||
Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/firedfox"} ->
|
||||
%Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")}
|
||||
end)
|
||||
|
||||
clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468)
|
||||
|
||||
refute "fired.fox" in installed()
|
||||
refute File.exists?(path)
|
||||
|
||||
assert {:ok, _message} = StealEmojiPolicy.filter(message)
|
||||
|
||||
refute "fired.fox" in installed()
|
||||
refute File.exists?(fullpath)
|
||||
end
|
||||
|
||||
test "rejects invalid shortcodes with special characters", %{path: path} do
|
||||
message = %{
|
||||
"type" => "Create",
|
||||
"object" => %{
|
||||
"emoji" => [{"fired:fox", "https://example.org/emoji/firedfox"}],
|
||||
"actor" => "https://example.org/users/admin"
|
||||
}
|
||||
}
|
||||
|
||||
fullpath = Path.join(path, "fired:fox.png")
|
||||
|
||||
Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/firedfox"} ->
|
||||
%Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")}
|
||||
end)
|
||||
|
||||
clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468)
|
||||
|
||||
refute "fired:fox" in installed()
|
||||
refute File.exists?(path)
|
||||
|
||||
assert {:ok, _message} = StealEmojiPolicy.filter(message)
|
||||
|
||||
refute "fired:fox" in installed()
|
||||
refute File.exists?(fullpath)
|
||||
end
|
||||
|
||||
test "reject regex shortcode", %{message: message} do
|
||||
refute "firedfox" in installed()
|
||||
|
||||
|
|
@ -171,5 +223,74 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicyTest do
|
|||
refute "firedfox" in installed()
|
||||
end
|
||||
|
||||
test "accepts valid alphanum shortcodes", %{path: path} do
|
||||
message = %{
|
||||
"type" => "Create",
|
||||
"object" => %{
|
||||
"emoji" => [{"fire1fox", "https://example.org/emoji/fire1fox.png"}],
|
||||
"actor" => "https://example.org/users/admin"
|
||||
}
|
||||
}
|
||||
|
||||
Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/fire1fox.png"} ->
|
||||
%Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")}
|
||||
end)
|
||||
|
||||
clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468)
|
||||
|
||||
refute "fire1fox" in installed()
|
||||
refute File.exists?(path)
|
||||
|
||||
assert {:ok, _message} = StealEmojiPolicy.filter(message)
|
||||
|
||||
assert "fire1fox" in installed()
|
||||
end
|
||||
|
||||
test "accepts valid shortcodes with underscores", %{path: path} do
|
||||
message = %{
|
||||
"type" => "Create",
|
||||
"object" => %{
|
||||
"emoji" => [{"fire_fox", "https://example.org/emoji/fire_fox.png"}],
|
||||
"actor" => "https://example.org/users/admin"
|
||||
}
|
||||
}
|
||||
|
||||
Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/fire_fox.png"} ->
|
||||
%Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")}
|
||||
end)
|
||||
|
||||
clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468)
|
||||
|
||||
refute "fire_fox" in installed()
|
||||
refute File.exists?(path)
|
||||
|
||||
assert {:ok, _message} = StealEmojiPolicy.filter(message)
|
||||
|
||||
assert "fire_fox" in installed()
|
||||
end
|
||||
|
||||
test "accepts valid shortcodes with hyphens", %{path: path} do
|
||||
message = %{
|
||||
"type" => "Create",
|
||||
"object" => %{
|
||||
"emoji" => [{"fire-fox", "https://example.org/emoji/fire-fox.png"}],
|
||||
"actor" => "https://example.org/users/admin"
|
||||
}
|
||||
}
|
||||
|
||||
Tesla.Mock.mock(fn %{method: :get, url: "https://example.org/emoji/fire-fox.png"} ->
|
||||
%Tesla.Env{status: 200, body: File.read!("test/fixtures/image.jpg")}
|
||||
end)
|
||||
|
||||
clear_config(:mrf_steal_emoji, hosts: ["example.org"], size_limit: 284_468)
|
||||
|
||||
refute "fire-fox" in installed()
|
||||
refute File.exists?(path)
|
||||
|
||||
assert {:ok, _message} = StealEmojiPolicy.filter(message)
|
||||
|
||||
assert "fire-fox" in installed()
|
||||
end
|
||||
|
||||
defp installed, do: Emoji.get_all() |> Enum.map(fn {k, _} -> k end)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -5,12 +5,33 @@
|
|||
defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidatorTest do
|
||||
use Pleroma.DataCase, async: true
|
||||
|
||||
alias Pleroma.Language.LanguageDetectorMock
|
||||
alias Pleroma.StaticStubbedConfigMock
|
||||
alias Pleroma.Web.ActivityPub.ObjectValidator
|
||||
alias Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidator
|
||||
alias Pleroma.Web.ActivityPub.Utils
|
||||
|
||||
import Mox
|
||||
import Pleroma.Factory
|
||||
|
||||
# Setup for all tests
|
||||
setup do
|
||||
# Stub the StaticStubbedConfigMock to return our mock for the provider
|
||||
StaticStubbedConfigMock
|
||||
|> stub(:get, fn
|
||||
[Pleroma.Language.LanguageDetector, :provider] -> LanguageDetectorMock
|
||||
_other -> nil
|
||||
end)
|
||||
|
||||
# Stub the LanguageDetectorMock with default implementations
|
||||
LanguageDetectorMock
|
||||
|> stub(:missing_dependencies, fn -> [] end)
|
||||
|> stub(:configured?, fn -> true end)
|
||||
|> stub(:detect, fn _text -> nil end)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
describe "Notes" do
|
||||
setup do
|
||||
user = insert(:user)
|
||||
|
|
@ -234,6 +255,37 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidatorTest
|
|||
assert object.language == "pl"
|
||||
end
|
||||
|
||||
test "it doesn't call LanguageDetector when language is specified" do
|
||||
# Set up expectation that detect should not be called
|
||||
LanguageDetectorMock
|
||||
|> expect(:detect, 0, fn _ -> flunk("LanguageDetector.detect should not be called") end)
|
||||
|> stub(:missing_dependencies, fn -> [] end)
|
||||
|> stub(:configured?, fn -> true end)
|
||||
|
||||
# Stub the StaticStubbedConfigMock to return our mock for the provider
|
||||
StaticStubbedConfigMock
|
||||
|> stub(:get, fn
|
||||
[Pleroma.Language.LanguageDetector, :provider] -> LanguageDetectorMock
|
||||
_other -> nil
|
||||
end)
|
||||
|
||||
user = insert(:user)
|
||||
|
||||
note = %{
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"id" => Utils.generate_object_id(),
|
||||
"type" => "Note",
|
||||
"content" => "a post in English",
|
||||
"contentMap" => %{
|
||||
"en" => "a post in English"
|
||||
},
|
||||
"attributedTo" => user.ap_id
|
||||
}
|
||||
|
||||
ArticleNotePageValidator.cast_and_apply(note)
|
||||
end
|
||||
|
||||
test "it adds contentMap if language is specified" do
|
||||
user = insert(:user)
|
||||
|
||||
|
|
|
|||
|
|
@ -13,6 +13,23 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidatorTest do
|
|||
import Pleroma.Factory
|
||||
|
||||
describe "attachments" do
|
||||
test "works with apng" do
|
||||
attachment =
|
||||
%{
|
||||
"mediaType" => "image/apng",
|
||||
"name" => "",
|
||||
"type" => "Document",
|
||||
"url" =>
|
||||
"https://media.misskeyusercontent.com/io/2859c26e-cd43-4550-848b-b6243bc3fe28.apng"
|
||||
}
|
||||
|
||||
assert {:ok, attachment} =
|
||||
AttachmentValidator.cast_and_validate(attachment)
|
||||
|> Ecto.Changeset.apply_action(:insert)
|
||||
|
||||
assert attachment.mediaType == "image/apng"
|
||||
end
|
||||
|
||||
test "fails without url" do
|
||||
attachment = %{
|
||||
"mediaType" => "",
|
||||
|
|
|
|||
|
|
@ -156,6 +156,246 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do
|
|||
# It fetched the quoted post
|
||||
assert Object.normalize("https://misskey.io/notes/8vs6wxufd0")
|
||||
end
|
||||
|
||||
test "doesn't allow remote edits to fake local likes" do
|
||||
# as a spot check for no internal fields getting injected
|
||||
now = DateTime.utc_now()
|
||||
pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3)))
|
||||
edit_date = DateTime.to_iso8601(now)
|
||||
|
||||
local_user = insert(:user)
|
||||
|
||||
create_data = %{
|
||||
"type" => "Create",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2619539638/activity",
|
||||
"actor" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2619539638",
|
||||
"attributedTo" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"published" => pub_date,
|
||||
"content" => "miaow",
|
||||
"likes" => [local_user.ap_id]
|
||||
}
|
||||
}
|
||||
|
||||
update_data =
|
||||
create_data
|
||||
|> Map.put("type", "Update")
|
||||
|> Map.put("id", create_data["object"]["id"] <> "/update/1")
|
||||
|> put_in(["object", "content"], "miaow :3")
|
||||
|> put_in(["object", "updated"], edit_date)
|
||||
|> put_in(["object", "formerRepresentations"], %{
|
||||
"type" => "OrderedCollection",
|
||||
"totalItems" => 1,
|
||||
"orderedItems" => [create_data["object"]]
|
||||
})
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"])
|
||||
assert object.data["content"] == "miaow"
|
||||
assert object.data["likes"] == []
|
||||
assert object.data["like_count"] == 0
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"])
|
||||
assert object.data["content"] == "miaow :3"
|
||||
assert object.data["likes"] == []
|
||||
assert object.data["like_count"] == 0
|
||||
end
|
||||
|
||||
test "strips internal fields from history items in edited notes" do
|
||||
now = DateTime.utc_now()
|
||||
pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3)))
|
||||
edit_date = DateTime.to_iso8601(now)
|
||||
|
||||
local_user = insert(:user)
|
||||
|
||||
create_data = %{
|
||||
"type" => "Create",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2619539638/activity",
|
||||
"actor" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2619539638",
|
||||
"attributedTo" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"published" => pub_date,
|
||||
"content" => "miaow",
|
||||
"likes" => [],
|
||||
"like_count" => 0
|
||||
}
|
||||
}
|
||||
|
||||
update_data =
|
||||
create_data
|
||||
|> Map.put("type", "Update")
|
||||
|> Map.put("id", create_data["object"]["id"] <> "/update/1")
|
||||
|> put_in(["object", "content"], "miaow :3")
|
||||
|> put_in(["object", "updated"], edit_date)
|
||||
|> put_in(["object", "formerRepresentations"], %{
|
||||
"type" => "OrderedCollection",
|
||||
"totalItems" => 1,
|
||||
"orderedItems" => [
|
||||
Map.merge(create_data["object"], %{
|
||||
"likes" => [local_user.ap_id],
|
||||
"like_count" => 1,
|
||||
"pleroma" => %{"internal_field" => "should_be_stripped"}
|
||||
})
|
||||
]
|
||||
})
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"])
|
||||
assert object.data["content"] == "miaow"
|
||||
assert object.data["likes"] == []
|
||||
assert object.data["like_count"] == 0
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"])
|
||||
assert object.data["content"] == "miaow :3"
|
||||
assert object.data["likes"] == []
|
||||
assert object.data["like_count"] == 0
|
||||
|
||||
# Check that internal fields are stripped from history items
|
||||
history_item = List.first(object.data["formerRepresentations"]["orderedItems"])
|
||||
assert history_item["likes"] == []
|
||||
assert history_item["like_count"] == 0
|
||||
refute Map.has_key?(history_item, "pleroma")
|
||||
end
|
||||
|
||||
test "doesn't trip over remote likes in notes" do
|
||||
now = DateTime.utc_now()
|
||||
pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3)))
|
||||
edit_date = DateTime.to_iso8601(now)
|
||||
|
||||
create_data = %{
|
||||
"type" => "Create",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/3409297097/activity",
|
||||
"actor" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/3409297097",
|
||||
"attributedTo" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"published" => pub_date,
|
||||
"content" => "miaow",
|
||||
"likes" => %{
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/3409297097/likes",
|
||||
"totalItems" => 0,
|
||||
"type" => "Collection"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
update_data =
|
||||
create_data
|
||||
|> Map.put("type", "Update")
|
||||
|> Map.put("id", create_data["object"]["id"] <> "/update/1")
|
||||
|> put_in(["object", "content"], "miaow :3")
|
||||
|> put_in(["object", "updated"], edit_date)
|
||||
|> put_in(["object", "likes", "totalItems"], 666)
|
||||
|> put_in(["object", "formerRepresentations"], %{
|
||||
"type" => "OrderedCollection",
|
||||
"totalItems" => 1,
|
||||
"orderedItems" => [create_data["object"]]
|
||||
})
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"])
|
||||
assert object.data["content"] == "miaow"
|
||||
assert object.data["likes"] == []
|
||||
assert object.data["like_count"] == 0
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"])
|
||||
assert object.data["content"] == "miaow :3"
|
||||
assert object.data["likes"] == []
|
||||
# in the future this should retain remote likes, but for now:
|
||||
assert object.data["like_count"] == 0
|
||||
end
|
||||
|
||||
test "doesn't trip over remote likes in polls" do
|
||||
now = DateTime.utc_now()
|
||||
pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3)))
|
||||
edit_date = DateTime.to_iso8601(now)
|
||||
|
||||
create_data = %{
|
||||
"type" => "Create",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2471790073/activity",
|
||||
"actor" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Question",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2471790073",
|
||||
"attributedTo" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||
"cc" => [],
|
||||
"published" => pub_date,
|
||||
"content" => "vote!",
|
||||
"anyOf" => [
|
||||
%{
|
||||
"type" => "Note",
|
||||
"name" => "a",
|
||||
"replies" => %{
|
||||
"type" => "Collection",
|
||||
"totalItems" => 3
|
||||
}
|
||||
},
|
||||
%{
|
||||
"type" => "Note",
|
||||
"name" => "b",
|
||||
"replies" => %{
|
||||
"type" => "Collection",
|
||||
"totalItems" => 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"likes" => %{
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2471790073/likes",
|
||||
"totalItems" => 0,
|
||||
"type" => "Collection"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
update_data =
|
||||
create_data
|
||||
|> Map.put("type", "Update")
|
||||
|> Map.put("id", create_data["object"]["id"] <> "/update/1")
|
||||
|> put_in(["object", "content"], "vote now!")
|
||||
|> put_in(["object", "updated"], edit_date)
|
||||
|> put_in(["object", "likes", "totalItems"], 666)
|
||||
|> put_in(["object", "formerRepresentations"], %{
|
||||
"type" => "OrderedCollection",
|
||||
"totalItems" => 1,
|
||||
"orderedItems" => [create_data["object"]]
|
||||
})
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"])
|
||||
assert object.data["content"] == "vote!"
|
||||
assert object.data["likes"] == []
|
||||
assert object.data["like_count"] == 0
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"])
|
||||
assert object.data["content"] == "vote now!"
|
||||
assert object.data["likes"] == []
|
||||
# in the future this should retain remote likes, but for now:
|
||||
assert object.data["like_count"] == 0
|
||||
end
|
||||
end
|
||||
|
||||
describe "prepare outgoing" do
|
||||
|
|
|
|||
|
|
@ -1211,8 +1211,6 @@ defmodule Pleroma.Web.AdminAPI.ConfigControllerTest do
|
|||
end
|
||||
|
||||
test "args for Pleroma.Upload.Filter.Mogrify with custom tuples", %{conn: conn} do
|
||||
clear_config(Pleroma.Upload.Filter.Mogrify)
|
||||
|
||||
assert conn
|
||||
|> put_req_header("content-type", "application/json")
|
||||
|> post("/api/pleroma/admin/config", %{
|
||||
|
|
@ -1240,7 +1238,8 @@ defmodule Pleroma.Web.AdminAPI.ConfigControllerTest do
|
|||
"need_reboot" => false
|
||||
}
|
||||
|
||||
assert Config.get(Pleroma.Upload.Filter.Mogrify) == [args: ["auto-orient", "strip"]]
|
||||
config = Config.get(Pleroma.Upload.Filter.Mogrify)
|
||||
assert {:args, ["auto-orient", "strip"]} in config
|
||||
|
||||
assert conn
|
||||
|> put_req_header("content-type", "application/json")
|
||||
|
|
@ -1289,9 +1288,9 @@ defmodule Pleroma.Web.AdminAPI.ConfigControllerTest do
|
|||
"need_reboot" => false
|
||||
}
|
||||
|
||||
assert Config.get(Pleroma.Upload.Filter.Mogrify) == [
|
||||
args: ["auto-orient", "strip", {"implode", "1"}, {"resize", "3840x1080>"}]
|
||||
]
|
||||
config = Config.get(Pleroma.Upload.Filter.Mogrify)
|
||||
|
||||
assert {:args, ["auto-orient", "strip", {"implode", "1"}, {"resize", "3840x1080>"}]} in config
|
||||
end
|
||||
|
||||
test "enables the welcome messages", %{conn: conn} do
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ defmodule Pleroma.Web.AdminAPI.UserControllerTest do
|
|||
alias Pleroma.Web.MediaProxy
|
||||
|
||||
setup do
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
|
||||
:ok
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountControllerTest do
|
|||
import Pleroma.Factory
|
||||
|
||||
setup do
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
|
||||
:ok
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -227,4 +227,93 @@ defmodule Pleroma.Web.MastodonAPI.MediaControllerTest do
|
|||
|> json_response_and_validate_schema(403)
|
||||
end
|
||||
end
|
||||
|
||||
describe "Content-Type sanitization" do
|
||||
setup do: oauth_access(["write:media", "read:media"])
|
||||
|
||||
setup do
|
||||
ConfigMock
|
||||
|> stub_with(Pleroma.Test.StaticConfig)
|
||||
|
||||
config =
|
||||
Pleroma.Config.get([Pleroma.Upload])
|
||||
|> Keyword.put(:uploader, Pleroma.Uploaders.Local)
|
||||
|
||||
clear_config([Pleroma.Upload], config)
|
||||
clear_config([Pleroma.Upload, :allowed_mime_types], ["image", "audio", "video"])
|
||||
|
||||
# Create a file with a malicious content type and dangerous extension
|
||||
malicious_file = %Plug.Upload{
|
||||
content_type: "application/activity+json",
|
||||
path: Path.absname("test/fixtures/image.jpg"),
|
||||
# JSON extension to make MIME.from_path detect application/json
|
||||
filename: "malicious.json"
|
||||
}
|
||||
|
||||
[malicious_file: malicious_file]
|
||||
end
|
||||
|
||||
test "sanitizes malicious content types when serving media", %{
|
||||
conn: conn,
|
||||
malicious_file: malicious_file
|
||||
} do
|
||||
# First upload the file with the malicious content type
|
||||
media =
|
||||
conn
|
||||
|> put_req_header("content-type", "multipart/form-data")
|
||||
|> post("/api/v1/media", %{"file" => malicious_file})
|
||||
|> json_response_and_validate_schema(:ok)
|
||||
|
||||
# Get the file URL from the response
|
||||
url = media["url"]
|
||||
|
||||
# Now make a direct request to the media URL and check the content-type header
|
||||
response =
|
||||
build_conn()
|
||||
|> get(URI.parse(url).path)
|
||||
|
||||
# Find the content-type header
|
||||
content_type_header =
|
||||
Enum.find(response.resp_headers, fn {name, _} -> name == "content-type" end)
|
||||
|
||||
# The server should detect the application/json MIME type from the .json extension
|
||||
# and replace it with application/octet-stream since it's not in allowed_mime_types
|
||||
assert content_type_header == {"content-type", "application/octet-stream"}
|
||||
|
||||
# Verify that the file was still served correctly
|
||||
assert response.status == 200
|
||||
end
|
||||
|
||||
test "allows safe content types", %{conn: conn} do
|
||||
safe_image = %Plug.Upload{
|
||||
content_type: "image/jpeg",
|
||||
path: Path.absname("test/fixtures/image.jpg"),
|
||||
filename: "safe_image.jpg"
|
||||
}
|
||||
|
||||
# Upload a file with a safe content type
|
||||
media =
|
||||
conn
|
||||
|> put_req_header("content-type", "multipart/form-data")
|
||||
|> post("/api/v1/media", %{"file" => safe_image})
|
||||
|> json_response_and_validate_schema(:ok)
|
||||
|
||||
# Get the file URL from the response
|
||||
url = media["url"]
|
||||
|
||||
# Make a direct request to the media URL and check the content-type header
|
||||
response =
|
||||
build_conn()
|
||||
|> get(URI.parse(url).path)
|
||||
|
||||
# The server should preserve the image/jpeg MIME type since it's allowed
|
||||
content_type_header =
|
||||
Enum.find(response.resp_headers, fn {name, _} -> name == "content-type" end)
|
||||
|
||||
assert content_type_header == {"content-type", "image/jpeg"}
|
||||
|
||||
# Verify that the file was served correctly
|
||||
assert response.status == 200
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ defmodule Pleroma.Web.MastodonAPI.NotificationControllerTest do
|
|||
import Pleroma.Factory
|
||||
|
||||
setup do
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
|
||||
:ok
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ defmodule Pleroma.Web.MastodonAPI.SearchControllerTest do
|
|||
import Mock
|
||||
|
||||
setup do
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
|
||||
:ok
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ defmodule Pleroma.Web.MastodonAPI.NotificationViewTest do
|
|||
import Pleroma.Factory
|
||||
|
||||
setup do
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
|
||||
:ok
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ defmodule Pleroma.Web.Metadata.Providers.OpenGraphTest do
|
|||
|
||||
alias Pleroma.UnstubbedConfigMock, as: ConfigMock
|
||||
alias Pleroma.Web.Metadata.Providers.OpenGraph
|
||||
alias Pleroma.Web.Metadata.Utils
|
||||
|
||||
setup do
|
||||
ConfigMock
|
||||
|
|
@ -197,4 +198,58 @@ defmodule Pleroma.Web.Metadata.Providers.OpenGraphTest do
|
|||
"http://localhost:4001/proxy/preview/LzAnlke-l5oZbNzWsrHfprX1rGw/aHR0cHM6Ly9wbGVyb21hLmdvdi9hYm91dC9qdWNoZS53ZWJt/juche.webm"
|
||||
], []} in result
|
||||
end
|
||||
|
||||
test "meta tag ordering matches attachment order" do
|
||||
user = insert(:user, name: "Jimmy Hendriks", bio: "born 19 March 1994")
|
||||
|
||||
note =
|
||||
insert(:note, %{
|
||||
data: %{
|
||||
"actor" => user.ap_id,
|
||||
"tag" => [],
|
||||
"id" => "https://pleroma.gov/objects/whatever",
|
||||
"summary" => "",
|
||||
"content" => "pleroma in a nutshell",
|
||||
"attachment" => [
|
||||
%{
|
||||
"url" => [
|
||||
%{
|
||||
"mediaType" => "image/png",
|
||||
"href" => "https://example.com/first.png",
|
||||
"height" => 1024,
|
||||
"width" => 1280
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
"url" => [
|
||||
%{
|
||||
"mediaType" => "image/png",
|
||||
"href" => "https://example.com/second.png",
|
||||
"height" => 1024,
|
||||
"width" => 1280
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
result = OpenGraph.build_tags(%{object: note, url: note.data["id"], user: user})
|
||||
|
||||
assert [
|
||||
{:meta, [property: "og:title", content: Utils.user_name_string(user)], []},
|
||||
{:meta, [property: "og:url", content: "https://pleroma.gov/objects/whatever"], []},
|
||||
{:meta, [property: "og:description", content: "pleroma in a nutshell"], []},
|
||||
{:meta, [property: "og:type", content: "article"], []},
|
||||
{:meta, [property: "og:image", content: "https://example.com/first.png"], []},
|
||||
{:meta, [property: "og:image:alt", content: nil], []},
|
||||
{:meta, [property: "og:image:width", content: "1280"], []},
|
||||
{:meta, [property: "og:image:height", content: "1024"], []},
|
||||
{:meta, [property: "og:image", content: "https://example.com/second.png"], []},
|
||||
{:meta, [property: "og:image:alt", content: nil], []},
|
||||
{:meta, [property: "og:image:width", content: "1280"], []},
|
||||
{:meta, [property: "og:image:height", content: "1024"], []}
|
||||
] == result
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -202,4 +202,58 @@ defmodule Pleroma.Web.Metadata.Providers.TwitterCardTest do
|
|||
{:meta, [name: "twitter:player:stream:content_type", content: "video/webm"], []}
|
||||
] == result
|
||||
end
|
||||
|
||||
test "meta tag ordering matches attachment order" do
|
||||
user = insert(:user, name: "Jimmy Hendriks", bio: "born 19 March 1994")
|
||||
|
||||
note =
|
||||
insert(:note, %{
|
||||
data: %{
|
||||
"actor" => user.ap_id,
|
||||
"tag" => [],
|
||||
"id" => "https://pleroma.gov/objects/whatever",
|
||||
"summary" => "",
|
||||
"content" => "pleroma in a nutshell",
|
||||
"attachment" => [
|
||||
%{
|
||||
"url" => [
|
||||
%{
|
||||
"mediaType" => "image/png",
|
||||
"href" => "https://example.com/first.png",
|
||||
"height" => 1024,
|
||||
"width" => 1280
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
"url" => [
|
||||
%{
|
||||
"mediaType" => "image/png",
|
||||
"href" => "https://example.com/second.png",
|
||||
"height" => 1024,
|
||||
"width" => 1280
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
result = TwitterCard.build_tags(%{object: note, activity_id: note.data["id"], user: user})
|
||||
|
||||
assert [
|
||||
{:meta, [name: "twitter:title", content: Utils.user_name_string(user)], []},
|
||||
{:meta, [name: "twitter:description", content: "pleroma in a nutshell"], []},
|
||||
{:meta, [name: "twitter:card", content: "summary_large_image"], []},
|
||||
{:meta, [name: "twitter:image", content: "https://example.com/first.png"], []},
|
||||
{:meta, [name: "twitter:image:alt", content: ""], []},
|
||||
{:meta, [name: "twitter:player:width", content: "1280"], []},
|
||||
{:meta, [name: "twitter:player:height", content: "1024"], []},
|
||||
{:meta, [name: "twitter:card", content: "summary_large_image"], []},
|
||||
{:meta, [name: "twitter:image", content: "https://example.com/second.png"], []},
|
||||
{:meta, [name: "twitter:image:alt", content: ""], []},
|
||||
{:meta, [name: "twitter:player:width", content: "1280"], []},
|
||||
{:meta, [name: "twitter:player:height", content: "1024"], []}
|
||||
] == result
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -58,16 +58,28 @@ defmodule Pleroma.Web.OAuth.AppTest do
|
|||
attrs = %{client_name: "Mastodon-Local", redirect_uris: "."}
|
||||
{:ok, %App{} = old_app} = App.get_or_make(attrs, ["write"])
|
||||
|
||||
# backdate the old app so it's within the threshold for being cleaned up
|
||||
one_hour_ago = DateTime.add(DateTime.utc_now(), -3600)
|
||||
|
||||
{:ok, _} =
|
||||
"UPDATE apps SET inserted_at = $1, updated_at = $1 WHERE id = $2"
|
||||
|> Pleroma.Repo.query([one_hour_ago, old_app.id])
|
||||
|
||||
# Create the new app after backdating the old one
|
||||
attrs = %{client_name: "PleromaFE", redirect_uris: "."}
|
||||
{:ok, %App{} = app} = App.get_or_make(attrs, ["write"])
|
||||
|
||||
# backdate the old app so it's within the threshold for being cleaned up
|
||||
# Ensure the new app has a recent timestamp
|
||||
now = DateTime.utc_now()
|
||||
|
||||
{:ok, _} =
|
||||
"UPDATE apps SET inserted_at = now() - interval '1 hour' WHERE id = #{old_app.id}"
|
||||
|> Pleroma.Repo.query()
|
||||
"UPDATE apps SET inserted_at = $1, updated_at = $1 WHERE id = $2"
|
||||
|> Pleroma.Repo.query([now, app.id])
|
||||
|
||||
App.remove_orphans()
|
||||
|
||||
assert [app] == Pleroma.Repo.all(App)
|
||||
assert [returned_app] = Pleroma.Repo.all(App)
|
||||
assert returned_app.client_name == "PleromaFE"
|
||||
assert returned_app.id == app.id
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ defmodule Pleroma.Web.PleromaAPI.EmojiReactionControllerTest do
|
|||
import Pleroma.Factory
|
||||
|
||||
setup do
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
|
||||
:ok
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -62,4 +62,79 @@ defmodule Pleroma.Web.Plugs.InstanceStaticTest do
|
|||
index = get(build_conn(), "/static/kaniini.html")
|
||||
assert html_response(index, 200) == "<h1>rabbit hugs as a service</h1>"
|
||||
end
|
||||
|
||||
test "does not sanitize dangerous files in general, as there can be html and javascript files legitimately in this folder" do
|
||||
# Create a file with a potentially dangerous extension (.json)
|
||||
# This mimics an attacker trying to serve ActivityPub JSON with a static file
|
||||
File.mkdir!(@dir <> "/static")
|
||||
File.write!(@dir <> "/static/malicious.json", "{\"type\": \"ActivityPub\"}")
|
||||
|
||||
conn = get(build_conn(), "/static/malicious.json")
|
||||
|
||||
assert conn.status == 200
|
||||
|
||||
content_type =
|
||||
Enum.find_value(conn.resp_headers, fn
|
||||
{"content-type", value} -> value
|
||||
_ -> nil
|
||||
end)
|
||||
|
||||
assert content_type == "application/json"
|
||||
|
||||
File.write!(@dir <> "/static/safe.jpg", "fake image data")
|
||||
|
||||
conn = get(build_conn(), "/static/safe.jpg")
|
||||
|
||||
assert conn.status == 200
|
||||
|
||||
# Get the content-type
|
||||
content_type =
|
||||
Enum.find_value(conn.resp_headers, fn
|
||||
{"content-type", value} -> value
|
||||
_ -> nil
|
||||
end)
|
||||
|
||||
assert content_type == "image/jpeg"
|
||||
end
|
||||
|
||||
test "always sanitizes emojis to images" do
|
||||
File.mkdir!(@dir <> "/emoji")
|
||||
File.write!(@dir <> "/emoji/malicious.html", "<script>HACKED</script>")
|
||||
|
||||
# Request the malicious file
|
||||
conn = get(build_conn(), "/emoji/malicious.html")
|
||||
|
||||
# Verify the file was served (status 200)
|
||||
assert conn.status == 200
|
||||
|
||||
# The content should be served, but with a sanitized content-type
|
||||
content_type =
|
||||
Enum.find_value(conn.resp_headers, fn
|
||||
{"content-type", value} -> value
|
||||
_ -> nil
|
||||
end)
|
||||
|
||||
# It should have been sanitized to application/octet-stream because "application"
|
||||
# is not in the allowed_mime_types list
|
||||
assert content_type == "application/octet-stream"
|
||||
|
||||
# Create a file with an allowed extension (.jpg)
|
||||
File.write!(@dir <> "/emoji/safe.jpg", "fake image data")
|
||||
|
||||
# Request the safe file
|
||||
conn = get(build_conn(), "/emoji/safe.jpg")
|
||||
|
||||
# Verify the file was served (status 200)
|
||||
assert conn.status == 200
|
||||
|
||||
# Get the content-type
|
||||
content_type =
|
||||
Enum.find_value(conn.resp_headers, fn
|
||||
{"content-type", value} -> value
|
||||
_ -> nil
|
||||
end)
|
||||
|
||||
# It should be preserved because "image" is in the allowed_mime_types list
|
||||
assert content_type == "image/jpeg"
|
||||
end
|
||||
end
|
||||
|
|
|
|||
53
test/pleroma/web/plugs/uploaded_media_test.exs
Normal file
53
test/pleroma/web/plugs/uploaded_media_test.exs
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.Plugs.UploadedMediaTest do
|
||||
use ExUnit.Case, async: true
|
||||
|
||||
alias Pleroma.Web.Plugs.Utils
|
||||
|
||||
describe "content-type sanitization with Utils.get_safe_mime_type/2" do
|
||||
test "it allows safe MIME types" do
|
||||
opts = %{allowed_mime_types: ["image", "audio", "video"]}
|
||||
|
||||
assert Utils.get_safe_mime_type(opts, "image/jpeg") == "image/jpeg"
|
||||
assert Utils.get_safe_mime_type(opts, "audio/mpeg") == "audio/mpeg"
|
||||
assert Utils.get_safe_mime_type(opts, "video/mp4") == "video/mp4"
|
||||
end
|
||||
|
||||
test "it sanitizes potentially dangerous content-types" do
|
||||
opts = %{allowed_mime_types: ["image", "audio", "video"]}
|
||||
|
||||
assert Utils.get_safe_mime_type(opts, "application/activity+json") ==
|
||||
"application/octet-stream"
|
||||
|
||||
assert Utils.get_safe_mime_type(opts, "text/html") == "application/octet-stream"
|
||||
|
||||
assert Utils.get_safe_mime_type(opts, "application/javascript") ==
|
||||
"application/octet-stream"
|
||||
end
|
||||
|
||||
test "it sanitizes ActivityPub content types" do
|
||||
opts = %{allowed_mime_types: ["image", "audio", "video"]}
|
||||
|
||||
assert Utils.get_safe_mime_type(opts, "application/activity+json") ==
|
||||
"application/octet-stream"
|
||||
|
||||
assert Utils.get_safe_mime_type(opts, "application/ld+json") == "application/octet-stream"
|
||||
assert Utils.get_safe_mime_type(opts, "application/jrd+json") == "application/octet-stream"
|
||||
end
|
||||
|
||||
test "it sanitizes other potentially dangerous types" do
|
||||
opts = %{allowed_mime_types: ["image", "audio", "video"]}
|
||||
|
||||
assert Utils.get_safe_mime_type(opts, "text/html") == "application/octet-stream"
|
||||
|
||||
assert Utils.get_safe_mime_type(opts, "application/javascript") ==
|
||||
"application/octet-stream"
|
||||
|
||||
assert Utils.get_safe_mime_type(opts, "text/javascript") == "application/octet-stream"
|
||||
assert Utils.get_safe_mime_type(opts, "application/xhtml+xml") == "application/octet-stream"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -54,7 +54,6 @@ defmodule Pleroma.Web.RichMedia.ParserTest do
|
|||
{:ok,
|
||||
%{
|
||||
"card" => "summary",
|
||||
"site" => "@flickr",
|
||||
"image" => "https://farm6.staticflickr.com/5510/14338202952_93595258ff_z.jpg",
|
||||
"title" => "Small Island Developing States Photo Submission",
|
||||
"description" => "View the album on Flickr.",
|
||||
|
|
|
|||
|
|
@ -17,10 +17,6 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do
|
|||
|
||||
assert TwitterCard.parse(html, %{}) ==
|
||||
%{
|
||||
"app:id:googleplay" => "com.nytimes.android",
|
||||
"app:name:googleplay" => "NYTimes",
|
||||
"app:url:googleplay" => "nytimes://reader/id/100000006583622",
|
||||
"site" => nil,
|
||||
"description" =>
|
||||
"With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
|
||||
"image" =>
|
||||
|
|
@ -44,7 +40,7 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do
|
|||
"description" =>
|
||||
"With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
|
||||
"image" =>
|
||||
"https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-videoSixteenByNineJumbo1600.jpg",
|
||||
"https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-facebookJumbo.jpg",
|
||||
"image:alt" => "",
|
||||
"title" =>
|
||||
"She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.",
|
||||
|
|
@ -61,16 +57,12 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do
|
|||
|
||||
assert TwitterCard.parse(html, %{}) ==
|
||||
%{
|
||||
"app:id:googleplay" => "com.nytimes.android",
|
||||
"app:name:googleplay" => "NYTimes",
|
||||
"app:url:googleplay" => "nytimes://reader/id/100000006583622",
|
||||
"card" => "summary_large_image",
|
||||
"description" =>
|
||||
"With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
|
||||
"image" =>
|
||||
"https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-videoSixteenByNineJumbo1600.jpg",
|
||||
"https://static01.nyt.com/images/2019/08/01/nyregion/01nypd-juveniles-promo/01nypd-juveniles-promo-facebookJumbo.jpg",
|
||||
"image:alt" => "",
|
||||
"site" => nil,
|
||||
"title" =>
|
||||
"She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.",
|
||||
"url" =>
|
||||
|
|
@ -90,13 +82,11 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do
|
|||
|
||||
assert TwitterCard.parse(html, %{}) ==
|
||||
%{
|
||||
"site" => "@atlasobscura",
|
||||
"title" => "The Missing Grave of Margaret Corbin, Revolutionary War Veteran",
|
||||
"card" => "summary_large_image",
|
||||
"image" => image_path,
|
||||
"description" =>
|
||||
"She's the only woman veteran honored with a monument at West Point. But where was she buried?",
|
||||
"site_name" => "Atlas Obscura",
|
||||
"type" => "article",
|
||||
"url" => "http://www.atlasobscura.com/articles/margaret-corbin-grave-west-point"
|
||||
}
|
||||
|
|
@ -109,12 +99,8 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do
|
|||
|
||||
assert TwitterCard.parse(html, %{}) ==
|
||||
%{
|
||||
"site" => nil,
|
||||
"title" =>
|
||||
"She Was Arrested at 14. Then Her Photo Went to a Facial Recognition Database.",
|
||||
"app:id:googleplay" => "com.nytimes.android",
|
||||
"app:name:googleplay" => "NYTimes",
|
||||
"app:url:googleplay" => "nytimes://reader/id/100000006583622",
|
||||
"description" =>
|
||||
"With little oversight, the N.Y.P.D. has been using powerful surveillance technology on photos of children and teenagers.",
|
||||
"image" =>
|
||||
|
|
@ -124,4 +110,23 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do
|
|||
"https://www.nytimes.com/2019/08/01/nyregion/nypd-facial-recognition-children-teenagers.html"
|
||||
}
|
||||
end
|
||||
|
||||
test "takes first image if multiple are specified" do
|
||||
html =
|
||||
File.read!("test/fixtures/fulmo.html")
|
||||
|> Floki.parse_document!()
|
||||
|
||||
assert TwitterCard.parse(html, %{}) ==
|
||||
%{
|
||||
"description" => "Pri feoj, kiuj devis ordigi falintan arbon.",
|
||||
"image" => "https://tirifto.xwx.moe/r/ilustrajhoj/pinglordigado.png",
|
||||
"title" => "Fulmo",
|
||||
"type" => "website",
|
||||
"url" => "https://tirifto.xwx.moe/eo/rakontoj/fulmo.html",
|
||||
"image:alt" =>
|
||||
"Meze de arbaro kuŝas falinta trunko, sen pingloj kaj kun branĉoj derompitaj. Post ĝi videblas du feoj: florofeo maldekstre kaj nubofeo dekstre. La florofeo iom kaŝas sin post la trunko. La nubofeo staras kaj tenas amason da pigloj. Ili iom rigardas al si.",
|
||||
"image:height" => "630",
|
||||
"image:width" => "1200"
|
||||
}
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ defmodule Pleroma.Workers.Cron.DigestEmailsWorkerTest do
|
|||
setup do: clear_config([:email_notifications, :digest])
|
||||
|
||||
setup do
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
|
||||
:ok
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ defmodule Pleroma.Workers.Cron.NewUsersDigestWorkerTest do
|
|||
alias Pleroma.Workers.Cron.NewUsersDigestWorker
|
||||
|
||||
setup do
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Config)
|
||||
Mox.stub_with(Pleroma.UnstubbedConfigMock, Pleroma.Test.StaticConfig)
|
||||
:ok
|
||||
end
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue