Merge branch 'url-encode-pt2' into 'develop'

Fix URL encoding of HTTP requests Pt2

See merge request pleroma/pleroma!4364
This commit is contained in:
lain 2025-12-10 16:34:31 +00:00
commit ed931a668f
14 changed files with 408 additions and 59 deletions

View file

@ -0,0 +1 @@
Fix sometimes incorrect URI percent encoding

View file

@ -132,6 +132,13 @@ defmodule Pleroma.Constants do
do: ~r/^[^[:cntrl:] ()<>@,;:\\"\/\[\]?=]+\/[^[:cntrl:] ()<>@,;:\\"\/\[\]?=]+(; .*)?$/
)
# List of allowed chars in the path segment of a URI
# unreserved, sub-delims, ":", "@" and "/" allowed as the separator in path
# https://datatracker.ietf.org/doc/html/rfc3986
const(uri_path_allowed_reserved_chars,
do: ~c"!$&'()*+,;=/:@"
)
const(upload_object_types, do: ["Document", "Image"])
const(activity_json_canonical_mime_type,

View file

@ -131,31 +131,4 @@ defmodule Pleroma.HTTP do
defp default_middleware,
do: [Tesla.Middleware.FollowRedirects, Pleroma.Tesla.Middleware.EncodeUrl]
def encode_url(url) when is_binary(url) do
URI.parse(url)
|> then(fn parsed ->
path = encode_path(parsed.path)
query = encode_query(parsed.query)
%{parsed | path: path, query: query}
end)
|> URI.to_string()
end
defp encode_path(nil), do: nil
defp encode_path(path) when is_binary(path) do
path
|> URI.decode()
|> URI.encode()
end
defp encode_query(nil), do: nil
defp encode_query(query) when is_binary(query) do
query
|> URI.decode_query()
|> URI.encode_query()
end
end

View file

@ -16,7 +16,12 @@ defmodule Pleroma.HTTP.AdapterHelper.Hackney do
config_opts = Pleroma.Config.get([:http, :adapter], [])
url_encoding =
Keyword.new()
|> Keyword.put(:path_encode_fun, fn path -> path end)
@defaults
|> Keyword.merge(url_encoding)
|> Keyword.merge(config_opts)
|> Keyword.merge(connection_opts)
|> add_scheme_opts(uri)

View file

@ -3,6 +3,8 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ReverseProxy do
alias Pleroma.Utils.URIEncoding
@range_headers ~w(range if-range)
@keep_req_headers ~w(accept accept-encoding cache-control if-modified-since) ++
~w(if-unmodified-since if-none-match) ++ @range_headers
@ -155,11 +157,12 @@ defmodule Pleroma.ReverseProxy do
end
defp request(method, url, headers, opts) do
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
method = method |> String.downcase() |> String.to_existing_atom()
url = maybe_encode_url(url)
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
case client().request(method, url, headers, "", opts) do
{:ok, code, headers, client} when code in @valid_resp_codes ->
{:ok, code, downcase_headers(headers), client}
@ -459,9 +462,9 @@ defmodule Pleroma.ReverseProxy do
# Also do it for test environment
defp maybe_encode_url(url) do
case Application.get_env(:tesla, :adapter) do
Tesla.Adapter.Hackney -> Pleroma.HTTP.encode_url(url)
{Tesla.Adapter.Finch, _} -> Pleroma.HTTP.encode_url(url)
Tesla.Mock -> Pleroma.HTTP.encode_url(url)
Tesla.Adapter.Hackney -> URIEncoding.encode_url(url)
{Tesla.Adapter.Finch, _} -> URIEncoding.encode_url(url)
Tesla.Mock -> URIEncoding.encode_url(url)
_ -> url
end
end

View file

@ -7,6 +7,11 @@ defmodule Pleroma.ReverseProxy.Client.Hackney do
@impl true
def request(method, url, headers, body, opts \\ []) do
opts =
Keyword.put_new(opts, :path_encode_fun, fn path ->
path
end)
:hackney.request(method, url, headers, body, opts)
end

View file

@ -17,7 +17,7 @@ defmodule Pleroma.Tesla.Middleware.EncodeUrl do
@impl Tesla.Middleware
def call(%Tesla.Env{url: url} = env, next, _) do
url = Pleroma.HTTP.encode_url(url)
url = Pleroma.Utils.URIEncoding.encode_url(url)
env = %{env | url: url}

View file

@ -35,6 +35,7 @@ defmodule Pleroma.Upload do
"""
alias Ecto.UUID
alias Pleroma.Maps
alias Pleroma.Utils.URIEncoding
alias Pleroma.Web.ActivityPub.Utils
require Logger
@ -230,11 +231,18 @@ defmodule Pleroma.Upload do
tmp_path
end
# Encoding the whole path here is fine since the path is in a
# UUID/<file name> form.
# The file at this point isn't %-encoded, so the path shouldn't
# be decoded first like Pleroma.Utils.URIEncoding.encode_url/1 does.
defp url_from_spec(%__MODULE__{name: name}, base_url, {:file, path}) do
encode_opts = [bypass_decode: true, bypass_parse: true]
path =
URI.encode(path, &char_unescaped?/1) <>
URIEncoding.encode_url(path, encode_opts) <>
if Pleroma.Config.get([__MODULE__, :link_name], false) do
"?name=#{URI.encode(name, &char_unescaped?/1)}"
enum = %{name: name}
"?#{URI.encode_query(enum)}"
else
""
end

View file

@ -0,0 +1,142 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2025 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Utils.URIEncoding do
@moduledoc """
Utility functions for dealing with URI encoding of paths and queries
with support for query-encoding quirks.
"""
require Pleroma.Constants
# We don't always want to decode the path first, like is the case in
# Pleroma.Upload.url_from_spec/3.
@doc """
Wraps URI encoding/decoding functions from Elixir's standard library to fix usually unintended side-effects.
Supports two URL processing options in the optional 2nd argument with the default being `false`:
* `bypass_parse` - Bypasses `URI.parse` stage, useful when it's not desirable to parse to URL first
before encoding it. Supports only encoding as the Path segment of a URI.
* `bypass_decode` - Bypasses `URI.decode` stage for the Path segment of a URI. Used when a URL
has to be double %-encoded for internal reasons.
Options must be specified as a Keyword with tuples with booleans, otherwise
`{:error, :invalid_opts}` is returned. Example:
`encode_url(url, [bypass_parse: true, bypass_decode: true])`
"""
@spec encode_url(String.t(), Keyword.t()) :: String.t() | {:error, :invalid_opts}
def encode_url(url, opts \\ []) when is_binary(url) and is_list(opts) do
bypass_parse = Keyword.get(opts, :bypass_parse, false)
bypass_decode = Keyword.get(opts, :bypass_decode, false)
with true <- is_boolean(bypass_parse),
true <- is_boolean(bypass_decode) do
cond do
bypass_parse ->
encode_path(url, bypass_decode)
true ->
URI.parse(url)
|> then(fn parsed ->
path = encode_path(parsed.path, bypass_decode)
query = encode_query(parsed.query, parsed.host)
%{parsed | path: path, query: query}
end)
|> URI.to_string()
end
else
_ -> {:error, :invalid_opts}
end
end
defp encode_path(nil, _bypass_decode), do: nil
# URI.encode/2 deliberately does not encode all chars that are forbidden
# in the path component of a URI. It only encodes chars that are forbidden
# in the whole URI. A predicate in the 2nd argument is used to fix that here.
# URI.encode/2 uses the predicate function to determine whether each byte
# (in an integer representation) should be encoded or not.
defp encode_path(path, bypass_decode) when is_binary(path) do
path =
cond do
bypass_decode ->
path
true ->
URI.decode(path)
end
path
|> URI.encode(fn byte ->
URI.char_unreserved?(byte) ||
Enum.any?(
Pleroma.Constants.uri_path_allowed_reserved_chars(),
fn char ->
char == byte
end
)
end)
end
# Order of kv pairs in query is not preserved when using URI.decode_query.
# URI.query_decoder/2 returns a stream which so far appears to not change order.
# Immediately switch to a list to prevent breakage for sites that expect
# the order of query keys to be always the same.
defp encode_query(query, host) when is_binary(query) do
query
|> URI.query_decoder()
|> Enum.to_list()
|> do_encode_query(host)
end
defp encode_query(nil, _), do: nil
# Always uses www_form encoding.
# Taken from Elixir's URI module.
defp do_encode_query(enumerable, host) do
Enum.map_join(enumerable, "&", &maybe_apply_query_quirk(&1, host))
end
# https://git.pleroma.social/pleroma/pleroma/-/issues/1055
defp maybe_apply_query_quirk({key, value}, "i.guim.co.uk" = _host) do
case key do
"precrop" ->
query_encode_kv_pair({key, value}, ~c":,")
key ->
query_encode_kv_pair({key, value})
end
end
defp maybe_apply_query_quirk({key, value}, _), do: query_encode_kv_pair({key, value})
# Taken from Elixir's URI module and modified to support quirks.
defp query_encode_kv_pair({key, value}, rules \\ []) when is_list(rules) do
cond do
length(rules) > 0 ->
# URI.encode_query/2 does not appear to follow spec and encodes all parts
# of our URI path Constant. This appears to work outside of edge-cases
# like The Guardian Rich Media Cards, keeping behavior same as with
# URI.encode_query/2 unless otherwise specified via rules.
(URI.encode_www_form(Kernel.to_string(key)) <>
"=" <>
URI.encode(value, fn byte ->
URI.char_unreserved?(byte) ||
Enum.any?(
rules,
fn char ->
char == byte
end
)
end))
|> String.replace("%20", "+")
true ->
URI.encode_www_form(Kernel.to_string(key)) <>
"=" <> URI.encode_www_form(Kernel.to_string(value))
end
end
end

View file

@ -6,6 +6,7 @@ defmodule Pleroma.Web.MediaProxy do
alias Pleroma.Config
alias Pleroma.Helpers.UriHelper
alias Pleroma.Upload
alias Pleroma.Utils.URIEncoding
alias Pleroma.Web.Endpoint
alias Pleroma.Web.MediaProxy.Invalidation
@ -99,13 +100,21 @@ defmodule Pleroma.Web.MediaProxy do
{base64, sig64}
end
# The URL coming into MediaProxy from the outside might have wrong %-encoding
# (like older Pleroma versions).
# This would cause an inconsistency with the encoded URL here and the requested
# URL fixed with Pleroma.Tesla.Middleware.EncodeUrl.
# End result is a failing HEAD request in
# Pleroma.Web.MediaProxy.MediaProxyController.handle_preview/2
def encode_url(url) do
url = URIEncoding.encode_url(url)
{base64, sig64} = base64_sig64(url)
build_url(sig64, base64, filename(url))
end
def encode_preview_url(url, preview_params \\ []) do
url = URIEncoding.encode_url(url)
{base64, sig64} = base64_sig64(url)
build_preview_url(sig64, base64, filename(url), preview_params)

View file

@ -5,8 +5,11 @@
defmodule Pleroma.HTTPTest do
use ExUnit.Case, async: true
use Pleroma.Tests.Helpers
import Tesla.Mock
alias Pleroma.HTTP
alias Pleroma.Utils.URIEncoding
setup do
mock(fn
@ -28,6 +31,36 @@ defmodule Pleroma.HTTPTest do
%{method: :get, url: "https://example.com/emoji/Pack%201/koronebless.png?foo=bar+baz"} ->
%Tesla.Env{status: 200, body: "emoji data"}
%{
method: :get,
url: "https://example.com/media/foo/bar%20!$&'()*+,;=/:%20@a%20%5Bbaz%5D.mp4"
} ->
%Tesla.Env{status: 200, body: "video data"}
%{method: :get, url: "https://example.com/media/unicode%20%F0%9F%99%82%20.gif"} ->
%Tesla.Env{status: 200, body: "unicode data"}
%{
method: :get,
url:
"https://i.guim.co.uk/img/media/1069ef13c447908272c4de94174cec2b6352cb2f/0_91_2000_1201/master/2000.jpg?width=1200&height=630&quality=85&auto=format&fit=crop&precrop=40:21,offset-x50,offset-y0&overlay-align=bottom%2Cleft&overlay-width=100p&overlay-base64=L2ltZy9zdGF0aWMvb3ZlcmxheXMvdGctb3BpbmlvbnMtYWdlLTIwMTkucG5n&enable=upscale&s=cba21427a73512fdc9863c486c03fdd8"
} ->
%Tesla.Env{status: 200, body: "Guardian image quirk"}
%{
method: :get,
url:
"https://i.guim.co.uk/emoji/Pack%201/koronebless.png?precrop=40:21,overlay-x0,overlay-y0&foo=bar+baz"
} ->
%Tesla.Env{status: 200, body: "Space in query with Guardian quirk"}
%{
method: :get,
url:
"https://examplebucket.s3.amazonaws.com/test.txt?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=accessKEY%2F20130721%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20130721T201207Z&X-Amz-Expires=86400&X-Amz-Signature=SIGNATURE&X-Amz-SignedHeaders=host"
} ->
%Tesla.Env{status: 200, body: "AWS S3 data"}
end)
:ok
@ -85,5 +118,100 @@ defmodule Pleroma.HTTPTest do
{:ok, result} = HTTP.get(properly_encoded_url)
assert result.status == 200
url_with_reserved_chars = "https://example.com/media/foo/bar !$&'()*+,;=/: @a [baz].mp4"
{:ok, result} = HTTP.get(url_with_reserved_chars)
assert result.status == 200
url_with_unicode = "https://example.com/media/unicode 🙂 .gif"
{:ok, result} = HTTP.get(url_with_unicode)
assert result.status == 200
end
test "decodes URL first by default" do
clear_config(:test_url_encoding, true)
normal_url = "https://example.com/media/file%20with%20space.jpg?name=a+space.jpg"
result = URIEncoding.encode_url(normal_url)
assert result == "https://example.com/media/file%20with%20space.jpg?name=a+space.jpg"
end
test "doesn't decode URL first when specified" do
clear_config(:test_url_encoding, true)
normal_url = "https://example.com/media/file%20with%20space.jpg"
result = URIEncoding.encode_url(normal_url, bypass_decode: true)
assert result == "https://example.com/media/file%2520with%2520space.jpg"
end
test "properly applies Guardian image query quirk" do
clear_config(:test_url_encoding, true)
url =
"https://i.guim.co.uk/img/media/1069ef13c447908272c4de94174cec2b6352cb2f/0_91_2000_1201/master/2000.jpg?width=1200&height=630&quality=85&auto=format&fit=crop&precrop=40:21,offset-x50,offset-y0&overlay-align=bottom%2Cleft&overlay-width=100p&overlay-base64=L2ltZy9zdGF0aWMvb3ZlcmxheXMvdGctb3BpbmlvbnMtYWdlLTIwMTkucG5n&enable=upscale&s=cba21427a73512fdc9863c486c03fdd8"
result = URIEncoding.encode_url(url)
assert result == url
{:ok, result_get} = HTTP.get(result)
assert result_get.status == 200
end
test "properly encodes spaces as \"pluses\" in query when using quirks" do
clear_config(:test_url_encoding, true)
url =
"https://i.guim.co.uk/emoji/Pack 1/koronebless.png?precrop=40:21,overlay-x0,overlay-y0&foo=bar baz"
properly_encoded_url =
"https://i.guim.co.uk/emoji/Pack%201/koronebless.png?precrop=40:21,overlay-x0,overlay-y0&foo=bar+baz"
result = URIEncoding.encode_url(url)
assert result == properly_encoded_url
{:ok, result_get} = HTTP.get(result)
assert result_get.status == 200
end
test "properly encode AWS S3 queries" do
clear_config(:test_url_encoding, true)
url =
"https://examplebucket.s3.amazonaws.com/test.txt?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=accessKEY%2F20130721%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20130721T201207Z&X-Amz-Expires=86400&X-Amz-Signature=SIGNATURE&X-Amz-SignedHeaders=host"
unencoded_url =
"https://examplebucket.s3.amazonaws.com/test.txt?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=accessKEY/20130721/us-east-1/s3/aws4_request&X-Amz-Date=20130721T201207Z&X-Amz-Expires=86400&X-Amz-Signature=SIGNATURE&X-Amz-SignedHeaders=host"
result = URIEncoding.encode_url(url)
result_unencoded = URIEncoding.encode_url(unencoded_url)
assert result == url
assert result == result_unencoded
{:ok, result_get} = HTTP.get(result)
assert result_get.status == 200
end
test "preserves query key order" do
clear_config(:test_url_encoding, true)
url = "https://example.com/foo?hjkl=qwertz&xyz=abc&bar=baz"
result = URIEncoding.encode_url(url)
assert result == url
end
end

View file

@ -396,18 +396,29 @@ defmodule Pleroma.ReverseProxyTest do
end
end
# Hackey is used for Reverse Proxy when Hackney or Finch is the Tesla Adapter
# Hackney is used for Reverse Proxy when Hackney or Finch is the Tesla Adapter
# Gun is able to proxy through Tesla, so it does not need testing as the
# test cases in the Pleroma.HTTPTest module are sufficient
describe "Hackney URL encoding:" do
setup do
ClientMock
|> expect(:request, fn :get,
"https://example.com/emoji/Pack%201/koronebless.png?foo=bar+baz",
_headers,
_body,
_opts ->
{:ok, 200, [{"content-type", "image/png"}], "It works!"}
|> expect(:request, fn
:get,
"https://example.com/emoji/Pack%201/koronebless.png?foo=bar+baz",
_headers,
_body,
_opts ->
{:ok, 200, [{"content-type", "image/png"}], "It works!"}
:get,
"https://example.com/media/foo/bar%20!$&'()*+,;=/:%20@a%20%5Bbaz%5D.mp4",
_headers,
_body,
_opts ->
{:ok, 200, [{"content-type", "video/mp4"}], "Allowed reserved chars."}
:get, "https://example.com/media/unicode%20%F0%9F%99%82%20.gif", _headers, _body, _opts ->
{:ok, 200, [{"content-type", "image/gif"}], "Unicode emoji in path"}
end)
|> stub(:stream_body, fn _ -> :done end)
|> stub(:close, fn _ -> :ok end)
@ -430,5 +441,21 @@ defmodule Pleroma.ReverseProxyTest do
assert result.status == 200
end
test "properly encodes URLs with allowed reserved characters", %{conn: conn} do
url_with_reserved_chars = "https://example.com/media/foo/bar !$&'()*+,;=/: @a [baz].mp4"
result = ReverseProxy.call(conn, url_with_reserved_chars)
assert result.status == 200
end
test "properly encodes URLs with unicode in path", %{conn: conn} do
url_with_unicode = "https://example.com/media/unicode 🙂 .gif"
result = ReverseProxy.call(conn, url_with_unicode)
assert result.status == 200
end
end
end

View file

@ -227,20 +227,35 @@ defmodule Pleroma.UploadTest do
assert Path.basename(attachment_url["href"]) == "an%E2%80%A6%20image.jpg"
end
test "escapes reserved uri characters" do
test "escapes disallowed reserved characters in uri path" do
File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg")
file = %Plug.Upload{
content_type: "image/jpeg",
path: Path.absname("test/fixtures/image_tmp.jpg"),
filename: ":?#[]@!$&\\'()*+,;=.jpg"
filename: ":?#[]@!$&'()*+,;=.jpg"
}
{:ok, data} = Upload.store(file)
[attachment_url | _] = data["url"]
assert Path.basename(attachment_url["href"]) ==
"%3A%3F%23%5B%5D%40%21%24%26%5C%27%28%29%2A%2B%2C%3B%3D.jpg"
":%3F%23%5B%5D@!$&'()*+,;=.jpg"
end
test "double %-encodes filename" do
File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg")
file = %Plug.Upload{
content_type: "image/jpeg",
path: Path.absname("test/fixtures/image_tmp.jpg"),
filename: "file with %20.jpg"
}
{:ok, data} = Upload.store(file)
[attachment_url | _] = data["url"]
assert Path.basename(attachment_url["href"]) == "file%20with%20%2520.jpg"
end
end
@ -267,4 +282,23 @@ defmodule Pleroma.UploadTest do
refute String.starts_with?(url, base_url <> "/media/")
end
end
describe "Setting a link_name for uploaded media" do
setup do: clear_config([Pleroma.Upload, :link_name], true)
test "encodes name parameter in query" do
File.cp!("test/fixtures/image.jpg", "test/fixtures/image_tmp.jpg")
file = %Plug.Upload{
content_type: "image/jpeg",
path: Path.absname("test/fixtures/image_tmp.jpg"),
filename: "test file.jpg"
}
{:ok, data} = Upload.store(file)
[attachment_url | _] = data["url"]
assert Path.basename(attachment_url["href"]) == "test%20file.jpg?name=test+file.jpg"
end
end
end

View file

@ -73,7 +73,7 @@ defmodule Pleroma.Web.MediaProxyTest do
end
test "encodes and decodes URL and ignores query params for the path" do
url = "https://pleroma.soykaf.com/static/logo.png?93939393939&bunny=true"
url = "https://pleroma.soykaf.com/static/logo.png?93939393939=&bunny=true"
encoded = MediaProxy.url(url)
assert String.ends_with?(encoded, "/logo.png")
assert decode_result(encoded) == url
@ -159,18 +159,6 @@ defmodule Pleroma.Web.MediaProxyTest do
assert String.starts_with?(encoded, base_url)
end
# Some sites expect ASCII encoded characters in the URL to be preserved even if
# unnecessary.
# Issues: https://git.pleroma.social/pleroma/pleroma/issues/580
# https://git.pleroma.social/pleroma/pleroma/issues/1055
test "preserve ASCII encoding" do
url =
"https://pleroma.com/%20/%21/%22/%23/%24/%25/%26/%27/%28/%29/%2A/%2B/%2C/%2D/%2E/%2F/%30/%31/%32/%33/%34/%35/%36/%37/%38/%39/%3A/%3B/%3C/%3D/%3E/%3F/%40/%41/%42/%43/%44/%45/%46/%47/%48/%49/%4A/%4B/%4C/%4D/%4E/%4F/%50/%51/%52/%53/%54/%55/%56/%57/%58/%59/%5A/%5B/%5C/%5D/%5E/%5F/%60/%61/%62/%63/%64/%65/%66/%67/%68/%69/%6A/%6B/%6C/%6D/%6E/%6F/%70/%71/%72/%73/%74/%75/%76/%77/%78/%79/%7A/%7B/%7C/%7D/%7E/%7F/%80/%81/%82/%83/%84/%85/%86/%87/%88/%89/%8A/%8B/%8C/%8D/%8E/%8F/%90/%91/%92/%93/%94/%95/%96/%97/%98/%99/%9A/%9B/%9C/%9D/%9E/%9F/%C2%A0/%A1/%A2/%A3/%A4/%A5/%A6/%A7/%A8/%A9/%AA/%AB/%AC/%C2%AD/%AE/%AF/%B0/%B1/%B2/%B3/%B4/%B5/%B6/%B7/%B8/%B9/%BA/%BB/%BC/%BD/%BE/%BF/%C0/%C1/%C2/%C3/%C4/%C5/%C6/%C7/%C8/%C9/%CA/%CB/%CC/%CD/%CE/%CF/%D0/%D1/%D2/%D3/%D4/%D5/%D6/%D7/%D8/%D9/%DA/%DB/%DC/%DD/%DE/%DF/%E0/%E1/%E2/%E3/%E4/%E5/%E6/%E7/%E8/%E9/%EA/%EB/%EC/%ED/%EE/%EF/%F0/%F1/%F2/%F3/%F4/%F5/%F6/%F7/%F8/%F9/%FA/%FB/%FC/%FD/%FE/%FF"
encoded = MediaProxy.url(url)
assert decode_result(encoded) == url
end
# This includes unsafe/reserved characters which are not interpreted as part of the URL
# and would otherwise have to be ASCII encoded. It is our role to ensure the proxied URL
# is unmodified, so we are testing these characters anyway.
@ -182,11 +170,30 @@ defmodule Pleroma.Web.MediaProxyTest do
assert decode_result(encoded) == url
end
test "preserve unicode characters" do
# Improperly encoded URLs should not happen even when input was wrong.
test "does not preserve unicode characters" do
url = "https://ko.wikipedia.org/wiki/위키백과:대문"
encoded_url =
"https://ko.wikipedia.org/wiki/%EC%9C%84%ED%82%A4%EB%B0%B1%EA%B3%BC:%EB%8C%80%EB%AC%B8"
encoded = MediaProxy.url(url)
assert decode_result(encoded) == url
assert decode_result(encoded) == encoded_url
end
# If we preserve wrongly encoded URLs in MediaProxy, it will get fixed
# when we GET these URLs and will result in 424 when MediaProxy previews are enabled.
test "does not preserve incorrect URLs when making MediaProxy link" do
incorrect_original_url = "https://example.com/media/cofe%20%28with%20milk%29.png"
corrected_original_url = "https://example.com/media/cofe%20(with%20milk).png"
unpreserved_encoded_original_url =
"http://localhost:4001/proxy/Sv6tt6xjA72_i4d8gXbuMAOXQSs/aHR0cHM6Ly9leGFtcGxlLmNvbS9tZWRpYS9jb2ZlJTIwKHdpdGglMjBtaWxrKS5wbmc/cofe%20(with%20milk).png"
encoded = MediaProxy.url(incorrect_original_url)
assert encoded == unpreserved_encoded_original_url
assert decode_result(encoded) == corrected_original_url
end
end