2018-12-23 20:04:54 +00:00
|
|
|
# Pleroma: A lightweight social networking server
|
2021-01-13 06:49:20 +00:00
|
|
|
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 20:04:54 +00:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2017-11-22 18:06:07 +00:00
|
|
|
defmodule Pleroma.Web.MediaProxy do
|
2019-07-14 21:01:32 +00:00
|
|
|
alias Pleroma.Config
|
2020-09-03 17:13:29 +00:00
|
|
|
alias Pleroma.Helpers.UriHelper
|
2019-08-02 17:07:09 +00:00
|
|
|
alias Pleroma.Upload
|
2021-05-31 20:09:11 +00:00
|
|
|
alias Pleroma.Web.Endpoint
|
2020-06-14 18:02:57 +00:00
|
|
|
alias Pleroma.Web.MediaProxy.Invalidation
|
2017-11-22 18:06:07 +00:00
|
|
|
|
2019-07-14 21:01:32 +00:00
|
|
|
@base64_opts [padding: false]
|
2020-08-11 07:28:35 +00:00
|
|
|
@cache_table :banned_urls_cache
|
|
|
|
|
2020-12-18 16:44:46 +00:00
|
|
|
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
|
|
|
|
2020-08-11 07:28:35 +00:00
|
|
|
def cache_table, do: @cache_table
|
2018-11-20 16:46:54 +00:00
|
|
|
|
2020-06-17 18:13:55 +00:00
|
|
|
@spec in_banned_urls(String.t()) :: boolean()
|
2020-12-18 16:44:46 +00:00
|
|
|
def in_banned_urls(url), do: elem(@cachex.exists?(@cache_table, url(url)), 1)
|
2020-06-14 18:02:57 +00:00
|
|
|
|
2020-06-17 18:13:55 +00:00
|
|
|
def remove_from_banned_urls(urls) when is_list(urls) do
|
2020-12-18 16:44:46 +00:00
|
|
|
@cachex.execute!(@cache_table, fn cache ->
|
|
|
|
Enum.each(Invalidation.prepare_urls(urls), &@cachex.del(cache, &1))
|
2020-06-14 18:02:57 +00:00
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2020-06-17 18:13:55 +00:00
|
|
|
def remove_from_banned_urls(url) when is_binary(url) do
|
2020-12-18 16:44:46 +00:00
|
|
|
@cachex.del(@cache_table, url(url))
|
2020-06-14 18:02:57 +00:00
|
|
|
end
|
|
|
|
|
2020-06-17 18:13:55 +00:00
|
|
|
def put_in_banned_urls(urls) when is_list(urls) do
|
2020-12-18 16:44:46 +00:00
|
|
|
@cachex.execute!(@cache_table, fn cache ->
|
|
|
|
Enum.each(Invalidation.prepare_urls(urls), &@cachex.put(cache, &1, true))
|
2020-06-14 18:02:57 +00:00
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2020-06-17 18:13:55 +00:00
|
|
|
def put_in_banned_urls(url) when is_binary(url) do
|
2020-12-18 16:44:46 +00:00
|
|
|
@cachex.put(@cache_table, url(url), true)
|
2020-06-14 18:02:57 +00:00
|
|
|
end
|
|
|
|
|
2019-07-14 21:01:32 +00:00
|
|
|
def url(url) when is_nil(url) or url == "", do: nil
|
2019-02-03 17:44:18 +00:00
|
|
|
def url("/" <> _ = url), do: url
|
2017-12-12 11:30:24 +00:00
|
|
|
|
2017-11-22 18:06:07 +00:00
|
|
|
def url(url) do
|
2020-09-05 17:19:09 +00:00
|
|
|
if enabled?() and url_proxiable?(url) do
|
2019-05-30 08:33:58 +00:00
|
|
|
encode_url(url)
|
2020-09-05 17:19:09 +00:00
|
|
|
else
|
|
|
|
url
|
2019-05-30 08:33:58 +00:00
|
|
|
end
|
2024-06-09 17:45:38 +00:00
|
|
|
|> IO.inspect()
|
2019-05-30 08:33:58 +00:00
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2020-06-17 18:02:01 +00:00
|
|
|
@spec url_proxiable?(String.t()) :: boolean()
|
|
|
|
def url_proxiable?(url) do
|
2023-06-26 13:18:31 +00:00
|
|
|
not local?(url) and not whitelisted?(url) and not blocked?(url)
|
2020-06-14 18:02:57 +00:00
|
|
|
end
|
|
|
|
|
2020-09-03 17:13:29 +00:00
|
|
|
def preview_url(url, preview_params \\ []) do
|
2020-05-11 20:21:53 +00:00
|
|
|
if preview_enabled?() do
|
2020-09-03 17:13:29 +00:00
|
|
|
encode_preview_url(url, preview_params)
|
2020-05-11 20:21:53 +00:00
|
|
|
else
|
2020-09-05 13:16:35 +00:00
|
|
|
url(url)
|
2020-05-08 20:06:47 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-05-11 20:21:53 +00:00
|
|
|
def enabled?, do: Config.get([:media_proxy, :enabled], false)
|
2018-12-07 20:44:04 +00:00
|
|
|
|
2020-05-11 20:21:53 +00:00
|
|
|
# Note: media proxy must be enabled for media preview proxy in order to load all
|
|
|
|
# non-local non-whitelisted URLs through it and be sure that body size constraint is preserved.
|
2020-09-05 17:19:09 +00:00
|
|
|
def preview_enabled?, do: enabled?() and !!Config.get([:media_preview_proxy, :enabled])
|
2018-11-13 14:58:02 +00:00
|
|
|
|
2021-05-31 20:09:11 +00:00
|
|
|
def local?(url), do: String.starts_with?(url, Endpoint.url())
|
2020-05-11 20:21:53 +00:00
|
|
|
|
|
|
|
def whitelisted?(url) do
|
2019-05-30 08:33:58 +00:00
|
|
|
%{host: domain} = URI.parse(url)
|
|
|
|
|
2020-07-11 07:36:36 +00:00
|
|
|
mediaproxy_whitelist_domains =
|
|
|
|
[:media_proxy, :whitelist]
|
2023-08-01 10:43:50 +00:00
|
|
|
|> Config.get([])
|
2021-01-11 20:19:14 +00:00
|
|
|
|> Kernel.++(["#{Upload.base_url()}"])
|
2020-07-11 07:36:36 +00:00
|
|
|
|> Enum.map(&maybe_get_domain_from_url/1)
|
|
|
|
|
2021-01-11 20:19:14 +00:00
|
|
|
domain in mediaproxy_whitelist_domains
|
2020-07-11 07:36:36 +00:00
|
|
|
end
|
2019-08-02 17:07:09 +00:00
|
|
|
|
2023-06-26 13:18:31 +00:00
|
|
|
def blocked?(url) do
|
2023-07-17 17:17:04 +00:00
|
|
|
%{scheme: scheme, host: domain} = URI.parse(url)
|
|
|
|
# Block either the bare domain or the scheme-domain combo
|
|
|
|
scheme_domain = "#{scheme}://#{domain}"
|
|
|
|
blocklist = Config.get([:media_proxy, :blocklist])
|
|
|
|
|
|
|
|
Enum.member?(blocklist, domain) ||
|
|
|
|
Enum.member?(blocklist, scheme_domain)
|
2023-06-26 13:18:31 +00:00
|
|
|
end
|
|
|
|
|
2020-07-11 07:36:36 +00:00
|
|
|
defp maybe_get_domain_from_url("http" <> _ = url) do
|
|
|
|
URI.parse(url).host
|
2017-11-22 18:06:07 +00:00
|
|
|
end
|
|
|
|
|
2020-07-11 07:36:36 +00:00
|
|
|
defp maybe_get_domain_from_url(domain), do: domain
|
|
|
|
|
2020-05-08 20:06:47 +00:00
|
|
|
defp base64_sig64(url) do
|
2019-07-07 08:28:40 +00:00
|
|
|
base64 = Base.url_encode64(url, @base64_opts)
|
2019-07-14 21:01:32 +00:00
|
|
|
|
|
|
|
sig64 =
|
|
|
|
base64
|
2020-05-08 20:06:47 +00:00
|
|
|
|> signed_url()
|
2019-07-14 21:01:32 +00:00
|
|
|
|> Base.url_encode64(@base64_opts)
|
2019-04-25 23:11:47 +00:00
|
|
|
|
2020-05-08 20:06:47 +00:00
|
|
|
{base64, sig64}
|
|
|
|
end
|
|
|
|
|
|
|
|
def encode_url(url) do
|
|
|
|
{base64, sig64} = base64_sig64(url)
|
|
|
|
|
2019-04-25 23:11:47 +00:00
|
|
|
build_url(sig64, base64, filename(url))
|
|
|
|
end
|
|
|
|
|
2020-09-03 17:13:29 +00:00
|
|
|
def encode_preview_url(url, preview_params \\ []) do
|
2020-05-08 20:06:47 +00:00
|
|
|
{base64, sig64} = base64_sig64(url)
|
|
|
|
|
2020-09-03 17:13:29 +00:00
|
|
|
build_preview_url(sig64, base64, filename(url), preview_params)
|
2020-05-08 20:06:47 +00:00
|
|
|
end
|
|
|
|
|
2017-11-22 18:06:07 +00:00
|
|
|
def decode_url(sig, url) do
|
2019-07-14 21:01:32 +00:00
|
|
|
with {:ok, sig} <- Base.url_decode64(sig, @base64_opts),
|
|
|
|
signature when signature == sig <- signed_url(url) do
|
2017-11-22 18:06:07 +00:00
|
|
|
{:ok, Base.url_decode64!(url, @base64_opts)}
|
|
|
|
else
|
2019-07-14 21:01:32 +00:00
|
|
|
_ -> {:error, :invalid_signature}
|
2017-11-22 18:06:07 +00:00
|
|
|
end
|
|
|
|
end
|
2018-11-23 16:40:45 +00:00
|
|
|
|
2021-03-05 19:18:37 +00:00
|
|
|
def decode_url(encoded) do
|
|
|
|
[_, "proxy", sig, base64 | _] = URI.parse(encoded).path |> String.split("/")
|
|
|
|
decode_url(sig, base64)
|
|
|
|
end
|
|
|
|
|
2019-07-14 21:01:32 +00:00
|
|
|
defp signed_url(url) do
|
2021-05-15 11:15:02 +00:00
|
|
|
:crypto.mac(:hmac, :sha, Config.get([Endpoint, :secret_key_base]), url)
|
2019-07-14 21:01:32 +00:00
|
|
|
end
|
|
|
|
|
2018-11-23 16:40:45 +00:00
|
|
|
def filename(url_or_path) do
|
|
|
|
if path = URI.parse(url_or_path).path, do: Path.basename(path)
|
|
|
|
end
|
|
|
|
|
2024-04-06 22:06:29 +00:00
|
|
|
def base_url do
|
|
|
|
Config.get!([:media_proxy, :base_url])
|
2020-09-05 17:19:09 +00:00
|
|
|
end
|
|
|
|
|
2020-05-08 20:06:47 +00:00
|
|
|
defp proxy_url(path, sig_base64, url_base64, filename) do
|
2018-11-23 16:40:45 +00:00
|
|
|
[
|
2020-09-05 17:19:09 +00:00
|
|
|
base_url(),
|
2020-05-08 20:06:47 +00:00
|
|
|
path,
|
2018-11-23 16:40:45 +00:00
|
|
|
sig_base64,
|
|
|
|
url_base64,
|
|
|
|
filename
|
|
|
|
]
|
2019-07-14 21:01:32 +00:00
|
|
|
|> Enum.filter(& &1)
|
2018-11-23 16:40:45 +00:00
|
|
|
|> Path.join()
|
|
|
|
end
|
2020-05-08 20:06:47 +00:00
|
|
|
|
|
|
|
def build_url(sig_base64, url_base64, filename \\ nil) do
|
|
|
|
proxy_url("proxy", sig_base64, url_base64, filename)
|
|
|
|
end
|
|
|
|
|
2020-09-03 17:13:29 +00:00
|
|
|
def build_preview_url(sig_base64, url_base64, filename \\ nil, preview_params \\ []) do
|
|
|
|
uri = proxy_url("proxy/preview", sig_base64, url_base64, filename)
|
|
|
|
|
2020-09-16 19:30:42 +00:00
|
|
|
UriHelper.modify_uri_params(uri, preview_params)
|
2020-05-08 20:06:47 +00:00
|
|
|
end
|
|
|
|
|
2020-05-11 20:21:53 +00:00
|
|
|
def verify_request_path_and_url(
|
|
|
|
%Plug.Conn{params: %{"filename" => _}, request_path: request_path},
|
|
|
|
url
|
|
|
|
) do
|
|
|
|
verify_request_path_and_url(request_path, url)
|
|
|
|
end
|
|
|
|
|
|
|
|
def verify_request_path_and_url(request_path, url) when is_binary(request_path) do
|
2020-05-08 20:06:47 +00:00
|
|
|
filename = filename(url)
|
|
|
|
|
2020-05-11 20:21:53 +00:00
|
|
|
if filename && not basename_matches?(request_path, filename) do
|
2020-05-08 20:06:47 +00:00
|
|
|
{:wrong_filename, filename}
|
|
|
|
else
|
|
|
|
:ok
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-05-11 20:21:53 +00:00
|
|
|
def verify_request_path_and_url(_, _), do: :ok
|
2020-05-08 20:06:47 +00:00
|
|
|
|
|
|
|
defp basename_matches?(path, filename) do
|
|
|
|
basename = Path.basename(path)
|
|
|
|
basename == filename or URI.decode(basename) == filename or URI.encode(basename) == filename
|
|
|
|
end
|
2017-11-22 18:06:07 +00:00
|
|
|
end
|