Pleroma/lib/pleroma/web/media_proxy/media_proxy.ex

187 lines
4.9 KiB
Elixir
Raw Normal View History

# Pleroma: A lightweight social networking server
2020-03-03 23:44:49 +01:00
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
2017-11-22 19:06:07 +01:00
defmodule Pleroma.Web.MediaProxy do
2019-07-14 23:01:32 +02:00
alias Pleroma.Config
alias Pleroma.Helpers.UriHelper
2019-08-02 19:07:09 +02:00
alias Pleroma.Upload
2019-07-14 23:01:32 +02:00
alias Pleroma.Web
2020-06-14 20:02:57 +02:00
alias Pleroma.Web.MediaProxy.Invalidation
2017-11-22 19:06:07 +01:00
2019-07-14 23:01:32 +02:00
@base64_opts [padding: false]
@cache_table :banned_urls_cache
def cache_table, do: @cache_table
@spec in_banned_urls(String.t()) :: boolean()
def in_banned_urls(url), do: elem(Cachex.exists?(@cache_table, url(url)), 1)
2020-06-14 20:02:57 +02:00
def remove_from_banned_urls(urls) when is_list(urls) do
Cachex.execute!(@cache_table, fn cache ->
2020-06-14 20:02:57 +02:00
Enum.each(Invalidation.prepare_urls(urls), &Cachex.del(cache, &1))
end)
end
def remove_from_banned_urls(url) when is_binary(url) do
Cachex.del(@cache_table, url(url))
2020-06-14 20:02:57 +02:00
end
def put_in_banned_urls(urls) when is_list(urls) do
Cachex.execute!(@cache_table, fn cache ->
2020-06-14 20:02:57 +02:00
Enum.each(Invalidation.prepare_urls(urls), &Cachex.put(cache, &1, true))
end)
end
def put_in_banned_urls(url) when is_binary(url) do
Cachex.put(@cache_table, url(url), true)
2020-06-14 20:02:57 +02:00
end
2019-07-14 23:01:32 +02:00
def url(url) when is_nil(url) or url == "", do: nil
2019-02-03 18:44:18 +01:00
def url("/" <> _ = url), do: url
2017-11-22 19:06:07 +01:00
def url(url) do
if enabled?() and url_proxiable?(url) do
2019-05-30 10:33:58 +02:00
encode_url(url)
else
url
2019-05-30 10:33:58 +02:00
end
end
2018-03-30 15:01:53 +02:00
@spec url_proxiable?(String.t()) :: boolean()
def url_proxiable?(url) do
not local?(url) and not whitelisted?(url)
2020-06-14 20:02:57 +02:00
end
def preview_url(url, preview_params \\ []) do
if preview_enabled?() do
encode_preview_url(url, preview_params)
else
url(url)
end
end
def enabled?, do: Config.get([:media_proxy, :enabled], false)
2018-12-07 21:44:04 +01:00
# Note: media proxy must be enabled for media preview proxy in order to load all
# non-local non-whitelisted URLs through it and be sure that body size constraint is preserved.
def preview_enabled?, do: enabled?() and !!Config.get([:media_preview_proxy, :enabled])
def local?(url), do: String.starts_with?(url, Pleroma.Web.base_url())
def whitelisted?(url) do
2019-05-30 10:33:58 +02:00
%{host: domain} = URI.parse(url)
mediaproxy_whitelist_domains =
[:media_proxy, :whitelist]
|> Config.get()
|> Enum.map(&maybe_get_domain_from_url/1)
whitelist_domains =
if base_url = Config.get([Upload, :base_url]) do
%{host: base_domain} = URI.parse(base_url)
[base_domain | mediaproxy_whitelist_domains]
2019-08-02 19:07:09 +02:00
else
mediaproxy_whitelist_domains
2019-08-02 19:07:09 +02:00
end
domain in whitelist_domains
end
2019-08-02 19:07:09 +02:00
defp maybe_get_domain_from_url("http" <> _ = url) do
URI.parse(url).host
2017-11-22 19:06:07 +01:00
end
defp maybe_get_domain_from_url(domain), do: domain
defp base64_sig64(url) do
base64 = Base.url_encode64(url, @base64_opts)
2019-07-14 23:01:32 +02:00
sig64 =
base64
|> signed_url()
2019-07-14 23:01:32 +02:00
|> Base.url_encode64(@base64_opts)
2019-04-26 01:11:47 +02:00
{base64, sig64}
end
def encode_url(url) do
{base64, sig64} = base64_sig64(url)
2019-04-26 01:11:47 +02:00
build_url(sig64, base64, filename(url))
end
def encode_preview_url(url, preview_params \\ []) do
{base64, sig64} = base64_sig64(url)
build_preview_url(sig64, base64, filename(url), preview_params)
end
2017-11-22 19:06:07 +01:00
def decode_url(sig, url) do
2019-07-14 23:01:32 +02:00
with {:ok, sig} <- Base.url_decode64(sig, @base64_opts),
signature when signature == sig <- signed_url(url) do
2017-11-22 19:06:07 +01:00
{:ok, Base.url_decode64!(url, @base64_opts)}
else
2019-07-14 23:01:32 +02:00
_ -> {:error, :invalid_signature}
2017-11-22 19:06:07 +01:00
end
end
2018-11-23 17:40:45 +01:00
2019-07-14 23:01:32 +02:00
defp signed_url(url) do
:crypto.hmac(:sha, Config.get([Web.Endpoint, :secret_key_base]), url)
end
2018-11-23 17:40:45 +01:00
def filename(url_or_path) do
if path = URI.parse(url_or_path).path, do: Path.basename(path)
end
def base_url do
Config.get([:media_proxy, :base_url], Web.base_url())
end
defp proxy_url(path, sig_base64, url_base64, filename) do
2018-11-23 17:40:45 +01:00
[
base_url(),
path,
2018-11-23 17:40:45 +01:00
sig_base64,
url_base64,
filename
]
2019-07-14 23:01:32 +02:00
|> Enum.filter(& &1)
2018-11-23 17:40:45 +01:00
|> Path.join()
end
def build_url(sig_base64, url_base64, filename \\ nil) do
proxy_url("proxy", sig_base64, url_base64, filename)
end
def build_preview_url(sig_base64, url_base64, filename \\ nil, preview_params \\ []) do
uri = proxy_url("proxy/preview", sig_base64, url_base64, filename)
UriHelper.modify_uri_params(uri, preview_params)
end
def verify_request_path_and_url(
%Plug.Conn{params: %{"filename" => _}, request_path: request_path},
url
) do
verify_request_path_and_url(request_path, url)
end
def verify_request_path_and_url(request_path, url) when is_binary(request_path) do
filename = filename(url)
if filename && not basename_matches?(request_path, filename) do
{:wrong_filename, filename}
else
:ok
end
end
def verify_request_path_and_url(_, _), do: :ok
defp basename_matches?(path, filename) do
basename = Path.basename(path)
basename == filename or URI.decode(basename) == filename or URI.encode(basename) == filename
end
2017-11-22 19:06:07 +01:00
end