2018-12-23 21:04:54 +01:00
|
|
|
# Pleroma: A lightweight social networking server
|
2020-03-03 23:44:49 +01:00
|
|
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 21:04:54 +01:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2017-11-22 19:06:07 +01:00
|
|
|
defmodule Pleroma.Web.MediaProxy do
|
2019-07-14 23:01:32 +02:00
|
|
|
alias Pleroma.Config
|
2019-08-02 19:07:09 +02:00
|
|
|
alias Pleroma.Upload
|
2019-07-14 23:01:32 +02:00
|
|
|
alias Pleroma.Web
|
2020-06-14 20:02:57 +02:00
|
|
|
alias Pleroma.Web.MediaProxy.Invalidation
|
2017-11-22 19:06:07 +01:00
|
|
|
|
2019-07-14 23:01:32 +02:00
|
|
|
@base64_opts [padding: false]
|
2020-08-11 09:28:35 +02:00
|
|
|
@cache_table :banned_urls_cache
|
|
|
|
|
|
|
|
def cache_table, do: @cache_table
|
2018-11-20 17:46:54 +01:00
|
|
|
|
2020-06-17 20:13:55 +02:00
|
|
|
@spec in_banned_urls(String.t()) :: boolean()
|
2020-08-11 09:28:35 +02:00
|
|
|
def in_banned_urls(url), do: elem(Cachex.exists?(@cache_table, url(url)), 1)
|
2020-06-14 20:02:57 +02:00
|
|
|
|
2020-06-17 20:13:55 +02:00
|
|
|
def remove_from_banned_urls(urls) when is_list(urls) do
|
2020-08-11 09:28:35 +02:00
|
|
|
Cachex.execute!(@cache_table, fn cache ->
|
2020-06-14 20:02:57 +02:00
|
|
|
Enum.each(Invalidation.prepare_urls(urls), &Cachex.del(cache, &1))
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2020-06-17 20:13:55 +02:00
|
|
|
def remove_from_banned_urls(url) when is_binary(url) do
|
2020-08-11 09:28:35 +02:00
|
|
|
Cachex.del(@cache_table, url(url))
|
2020-06-14 20:02:57 +02:00
|
|
|
end
|
|
|
|
|
2020-06-17 20:13:55 +02:00
|
|
|
def put_in_banned_urls(urls) when is_list(urls) do
|
2020-08-11 09:28:35 +02:00
|
|
|
Cachex.execute!(@cache_table, fn cache ->
|
2020-06-14 20:02:57 +02:00
|
|
|
Enum.each(Invalidation.prepare_urls(urls), &Cachex.put(cache, &1, true))
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2020-06-17 20:13:55 +02:00
|
|
|
def put_in_banned_urls(url) when is_binary(url) do
|
2020-08-11 09:28:35 +02:00
|
|
|
Cachex.put(@cache_table, url(url), true)
|
2020-06-14 20:02:57 +02:00
|
|
|
end
|
|
|
|
|
2019-07-14 23:01:32 +02:00
|
|
|
def url(url) when is_nil(url) or url == "", do: nil
|
2019-02-03 18:44:18 +01:00
|
|
|
def url("/" <> _ = url), do: url
|
2017-12-12 12:30:24 +01:00
|
|
|
|
2017-11-22 19:06:07 +01:00
|
|
|
def url(url) do
|
2020-06-17 20:02:01 +02:00
|
|
|
if disabled?() or not url_proxiable?(url) do
|
2019-05-30 10:33:58 +02:00
|
|
|
url
|
|
|
|
else
|
|
|
|
encode_url(url)
|
|
|
|
end
|
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2020-06-17 20:02:01 +02:00
|
|
|
@spec url_proxiable?(String.t()) :: boolean()
|
|
|
|
def url_proxiable?(url) do
|
2020-06-14 20:02:57 +02:00
|
|
|
if local?(url) or whitelisted?(url) do
|
|
|
|
false
|
|
|
|
else
|
|
|
|
true
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-14 23:01:32 +02:00
|
|
|
defp disabled?, do: !Config.get([:media_proxy, :enabled], false)
|
2018-12-07 21:44:04 +01:00
|
|
|
|
2019-05-30 10:33:58 +02:00
|
|
|
defp local?(url), do: String.starts_with?(url, Pleroma.Web.base_url())
|
2018-11-13 15:58:02 +01:00
|
|
|
|
2019-05-30 10:33:58 +02:00
|
|
|
defp whitelisted?(url) do
|
|
|
|
%{host: domain} = URI.parse(url)
|
|
|
|
|
2020-07-11 09:36:36 +02:00
|
|
|
mediaproxy_whitelist_domains =
|
|
|
|
[:media_proxy, :whitelist]
|
|
|
|
|> Config.get()
|
|
|
|
|> Enum.map(&maybe_get_domain_from_url/1)
|
|
|
|
|
|
|
|
whitelist_domains =
|
|
|
|
if base_url = Config.get([Upload, :base_url]) do
|
|
|
|
%{host: base_domain} = URI.parse(base_url)
|
|
|
|
[base_domain | mediaproxy_whitelist_domains]
|
2019-08-02 19:07:09 +02:00
|
|
|
else
|
2020-07-11 09:36:36 +02:00
|
|
|
mediaproxy_whitelist_domains
|
2019-08-02 19:07:09 +02:00
|
|
|
end
|
|
|
|
|
2020-07-11 09:36:36 +02:00
|
|
|
domain in whitelist_domains
|
|
|
|
end
|
2019-08-02 19:07:09 +02:00
|
|
|
|
2020-07-11 09:36:36 +02:00
|
|
|
defp maybe_get_domain_from_url("http" <> _ = url) do
|
|
|
|
URI.parse(url).host
|
2017-11-22 19:06:07 +01:00
|
|
|
end
|
|
|
|
|
2020-07-11 09:36:36 +02:00
|
|
|
defp maybe_get_domain_from_url(domain), do: domain
|
|
|
|
|
2019-04-26 01:11:47 +02:00
|
|
|
def encode_url(url) do
|
2019-07-07 10:28:40 +02:00
|
|
|
base64 = Base.url_encode64(url, @base64_opts)
|
2019-07-14 23:01:32 +02:00
|
|
|
|
|
|
|
sig64 =
|
|
|
|
base64
|
|
|
|
|> signed_url
|
|
|
|
|> Base.url_encode64(@base64_opts)
|
2019-04-26 01:11:47 +02:00
|
|
|
|
|
|
|
build_url(sig64, base64, filename(url))
|
|
|
|
end
|
|
|
|
|
2017-11-22 19:06:07 +01:00
|
|
|
def decode_url(sig, url) do
|
2019-07-14 23:01:32 +02:00
|
|
|
with {:ok, sig} <- Base.url_decode64(sig, @base64_opts),
|
|
|
|
signature when signature == sig <- signed_url(url) do
|
2017-11-22 19:06:07 +01:00
|
|
|
{:ok, Base.url_decode64!(url, @base64_opts)}
|
|
|
|
else
|
2019-07-14 23:01:32 +02:00
|
|
|
_ -> {:error, :invalid_signature}
|
2017-11-22 19:06:07 +01:00
|
|
|
end
|
|
|
|
end
|
2018-11-23 17:40:45 +01:00
|
|
|
|
2019-07-14 23:01:32 +02:00
|
|
|
defp signed_url(url) do
|
|
|
|
:crypto.hmac(:sha, Config.get([Web.Endpoint, :secret_key_base]), url)
|
|
|
|
end
|
|
|
|
|
2018-11-23 17:40:45 +01:00
|
|
|
def filename(url_or_path) do
|
|
|
|
if path = URI.parse(url_or_path).path, do: Path.basename(path)
|
|
|
|
end
|
|
|
|
|
|
|
|
def build_url(sig_base64, url_base64, filename \\ nil) do
|
|
|
|
[
|
2020-07-09 17:53:51 +02:00
|
|
|
Config.get([:media_proxy, :base_url], Web.base_url()),
|
2018-11-23 17:40:45 +01:00
|
|
|
"proxy",
|
|
|
|
sig_base64,
|
|
|
|
url_base64,
|
|
|
|
filename
|
|
|
|
]
|
2019-07-14 23:01:32 +02:00
|
|
|
|> Enum.filter(& &1)
|
2018-11-23 17:40:45 +01:00
|
|
|
|> Path.join()
|
|
|
|
end
|
2017-11-22 19:06:07 +01:00
|
|
|
end
|