2019-01-28 20:59:36 +01:00
|
|
|
# Pleroma: A lightweight social networking server
|
2020-03-02 06:08:45 +01:00
|
|
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
2019-01-28 20:59:36 +01:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2019-01-01 21:26:40 +01:00
|
|
|
defmodule Pleroma.Web.RichMedia.Parser do
|
2020-02-11 08:12:57 +01:00
|
|
|
@options [
|
2019-02-10 22:37:51 +01:00
|
|
|
pool: :media,
|
2020-02-11 08:12:57 +01:00
|
|
|
max_body: 2_000_000
|
2019-02-10 22:37:51 +01:00
|
|
|
]
|
|
|
|
|
2019-07-11 15:04:42 +02:00
|
|
|
defp parsers do
|
|
|
|
Pleroma.Config.get([:rich_media, :parsers])
|
|
|
|
end
|
|
|
|
|
2019-01-26 17:26:11 +01:00
|
|
|
def parse(nil), do: {:error, "No URL provided"}
|
|
|
|
|
2019-06-06 22:59:51 +02:00
|
|
|
if Pleroma.Config.get(:env) == :test do
|
2019-01-05 00:50:54 +01:00
|
|
|
def parse(url), do: parse_url(url)
|
|
|
|
else
|
2019-01-26 17:26:11 +01:00
|
|
|
def parse(url) do
|
2019-01-28 21:19:07 +01:00
|
|
|
try do
|
|
|
|
Cachex.fetch!(:rich_media_cache, url, fn _ ->
|
|
|
|
{:commit, parse_url(url)}
|
|
|
|
end)
|
2019-07-16 18:52:36 +02:00
|
|
|
|> set_ttl_based_on_image(url)
|
2019-01-28 21:19:07 +01:00
|
|
|
rescue
|
|
|
|
e ->
|
|
|
|
{:error, "Cachex error: #{inspect(e)}"}
|
2019-01-26 17:26:11 +01:00
|
|
|
end
|
|
|
|
end
|
2019-01-05 00:50:54 +01:00
|
|
|
end
|
2019-01-01 21:26:40 +01:00
|
|
|
|
2019-07-16 18:52:36 +02:00
|
|
|
@doc """
|
|
|
|
Set the rich media cache based on the expiration time of image.
|
|
|
|
|
2019-07-19 07:58:42 +02:00
|
|
|
Adopt behaviour `Pleroma.Web.RichMedia.Parser.TTL`
|
2019-07-16 18:52:36 +02:00
|
|
|
|
|
|
|
## Example
|
|
|
|
|
|
|
|
defmodule MyModule do
|
2019-07-19 07:58:42 +02:00
|
|
|
@behaviour Pleroma.Web.RichMedia.Parser.TTL
|
|
|
|
def ttl(data, url) do
|
2019-07-16 18:52:36 +02:00
|
|
|
image_url = Map.get(data, :image)
|
|
|
|
# do some parsing in the url and get the ttl of the image
|
2019-07-19 07:58:42 +02:00
|
|
|
# and return ttl is unix time
|
|
|
|
parse_ttl_from_url(image_url)
|
2019-07-16 18:52:36 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
Define the module in the config
|
|
|
|
|
|
|
|
config :pleroma, :rich_media,
|
|
|
|
ttl_setters: [MyModule]
|
|
|
|
"""
|
|
|
|
def set_ttl_based_on_image({:ok, data}, url) do
|
2019-07-21 17:22:22 +02:00
|
|
|
with {:ok, nil} <- Cachex.ttl(:rich_media_cache, url),
|
|
|
|
ttl when is_number(ttl) <- get_ttl_from_image(data, url) do
|
2019-07-19 07:58:42 +02:00
|
|
|
Cachex.expire_at(:rich_media_cache, url, ttl * 1000)
|
|
|
|
{:ok, data}
|
|
|
|
else
|
2019-07-16 18:52:36 +02:00
|
|
|
_ ->
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-19 07:58:42 +02:00
|
|
|
defp get_ttl_from_image(data, url) do
|
|
|
|
Pleroma.Config.get([:rich_media, :ttl_setters])
|
|
|
|
|> Enum.reduce({:ok, nil}, fn
|
|
|
|
module, {:ok, _ttl} ->
|
|
|
|
module.ttl(data, url)
|
|
|
|
|
|
|
|
_, error ->
|
|
|
|
error
|
|
|
|
end)
|
|
|
|
end
|
2019-07-16 18:52:36 +02:00
|
|
|
|
2019-01-05 00:50:54 +01:00
|
|
|
defp parse_url(url) do
|
2020-02-11 08:12:57 +01:00
|
|
|
opts =
|
|
|
|
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
|
|
|
|
Keyword.merge(@options,
|
|
|
|
recv_timeout: 2_000,
|
|
|
|
with_body: true
|
|
|
|
)
|
|
|
|
else
|
|
|
|
@options
|
|
|
|
end
|
|
|
|
|
2019-01-27 13:21:05 +01:00
|
|
|
try do
|
2020-02-11 08:12:57 +01:00
|
|
|
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: opts)
|
2019-01-05 00:23:47 +01:00
|
|
|
|
2019-05-30 23:03:31 +02:00
|
|
|
html
|
2020-02-15 23:55:26 +01:00
|
|
|
|> parse_html()
|
2019-05-30 23:03:31 +02:00
|
|
|
|> maybe_parse()
|
2019-07-24 00:58:31 +02:00
|
|
|
|> Map.put(:url, url)
|
2019-05-30 23:03:31 +02:00
|
|
|
|> clean_parsed_data()
|
|
|
|
|> check_parsed_data()
|
2019-01-27 13:21:05 +01:00
|
|
|
rescue
|
2019-01-28 21:19:07 +01:00
|
|
|
e ->
|
2020-02-15 23:55:26 +01:00
|
|
|
{:error, "Parsing error: #{inspect(e)} #{inspect(__STACKTRACE__)}"}
|
2019-01-27 13:21:05 +01:00
|
|
|
end
|
2019-01-02 15:02:50 +01:00
|
|
|
end
|
|
|
|
|
2020-02-15 23:55:26 +01:00
|
|
|
defp parse_html(html), do: Floki.parse_document!(html)
|
2019-09-15 13:53:58 +02:00
|
|
|
|
2019-01-02 15:02:50 +01:00
|
|
|
defp maybe_parse(html) do
|
2019-07-11 15:04:42 +02:00
|
|
|
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
|
2019-01-01 21:26:40 +01:00
|
|
|
case parser.parse(html, acc) do
|
2020-06-11 15:57:31 +02:00
|
|
|
data when data != %{} -> {:halt, data}
|
|
|
|
_ -> {:cont, acc}
|
2019-01-01 21:26:40 +01:00
|
|
|
end
|
|
|
|
end)
|
|
|
|
end
|
2019-01-02 15:02:50 +01:00
|
|
|
|
2019-09-15 13:53:58 +02:00
|
|
|
defp check_parsed_data(%{title: title} = data)
|
|
|
|
when is_binary(title) and byte_size(title) > 0 do
|
2019-01-28 21:31:43 +01:00
|
|
|
{:ok, data}
|
2019-01-02 15:02:50 +01:00
|
|
|
end
|
|
|
|
|
2019-01-31 17:03:56 +01:00
|
|
|
defp check_parsed_data(data) do
|
2019-01-28 21:31:43 +01:00
|
|
|
{:error, "Found metadata was invalid or incomplete: #{inspect(data)}"}
|
2019-01-02 15:02:50 +01:00
|
|
|
end
|
2019-01-31 17:03:56 +01:00
|
|
|
|
|
|
|
defp clean_parsed_data(data) do
|
|
|
|
data
|
2019-02-05 21:50:57 +01:00
|
|
|
|> Enum.reject(fn {key, val} ->
|
|
|
|
with {:ok, _} <- Jason.encode(%{key => val}) do
|
|
|
|
false
|
|
|
|
else
|
2019-01-31 17:03:56 +01:00
|
|
|
_ -> true
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
|> Map.new()
|
|
|
|
end
|
2019-01-01 21:26:40 +01:00
|
|
|
end
|