2019-01-28 20:59:36 +01:00
|
|
|
# Pleroma: A lightweight social networking server
|
|
|
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2019-01-01 21:26:40 +01:00
|
|
|
defmodule Pleroma.Web.RichMedia.Parser do
|
2019-01-13 01:06:50 +01:00
|
|
|
@parsers [
|
|
|
|
Pleroma.Web.RichMedia.Parsers.OGP,
|
|
|
|
Pleroma.Web.RichMedia.Parsers.TwitterCard,
|
|
|
|
Pleroma.Web.RichMedia.Parsers.OEmbed
|
|
|
|
]
|
2019-01-01 21:26:40 +01:00
|
|
|
|
2019-02-10 22:37:51 +01:00
|
|
|
@hackney_options [
|
|
|
|
pool: :media,
|
|
|
|
recv_timeout: 2_000,
|
2019-04-11 19:16:15 +02:00
|
|
|
max_body: 2_000_000,
|
|
|
|
with_body: true
|
2019-02-10 22:37:51 +01:00
|
|
|
]
|
|
|
|
|
2019-01-26 17:26:11 +01:00
|
|
|
def parse(nil), do: {:error, "No URL provided"}
|
|
|
|
|
2019-06-06 22:59:51 +02:00
|
|
|
if Pleroma.Config.get(:env) == :test do
|
2019-01-05 00:50:54 +01:00
|
|
|
def parse(url), do: parse_url(url)
|
|
|
|
else
|
2019-01-26 17:26:11 +01:00
|
|
|
def parse(url) do
|
2019-01-28 21:19:07 +01:00
|
|
|
try do
|
|
|
|
Cachex.fetch!(:rich_media_cache, url, fn _ ->
|
|
|
|
{:commit, parse_url(url)}
|
|
|
|
end)
|
|
|
|
rescue
|
|
|
|
e ->
|
|
|
|
{:error, "Cachex error: #{inspect(e)}"}
|
2019-01-26 17:26:11 +01:00
|
|
|
end
|
|
|
|
end
|
2019-01-05 00:50:54 +01:00
|
|
|
end
|
2019-01-01 21:26:40 +01:00
|
|
|
|
2019-01-05 00:50:54 +01:00
|
|
|
defp parse_url(url) do
|
2019-01-27 13:21:05 +01:00
|
|
|
try do
|
2019-02-10 22:37:51 +01:00
|
|
|
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: @hackney_options)
|
2019-01-05 00:23:47 +01:00
|
|
|
|
2019-05-30 23:03:31 +02:00
|
|
|
html
|
|
|
|
|> maybe_parse()
|
|
|
|
|> clean_parsed_data()
|
|
|
|
|> check_parsed_data()
|
2019-01-27 13:21:05 +01:00
|
|
|
rescue
|
2019-01-28 21:19:07 +01:00
|
|
|
e ->
|
|
|
|
{:error, "Parsing error: #{inspect(e)}"}
|
2019-01-27 13:21:05 +01:00
|
|
|
end
|
2019-01-02 15:02:50 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
defp maybe_parse(html) do
|
|
|
|
Enum.reduce_while(@parsers, %{}, fn parser, acc ->
|
2019-01-01 21:26:40 +01:00
|
|
|
case parser.parse(html, acc) do
|
|
|
|
{:ok, data} -> {:halt, data}
|
|
|
|
{:error, _msg} -> {:cont, acc}
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
end
|
2019-01-02 15:02:50 +01:00
|
|
|
|
2019-01-31 17:03:56 +01:00
|
|
|
defp check_parsed_data(%{title: title} = data) when is_binary(title) and byte_size(title) > 0 do
|
2019-01-28 21:31:43 +01:00
|
|
|
{:ok, data}
|
2019-01-02 15:02:50 +01:00
|
|
|
end
|
|
|
|
|
2019-01-31 17:03:56 +01:00
|
|
|
defp check_parsed_data(data) do
|
2019-01-28 21:31:43 +01:00
|
|
|
{:error, "Found metadata was invalid or incomplete: #{inspect(data)}"}
|
2019-01-02 15:02:50 +01:00
|
|
|
end
|
2019-01-31 17:03:56 +01:00
|
|
|
|
|
|
|
defp clean_parsed_data(data) do
|
|
|
|
data
|
2019-02-05 21:50:57 +01:00
|
|
|
|> Enum.reject(fn {key, val} ->
|
|
|
|
with {:ok, _} <- Jason.encode(%{key => val}) do
|
|
|
|
false
|
|
|
|
else
|
2019-01-31 17:03:56 +01:00
|
|
|
_ -> true
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
|> Map.new()
|
|
|
|
end
|
2019-01-01 21:26:40 +01:00
|
|
|
end
|