Pleroma/lib/pleroma/formatter.ex

150 lines
4.9 KiB
Elixir
Raw Normal View History

# Pleroma: A lightweight social networking server
2018-12-31 16:41:47 +01:00
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
2017-05-17 18:00:09 +02:00
defmodule Pleroma.Formatter do
2019-02-09 16:16:26 +01:00
alias Pleroma.Emoji
alias Pleroma.HTML
alias Pleroma.User
alias Pleroma.Web.MediaProxy
2017-05-17 18:00:09 +02:00
2019-03-20 21:09:36 +01:00
@safe_mention_regex ~r/^(\s*(?<mentions>@.+?\s+)+)(?<rest>.*)/
2018-12-14 19:21:37 +01:00
@markdown_characters_regex ~r/(`|\*|_|{|}|[|]|\(|\)|#|\+|-|\.|!)/
2019-02-27 00:32:26 +01:00
@link_regex ~r{((?:http(s)?:\/\/)?[\w.-]+(?:\.[\w\.-]+)+[\w\-\._~%:/?#[\]@!\$&'\(\)\*\+,;=.]+)|[0-9a-z+\-\.]+:[0-9a-z$-_.+!*'(),]+}ui
# credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength
2018-12-14 10:41:55 +01:00
2019-02-27 00:32:26 +01:00
@auto_linker_config hashtag: true,
hashtag_handler: &Pleroma.Formatter.hashtag_handler/4,
mention: true,
mention_handler: &Pleroma.Formatter.mention_handler/4
def mention_handler("@" <> nickname, buffer, opts, acc) do
case User.get_cached_by_nickname(nickname) do
%User{id: id} = user ->
ap_id = get_ap_id(user)
nickname_text = get_nickname_text(nickname, opts) |> maybe_escape(opts)
link =
"<span class='h-card'><a data-user='#{id}' class='u-url mention' href='#{ap_id}'>@<span>#{
nickname_text
}</span></a></span>"
{link, %{acc | mentions: MapSet.put(acc.mentions, {"@" <> nickname, user})}}
_ ->
{buffer, acc}
end
2017-05-17 18:00:09 +02:00
end
2019-02-27 00:32:26 +01:00
def hashtag_handler("#" <> tag = tag_text, _buffer, _opts, acc) do
tag = String.downcase(tag)
url = "#{Pleroma.Web.base_url()}/tag/#{tag}"
link = "<a class='hashtag' data-tag='#{tag}' href='#{url}' rel='tag'>#{tag_text}</a>"
{link, %{acc | tags: MapSet.put(acc.tags, {tag_text, tag})}}
end
@doc """
Parses a text and replace plain text links with HTML. Returns a tuple with a result text, mentions, and hashtags.
2019-03-20 21:09:36 +01:00
If the 'safe_mention' option is given, only consecutive mentions at the start the post are actually mentioned.
2019-02-27 00:32:26 +01:00
"""
@spec linkify(String.t(), keyword()) ::
{String.t(), [{String.t(), User.t()}], [{String.t(), String.t()}]}
def linkify(text, options \\ []) do
options = options ++ @auto_linker_config
2019-03-20 21:09:36 +01:00
if options[:safe_mention] && Regex.named_captures(@safe_mention_regex, text) do
%{"mentions" => mentions, "rest" => rest} = Regex.named_captures(@safe_mention_regex, text)
acc = %{mentions: MapSet.new(), tags: MapSet.new()}
{text_mentions, %{mentions: mentions}} = AutoLinker.link_map(mentions, acc, options)
{text_rest, %{tags: tags}} = AutoLinker.link_map(rest, acc, options)
{text_mentions <> text_rest, MapSet.to_list(mentions), MapSet.to_list(tags)}
else
acc = %{mentions: MapSet.new(), tags: MapSet.new()}
{text, %{mentions: mentions, tags: tags}} = AutoLinker.link_map(text, acc, options)
{text, MapSet.to_list(mentions), MapSet.to_list(tags)}
end
end
2017-06-19 17:51:43 +02:00
def emojify(text) do
emojify(text, Emoji.get_all())
end
2017-09-16 15:47:45 +02:00
def emojify(text, nil), do: text
2018-03-30 15:01:53 +02:00
def emojify(text, emoji, strip \\ false) do
Enum.reduce(emoji, text, fn emoji_data, text ->
emoji = HTML.strip_tags(elem(emoji_data, 0))
file = HTML.strip_tags(elem(emoji_data, 1))
2018-03-30 15:01:53 +02:00
2019-02-27 00:32:26 +01:00
html =
if not strip do
"<img height='32px' width='32px' alt='#{emoji}' title='#{emoji}' src='#{
MediaProxy.url(file)
}' />"
else
""
end
2019-02-27 00:32:26 +01:00
String.replace(text, ":#{emoji}:", html) |> HTML.filter_tags()
2017-06-19 17:51:43 +02:00
end)
end
2017-09-16 16:14:23 +02:00
def demojify(text) do
emojify(text, Emoji.get_all(), true)
end
def demojify(text, nil), do: text
def get_emoji(text) when is_binary(text) do
Enum.filter(Emoji.get_all(), fn {emoji, _, _} -> String.contains?(text, ":#{emoji}:") end)
2017-09-16 16:14:23 +02:00
end
2017-10-19 21:51:56 +02:00
def get_emoji(_), do: []
2019-02-27 09:40:30 +01:00
def html_escape({text, mentions, hashtags}, type) do
{html_escape(text, type), mentions, hashtags}
end
def html_escape(text, "text/html") do
HTML.filter_tags(text)
end
def html_escape(text, "text/plain") do
2018-03-24 11:03:10 +01:00
Regex.split(@link_regex, text, include_captures: true)
|> Enum.map_every(2, fn chunk ->
{:safe, part} = Phoenix.HTML.html_escape(chunk)
part
end)
|> Enum.join("")
end
def truncate(text, max_length \\ 200, omission \\ "...") do
# Remove trailing whitespace
text = Regex.replace(~r/([^ \t\r\n])([ \t]+$)/u, text, "\\g{1}")
if String.length(text) < max_length do
text
else
length_with_omission = max_length - String.length(omission)
String.slice(text, 0, length_with_omission) <> omission
end
end
2019-02-27 00:32:26 +01:00
defp get_ap_id(%User{info: %{source_data: %{"url" => url}}}) when is_binary(url), do: url
defp get_ap_id(%User{ap_id: ap_id}), do: ap_id
defp get_nickname_text(nickname, %{mentions_format: :full}), do: User.full_nickname(nickname)
defp get_nickname_text(nickname, _), do: User.local_nickname(nickname)
defp maybe_escape(str, %{mentions_escape: true}) do
String.replace(str, @markdown_characters_regex, "\\\\\\1")
end
defp maybe_escape(str, _), do: str
2017-05-17 18:00:09 +02:00
end