2018-12-23 21:04:54 +01:00
|
|
|
# Pleroma: A lightweight social networking server
|
2020-03-02 06:08:45 +01:00
|
|
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 21:04:54 +01:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2017-09-15 14:17:36 +02:00
|
|
|
defmodule Pleroma.Web.CommonAPI.Utils do
|
2019-07-10 11:25:58 +02:00
|
|
|
import Pleroma.Web.Gettext
|
2019-09-24 11:10:54 +02:00
|
|
|
import Pleroma.Web.ControllerHelper, only: [truthy_param?: 1]
|
2019-07-10 11:25:58 +02:00
|
|
|
|
2018-12-11 13:31:52 +01:00
|
|
|
alias Calendar.Strftime
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Activity
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Pleroma.Config
|
2019-08-02 15:05:27 +02:00
|
|
|
alias Pleroma.Conversation.Participation
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Formatter
|
|
|
|
alias Pleroma.Object
|
|
|
|
alias Pleroma.Repo
|
2018-12-11 13:31:52 +01:00
|
|
|
alias Pleroma.User
|
2017-05-17 18:00:20 +02:00
|
|
|
alias Pleroma.Web.ActivityPub.Utils
|
2019-04-07 16:11:29 +02:00
|
|
|
alias Pleroma.Web.ActivityPub.Visibility
|
2018-10-29 18:26:15 +01:00
|
|
|
alias Pleroma.Web.MediaProxy
|
2020-06-24 12:07:47 +02:00
|
|
|
alias Pleroma.Web.Plugs.AuthenticationPlug
|
2017-05-17 18:00:20 +02:00
|
|
|
|
2019-04-02 11:25:51 +02:00
|
|
|
require Logger
|
2019-07-29 04:43:19 +02:00
|
|
|
require Pleroma.Constants
|
2017-05-17 18:00:20 +02:00
|
|
|
|
2020-05-12 21:59:26 +02:00
|
|
|
def attachments_from_ids(%{media_ids: ids, descriptions: desc}) do
|
2019-08-05 17:37:05 +02:00
|
|
|
attachments_from_ids_descs(ids, desc)
|
|
|
|
end
|
|
|
|
|
2020-05-12 21:59:26 +02:00
|
|
|
def attachments_from_ids(%{media_ids: ids}) do
|
2019-08-05 17:37:05 +02:00
|
|
|
attachments_from_ids_no_descs(ids)
|
2019-01-04 17:27:46 +01:00
|
|
|
end
|
|
|
|
|
2019-08-05 17:37:05 +02:00
|
|
|
def attachments_from_ids(_), do: []
|
|
|
|
|
|
|
|
def attachments_from_ids_no_descs([]), do: []
|
|
|
|
|
2019-01-04 17:27:46 +01:00
|
|
|
def attachments_from_ids_no_descs(ids) do
|
2019-08-05 17:37:05 +02:00
|
|
|
Enum.map(ids, fn media_id ->
|
|
|
|
case Repo.get(Object, media_id) do
|
2020-05-12 21:59:26 +02:00
|
|
|
%Object{data: data} -> data
|
2019-08-05 17:37:05 +02:00
|
|
|
_ -> nil
|
|
|
|
end
|
2017-05-17 18:00:20 +02:00
|
|
|
end)
|
2020-05-12 21:59:26 +02:00
|
|
|
|> Enum.reject(&is_nil/1)
|
2017-05-17 18:00:20 +02:00
|
|
|
end
|
|
|
|
|
2019-08-05 17:37:05 +02:00
|
|
|
def attachments_from_ids_descs([], _), do: []
|
|
|
|
|
2019-01-04 17:27:46 +01:00
|
|
|
def attachments_from_ids_descs(ids, descs_str) do
|
2019-01-04 16:35:41 +01:00
|
|
|
{_, descs} = Jason.decode(descs_str)
|
|
|
|
|
2019-08-05 17:37:05 +02:00
|
|
|
Enum.map(ids, fn media_id ->
|
|
|
|
case Repo.get(Object, media_id) do
|
2020-05-12 21:59:26 +02:00
|
|
|
%Object{data: data} ->
|
2019-08-05 17:37:05 +02:00
|
|
|
Map.put(data, "name", descs[media_id])
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
nil
|
|
|
|
end
|
2017-05-17 18:00:20 +02:00
|
|
|
end)
|
2020-05-12 21:59:26 +02:00
|
|
|
|> Enum.reject(&is_nil/1)
|
2017-05-17 18:00:20 +02:00
|
|
|
end
|
|
|
|
|
2019-08-02 15:05:27 +02:00
|
|
|
@spec get_to_and_cc(
|
|
|
|
User.t(),
|
|
|
|
list(String.t()),
|
|
|
|
Activity.t() | nil,
|
|
|
|
String.t(),
|
|
|
|
Participation.t() | nil
|
2019-08-25 21:39:37 +02:00
|
|
|
) :: {list(String.t()), list(String.t())}
|
2019-08-02 15:05:27 +02:00
|
|
|
|
|
|
|
def get_to_and_cc(_, _, _, _, %Participation{} = participation) do
|
|
|
|
participation = Repo.preload(participation, :recipients)
|
|
|
|
{Enum.map(participation.recipients, & &1.ap_id), []}
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_to_and_cc(user, mentioned_users, inReplyTo, "public", _) do
|
2019-07-29 04:43:19 +02:00
|
|
|
to = [Pleroma.Constants.as_public() | mentioned_users]
|
2018-11-08 20:17:01 +01:00
|
|
|
cc = [user.follower_address]
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-09-15 14:17:36 +02:00
|
|
|
if inReplyTo do
|
2018-11-08 20:17:01 +01:00
|
|
|
{Enum.uniq([inReplyTo.data["actor"] | to]), cc}
|
2017-08-28 19:17:38 +02:00
|
|
|
else
|
2018-02-18 14:45:08 +01:00
|
|
|
{to, cc}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-08-02 15:05:27 +02:00
|
|
|
def get_to_and_cc(user, mentioned_users, inReplyTo, "unlisted", _) do
|
2018-11-08 20:17:01 +01:00
|
|
|
to = [user.follower_address | mentioned_users]
|
2019-07-29 04:43:19 +02:00
|
|
|
cc = [Pleroma.Constants.as_public()]
|
2018-11-08 20:17:01 +01:00
|
|
|
|
|
|
|
if inReplyTo do
|
|
|
|
{Enum.uniq([inReplyTo.data["actor"] | to]), cc}
|
|
|
|
else
|
|
|
|
{to, cc}
|
|
|
|
end
|
2018-02-18 14:45:08 +01:00
|
|
|
end
|
|
|
|
|
2019-08-02 15:05:27 +02:00
|
|
|
def get_to_and_cc(user, mentioned_users, inReplyTo, "private", _) do
|
|
|
|
{to, cc} = get_to_and_cc(user, mentioned_users, inReplyTo, "direct", nil)
|
2018-02-18 14:45:08 +01:00
|
|
|
{[user.follower_address | to], cc}
|
|
|
|
end
|
|
|
|
|
2019-08-02 15:05:27 +02:00
|
|
|
def get_to_and_cc(_user, mentioned_users, inReplyTo, "direct", _) do
|
2020-05-20 13:38:47 +02:00
|
|
|
# If the OP is a DM already, add the implicit actor.
|
|
|
|
if inReplyTo && Visibility.is_direct?(inReplyTo) do
|
2018-02-18 14:45:08 +01:00
|
|
|
{Enum.uniq([inReplyTo.data["actor"] | mentioned_users]), []}
|
|
|
|
else
|
|
|
|
{mentioned_users, []}
|
2017-08-28 19:17:38 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-08-02 15:05:27 +02:00
|
|
|
def get_to_and_cc(_user, mentions, _inReplyTo, {:list, _}, _), do: {mentions, []}
|
2019-06-05 12:55:00 +02:00
|
|
|
|
2019-06-03 18:17:08 +02:00
|
|
|
def get_addressed_users(_, to) when is_list(to) do
|
|
|
|
User.get_ap_ids_by_nicknames(to)
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_addressed_users(mentioned_users, _), do: mentioned_users
|
2019-05-01 11:11:17 +02:00
|
|
|
|
2019-07-15 09:00:29 +02:00
|
|
|
def maybe_add_list_data(activity_params, user, {:list, list_id}) do
|
2019-07-11 11:36:08 +02:00
|
|
|
case Pleroma.List.get(list_id, user) do
|
|
|
|
%Pleroma.List{} = list ->
|
2019-07-15 09:00:29 +02:00
|
|
|
activity_params
|
|
|
|
|> put_in([:additional, "bcc"], [list.ap_id])
|
|
|
|
|> put_in([:additional, "listMessage"], list.ap_id)
|
|
|
|
|> put_in([:object, "listMessage"], list.ap_id)
|
2019-07-11 11:36:08 +02:00
|
|
|
|
|
|
|
_ ->
|
2019-07-15 09:00:29 +02:00
|
|
|
activity_params
|
2019-07-11 11:36:08 +02:00
|
|
|
end
|
2019-05-01 11:11:17 +02:00
|
|
|
end
|
|
|
|
|
2019-07-15 09:00:29 +02:00
|
|
|
def maybe_add_list_data(activity_params, _, _), do: activity_params
|
2019-05-01 11:11:17 +02:00
|
|
|
|
2019-09-24 11:10:54 +02:00
|
|
|
def make_poll_data(%{"poll" => %{"expires_in" => expires_in}} = data)
|
|
|
|
when is_binary(expires_in) do
|
|
|
|
# In some cases mastofe sends out strings instead of integers
|
|
|
|
data
|
|
|
|
|> put_in(["poll", "expires_in"], String.to_integer(expires_in))
|
|
|
|
|> make_poll_data()
|
|
|
|
end
|
|
|
|
|
2020-05-12 21:59:26 +02:00
|
|
|
def make_poll_data(%{poll: %{options: options, expires_in: expires_in}} = data)
|
2019-05-21 16:30:51 +02:00
|
|
|
when is_list(options) do
|
2020-07-09 17:53:51 +02:00
|
|
|
limits = Config.get([:instance, :poll_limits])
|
2019-05-21 09:54:20 +02:00
|
|
|
|
2019-09-24 11:10:54 +02:00
|
|
|
with :ok <- validate_poll_expiration(expires_in, limits),
|
|
|
|
:ok <- validate_poll_options_amount(options, limits),
|
|
|
|
:ok <- validate_poll_options_length(options, limits) do
|
|
|
|
{option_notes, emoji} =
|
2019-05-21 09:54:20 +02:00
|
|
|
Enum.map_reduce(options, %{}, fn option, emoji ->
|
2019-09-24 11:10:54 +02:00
|
|
|
note = %{
|
|
|
|
"name" => option,
|
|
|
|
"type" => "Note",
|
|
|
|
"replies" => %{"type" => "Collection", "totalItems" => 0}
|
|
|
|
}
|
2019-05-21 09:54:20 +02:00
|
|
|
|
2020-04-03 13:03:32 +02:00
|
|
|
{note, Map.merge(emoji, Pleroma.Emoji.Formatter.get_emoji_map(option))}
|
2019-09-24 11:10:54 +02:00
|
|
|
end)
|
2019-05-18 12:29:28 +02:00
|
|
|
|
2019-05-21 09:54:20 +02:00
|
|
|
end_time =
|
2020-02-07 16:57:46 +01:00
|
|
|
DateTime.utc_now()
|
|
|
|
|> DateTime.add(expires_in)
|
|
|
|
|> DateTime.to_iso8601()
|
2019-05-21 09:54:20 +02:00
|
|
|
|
2020-05-12 21:59:26 +02:00
|
|
|
key = if truthy_param?(data.poll[:multiple]), do: "anyOf", else: "oneOf"
|
2019-09-24 11:10:54 +02:00
|
|
|
poll = %{"type" => "Question", key => option_notes, "closed" => end_time}
|
2019-05-21 09:54:20 +02:00
|
|
|
|
2019-09-24 11:10:54 +02:00
|
|
|
{:ok, {poll, emoji}}
|
2019-05-21 09:54:20 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-05-21 13:19:03 +02:00
|
|
|
def make_poll_data(%{"poll" => poll}) when is_map(poll) do
|
2019-09-24 11:10:54 +02:00
|
|
|
{:error, "Invalid poll"}
|
2019-05-18 12:29:28 +02:00
|
|
|
end
|
|
|
|
|
2019-05-19 16:06:44 +02:00
|
|
|
def make_poll_data(_data) do
|
2019-09-24 11:10:54 +02:00
|
|
|
{:ok, {%{}, %{}}}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp validate_poll_options_amount(options, %{max_options: max_options}) do
|
|
|
|
if Enum.count(options) > max_options do
|
|
|
|
{:error, "Poll can't contain more than #{max_options} options"}
|
|
|
|
else
|
|
|
|
:ok
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp validate_poll_options_length(options, %{max_option_chars: max_option_chars}) do
|
|
|
|
if Enum.any?(options, &(String.length(&1) > max_option_chars)) do
|
|
|
|
{:error, "Poll options cannot be longer than #{max_option_chars} characters each"}
|
|
|
|
else
|
|
|
|
:ok
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp validate_poll_expiration(expires_in, %{min_expiration: min, max_expiration: max}) do
|
|
|
|
cond do
|
|
|
|
expires_in > max -> {:error, "Expiration date is too far in the future"}
|
|
|
|
expires_in < min -> {:error, "Expiration date is too soon"}
|
|
|
|
true -> :ok
|
|
|
|
end
|
2019-05-18 12:29:28 +02:00
|
|
|
end
|
|
|
|
|
2018-09-02 02:14:25 +02:00
|
|
|
def make_content_html(
|
|
|
|
status,
|
|
|
|
attachments,
|
2019-03-20 21:09:36 +01:00
|
|
|
data,
|
|
|
|
visibility
|
2018-09-02 02:14:25 +02:00
|
|
|
) do
|
2020-02-11 22:39:19 +01:00
|
|
|
attachment_links =
|
2019-02-27 00:32:26 +01:00
|
|
|
data
|
2020-02-11 22:39:19 +01:00
|
|
|
|> Map.get("attachment_links", Config.get([:instance, :attachment_links]))
|
2019-09-24 10:56:20 +02:00
|
|
|
|> truthy_param?()
|
2019-02-27 00:32:26 +01:00
|
|
|
|
2020-05-12 21:59:26 +02:00
|
|
|
content_type = get_content_type(data[:content_type])
|
2019-02-27 00:32:26 +01:00
|
|
|
|
2019-03-20 21:09:36 +01:00
|
|
|
options =
|
|
|
|
if visibility == "direct" && Config.get([:instance, :safe_dm_mentions]) do
|
|
|
|
[safe_mention: true]
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
|
2017-09-15 14:17:36 +02:00
|
|
|
status
|
2019-03-20 21:09:36 +01:00
|
|
|
|> format_input(content_type, options)
|
2020-02-11 22:39:19 +01:00
|
|
|
|> maybe_add_attachments(attachments, attachment_links)
|
2019-02-27 00:32:26 +01:00
|
|
|
|> maybe_add_nsfw_tag(data)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp get_content_type(content_type) do
|
|
|
|
if Enum.member?(Config.get([:instance, :allowed_post_formats]), content_type) do
|
|
|
|
content_type
|
|
|
|
else
|
|
|
|
"text/plain"
|
|
|
|
end
|
2017-09-15 14:17:36 +02:00
|
|
|
end
|
|
|
|
|
2019-02-27 00:32:26 +01:00
|
|
|
defp maybe_add_nsfw_tag({text, mentions, tags}, %{"sensitive" => sensitive})
|
|
|
|
when sensitive in [true, "True", "true", "1"] do
|
|
|
|
{text, mentions, [{"#nsfw", "nsfw"} | tags]}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp maybe_add_nsfw_tag(data, _), do: data
|
|
|
|
|
2019-08-05 15:33:22 +02:00
|
|
|
def make_context(_, %Participation{} = participation) do
|
|
|
|
Repo.preload(participation, :conversation).conversation.ap_id
|
|
|
|
end
|
|
|
|
|
|
|
|
def make_context(%Activity{data: %{"context" => context}}, _), do: context
|
|
|
|
def make_context(_, _), do: Utils.generate_context_id()
|
2017-09-15 14:17:36 +02:00
|
|
|
|
2020-02-11 22:39:19 +01:00
|
|
|
def maybe_add_attachments(parsed, _attachments, false = _no_links), do: parsed
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2019-02-27 00:32:26 +01:00
|
|
|
def maybe_add_attachments({text, mentions, tags}, attachments, _no_links) do
|
|
|
|
text = add_attachments(text, attachments)
|
|
|
|
{text, mentions, tags}
|
2017-12-07 19:44:09 +01:00
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-05-17 18:00:20 +02:00
|
|
|
def add_attachments(text, attachments) do
|
2019-08-05 17:37:05 +02:00
|
|
|
attachment_text = Enum.map(attachments, &build_attachment_link/1)
|
2017-11-18 13:46:54 +01:00
|
|
|
Enum.join([text | attachment_text], "<br>")
|
2017-05-17 18:00:20 +02:00
|
|
|
end
|
|
|
|
|
2019-08-05 17:37:05 +02:00
|
|
|
defp build_attachment_link(%{"url" => [%{"href" => href} | _]} = attachment) do
|
|
|
|
name = attachment["name"] || URI.decode(Path.basename(href))
|
|
|
|
href = MediaProxy.url(href)
|
|
|
|
"<a href=\"#{href}\" class='attachment'>#{shortname(name)}</a>"
|
|
|
|
end
|
|
|
|
|
|
|
|
defp build_attachment_link(_), do: ""
|
|
|
|
|
2019-02-27 00:32:26 +01:00
|
|
|
def format_input(text, format, options \\ [])
|
2019-01-18 07:30:16 +01:00
|
|
|
|
2018-12-14 10:41:55 +01:00
|
|
|
@doc """
|
2020-10-22 22:33:52 +02:00
|
|
|
Formatting text to plain text, BBCode, HTML, or Markdown
|
2018-12-14 10:41:55 +01:00
|
|
|
"""
|
2019-02-27 00:32:26 +01:00
|
|
|
def format_input(text, "text/plain", options) do
|
2017-12-07 20:34:25 +01:00
|
|
|
text
|
2018-09-02 02:14:25 +02:00
|
|
|
|> Formatter.html_escape("text/plain")
|
2019-02-27 00:32:26 +01:00
|
|
|
|> Formatter.linkify(options)
|
|
|
|
|> (fn {text, mentions, tags} ->
|
|
|
|
{String.replace(text, ~r/\r?\n/, "<br>"), mentions, tags}
|
|
|
|
end).()
|
2017-09-17 15:21:44 +02:00
|
|
|
end
|
|
|
|
|
2019-04-26 12:17:57 +02:00
|
|
|
def format_input(text, "text/bbcode", options) do
|
|
|
|
text
|
|
|
|
|> String.replace(~r/\r/, "")
|
|
|
|
|> Formatter.html_escape("text/plain")
|
|
|
|
|> BBCode.to_html()
|
|
|
|
|> (fn {:ok, html} -> html end).()
|
|
|
|
|> Formatter.linkify(options)
|
|
|
|
end
|
|
|
|
|
2019-02-27 00:32:26 +01:00
|
|
|
def format_input(text, "text/html", options) do
|
2018-09-02 02:14:25 +02:00
|
|
|
text
|
|
|
|
|> Formatter.html_escape("text/html")
|
2019-02-27 00:32:26 +01:00
|
|
|
|> Formatter.linkify(options)
|
2018-09-02 02:14:25 +02:00
|
|
|
end
|
|
|
|
|
2019-02-27 00:32:26 +01:00
|
|
|
def format_input(text, "text/markdown", options) do
|
2018-09-02 02:14:25 +02:00
|
|
|
text
|
2019-04-12 21:25:53 +02:00
|
|
|
|> Formatter.mentions_escape(options)
|
2020-02-13 03:39:47 +01:00
|
|
|
|> Earmark.as_html!(%Earmark.Options{renderer: Pleroma.EarmarkRenderer})
|
2019-02-27 00:32:26 +01:00
|
|
|
|> Formatter.linkify(options)
|
2018-09-02 02:14:25 +02:00
|
|
|
|> Formatter.html_escape("text/html")
|
2017-05-17 18:00:20 +02:00
|
|
|
end
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
def make_note_data(
|
|
|
|
actor,
|
|
|
|
to,
|
|
|
|
context,
|
|
|
|
content_html,
|
|
|
|
attachments,
|
2019-04-17 13:52:01 +02:00
|
|
|
in_reply_to,
|
2018-03-30 15:01:53 +02:00
|
|
|
tags,
|
2019-09-24 11:10:54 +02:00
|
|
|
summary \\ nil,
|
2019-05-17 22:42:51 +02:00
|
|
|
cc \\ [],
|
2019-05-18 12:37:38 +02:00
|
|
|
sensitive \\ false,
|
2019-09-24 11:10:54 +02:00
|
|
|
extra_params \\ %{}
|
2018-03-30 15:01:53 +02:00
|
|
|
) do
|
2019-08-05 17:37:05 +02:00
|
|
|
%{
|
2018-03-30 15:01:53 +02:00
|
|
|
"type" => "Note",
|
|
|
|
"to" => to,
|
|
|
|
"cc" => cc,
|
|
|
|
"content" => content_html,
|
2019-09-24 11:10:54 +02:00
|
|
|
"summary" => summary,
|
|
|
|
"sensitive" => truthy_param?(sensitive),
|
2018-03-30 15:01:53 +02:00
|
|
|
"context" => context,
|
|
|
|
"attachment" => attachments,
|
|
|
|
"actor" => actor,
|
2019-08-05 17:37:05 +02:00
|
|
|
"tag" => Keyword.values(tags) |> Enum.uniq()
|
2018-03-30 15:01:53 +02:00
|
|
|
}
|
2019-08-05 17:37:05 +02:00
|
|
|
|> add_in_reply_to(in_reply_to)
|
2019-09-24 11:10:54 +02:00
|
|
|
|> Map.merge(extra_params)
|
2019-08-05 17:37:05 +02:00
|
|
|
end
|
2017-05-17 18:00:20 +02:00
|
|
|
|
2019-08-05 17:37:05 +02:00
|
|
|
defp add_in_reply_to(object, nil), do: object
|
2019-05-18 12:29:28 +02:00
|
|
|
|
2019-08-05 17:37:05 +02:00
|
|
|
defp add_in_reply_to(object, in_reply_to) do
|
|
|
|
with %Object{} = in_reply_to_object <- Object.normalize(in_reply_to) do
|
|
|
|
Map.put(object, "inReplyTo", in_reply_to_object.data["id"])
|
|
|
|
else
|
|
|
|
_ -> object
|
|
|
|
end
|
2017-05-17 18:00:20 +02:00
|
|
|
end
|
2017-06-19 23:12:37 +02:00
|
|
|
|
|
|
|
def format_naive_asctime(date) do
|
|
|
|
date |> DateTime.from_naive!("Etc/UTC") |> format_asctime
|
|
|
|
end
|
|
|
|
|
|
|
|
def format_asctime(date) do
|
|
|
|
Strftime.strftime!(date, "%a %b %d %H:%M:%S %z %Y")
|
|
|
|
end
|
|
|
|
|
2019-04-02 11:25:51 +02:00
|
|
|
def date_to_asctime(date) when is_binary(date) do
|
|
|
|
with {:ok, date, _offset} <- DateTime.from_iso8601(date) do
|
2017-06-19 23:12:37 +02:00
|
|
|
format_asctime(date)
|
2018-03-30 15:01:53 +02:00
|
|
|
else
|
|
|
|
_e ->
|
2019-04-02 11:25:51 +02:00
|
|
|
Logger.warn("Date #{date} in wrong format, must be ISO 8601")
|
2017-06-19 23:12:37 +02:00
|
|
|
""
|
|
|
|
end
|
|
|
|
end
|
2017-09-15 14:17:36 +02:00
|
|
|
|
2017-06-19 23:12:37 +02:00
|
|
|
def date_to_asctime(date) do
|
2019-04-02 11:25:51 +02:00
|
|
|
Logger.warn("Date #{date} in wrong format, must be ISO 8601")
|
|
|
|
""
|
2017-06-19 23:12:37 +02:00
|
|
|
end
|
2017-09-15 14:17:36 +02:00
|
|
|
|
2017-09-15 17:50:47 +02:00
|
|
|
def to_masto_date(%NaiveDateTime{} = date) do
|
|
|
|
date
|
2018-03-30 15:01:53 +02:00
|
|
|
|> NaiveDateTime.to_iso8601()
|
2017-09-15 17:50:47 +02:00
|
|
|
|> String.replace(~r/(\.\d+)?$/, ".000Z", global: false)
|
|
|
|
end
|
|
|
|
|
2019-08-05 17:37:05 +02:00
|
|
|
def to_masto_date(date) when is_binary(date) do
|
|
|
|
with {:ok, date} <- NaiveDateTime.from_iso8601(date) do
|
|
|
|
to_masto_date(date)
|
|
|
|
else
|
|
|
|
_ -> ""
|
2017-09-15 17:50:47 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-08-05 17:37:05 +02:00
|
|
|
def to_masto_date(_), do: ""
|
|
|
|
|
2017-09-15 14:17:36 +02:00
|
|
|
defp shortname(name) do
|
2020-05-22 17:19:25 +02:00
|
|
|
with max_length when max_length > 0 <-
|
2020-05-22 17:44:10 +02:00
|
|
|
Config.get([Pleroma.Upload, :filename_display_max_length], 30),
|
2020-05-22 17:19:25 +02:00
|
|
|
true <- String.length(name) > max_length do
|
|
|
|
String.slice(name, 0..max_length) <> "…"
|
2017-09-15 14:17:36 +02:00
|
|
|
else
|
2020-05-22 17:19:25 +02:00
|
|
|
_ -> name
|
2017-09-15 14:17:36 +02:00
|
|
|
end
|
|
|
|
end
|
2018-05-11 13:32:59 +02:00
|
|
|
|
2020-05-07 10:14:54 +02:00
|
|
|
@spec confirm_current_password(User.t(), String.t()) :: {:ok, User.t()} | {:error, String.t()}
|
2018-05-21 23:17:34 +02:00
|
|
|
def confirm_current_password(user, password) do
|
2019-04-22 09:20:43 +02:00
|
|
|
with %User{local: true} = db_user <- User.get_cached_by_id(user.id),
|
2019-07-14 18:48:42 +02:00
|
|
|
true <- AuthenticationPlug.checkpw(password, db_user.password_hash) do
|
2018-05-13 15:24:15 +02:00
|
|
|
{:ok, db_user}
|
|
|
|
else
|
2019-07-10 11:25:58 +02:00
|
|
|
_ -> {:error, dgettext("errors", "Invalid password.")}
|
2018-05-11 13:32:59 +02:00
|
|
|
end
|
|
|
|
end
|
2018-08-12 21:24:10 +02:00
|
|
|
|
2019-01-24 21:30:43 +01:00
|
|
|
def maybe_notify_to_recipients(
|
|
|
|
recipients,
|
|
|
|
%Activity{data: %{"to" => to, "type" => _type}} = _activity
|
|
|
|
) do
|
|
|
|
recipients ++ to
|
|
|
|
end
|
|
|
|
|
2019-11-12 12:48:14 +01:00
|
|
|
def maybe_notify_to_recipients(recipients, _), do: recipients
|
|
|
|
|
2019-01-24 21:30:43 +01:00
|
|
|
def maybe_notify_mentioned_recipients(
|
|
|
|
recipients,
|
2019-03-23 01:22:14 +01:00
|
|
|
%Activity{data: %{"to" => _to, "type" => type} = data} = activity
|
2019-01-24 21:30:43 +01:00
|
|
|
)
|
|
|
|
when type == "Create" do
|
2020-04-28 16:26:19 +02:00
|
|
|
object = Object.normalize(activity, false)
|
2019-01-24 21:30:43 +01:00
|
|
|
|
|
|
|
object_data =
|
|
|
|
cond do
|
2019-08-05 17:37:05 +02:00
|
|
|
not is_nil(object) ->
|
2019-01-24 21:30:43 +01:00
|
|
|
object.data
|
|
|
|
|
|
|
|
is_map(data["object"]) ->
|
|
|
|
data["object"]
|
|
|
|
|
|
|
|
true ->
|
|
|
|
%{}
|
|
|
|
end
|
|
|
|
|
|
|
|
tagged_mentions = maybe_extract_mentions(object_data)
|
|
|
|
|
|
|
|
recipients ++ tagged_mentions
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_notify_mentioned_recipients(recipients, _), do: recipients
|
|
|
|
|
2019-07-23 20:53:05 +02:00
|
|
|
# Do not notify subscribers if author is making a reply
|
|
|
|
def maybe_notify_subscribers(recipients, %Activity{
|
|
|
|
object: %Object{data: %{"inReplyTo" => _ap_id}}
|
2019-07-23 20:14:26 +02:00
|
|
|
}) do
|
2019-07-23 20:53:05 +02:00
|
|
|
recipients
|
2019-07-23 20:14:26 +02:00
|
|
|
end
|
|
|
|
|
2019-04-05 15:20:13 +02:00
|
|
|
def maybe_notify_subscribers(
|
2019-04-05 15:59:34 +02:00
|
|
|
recipients,
|
2019-04-07 16:11:29 +02:00
|
|
|
%Activity{data: %{"actor" => actor, "type" => type}} = activity
|
2019-04-05 15:59:34 +02:00
|
|
|
)
|
|
|
|
when type == "Create" do
|
2019-04-08 16:56:14 +02:00
|
|
|
with %User{} = user <- User.get_cached_by_ap_id(actor) do
|
2019-04-05 15:20:13 +02:00
|
|
|
subscriber_ids =
|
|
|
|
user
|
2019-11-20 13:46:11 +01:00
|
|
|
|> User.subscriber_users()
|
2019-04-07 16:11:29 +02:00
|
|
|
|> Enum.filter(&Visibility.visible_for_user?(activity, &1))
|
2019-04-05 15:20:13 +02:00
|
|
|
|> Enum.map(& &1.ap_id)
|
|
|
|
|
|
|
|
recipients ++ subscriber_ids
|
2020-05-22 17:06:12 +02:00
|
|
|
else
|
|
|
|
_e -> recipients
|
2019-04-05 15:20:13 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_notify_subscribers(recipients, _), do: recipients
|
|
|
|
|
2019-11-12 12:48:14 +01:00
|
|
|
def maybe_notify_followers(recipients, %Activity{data: %{"type" => "Move"}} = activity) do
|
|
|
|
with %User{} = user <- User.get_cached_by_ap_id(activity.actor) do
|
|
|
|
user
|
|
|
|
|> User.get_followers()
|
|
|
|
|> Enum.map(& &1.ap_id)
|
|
|
|
|> Enum.concat(recipients)
|
2020-05-22 17:06:12 +02:00
|
|
|
else
|
|
|
|
_e -> recipients
|
2019-11-12 12:48:14 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_notify_followers(recipients, _), do: recipients
|
|
|
|
|
2019-01-24 21:30:43 +01:00
|
|
|
def maybe_extract_mentions(%{"tag" => tag}) do
|
|
|
|
tag
|
2019-08-05 17:37:05 +02:00
|
|
|
|> Enum.filter(fn x -> is_map(x) && x["type"] == "Mention" end)
|
2019-01-24 21:30:43 +01:00
|
|
|
|> Enum.map(fn x -> x["href"] end)
|
2019-08-05 17:37:05 +02:00
|
|
|
|> Enum.uniq()
|
2019-01-24 21:30:43 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_extract_mentions(_), do: []
|
2019-02-20 17:51:25 +01:00
|
|
|
|
2019-02-27 00:32:26 +01:00
|
|
|
def make_report_content_html(nil), do: {:ok, {nil, [], []}}
|
2019-02-20 17:51:25 +01:00
|
|
|
|
|
|
|
def make_report_content_html(comment) do
|
2020-07-09 17:53:51 +02:00
|
|
|
max_size = Config.get([:instance, :max_report_comment_size], 1000)
|
2019-02-20 17:51:25 +01:00
|
|
|
|
|
|
|
if String.length(comment) <= max_size do
|
2019-02-27 00:32:26 +01:00
|
|
|
{:ok, format_input(comment, "text/plain")}
|
2019-02-20 17:51:25 +01:00
|
|
|
else
|
2019-07-10 11:25:58 +02:00
|
|
|
{:error,
|
|
|
|
dgettext("errors", "Comment must be up to %{max_size} characters", max_size: max_size)}
|
2019-02-20 17:51:25 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-28 14:50:37 +02:00
|
|
|
def get_report_statuses(%User{ap_id: actor}, %{status_ids: status_ids})
|
|
|
|
when is_list(status_ids) do
|
2019-02-20 17:51:25 +01:00
|
|
|
{:ok, Activity.all_by_actor_and_id(actor, status_ids)}
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_report_statuses(_, _), do: {:ok, nil}
|
2019-03-22 00:17:53 +01:00
|
|
|
|
|
|
|
# DEPRECATED mostly, context objects are now created at insertion time.
|
|
|
|
def context_to_conversation_id(context) do
|
|
|
|
with %Object{id: id} <- Object.get_cached_by_ap_id(context) do
|
|
|
|
id
|
|
|
|
else
|
|
|
|
_e ->
|
|
|
|
changeset = Object.context_mapping(context)
|
|
|
|
|
|
|
|
case Repo.insert(changeset) do
|
|
|
|
{:ok, %{id: id}} ->
|
|
|
|
id
|
|
|
|
|
|
|
|
# This should be solved by an upsert, but it seems ecto
|
|
|
|
# has problems accessing the constraint inside the jsonb.
|
|
|
|
{:error, _} ->
|
|
|
|
Object.get_cached_by_ap_id(context).id
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def conversation_id_to_context(id) do
|
|
|
|
with %Object{data: %{"id" => context}} <- Repo.get(Object, id) do
|
|
|
|
context
|
|
|
|
else
|
|
|
|
_e ->
|
2019-07-10 11:25:58 +02:00
|
|
|
{:error, dgettext("errors", "No such conversation")}
|
2019-03-22 00:17:53 +01:00
|
|
|
end
|
|
|
|
end
|
2019-06-01 15:07:01 +02:00
|
|
|
|
2019-09-24 11:10:54 +02:00
|
|
|
def validate_character_limit("" = _full_payload, [] = _attachments) do
|
|
|
|
{:error, dgettext("errors", "Cannot post an empty status without attachments")}
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_character_limit(full_payload, _attachments) do
|
2020-07-09 17:53:51 +02:00
|
|
|
limit = Config.get([:instance, :limit])
|
2019-06-18 04:05:05 +02:00
|
|
|
length = String.length(full_payload)
|
|
|
|
|
2020-03-10 19:08:00 +01:00
|
|
|
if length <= limit do
|
2019-09-24 11:10:54 +02:00
|
|
|
:ok
|
2019-06-18 04:05:05 +02:00
|
|
|
else
|
2019-07-10 11:25:58 +02:00
|
|
|
{:error, dgettext("errors", "The status is over the character limit")}
|
2019-06-18 04:05:05 +02:00
|
|
|
end
|
|
|
|
end
|
2017-05-17 18:00:20 +02:00
|
|
|
end
|