2018-12-23 21:04:54 +01:00
|
|
|
# Pleroma: A lightweight social networking server
|
2020-03-02 06:08:45 +01:00
|
|
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 21:04:54 +01:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2018-02-15 20:00:06 +01:00
|
|
|
defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|
|
|
@moduledoc """
|
|
|
|
A module to handle coding from internal to wire ActivityPub and back.
|
|
|
|
"""
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Activity
|
2019-10-10 21:35:32 +02:00
|
|
|
alias Pleroma.FollowingRelationship
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Object
|
2019-04-17 13:52:01 +02:00
|
|
|
alias Pleroma.Object.Containment
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Repo
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Pleroma.User
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
|
|
|
alias Pleroma.Web.ActivityPub.Utils
|
2019-02-22 13:29:52 +01:00
|
|
|
alias Pleroma.Web.ActivityPub.Visibility
|
2019-06-29 19:04:50 +02:00
|
|
|
alias Pleroma.Web.Federator
|
2019-08-31 18:08:56 +02:00
|
|
|
alias Pleroma.Workers.TransmogrifierWorker
|
2018-02-15 20:00:06 +01:00
|
|
|
|
2018-02-21 22:21:40 +01:00
|
|
|
import Ecto.Query
|
|
|
|
|
2018-02-23 15:00:41 +01:00
|
|
|
require Logger
|
2019-07-29 04:43:19 +02:00
|
|
|
require Pleroma.Constants
|
2018-02-23 15:00:41 +01:00
|
|
|
|
2018-02-15 20:00:06 +01:00
|
|
|
@doc """
|
|
|
|
Modifies an incoming AP object (mastodon format) to our internal format.
|
|
|
|
"""
|
2019-06-29 19:04:50 +02:00
|
|
|
def fix_object(object, options \\ []) do
|
2018-02-15 20:00:06 +01:00
|
|
|
object
|
2019-08-10 20:47:40 +02:00
|
|
|
|> strip_internal_fields
|
2018-07-12 18:37:42 +02:00
|
|
|
|> fix_actor
|
2018-11-01 09:56:37 +01:00
|
|
|
|> fix_url
|
2018-11-17 18:34:45 +01:00
|
|
|
|> fix_attachments
|
2018-02-19 10:39:03 +01:00
|
|
|
|> fix_context
|
2019-06-29 19:04:50 +02:00
|
|
|
|> fix_in_reply_to(options)
|
2018-03-13 08:05:43 +01:00
|
|
|
|> fix_emoji
|
2018-03-24 22:39:37 +01:00
|
|
|
|> fix_tag
|
2018-06-18 23:51:22 +02:00
|
|
|
|> fix_content_map
|
2018-08-14 19:05:11 +02:00
|
|
|
|> fix_addressing
|
2019-04-10 02:32:04 +02:00
|
|
|
|> fix_summary
|
2019-06-29 19:04:50 +02:00
|
|
|
|> fix_type(options)
|
2019-04-10 02:32:04 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def fix_summary(%{"summary" => nil} = object) do
|
2019-09-10 15:43:10 +02:00
|
|
|
Map.put(object, "summary", "")
|
2019-04-10 02:32:04 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def fix_summary(%{"summary" => _} = object) do
|
|
|
|
# summary is present, nothing to do
|
|
|
|
object
|
|
|
|
end
|
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
def fix_summary(object), do: Map.put(object, "summary", "")
|
2018-08-14 19:05:11 +02:00
|
|
|
|
|
|
|
def fix_addressing_list(map, field) do
|
2019-03-19 18:30:25 +01:00
|
|
|
cond do
|
|
|
|
is_binary(map[field]) ->
|
|
|
|
Map.put(map, field, [map[field]])
|
|
|
|
|
|
|
|
is_nil(map[field]) ->
|
|
|
|
Map.put(map, field, [])
|
|
|
|
|
|
|
|
true ->
|
|
|
|
map
|
2018-08-14 19:05:11 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-01 05:26:45 +02:00
|
|
|
def fix_explicit_addressing(
|
|
|
|
%{"to" => to, "cc" => cc} = object,
|
|
|
|
explicit_mentions,
|
|
|
|
follower_collection
|
|
|
|
) do
|
2019-09-10 15:43:10 +02:00
|
|
|
explicit_to = Enum.filter(to, fn x -> x in explicit_mentions end)
|
2018-11-17 17:05:41 +01:00
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
explicit_cc = Enum.filter(to, fn x -> x not in explicit_mentions end)
|
2018-11-17 17:05:41 +01:00
|
|
|
|
|
|
|
final_cc =
|
|
|
|
(cc ++ explicit_cc)
|
2019-06-01 05:26:45 +02:00
|
|
|
|> Enum.reject(fn x -> String.ends_with?(x, "/followers") and x != follower_collection end)
|
2018-11-17 17:05:41 +01:00
|
|
|
|> Enum.uniq()
|
|
|
|
|
|
|
|
object
|
|
|
|
|> Map.put("to", explicit_to)
|
|
|
|
|> Map.put("cc", final_cc)
|
|
|
|
end
|
|
|
|
|
2019-06-01 05:26:45 +02:00
|
|
|
def fix_explicit_addressing(object, _explicit_mentions, _followers_collection), do: object
|
2018-11-17 17:05:41 +01:00
|
|
|
|
2018-12-23 16:35:49 +01:00
|
|
|
# if directMessage flag is set to true, leave the addressing alone
|
|
|
|
def fix_explicit_addressing(%{"directMessage" => true} = object), do: object
|
2018-11-17 17:05:41 +01:00
|
|
|
|
2018-12-23 16:35:49 +01:00
|
|
|
def fix_explicit_addressing(object) do
|
2019-09-10 15:43:10 +02:00
|
|
|
explicit_mentions = Utils.determine_explicit_mentions(object)
|
2018-11-17 17:05:41 +01:00
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
%User{follower_address: follower_collection} =
|
|
|
|
object
|
|
|
|
|> Containment.get_actor()
|
|
|
|
|> User.get_cached_by_ap_id()
|
2019-05-31 13:17:05 +02:00
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
explicit_mentions =
|
|
|
|
explicit_mentions ++
|
|
|
|
[
|
|
|
|
Pleroma.Constants.as_public(),
|
|
|
|
follower_collection
|
|
|
|
]
|
2018-11-17 17:05:41 +01:00
|
|
|
|
2019-06-01 05:26:45 +02:00
|
|
|
fix_explicit_addressing(object, explicit_mentions, follower_collection)
|
2018-02-19 10:39:03 +01:00
|
|
|
end
|
|
|
|
|
2019-03-19 18:27:42 +01:00
|
|
|
# if as:Public is addressed, then make sure the followers collection is also addressed
|
|
|
|
# so that the activities will be delivered to local users.
|
|
|
|
def fix_implicit_addressing(%{"to" => to, "cc" => cc} = object, followers_collection) do
|
|
|
|
recipients = to ++ cc
|
|
|
|
|
|
|
|
if followers_collection not in recipients do
|
|
|
|
cond do
|
2019-07-29 04:43:19 +02:00
|
|
|
Pleroma.Constants.as_public() in cc ->
|
2019-03-19 18:27:42 +01:00
|
|
|
to = to ++ [followers_collection]
|
|
|
|
Map.put(object, "to", to)
|
|
|
|
|
2019-07-29 04:43:19 +02:00
|
|
|
Pleroma.Constants.as_public() in to ->
|
2019-03-19 18:27:42 +01:00
|
|
|
cc = cc ++ [followers_collection]
|
|
|
|
Map.put(object, "cc", cc)
|
|
|
|
|
|
|
|
true ->
|
|
|
|
object
|
|
|
|
end
|
2018-08-14 19:05:11 +02:00
|
|
|
else
|
2019-03-19 18:27:42 +01:00
|
|
|
object
|
2018-08-14 19:05:11 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-03-19 18:27:42 +01:00
|
|
|
def fix_implicit_addressing(object, _), do: object
|
|
|
|
|
2018-12-23 16:35:49 +01:00
|
|
|
def fix_addressing(object) do
|
2019-05-01 11:09:53 +02:00
|
|
|
{:ok, %User{} = user} = User.get_or_fetch_by_ap_id(object["actor"])
|
2019-03-19 18:27:42 +01:00
|
|
|
followers_collection = User.ap_followers(user)
|
|
|
|
|
2018-12-23 16:35:49 +01:00
|
|
|
object
|
2018-08-14 19:05:11 +02:00
|
|
|
|> fix_addressing_list("to")
|
|
|
|
|> fix_addressing_list("cc")
|
|
|
|
|> fix_addressing_list("bto")
|
|
|
|
|> fix_addressing_list("bcc")
|
2019-06-01 05:26:45 +02:00
|
|
|
|> fix_explicit_addressing()
|
2019-03-19 18:27:42 +01:00
|
|
|
|> fix_implicit_addressing(followers_collection)
|
2018-02-19 10:39:03 +01:00
|
|
|
end
|
|
|
|
|
2018-07-12 18:37:42 +02:00
|
|
|
def fix_actor(%{"attributedTo" => actor} = object) do
|
2019-09-10 15:43:10 +02:00
|
|
|
Map.put(object, "actor", Containment.get_actor(%{"actor" => actor}))
|
2018-07-12 18:37:42 +02:00
|
|
|
end
|
|
|
|
|
2019-06-29 19:04:50 +02:00
|
|
|
def fix_in_reply_to(object, options \\ [])
|
|
|
|
|
|
|
|
def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object, options)
|
2018-09-21 12:43:35 +02:00
|
|
|
when not is_nil(in_reply_to) do
|
2019-09-10 15:43:10 +02:00
|
|
|
in_reply_to_id = prepare_in_reply_to(in_reply_to)
|
2019-06-29 19:04:50 +02:00
|
|
|
object = Map.put(object, "inReplyToAtomUri", in_reply_to_id)
|
2020-02-15 18:41:38 +01:00
|
|
|
depth = (options[:depth] || 0) + 1
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2020-02-15 18:41:38 +01:00
|
|
|
if Federator.allowed_thread_distance?(depth) do
|
2019-09-10 15:43:10 +02:00
|
|
|
with {:ok, replied_object} <- get_obj_helper(in_reply_to_id, options),
|
2020-02-25 15:34:56 +01:00
|
|
|
%Activity{} <- Activity.get_create_by_object_ap_id(replied_object.data["id"]) do
|
2019-09-10 15:43:10 +02:00
|
|
|
object
|
|
|
|
|> Map.put("inReplyTo", replied_object.data["id"])
|
|
|
|
|> Map.put("inReplyToAtomUri", object["inReplyToAtomUri"] || in_reply_to_id)
|
|
|
|
|> Map.put("conversation", replied_object.data["context"] || object["conversation"])
|
|
|
|
|> Map.put("context", replied_object.data["context"] || object["conversation"])
|
|
|
|
else
|
2019-06-29 19:04:50 +02:00
|
|
|
e ->
|
2019-09-11 23:16:09 +02:00
|
|
|
Logger.error("Couldn't fetch #{inspect(in_reply_to_id)}, error: #{inspect(e)}")
|
2019-06-29 19:04:50 +02:00
|
|
|
object
|
|
|
|
end
|
|
|
|
else
|
|
|
|
object
|
2018-02-25 10:56:01 +01:00
|
|
|
end
|
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2019-06-29 19:04:50 +02:00
|
|
|
def fix_in_reply_to(object, _options), do: object
|
2018-02-25 10:56:01 +01:00
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
defp prepare_in_reply_to(in_reply_to) do
|
|
|
|
cond do
|
|
|
|
is_bitstring(in_reply_to) ->
|
|
|
|
in_reply_to
|
|
|
|
|
|
|
|
is_map(in_reply_to) && is_bitstring(in_reply_to["id"]) ->
|
|
|
|
in_reply_to["id"]
|
|
|
|
|
|
|
|
is_list(in_reply_to) && is_bitstring(Enum.at(in_reply_to, 0)) ->
|
|
|
|
Enum.at(in_reply_to, 0)
|
|
|
|
|
|
|
|
true ->
|
|
|
|
""
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-19 10:39:03 +01:00
|
|
|
def fix_context(object) do
|
2018-07-12 19:06:28 +02:00
|
|
|
context = object["context"] || object["conversation"] || Utils.generate_context_id()
|
|
|
|
|
2018-02-19 10:39:03 +01:00
|
|
|
object
|
2018-07-12 19:06:28 +02:00
|
|
|
|> Map.put("context", context)
|
|
|
|
|> Map.put("conversation", context)
|
2018-02-17 18:38:58 +01:00
|
|
|
end
|
|
|
|
|
2018-09-21 12:57:31 +02:00
|
|
|
def fix_attachments(%{"attachment" => attachment} = object) when is_list(attachment) do
|
2018-03-30 15:01:53 +02:00
|
|
|
attachments =
|
2019-09-10 15:43:10 +02:00
|
|
|
Enum.map(attachment, fn data ->
|
2018-12-23 14:28:17 +01:00
|
|
|
media_type = data["mediaType"] || data["mimeType"]
|
|
|
|
href = data["url"] || data["href"]
|
|
|
|
url = [%{"type" => "Link", "mediaType" => media_type, "href" => href}]
|
|
|
|
|
|
|
|
data
|
|
|
|
|> Map.put("mediaType", media_type)
|
|
|
|
|> Map.put("url", url)
|
2018-03-30 15:01:53 +02:00
|
|
|
end)
|
2018-02-17 18:38:58 +01:00
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
Map.put(object, "attachment", attachments)
|
2018-02-15 20:00:06 +01:00
|
|
|
end
|
|
|
|
|
2018-09-21 12:57:31 +02:00
|
|
|
def fix_attachments(%{"attachment" => attachment} = object) when is_map(attachment) do
|
2019-09-11 06:23:33 +02:00
|
|
|
object
|
|
|
|
|> Map.put("attachment", [attachment])
|
|
|
|
|> fix_attachments()
|
2018-09-21 12:57:31 +02:00
|
|
|
end
|
|
|
|
|
2018-09-26 21:01:33 +02:00
|
|
|
def fix_attachments(object), do: object
|
2018-09-21 12:57:31 +02:00
|
|
|
|
2018-11-01 09:56:37 +01:00
|
|
|
def fix_url(%{"url" => url} = object) when is_map(url) do
|
2019-09-10 15:43:10 +02:00
|
|
|
Map.put(object, "url", url["href"])
|
2018-11-01 09:56:37 +01:00
|
|
|
end
|
|
|
|
|
2020-03-29 21:18:22 +02:00
|
|
|
def fix_url(%{"type" => object_type, "url" => url} = object)
|
|
|
|
when object_type in ["Video", "Audio"] and is_list(url) do
|
2018-12-23 14:28:17 +01:00
|
|
|
first_element = Enum.at(url, 0)
|
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
link_element = Enum.find(url, fn x -> is_map(x) and x["mimeType"] == "text/html" end)
|
2018-12-23 14:28:17 +01:00
|
|
|
|
|
|
|
object
|
|
|
|
|> Map.put("attachment", [first_element])
|
|
|
|
|> Map.put("url", link_element["href"])
|
|
|
|
end
|
|
|
|
|
|
|
|
def fix_url(%{"type" => object_type, "url" => url} = object)
|
|
|
|
when object_type != "Video" and is_list(url) do
|
2018-11-01 09:56:37 +01:00
|
|
|
first_element = Enum.at(url, 0)
|
|
|
|
|
|
|
|
url_string =
|
|
|
|
cond do
|
|
|
|
is_bitstring(first_element) -> first_element
|
|
|
|
is_map(first_element) -> first_element["href"] || ""
|
|
|
|
true -> ""
|
|
|
|
end
|
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
Map.put(object, "url", url_string)
|
2018-11-01 09:56:37 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def fix_url(object), do: object
|
|
|
|
|
2018-09-21 14:36:29 +02:00
|
|
|
def fix_emoji(%{"tag" => tags} = object) when is_list(tags) do
|
2018-03-30 15:01:53 +02:00
|
|
|
emoji =
|
2019-09-10 15:43:10 +02:00
|
|
|
tags
|
|
|
|
|> Enum.filter(fn data -> data["type"] == "Emoji" and data["icon"] end)
|
2018-03-30 15:01:53 +02:00
|
|
|
|> Enum.reduce(%{}, fn data, mapping ->
|
2018-09-21 14:36:29 +02:00
|
|
|
name = String.trim(data["name"], ":")
|
2018-03-13 08:05:43 +01:00
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
Map.put(mapping, name, data["icon"]["url"])
|
2018-03-30 15:01:53 +02:00
|
|
|
end)
|
2018-03-13 08:05:43 +01:00
|
|
|
|
|
|
|
# we merge mastodon and pleroma emoji into a single mapping, to allow for both wire formats
|
|
|
|
emoji = Map.merge(object["emoji"] || %{}, emoji)
|
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
Map.put(object, "emoji", emoji)
|
2018-03-13 08:05:43 +01:00
|
|
|
end
|
|
|
|
|
2018-09-21 14:36:29 +02:00
|
|
|
def fix_emoji(%{"tag" => %{"type" => "Emoji"} = tag} = object) do
|
|
|
|
name = String.trim(tag["name"], ":")
|
|
|
|
emoji = %{name => tag["icon"]["url"]}
|
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
Map.put(object, "emoji", emoji)
|
2018-09-21 14:36:29 +02:00
|
|
|
end
|
|
|
|
|
2018-09-26 21:01:33 +02:00
|
|
|
def fix_emoji(object), do: object
|
2018-09-21 14:36:29 +02:00
|
|
|
|
2018-09-21 14:46:49 +02:00
|
|
|
def fix_tag(%{"tag" => tag} = object) when is_list(tag) do
|
2018-03-30 15:01:53 +02:00
|
|
|
tags =
|
2018-09-21 14:46:49 +02:00
|
|
|
tag
|
2018-03-30 15:01:53 +02:00
|
|
|
|> Enum.filter(fn data -> data["type"] == "Hashtag" and data["name"] end)
|
|
|
|
|> Enum.map(fn data -> String.slice(data["name"], 1..-1) end)
|
2018-03-24 22:39:37 +01:00
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
Map.put(object, "tag", tag ++ tags)
|
2018-03-24 22:39:37 +01:00
|
|
|
end
|
|
|
|
|
2018-09-26 11:27:00 +02:00
|
|
|
def fix_tag(%{"tag" => %{"type" => "Hashtag", "name" => hashtag} = tag} = object) do
|
|
|
|
combined = [tag, String.slice(hashtag, 1..-1)]
|
2018-09-26 10:21:58 +02:00
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
Map.put(object, "tag", combined)
|
2018-09-26 10:21:58 +02:00
|
|
|
end
|
|
|
|
|
2019-02-05 01:32:49 +01:00
|
|
|
def fix_tag(%{"tag" => %{} = tag} = object), do: Map.put(object, "tag", [tag])
|
|
|
|
|
2018-09-26 21:01:33 +02:00
|
|
|
def fix_tag(object), do: object
|
2018-09-21 14:46:49 +02:00
|
|
|
|
2018-06-18 23:51:22 +02:00
|
|
|
# content map usually only has one language so this will do for now.
|
|
|
|
def fix_content_map(%{"contentMap" => content_map} = object) do
|
|
|
|
content_groups = Map.to_list(content_map)
|
|
|
|
{_, content} = Enum.at(content_groups, 0)
|
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
Map.put(object, "content", content)
|
2018-06-18 23:51:22 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def fix_content_map(object), do: object
|
|
|
|
|
2019-06-29 19:04:50 +02:00
|
|
|
def fix_type(object, options \\ [])
|
|
|
|
|
2019-08-11 21:49:55 +02:00
|
|
|
def fix_type(%{"inReplyTo" => reply_id, "name" => _} = object, options)
|
|
|
|
when is_binary(reply_id) do
|
2020-02-15 18:41:38 +01:00
|
|
|
with true <- Federator.allowed_thread_distance?(options[:depth]),
|
2019-09-10 15:43:10 +02:00
|
|
|
{:ok, %{data: %{"type" => "Question"} = _} = _} <- get_obj_helper(reply_id, options) do
|
2019-05-22 20:17:57 +02:00
|
|
|
Map.put(object, "type", "Answer")
|
|
|
|
else
|
2019-09-10 15:43:10 +02:00
|
|
|
_ -> object
|
2019-05-22 20:17:57 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-29 19:04:50 +02:00
|
|
|
def fix_type(object, _), do: object
|
2019-05-22 20:17:57 +02:00
|
|
|
|
2018-12-09 10:12:48 +01:00
|
|
|
defp mastodon_follow_hack(%{"id" => id, "actor" => follower_id}, followed) do
|
|
|
|
with true <- id =~ "follows",
|
|
|
|
%User{local: true} = follower <- User.get_cached_by_ap_id(follower_id),
|
|
|
|
%Activity{} = activity <- Utils.fetch_latest_follow(follower, followed) do
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
_ -> {:error, nil}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp mastodon_follow_hack(_, _), do: {:error, nil}
|
|
|
|
|
|
|
|
defp get_follow_activity(follow_object, followed) do
|
|
|
|
with object_id when not is_nil(object_id) <- Utils.get_ap_id(follow_object),
|
|
|
|
{_, %Activity{} = activity} <- {:activity, Activity.get_by_ap_id(object_id)} do
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
# Can't find the activity. This might a Mastodon 2.3 "Accept"
|
|
|
|
{:activity, nil} ->
|
|
|
|
mastodon_follow_hack(follow_object, followed)
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
{:error, nil}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
# Reduce the object list to find the reported user.
|
|
|
|
defp get_reported(objects) do
|
|
|
|
Enum.reduce_while(objects, nil, fn ap_id, _ ->
|
|
|
|
with %User{} = user <- User.get_cached_by_ap_id(ap_id) do
|
|
|
|
{:halt, user}
|
|
|
|
else
|
|
|
|
_ -> {:cont, nil}
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2019-06-29 19:04:50 +02:00
|
|
|
def handle_incoming(data, options \\ [])
|
|
|
|
|
2019-03-14 20:06:02 +01:00
|
|
|
# Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them
|
|
|
|
# with nil ID.
|
2019-06-29 19:04:50 +02:00
|
|
|
def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data, _options) do
|
2019-03-14 20:06:02 +01:00
|
|
|
with context <- data["context"] || Utils.generate_context_id(),
|
|
|
|
content <- data["content"] || "",
|
|
|
|
%User{} = actor <- User.get_cached_by_ap_id(actor),
|
|
|
|
# Reduce the object list to find the reported user.
|
2019-09-10 15:43:10 +02:00
|
|
|
%User{} = account <- get_reported(objects),
|
2019-03-14 20:06:02 +01:00
|
|
|
# Remove the reported user from the object list.
|
|
|
|
statuses <- Enum.filter(objects, fn ap_id -> ap_id != account.ap_id end) do
|
2019-09-10 15:43:10 +02:00
|
|
|
%{
|
2019-03-14 20:06:02 +01:00
|
|
|
actor: actor,
|
|
|
|
context: context,
|
|
|
|
account: account,
|
|
|
|
statuses: statuses,
|
|
|
|
content: content,
|
2019-09-10 15:43:10 +02:00
|
|
|
additional: %{"cc" => [account.ap_id]}
|
2019-03-14 20:06:02 +01:00
|
|
|
}
|
2019-09-10 15:43:10 +02:00
|
|
|
|> ActivityPub.flag()
|
2019-03-14 20:06:02 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-23 03:23:02 +02:00
|
|
|
# disallow objects with bogus IDs
|
2019-06-29 19:04:50 +02:00
|
|
|
def handle_incoming(%{"id" => nil}, _options), do: :error
|
|
|
|
def handle_incoming(%{"id" => ""}, _options), do: :error
|
2018-08-23 03:23:02 +02:00
|
|
|
# length of https:// = 8, should validate better, but good enough for now.
|
2019-12-08 14:36:22 +01:00
|
|
|
def handle_incoming(%{"id" => id}, _options) when is_binary(id) and byte_size(id) < 8,
|
2019-06-29 19:04:50 +02:00
|
|
|
do: :error
|
2018-08-23 03:23:02 +02:00
|
|
|
|
2018-02-15 20:00:06 +01:00
|
|
|
# TODO: validate those with a Ecto scheme
|
|
|
|
# - tags
|
|
|
|
# - emoji
|
2019-06-29 19:04:50 +02:00
|
|
|
def handle_incoming(
|
|
|
|
%{"type" => "Create", "object" => %{"type" => objtype} = object} = data,
|
|
|
|
options
|
|
|
|
)
|
2020-03-29 21:18:22 +02:00
|
|
|
when objtype in ["Article", "Event", "Note", "Video", "Page", "Question", "Answer", "Audio"] do
|
2018-12-01 23:29:41 +01:00
|
|
|
actor = Containment.get_actor(data)
|
2018-08-14 19:05:11 +02:00
|
|
|
|
|
|
|
data =
|
|
|
|
Map.put(data, "actor", actor)
|
|
|
|
|> fix_addressing
|
2018-07-12 23:09:42 +02:00
|
|
|
|
2019-01-21 07:14:20 +01:00
|
|
|
with nil <- Activity.get_create_by_object_ap_id(object["id"]),
|
2019-03-18 14:56:59 +01:00
|
|
|
{:ok, %User{} = user} <- User.get_or_fetch_by_ap_id(data["actor"]) do
|
2020-02-09 08:17:21 +01:00
|
|
|
object = fix_object(object, options)
|
2018-02-23 15:00:41 +01:00
|
|
|
|
2018-02-15 20:00:06 +01:00
|
|
|
params = %{
|
|
|
|
to: data["to"],
|
|
|
|
object: object,
|
|
|
|
actor: user,
|
2018-02-25 22:28:53 +01:00
|
|
|
context: object["conversation"],
|
2018-02-15 20:00:06 +01:00
|
|
|
local: false,
|
|
|
|
published: data["published"],
|
2018-03-30 15:01:53 +02:00
|
|
|
additional:
|
|
|
|
Map.take(data, [
|
|
|
|
"cc",
|
2018-12-23 16:35:49 +01:00
|
|
|
"directMessage",
|
2018-03-30 15:01:53 +02:00
|
|
|
"id"
|
|
|
|
])
|
2018-02-15 20:00:06 +01:00
|
|
|
}
|
|
|
|
|
2020-01-25 08:47:30 +01:00
|
|
|
with {:ok, created_activity} <- ActivityPub.create(params) do
|
2020-02-15 18:41:38 +01:00
|
|
|
reply_depth = (options[:depth] || 0) + 1
|
|
|
|
|
|
|
|
if Federator.allowed_thread_distance?(reply_depth) do
|
|
|
|
for reply_id <- replies(object) do
|
|
|
|
Pleroma.Workers.RemoteFetcherWorker.enqueue("fetch_remote", %{
|
|
|
|
"id" => reply_id,
|
|
|
|
"depth" => reply_depth
|
|
|
|
})
|
|
|
|
end
|
2020-01-25 08:47:30 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
{:ok, created_activity}
|
|
|
|
end
|
2018-02-15 20:00:06 +01:00
|
|
|
else
|
2018-02-19 17:37:45 +01:00
|
|
|
%Activity{} = activity -> {:ok, activity}
|
2018-02-15 20:00:06 +01:00
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-27 14:40:31 +02:00
|
|
|
def handle_incoming(
|
|
|
|
%{"type" => "Listen", "object" => %{"type" => "Audio"} = object} = data,
|
|
|
|
options
|
|
|
|
) do
|
|
|
|
actor = Containment.get_actor(data)
|
|
|
|
|
|
|
|
data =
|
|
|
|
Map.put(data, "actor", actor)
|
|
|
|
|> fix_addressing
|
|
|
|
|
|
|
|
with {:ok, %User{} = user} <- User.get_or_fetch_by_ap_id(data["actor"]) do
|
2020-02-15 18:41:38 +01:00
|
|
|
reply_depth = (options[:depth] || 0) + 1
|
|
|
|
options = Keyword.put(options, :depth, reply_depth)
|
2019-09-27 14:40:31 +02:00
|
|
|
object = fix_object(object, options)
|
|
|
|
|
|
|
|
params = %{
|
|
|
|
to: data["to"],
|
|
|
|
object: object,
|
|
|
|
actor: user,
|
|
|
|
context: nil,
|
|
|
|
local: false,
|
|
|
|
published: data["published"],
|
|
|
|
additional: Map.take(data, ["cc", "id"])
|
|
|
|
}
|
|
|
|
|
|
|
|
ActivityPub.listen(params)
|
|
|
|
else
|
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
def handle_incoming(
|
2019-06-29 19:04:50 +02:00
|
|
|
%{"type" => "Follow", "object" => followed, "actor" => follower, "id" => id} = data,
|
|
|
|
_options
|
2018-03-30 15:01:53 +02:00
|
|
|
) do
|
2019-08-27 19:22:30 +02:00
|
|
|
with %User{local: true} = followed <-
|
|
|
|
User.get_cached_by_ap_id(Containment.get_actor(%{"actor" => followed})),
|
|
|
|
{:ok, %User{} = follower} <-
|
|
|
|
User.get_or_fetch_by_ap_id(Containment.get_actor(%{"actor" => follower})),
|
2018-02-17 14:55:44 +01:00
|
|
|
{:ok, activity} <- ActivityPub.follow(follower, followed, id, false) do
|
2019-04-25 01:52:33 +02:00
|
|
|
with deny_follow_blocked <- Pleroma.Config.get([:user, :deny_follow_blocked]),
|
2019-07-29 04:43:19 +02:00
|
|
|
{_, false} <- {:user_blocked, User.blocks?(followed, follower) && deny_follow_blocked},
|
2019-06-05 14:10:46 +02:00
|
|
|
{_, false} <- {:user_locked, User.locked?(followed)},
|
|
|
|
{_, {:ok, follower}} <- {:follow, User.follow(follower, followed)},
|
2019-06-05 16:51:28 +02:00
|
|
|
{_, {:ok, _}} <-
|
2019-10-10 21:35:32 +02:00
|
|
|
{:follow_state_update, Utils.update_follow_state_for_all(activity, "accept")},
|
2020-04-11 21:46:04 +02:00
|
|
|
{:ok, _relationship} <-
|
|
|
|
FollowingRelationship.update(follower, followed, :follow_accept) do
|
2018-05-28 20:31:48 +02:00
|
|
|
ActivityPub.accept(%{
|
|
|
|
to: [follower.ap_id],
|
2019-02-10 00:26:29 +01:00
|
|
|
actor: followed,
|
2018-05-28 20:31:48 +02:00
|
|
|
object: data,
|
|
|
|
local: true
|
|
|
|
})
|
2019-04-25 01:52:33 +02:00
|
|
|
else
|
|
|
|
{:user_blocked, true} ->
|
2019-06-05 16:51:28 +02:00
|
|
|
{:ok, _} = Utils.update_follow_state_for_all(activity, "reject")
|
2020-04-11 21:46:04 +02:00
|
|
|
{:ok, _relationship} = FollowingRelationship.update(follower, followed, :follow_reject)
|
2019-04-25 01:52:33 +02:00
|
|
|
|
|
|
|
ActivityPub.reject(%{
|
|
|
|
to: [follower.ap_id],
|
|
|
|
actor: followed,
|
|
|
|
object: data,
|
|
|
|
local: true
|
|
|
|
})
|
|
|
|
|
|
|
|
{:follow, {:error, _}} ->
|
2019-06-05 16:51:28 +02:00
|
|
|
{:ok, _} = Utils.update_follow_state_for_all(activity, "reject")
|
2020-04-11 21:46:04 +02:00
|
|
|
{:ok, _relationship} = FollowingRelationship.update(follower, followed, :follow_reject)
|
2019-04-25 01:52:33 +02:00
|
|
|
|
|
|
|
ActivityPub.reject(%{
|
|
|
|
to: [follower.ap_id],
|
|
|
|
actor: followed,
|
|
|
|
object: data,
|
|
|
|
local: true
|
|
|
|
})
|
|
|
|
|
|
|
|
{:user_locked, true} ->
|
2020-04-11 21:46:04 +02:00
|
|
|
{:ok, _relationship} = FollowingRelationship.update(follower, followed, :follow_pending)
|
2019-04-25 01:52:33 +02:00
|
|
|
:noop
|
2018-05-26 16:55:16 +02:00
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2018-02-17 14:55:44 +01:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
2019-04-25 01:52:33 +02:00
|
|
|
_e ->
|
|
|
|
:error
|
2018-02-17 14:55:44 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-25 08:09:01 +02:00
|
|
|
def handle_incoming(
|
2019-10-11 11:48:58 +02:00
|
|
|
%{"type" => "Accept", "object" => follow_object, "actor" => _actor, "id" => id} = data,
|
2019-06-29 19:04:50 +02:00
|
|
|
_options
|
2018-05-25 08:09:01 +02:00
|
|
|
) do
|
2018-12-01 23:29:41 +01:00
|
|
|
with actor <- Containment.get_actor(data),
|
2019-03-18 14:56:59 +01:00
|
|
|
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
|
2018-05-26 15:07:21 +02:00
|
|
|
{:ok, follow_activity} <- get_follow_activity(follow_object, followed),
|
2019-06-05 16:51:28 +02:00
|
|
|
{:ok, follow_activity} <- Utils.update_follow_state_for_all(follow_activity, "accept"),
|
2018-05-26 15:07:21 +02:00
|
|
|
%User{local: true} = follower <- User.get_cached_by_ap_id(follow_activity.data["actor"]),
|
2020-04-11 21:46:04 +02:00
|
|
|
{:ok, _relationship} <- FollowingRelationship.update(follower, followed, :follow_accept) do
|
2019-06-05 14:24:31 +02:00
|
|
|
ActivityPub.accept(%{
|
|
|
|
to: follow_activity.data["to"],
|
|
|
|
type: "Accept",
|
|
|
|
actor: followed,
|
|
|
|
object: follow_activity.data["id"],
|
2019-10-11 11:48:58 +02:00
|
|
|
local: false,
|
|
|
|
activity_id: id
|
2019-06-05 14:24:31 +02:00
|
|
|
})
|
2018-05-26 13:07:04 +02:00
|
|
|
else
|
|
|
|
_e -> :error
|
2018-05-25 08:09:01 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def handle_incoming(
|
2019-10-11 11:48:58 +02:00
|
|
|
%{"type" => "Reject", "object" => follow_object, "actor" => _actor, "id" => id} = data,
|
2019-06-29 19:04:50 +02:00
|
|
|
_options
|
2018-05-25 08:09:01 +02:00
|
|
|
) do
|
2018-12-01 23:29:41 +01:00
|
|
|
with actor <- Containment.get_actor(data),
|
2019-03-18 14:56:59 +01:00
|
|
|
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
|
2018-05-26 15:07:21 +02:00
|
|
|
{:ok, follow_activity} <- get_follow_activity(follow_object, followed),
|
2019-06-05 16:51:28 +02:00
|
|
|
{:ok, follow_activity} <- Utils.update_follow_state_for_all(follow_activity, "reject"),
|
2018-05-26 15:07:21 +02:00
|
|
|
%User{local: true} = follower <- User.get_cached_by_ap_id(follow_activity.data["actor"]),
|
2020-04-11 21:46:04 +02:00
|
|
|
{:ok, _relationship} <- FollowingRelationship.update(follower, followed, :follow_reject),
|
2018-05-26 15:11:50 +02:00
|
|
|
{:ok, activity} <-
|
2019-01-29 12:57:46 +01:00
|
|
|
ActivityPub.reject(%{
|
2018-05-26 15:11:50 +02:00
|
|
|
to: follow_activity.data["to"],
|
2019-01-29 12:57:46 +01:00
|
|
|
type: "Reject",
|
2019-02-10 00:26:29 +01:00
|
|
|
actor: followed,
|
2018-05-26 15:11:50 +02:00
|
|
|
object: follow_activity.data["id"],
|
2019-10-11 11:48:58 +02:00
|
|
|
local: false,
|
|
|
|
activity_id: id
|
2018-05-26 15:11:50 +02:00
|
|
|
}) do
|
2018-05-25 14:51:04 +02:00
|
|
|
{:ok, activity}
|
2018-05-26 13:07:04 +02:00
|
|
|
else
|
|
|
|
_e -> :error
|
2018-05-25 08:09:01 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-10-04 17:01:04 +02:00
|
|
|
@misskey_reactions %{
|
|
|
|
"like" => "👍",
|
|
|
|
"love" => "❤️",
|
|
|
|
"laugh" => "😆",
|
|
|
|
"hmm" => "🤔",
|
|
|
|
"surprise" => "😮",
|
|
|
|
"congrats" => "🎉",
|
|
|
|
"angry" => "💢",
|
|
|
|
"confused" => "😥",
|
|
|
|
"rip" => "😇",
|
2019-10-05 10:45:42 +02:00
|
|
|
"pudding" => "🍮",
|
|
|
|
"star" => "⭐"
|
2019-10-04 17:01:04 +02:00
|
|
|
}
|
|
|
|
|
2020-02-06 18:09:57 +01:00
|
|
|
@doc "Rewrite misskey likes into EmojiReacts"
|
2019-10-04 17:01:04 +02:00
|
|
|
def handle_incoming(
|
|
|
|
%{
|
|
|
|
"type" => "Like",
|
|
|
|
"_misskey_reaction" => reaction
|
|
|
|
} = data,
|
|
|
|
options
|
|
|
|
) do
|
|
|
|
data
|
2020-02-06 18:09:57 +01:00
|
|
|
|> Map.put("type", "EmojiReact")
|
2019-10-05 10:45:42 +02:00
|
|
|
|> Map.put("content", @misskey_reactions[reaction] || reaction)
|
2019-10-04 17:01:04 +02:00
|
|
|
|> handle_incoming(options)
|
|
|
|
end
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
def handle_incoming(
|
2019-06-29 19:04:50 +02:00
|
|
|
%{"type" => "Like", "object" => object_id, "actor" => _actor, "id" => id} = data,
|
|
|
|
_options
|
2018-03-30 15:01:53 +02:00
|
|
|
) do
|
2018-12-01 23:29:41 +01:00
|
|
|
with actor <- Containment.get_actor(data),
|
2019-03-18 14:56:59 +01:00
|
|
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
2018-12-04 04:18:10 +01:00
|
|
|
{:ok, object} <- get_obj_helper(object_id),
|
2018-05-04 22:59:01 +02:00
|
|
|
{:ok, activity, _object} <- ActivityPub.like(actor, object, id, false) do
|
2018-02-17 20:13:12 +01:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-08-26 23:47:31 +02:00
|
|
|
def handle_incoming(
|
|
|
|
%{
|
2020-02-06 18:09:57 +01:00
|
|
|
"type" => "EmojiReact",
|
2019-08-26 23:47:31 +02:00
|
|
|
"object" => object_id,
|
|
|
|
"actor" => _actor,
|
|
|
|
"id" => id,
|
|
|
|
"content" => emoji
|
|
|
|
} = data,
|
|
|
|
_options
|
|
|
|
) do
|
|
|
|
with actor <- Containment.get_actor(data),
|
|
|
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
|
|
|
{:ok, object} <- get_obj_helper(object_id),
|
|
|
|
{:ok, activity, _object} <-
|
|
|
|
ActivityPub.react_with_emoji(actor, object, emoji, activity_id: id, local: false) do
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
def handle_incoming(
|
2019-06-29 19:04:50 +02:00
|
|
|
%{"type" => "Announce", "object" => object_id, "actor" => _actor, "id" => id} = data,
|
|
|
|
_options
|
2018-03-30 15:01:53 +02:00
|
|
|
) do
|
2018-12-01 23:29:41 +01:00
|
|
|
with actor <- Containment.get_actor(data),
|
2019-03-18 14:56:59 +01:00
|
|
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
2019-10-02 13:18:51 +02:00
|
|
|
{:ok, object} <- get_embedded_obj_helper(object_id, actor),
|
2019-02-22 13:29:52 +01:00
|
|
|
public <- Visibility.is_public?(data),
|
2019-01-18 00:19:15 +01:00
|
|
|
{:ok, activity, _object} <- ActivityPub.announce(actor, object, id, false, public) do
|
2018-02-17 21:57:31 +01:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
def handle_incoming(
|
2018-09-10 03:57:03 +02:00
|
|
|
%{"type" => "Update", "object" => %{"type" => object_type} = object, "actor" => actor_id} =
|
2019-06-29 19:04:50 +02:00
|
|
|
data,
|
|
|
|
_options
|
2018-09-10 03:57:03 +02:00
|
|
|
)
|
2019-10-05 22:41:33 +02:00
|
|
|
when object_type in [
|
|
|
|
"Person",
|
|
|
|
"Application",
|
|
|
|
"Service",
|
2019-10-19 19:46:24 +02:00
|
|
|
"Organization"
|
2019-10-05 22:41:33 +02:00
|
|
|
] do
|
2019-04-22 09:20:43 +02:00
|
|
|
with %User{ap_id: ^actor_id} = actor <- User.get_cached_by_ap_id(object["id"]) do
|
2018-02-25 16:14:25 +01:00
|
|
|
{:ok, new_user_data} = ActivityPub.user_data_from_user_object(object)
|
|
|
|
|
|
|
|
actor
|
2020-01-19 07:02:16 +01:00
|
|
|
|> User.upgrade_changeset(new_user_data, true)
|
2018-02-25 16:34:24 +01:00
|
|
|
|> User.update_and_set_cache()
|
2018-02-25 16:14:25 +01:00
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
ActivityPub.update(%{
|
|
|
|
local: false,
|
|
|
|
to: data["to"] || [],
|
|
|
|
cc: data["cc"] || [],
|
|
|
|
object: object,
|
2019-10-05 14:49:45 +02:00
|
|
|
actor: actor_id,
|
|
|
|
activity_id: data["id"]
|
2018-03-30 15:01:53 +02:00
|
|
|
})
|
2018-02-25 16:14:25 +01:00
|
|
|
else
|
|
|
|
e ->
|
|
|
|
Logger.error(e)
|
|
|
|
:error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-17 22:22:30 +01:00
|
|
|
# TODO: We presently assume that any actor on the same origin domain as the object being
|
|
|
|
# deleted has the rights to delete that object. A better way to validate whether or not
|
|
|
|
# the object should be deleted is to refetch the object URI, which should return either
|
|
|
|
# an error or a tombstone. This would allow us to verify that a deletion actually took
|
|
|
|
# place.
|
2018-03-30 15:01:53 +02:00
|
|
|
def handle_incoming(
|
2019-10-10 17:17:33 +02:00
|
|
|
%{"type" => "Delete", "object" => object_id, "actor" => actor, "id" => id} = data,
|
2019-06-29 19:04:50 +02:00
|
|
|
_options
|
2018-03-30 15:01:53 +02:00
|
|
|
) do
|
2018-05-26 13:52:05 +02:00
|
|
|
object_id = Utils.get_ap_id(object_id)
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2018-12-01 23:29:41 +01:00
|
|
|
with actor <- Containment.get_actor(data),
|
2019-03-18 14:56:59 +01:00
|
|
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
2018-12-04 04:18:10 +01:00
|
|
|
{:ok, object} <- get_obj_helper(object_id),
|
2018-12-01 23:29:41 +01:00
|
|
|
:ok <- Containment.contain_origin(actor.ap_id, object.data),
|
2019-10-11 11:25:45 +02:00
|
|
|
{:ok, activity} <-
|
|
|
|
ActivityPub.delete(object, local: false, activity_id: id, actor: actor.ap_id) do
|
2018-03-03 18:37:40 +01:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
2019-07-10 07:16:08 +02:00
|
|
|
nil ->
|
|
|
|
case User.get_cached_by_ap_id(object_id) do
|
|
|
|
%User{ap_id: ^actor} = user ->
|
2019-07-28 23:29:10 +02:00
|
|
|
User.delete(user)
|
2019-07-10 07:16:08 +02:00
|
|
|
|
|
|
|
nil ->
|
|
|
|
:error
|
|
|
|
end
|
|
|
|
|
|
|
|
_e ->
|
|
|
|
:error
|
2018-03-03 18:37:40 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-09 05:50:19 +02:00
|
|
|
def handle_incoming(
|
2018-05-11 21:34:46 +02:00
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
2018-05-13 09:42:31 +02:00
|
|
|
"object" => %{"type" => "Announce", "object" => object_id},
|
2018-12-09 10:12:48 +01:00
|
|
|
"actor" => _actor,
|
2018-05-11 21:34:46 +02:00
|
|
|
"id" => id
|
2019-06-29 19:04:50 +02:00
|
|
|
} = data,
|
|
|
|
_options
|
2018-05-11 21:34:46 +02:00
|
|
|
) do
|
2018-12-01 23:29:41 +01:00
|
|
|
with actor <- Containment.get_actor(data),
|
2019-03-18 14:56:59 +01:00
|
|
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
2018-12-04 04:18:10 +01:00
|
|
|
{:ok, object} <- get_obj_helper(object_id),
|
2018-06-14 03:29:55 +02:00
|
|
|
{:ok, activity, _} <- ActivityPub.unannounce(actor, object, id, false) do
|
2018-05-09 05:50:19 +02:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
2018-05-20 18:05:34 +02:00
|
|
|
_e -> :error
|
2018-05-09 05:50:19 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-18 05:55:00 +02:00
|
|
|
def handle_incoming(
|
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
|
|
|
"object" => %{"type" => "Follow", "object" => followed},
|
|
|
|
"actor" => follower,
|
|
|
|
"id" => id
|
2019-06-29 19:04:50 +02:00
|
|
|
} = _data,
|
|
|
|
_options
|
2018-05-18 05:55:00 +02:00
|
|
|
) do
|
2018-05-21 10:35:43 +02:00
|
|
|
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
|
2019-03-18 14:56:59 +01:00
|
|
|
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
|
2018-05-21 03:01:14 +02:00
|
|
|
{:ok, activity} <- ActivityPub.unfollow(follower, followed, id, false) do
|
2018-05-18 05:55:00 +02:00
|
|
|
User.unfollow(follower, followed)
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
2018-12-09 10:12:48 +01:00
|
|
|
_e -> :error
|
2018-05-18 05:55:00 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-10-03 18:37:23 +02:00
|
|
|
def handle_incoming(
|
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
2020-02-06 18:09:57 +01:00
|
|
|
"object" => %{"type" => "EmojiReact", "id" => reaction_activity_id},
|
2019-10-03 18:37:23 +02:00
|
|
|
"actor" => _actor,
|
|
|
|
"id" => id
|
|
|
|
} = data,
|
|
|
|
_options
|
|
|
|
) do
|
|
|
|
with actor <- Containment.get_actor(data),
|
|
|
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
|
|
|
{:ok, activity, _} <-
|
|
|
|
ActivityPub.unreact_with_emoji(actor, reaction_activity_id,
|
|
|
|
activity_id: id,
|
|
|
|
local: false
|
|
|
|
) do
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-21 10:35:43 +02:00
|
|
|
def handle_incoming(
|
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
|
|
|
"object" => %{"type" => "Block", "object" => blocked},
|
|
|
|
"actor" => blocker,
|
|
|
|
"id" => id
|
2019-06-29 19:04:50 +02:00
|
|
|
} = _data,
|
|
|
|
_options
|
2018-05-21 10:35:43 +02:00
|
|
|
) do
|
2019-08-13 04:15:21 +02:00
|
|
|
with %User{local: true} = blocked <- User.get_cached_by_ap_id(blocked),
|
2019-03-18 14:56:59 +01:00
|
|
|
{:ok, %User{} = blocker} <- User.get_or_fetch_by_ap_id(blocker),
|
2018-05-21 10:35:43 +02:00
|
|
|
{:ok, activity} <- ActivityPub.unblock(blocker, blocked, id, false) do
|
2018-05-21 11:00:58 +02:00
|
|
|
User.unblock(blocker, blocked)
|
2018-05-21 10:35:43 +02:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
2018-12-09 10:12:48 +01:00
|
|
|
_e -> :error
|
2018-05-21 10:35:43 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-20 03:23:52 +02:00
|
|
|
def handle_incoming(
|
2019-06-29 19:04:50 +02:00
|
|
|
%{"type" => "Block", "object" => blocked, "actor" => blocker, "id" => id} = _data,
|
|
|
|
_options
|
2018-05-20 03:23:52 +02:00
|
|
|
) do
|
2019-08-13 04:15:21 +02:00
|
|
|
with %User{local: true} = blocked = User.get_cached_by_ap_id(blocked),
|
2019-03-18 15:05:10 +01:00
|
|
|
{:ok, %User{} = blocker} = User.get_or_fetch_by_ap_id(blocker),
|
2018-05-21 03:01:14 +02:00
|
|
|
{:ok, activity} <- ActivityPub.block(blocker, blocked, id, false) do
|
2018-05-20 02:57:37 +02:00
|
|
|
User.unfollow(blocker, blocked)
|
2018-05-20 04:02:13 +02:00
|
|
|
User.block(blocker, blocked)
|
2018-05-19 00:09:56 +02:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
2018-12-09 10:12:48 +01:00
|
|
|
_e -> :error
|
2018-05-19 00:09:56 +02:00
|
|
|
end
|
|
|
|
end
|
2018-05-20 03:23:52 +02:00
|
|
|
|
2018-05-19 15:22:43 +02:00
|
|
|
def handle_incoming(
|
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
|
|
|
"object" => %{"type" => "Like", "object" => object_id},
|
2018-12-09 10:12:48 +01:00
|
|
|
"actor" => _actor,
|
2018-05-19 15:22:43 +02:00
|
|
|
"id" => id
|
2019-06-29 19:04:50 +02:00
|
|
|
} = data,
|
|
|
|
_options
|
2018-05-19 15:22:43 +02:00
|
|
|
) do
|
2018-12-01 23:29:41 +01:00
|
|
|
with actor <- Containment.get_actor(data),
|
2019-03-18 14:56:59 +01:00
|
|
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
2018-12-04 04:18:10 +01:00
|
|
|
{:ok, object} <- get_obj_helper(object_id),
|
2018-05-19 15:22:43 +02:00
|
|
|
{:ok, activity, _, _} <- ActivityPub.unlike(actor, object, id, false) do
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
2018-05-20 18:05:34 +02:00
|
|
|
_e -> :error
|
2018-05-19 15:22:43 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-30 18:13:05 +02:00
|
|
|
# For Undos that don't have the complete object attached, try to find it in our database.
|
|
|
|
def handle_incoming(
|
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
|
|
|
"object" => object
|
|
|
|
} = activity,
|
|
|
|
options
|
|
|
|
)
|
|
|
|
when is_binary(object) do
|
|
|
|
with %Activity{data: data} <- Activity.get_by_ap_id(object) do
|
|
|
|
activity
|
|
|
|
|> Map.put("object", data)
|
|
|
|
|> handle_incoming(options)
|
|
|
|
else
|
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-10-30 12:21:49 +01:00
|
|
|
def handle_incoming(
|
|
|
|
%{
|
|
|
|
"type" => "Move",
|
|
|
|
"actor" => origin_actor,
|
|
|
|
"object" => origin_actor,
|
|
|
|
"target" => target_actor
|
|
|
|
},
|
|
|
|
_options
|
|
|
|
) do
|
|
|
|
with %User{} = origin_user <- User.get_cached_by_ap_id(origin_actor),
|
|
|
|
{:ok, %User{} = target_user} <- User.get_or_fetch_by_ap_id(target_actor),
|
|
|
|
true <- origin_actor in target_user.also_known_as do
|
|
|
|
ActivityPub.move(origin_user, target_user, false)
|
|
|
|
else
|
|
|
|
_e -> :error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-29 19:04:50 +02:00
|
|
|
def handle_incoming(_, _), do: :error
|
2018-02-17 14:55:44 +01:00
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
@spec get_obj_helper(String.t(), Keyword.t()) :: {:ok, Object.t()} | nil
|
2019-06-29 19:04:50 +02:00
|
|
|
def get_obj_helper(id, options \\ []) do
|
2019-09-11 06:23:33 +02:00
|
|
|
case Object.normalize(id, true, options) do
|
|
|
|
%Object{} = object -> {:ok, object}
|
|
|
|
_ -> nil
|
2019-09-10 15:43:10 +02:00
|
|
|
end
|
2018-02-18 11:24:54 +01:00
|
|
|
end
|
|
|
|
|
2019-10-02 13:18:51 +02:00
|
|
|
@spec get_embedded_obj_helper(String.t() | Object.t(), User.t()) :: {:ok, Object.t()} | nil
|
2019-10-02 13:46:06 +02:00
|
|
|
def get_embedded_obj_helper(%{"attributedTo" => attributed_to, "id" => object_id} = data, %User{
|
2019-10-02 13:18:51 +02:00
|
|
|
ap_id: ap_id
|
|
|
|
})
|
2019-10-02 13:46:06 +02:00
|
|
|
when attributed_to == ap_id do
|
2019-10-02 13:18:51 +02:00
|
|
|
with {:ok, activity} <-
|
|
|
|
handle_incoming(%{
|
|
|
|
"type" => "Create",
|
|
|
|
"to" => data["to"],
|
|
|
|
"cc" => data["cc"],
|
2019-10-02 13:46:06 +02:00
|
|
|
"actor" => attributed_to,
|
2019-10-02 13:18:51 +02:00
|
|
|
"object" => data
|
|
|
|
}) do
|
|
|
|
{:ok, Object.normalize(activity)}
|
|
|
|
else
|
|
|
|
_ -> get_obj_helper(object_id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_embedded_obj_helper(object_id, _) do
|
|
|
|
get_obj_helper(object_id)
|
|
|
|
end
|
|
|
|
|
2019-03-05 04:36:19 +01:00
|
|
|
def set_reply_to_uri(%{"inReplyTo" => in_reply_to} = object) when is_binary(in_reply_to) do
|
|
|
|
with false <- String.starts_with?(in_reply_to, "http"),
|
|
|
|
{:ok, %{data: replied_to_object}} <- get_obj_helper(in_reply_to) do
|
|
|
|
Map.put(object, "inReplyTo", replied_to_object["external_url"] || in_reply_to)
|
2018-03-23 16:07:02 +01:00
|
|
|
else
|
|
|
|
_e -> object
|
|
|
|
end
|
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2018-03-23 16:07:02 +01:00
|
|
|
def set_reply_to_uri(obj), do: obj
|
|
|
|
|
2020-01-22 19:10:17 +01:00
|
|
|
@doc """
|
|
|
|
Serialized Mastodon-compatible `replies` collection containing _self-replies_.
|
|
|
|
Based on Mastodon's ActivityPub::NoteSerializer#replies.
|
|
|
|
"""
|
2020-02-09 08:17:21 +01:00
|
|
|
def set_replies(obj_data) do
|
2020-01-22 19:10:17 +01:00
|
|
|
replies_uris =
|
2020-02-09 12:09:01 +01:00
|
|
|
with limit when limit > 0 <-
|
|
|
|
Pleroma.Config.get([:activitypub, :note_replies_output_limit], 0),
|
2020-02-09 08:17:21 +01:00
|
|
|
%Object{} = object <- Object.get_cached_by_ap_id(obj_data["id"]) do
|
|
|
|
object
|
|
|
|
|> Object.self_replies()
|
|
|
|
|> select([o], fragment("?->>'id'", o.data))
|
2020-01-22 19:10:17 +01:00
|
|
|
|> limit(^limit)
|
|
|
|
|> Repo.all()
|
2020-02-09 12:09:01 +01:00
|
|
|
else
|
|
|
|
_ -> []
|
2020-01-22 19:10:17 +01:00
|
|
|
end
|
|
|
|
|
2020-02-09 12:09:01 +01:00
|
|
|
set_replies(obj_data, replies_uris)
|
2020-01-22 19:10:17 +01:00
|
|
|
end
|
|
|
|
|
2020-02-09 12:09:01 +01:00
|
|
|
defp set_replies(obj, []) do
|
2020-01-22 19:10:17 +01:00
|
|
|
obj
|
|
|
|
end
|
|
|
|
|
|
|
|
defp set_replies(obj, replies_uris) do
|
|
|
|
replies_collection = %{
|
|
|
|
"type" => "Collection",
|
2020-02-09 15:34:48 +01:00
|
|
|
"items" => replies_uris
|
2020-01-22 19:10:17 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
Map.merge(obj, %{"replies" => replies_collection})
|
|
|
|
end
|
|
|
|
|
2020-02-09 12:09:01 +01:00
|
|
|
def replies(%{"replies" => %{"first" => %{"items" => items}}}) when not is_nil(items) do
|
|
|
|
items
|
|
|
|
end
|
2020-02-08 17:58:02 +01:00
|
|
|
|
2020-02-09 12:09:01 +01:00
|
|
|
def replies(%{"replies" => %{"items" => items}}) when not is_nil(items) do
|
|
|
|
items
|
2020-01-25 08:47:30 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def replies(_), do: []
|
|
|
|
|
2018-03-23 16:07:02 +01:00
|
|
|
# Prepares the object of an outgoing create activity.
|
2018-02-24 20:16:41 +01:00
|
|
|
def prepare_object(object) do
|
|
|
|
object
|
2018-02-18 14:07:13 +01:00
|
|
|
|> set_sensitive
|
2018-02-18 13:51:03 +01:00
|
|
|
|> add_hashtags
|
2018-02-17 14:11:20 +01:00
|
|
|
|> add_mention_tags
|
2018-03-13 08:05:43 +01:00
|
|
|
|> add_emoji_tags
|
2018-02-17 14:11:20 +01:00
|
|
|
|> add_attributed_to
|
2018-02-17 18:38:58 +01:00
|
|
|
|> prepare_attachments
|
2018-02-18 13:58:52 +01:00
|
|
|
|> set_conversation
|
2018-03-23 16:07:02 +01:00
|
|
|
|> set_reply_to_uri
|
2020-01-22 19:10:17 +01:00
|
|
|
|> set_replies
|
2018-11-10 13:08:53 +01:00
|
|
|
|> strip_internal_fields
|
|
|
|
|> strip_internal_tags
|
2019-05-22 20:17:57 +02:00
|
|
|
|> set_type
|
2018-02-24 20:16:41 +01:00
|
|
|
end
|
|
|
|
|
2018-05-05 00:03:14 +02:00
|
|
|
# @doc
|
|
|
|
# """
|
|
|
|
# internal -> Mastodon
|
|
|
|
# """
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2019-09-27 14:40:31 +02:00
|
|
|
def prepare_outgoing(%{"type" => activity_type, "object" => object_id} = data)
|
|
|
|
when activity_type in ["Create", "Listen"] do
|
2018-03-30 15:01:53 +02:00
|
|
|
object =
|
2019-05-01 11:11:17 +02:00
|
|
|
object_id
|
|
|
|
|> Object.normalize()
|
|
|
|
|> Map.get(:data)
|
2018-03-30 15:01:53 +02:00
|
|
|
|> prepare_object
|
|
|
|
|
|
|
|
data =
|
|
|
|
data
|
|
|
|
|> Map.put("object", object)
|
2018-11-08 16:39:38 +01:00
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
2019-05-01 11:11:17 +02:00
|
|
|
|> Map.delete("bcc")
|
2018-02-17 14:11:20 +01:00
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
|
2019-10-02 12:14:08 +02:00
|
|
|
def prepare_outgoing(%{"type" => "Announce", "actor" => ap_id, "object" => object_id} = data) do
|
|
|
|
object =
|
|
|
|
object_id
|
|
|
|
|> Object.normalize()
|
|
|
|
|
|
|
|
data =
|
|
|
|
if Visibility.is_private?(object) && object.data["actor"] == ap_id do
|
|
|
|
data |> Map.put("object", object |> Map.get(:data) |> prepare_object)
|
|
|
|
else
|
|
|
|
data |> maybe_fix_object_url
|
|
|
|
end
|
|
|
|
|
|
|
|
data =
|
|
|
|
data
|
|
|
|
|> strip_internal_fields
|
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
|
|
|
|> Map.delete("bcc")
|
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
|
2018-05-26 20:03:23 +02:00
|
|
|
# Mastodon Accept/Reject requires a non-normalized object containing the actor URIs,
|
|
|
|
# because of course it does.
|
|
|
|
def prepare_outgoing(%{"type" => "Accept"} = data) do
|
2018-06-18 23:21:03 +02:00
|
|
|
with follow_activity <- Activity.normalize(data["object"]) do
|
2018-05-26 20:03:23 +02:00
|
|
|
object = %{
|
|
|
|
"actor" => follow_activity.actor,
|
|
|
|
"object" => follow_activity.data["object"],
|
|
|
|
"id" => follow_activity.data["id"],
|
|
|
|
"type" => "Follow"
|
|
|
|
}
|
|
|
|
|
|
|
|
data =
|
|
|
|
data
|
|
|
|
|> Map.put("object", object)
|
2018-11-08 16:39:38 +01:00
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
2018-05-26 20:03:23 +02:00
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-27 11:10:46 +02:00
|
|
|
def prepare_outgoing(%{"type" => "Reject"} = data) do
|
2018-06-18 23:21:03 +02:00
|
|
|
with follow_activity <- Activity.normalize(data["object"]) do
|
2018-05-27 11:10:46 +02:00
|
|
|
object = %{
|
|
|
|
"actor" => follow_activity.actor,
|
|
|
|
"object" => follow_activity.data["object"],
|
|
|
|
"id" => follow_activity.data["id"],
|
|
|
|
"type" => "Follow"
|
|
|
|
}
|
|
|
|
|
|
|
|
data =
|
|
|
|
data
|
|
|
|
|> Map.put("object", object)
|
2018-11-08 16:39:38 +01:00
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
2018-05-27 11:10:46 +02:00
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-04 23:16:02 +02:00
|
|
|
def prepare_outgoing(%{"type" => _type} = data) do
|
2018-03-30 15:01:53 +02:00
|
|
|
data =
|
|
|
|
data
|
2019-03-09 12:12:15 +01:00
|
|
|
|> strip_internal_fields
|
2018-03-30 15:01:53 +02:00
|
|
|
|> maybe_fix_object_url
|
2018-11-08 16:39:38 +01:00
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
2018-02-17 16:08:55 +01:00
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
def maybe_fix_object_url(%{"object" => object} = data) when is_binary(object) do
|
|
|
|
with false <- String.starts_with?(object, "http"),
|
|
|
|
{:fetch, {:ok, relative_object}} <- {:fetch, get_obj_helper(object)},
|
|
|
|
%{data: %{"external_url" => external_url}} when not is_nil(external_url) <-
|
|
|
|
relative_object do
|
|
|
|
Map.put(data, "object", external_url)
|
2018-03-13 18:46:37 +01:00
|
|
|
else
|
2019-09-10 15:43:10 +02:00
|
|
|
{:fetch, e} ->
|
|
|
|
Logger.error("Couldn't fetch #{object} #{inspect(e)}")
|
|
|
|
data
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
data
|
2018-03-13 18:46:37 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
def maybe_fix_object_url(data), do: data
|
|
|
|
|
2018-02-18 13:51:03 +01:00
|
|
|
def add_hashtags(object) do
|
2018-03-30 15:01:53 +02:00
|
|
|
tags =
|
|
|
|
(object["tag"] || [])
|
2019-02-14 01:27:35 +01:00
|
|
|
|> Enum.map(fn
|
|
|
|
# Expand internal representation tags into AS2 tags.
|
|
|
|
tag when is_binary(tag) ->
|
|
|
|
%{
|
|
|
|
"href" => Pleroma.Web.Endpoint.url() <> "/tags/#{tag}",
|
|
|
|
"name" => "##{tag}",
|
|
|
|
"type" => "Hashtag"
|
|
|
|
}
|
|
|
|
|
|
|
|
# Do not process tags which are already AS2 tag objects.
|
|
|
|
tag when is_map(tag) ->
|
|
|
|
tag
|
2018-03-30 15:01:53 +02:00
|
|
|
end)
|
2018-02-18 13:51:03 +01:00
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
Map.put(object, "tag", tags)
|
2018-02-18 13:51:03 +01:00
|
|
|
end
|
|
|
|
|
2018-02-17 14:11:20 +01:00
|
|
|
def add_mention_tags(object) do
|
2020-03-25 19:00:24 +01:00
|
|
|
{enabled_receivers, disabled_receivers} = Utils.get_notified_from_object(object)
|
|
|
|
potential_receivers = enabled_receivers ++ disabled_receivers
|
|
|
|
mentions = Enum.map(potential_receivers, &build_mention_tag/1)
|
2018-02-17 14:11:20 +01:00
|
|
|
|
2018-02-17 14:20:53 +01:00
|
|
|
tags = object["tag"] || []
|
2019-09-10 15:43:10 +02:00
|
|
|
Map.put(object, "tag", tags ++ mentions)
|
2018-02-17 14:11:20 +01:00
|
|
|
end
|
|
|
|
|
2019-09-11 22:19:06 +02:00
|
|
|
defp build_mention_tag(%{ap_id: ap_id, nickname: nickname} = _) do
|
|
|
|
%{"type" => "Mention", "href" => ap_id, "name" => "@#{nickname}"}
|
|
|
|
end
|
2019-02-12 14:59:34 +01:00
|
|
|
|
2019-10-16 20:59:21 +02:00
|
|
|
def take_emoji_tags(%User{emoji: emoji}) do
|
2019-09-11 22:19:06 +02:00
|
|
|
emoji
|
|
|
|
|> Enum.flat_map(&Map.to_list/1)
|
|
|
|
|> Enum.map(&build_emoji_tag/1)
|
2019-02-12 14:59:34 +01:00
|
|
|
end
|
|
|
|
|
2018-03-13 08:05:43 +01:00
|
|
|
# TODO: we should probably send mtime instead of unix epoch time for updated
|
2019-02-12 14:59:34 +01:00
|
|
|
def add_emoji_tags(%{"emoji" => emoji} = object) do
|
2018-03-13 08:05:43 +01:00
|
|
|
tags = object["tag"] || []
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2019-09-11 22:19:06 +02:00
|
|
|
out = Enum.map(emoji, &build_emoji_tag/1)
|
2018-03-13 08:05:43 +01:00
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
Map.put(object, "tag", tags ++ out)
|
2018-03-13 08:05:43 +01:00
|
|
|
end
|
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
def add_emoji_tags(object), do: object
|
2019-02-12 14:59:34 +01:00
|
|
|
|
2019-09-11 22:19:06 +02:00
|
|
|
defp build_emoji_tag({name, url}) do
|
|
|
|
%{
|
|
|
|
"icon" => %{"url" => url, "type" => "Image"},
|
|
|
|
"name" => ":" <> name <> ":",
|
|
|
|
"type" => "Emoji",
|
|
|
|
"updated" => "1970-01-01T00:00:00Z",
|
|
|
|
"id" => url
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2018-02-18 13:58:52 +01:00
|
|
|
def set_conversation(object) do
|
|
|
|
Map.put(object, "conversation", object["context"])
|
|
|
|
end
|
|
|
|
|
2018-02-18 14:07:13 +01:00
|
|
|
def set_sensitive(object) do
|
|
|
|
tags = object["tag"] || []
|
|
|
|
Map.put(object, "sensitive", "nsfw" in tags)
|
|
|
|
end
|
|
|
|
|
2019-05-22 20:17:57 +02:00
|
|
|
def set_type(%{"type" => "Answer"} = object) do
|
|
|
|
Map.put(object, "type", "Note")
|
|
|
|
end
|
|
|
|
|
|
|
|
def set_type(object), do: object
|
|
|
|
|
2018-02-17 14:11:20 +01:00
|
|
|
def add_attributed_to(object) do
|
2019-03-05 04:36:19 +01:00
|
|
|
attributed_to = object["attributedTo"] || object["actor"]
|
2019-09-10 15:43:10 +02:00
|
|
|
Map.put(object, "attributedTo", attributed_to)
|
2018-02-15 20:00:06 +01:00
|
|
|
end
|
2018-02-17 18:38:58 +01:00
|
|
|
|
|
|
|
def prepare_attachments(object) do
|
2018-03-30 15:01:53 +02:00
|
|
|
attachments =
|
|
|
|
(object["attachment"] || [])
|
|
|
|
|> Enum.map(fn data ->
|
|
|
|
[%{"mediaType" => media_type, "href" => href} | _] = data["url"]
|
|
|
|
%{"url" => href, "mediaType" => media_type, "name" => data["name"], "type" => "Document"}
|
|
|
|
end)
|
2018-02-17 18:38:58 +01:00
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
Map.put(object, "attachment", attachments)
|
2018-02-17 18:38:58 +01:00
|
|
|
end
|
2018-02-21 22:21:40 +01:00
|
|
|
|
2019-09-12 18:59:13 +02:00
|
|
|
def strip_internal_fields(object) do
|
2018-11-10 13:08:53 +01:00
|
|
|
object
|
2019-09-18 18:24:20 +02:00
|
|
|
|> Map.drop(Pleroma.Constants.object_internal_fields())
|
2018-11-10 13:08:53 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
defp strip_internal_tags(%{"tag" => tags} = object) do
|
2019-09-10 15:43:10 +02:00
|
|
|
tags = Enum.filter(tags, fn x -> is_map(x) end)
|
2018-11-10 13:08:53 +01:00
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
Map.put(object, "tag", tags)
|
2018-11-10 13:08:53 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
defp strip_internal_tags(object), do: object
|
|
|
|
|
2019-04-04 11:10:43 +02:00
|
|
|
def perform(:user_upgrade, user) do
|
2019-03-19 19:39:33 +01:00
|
|
|
# we pass a fake user so that the followers collection is stripped away
|
|
|
|
old_follower_address = User.ap_followers(%User{nickname: user.nickname})
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2019-10-10 21:35:32 +02:00
|
|
|
from(
|
|
|
|
a in Activity,
|
|
|
|
where: ^old_follower_address in a.recipients,
|
|
|
|
update: [
|
|
|
|
set: [
|
|
|
|
recipients:
|
|
|
|
fragment(
|
|
|
|
"array_replace(?,?,?)",
|
|
|
|
a.recipients,
|
|
|
|
^old_follower_address,
|
|
|
|
^user.follower_address
|
|
|
|
)
|
2018-03-30 15:01:53 +02:00
|
|
|
]
|
2019-10-10 21:35:32 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|> Repo.update_all([])
|
2018-02-24 10:51:15 +01:00
|
|
|
end
|
|
|
|
|
2019-04-04 11:10:43 +02:00
|
|
|
def upgrade_user_from_ap_id(ap_id) do
|
2019-04-22 09:20:43 +02:00
|
|
|
with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id),
|
2019-04-04 11:10:43 +02:00
|
|
|
{:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id),
|
|
|
|
already_ap <- User.ap_enabled?(user),
|
2019-09-12 08:59:34 +02:00
|
|
|
{:ok, user} <- upgrade_user(user, data) do
|
|
|
|
if not already_ap do
|
2019-08-31 20:58:42 +02:00
|
|
|
TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
|
2018-02-24 10:51:15 +01:00
|
|
|
end
|
2018-02-21 22:21:40 +01:00
|
|
|
|
|
|
|
{:ok, user}
|
|
|
|
else
|
2019-04-04 11:10:43 +02:00
|
|
|
%User{} = user -> {:ok, user}
|
2018-02-21 22:21:40 +01:00
|
|
|
e -> e
|
|
|
|
end
|
|
|
|
end
|
2018-02-24 17:36:02 +01:00
|
|
|
|
2019-09-12 08:59:34 +02:00
|
|
|
defp upgrade_user(user, data) do
|
|
|
|
user
|
2019-09-17 20:20:08 +02:00
|
|
|
|> User.upgrade_changeset(data, true)
|
2019-09-12 08:59:34 +02:00
|
|
|
|> User.update_and_set_cache()
|
|
|
|
end
|
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
def maybe_fix_user_url(%{"url" => url} = data) when is_map(url) do
|
|
|
|
Map.put(data, "url", url["href"])
|
2018-05-19 09:30:02 +02:00
|
|
|
end
|
|
|
|
|
2019-09-10 15:43:10 +02:00
|
|
|
def maybe_fix_user_url(data), do: data
|
|
|
|
|
|
|
|
def maybe_fix_user_object(data), do: maybe_fix_user_url(data)
|
2018-02-15 20:00:06 +01:00
|
|
|
end
|