Pleroma/lib/pleroma/web/activity_pub/transmogrifier.ex

736 lines
20 KiB
Elixir
Raw Normal View History

defmodule Pleroma.Web.ActivityPub.Transmogrifier do
@moduledoc """
A module to handle coding from internal to wire ActivityPub and back.
"""
alias Pleroma.User
2018-02-17 20:13:12 +01:00
alias Pleroma.Object
alias Pleroma.Activity
2018-02-21 22:21:40 +01:00
alias Pleroma.Repo
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Utils
2018-02-21 22:21:40 +01:00
import Ecto.Query
require Logger
def get_actor(%{"actor" => actor}) when is_binary(actor) do
actor
end
def get_actor(%{"actor" => actor}) when is_list(actor) do
Enum.at(actor, 0)
end
def get_actor(%{"actor" => actor_list}) do
Enum.find(actor_list, fn %{"type" => type} -> type == "Person" end)
|> Map.get("id")
end
@doc """
Modifies an incoming AP object (mastodon format) to our internal format.
"""
def fix_object(object) do
object
|> fix_actor
2018-02-17 18:38:58 +01:00
|> fix_attachments
2018-02-19 10:39:03 +01:00
|> fix_context
2018-02-25 10:56:01 +01:00
|> fix_in_reply_to
2018-03-13 08:05:43 +01:00
|> fix_emoji
|> fix_tag
|> fix_content_map
2018-02-19 10:39:03 +01:00
end
def fix_actor(%{"attributedTo" => actor} = object) do
object
|> Map.put("actor", get_actor(%{"actor" => actor}))
end
2018-03-30 15:01:53 +02:00
def fix_in_reply_to(%{"inReplyTo" => in_reply_to_id} = object)
when not is_nil(in_reply_to_id) do
2018-02-25 16:14:25 +01:00
case ActivityPub.fetch_object_from_id(in_reply_to_id) do
2018-02-25 10:56:01 +01:00
{:ok, replied_object} ->
with %Activity{} = activity <-
Activity.get_create_activity_by_object_ap_id(replied_object.data["id"]) do
object
|> Map.put("inReplyTo", replied_object.data["id"])
|> Map.put("inReplyToAtomUri", object["inReplyToAtomUri"] || in_reply_to_id)
|> Map.put("inReplyToStatusId", activity.id)
|> Map.put("conversation", replied_object.data["context"] || object["conversation"])
|> Map.put("context", replied_object.data["context"] || object["conversation"])
else
e ->
Logger.error("Couldn't fetch #{object["inReplyTo"]} #{inspect(e)}")
object
end
2018-03-30 15:01:53 +02:00
2018-02-25 10:56:01 +01:00
e ->
Logger.error("Couldn't fetch #{object["inReplyTo"]} #{inspect(e)}")
object
end
end
2018-03-30 15:01:53 +02:00
2018-02-25 10:56:01 +01:00
def fix_in_reply_to(object), do: object
2018-02-19 10:39:03 +01:00
def fix_context(object) do
object
|> Map.put("context", object["conversation"])
2018-02-17 18:38:58 +01:00
end
def fix_attachments(object) do
2018-03-30 15:01:53 +02:00
attachments =
(object["attachment"] || [])
|> Enum.map(fn data ->
url = [%{"type" => "Link", "mediaType" => data["mediaType"], "href" => data["url"]}]
Map.put(data, "url", url)
end)
2018-02-17 18:38:58 +01:00
object
|> Map.put("attachment", attachments)
end
2018-03-13 08:05:43 +01:00
def fix_emoji(object) do
2018-03-30 15:01:53 +02:00
tags = object["tag"] || []
emoji = tags |> Enum.filter(fn data -> data["type"] == "Emoji" and data["icon"] end)
emoji =
emoji
|> Enum.reduce(%{}, fn data, mapping ->
name = data["name"]
2018-05-06 08:58:59 +02:00
name =
if String.starts_with?(name, ":") do
2018-05-07 18:11:37 +02:00
name |> String.slice(1..-2)
2018-05-06 08:58:59 +02:00
else
name
end
2018-03-13 08:05:43 +01:00
2018-03-30 15:01:53 +02:00
mapping |> Map.put(name, data["icon"]["url"])
end)
2018-03-13 08:05:43 +01:00
# we merge mastodon and pleroma emoji into a single mapping, to allow for both wire formats
emoji = Map.merge(object["emoji"] || %{}, emoji)
object
|> Map.put("emoji", emoji)
end
def fix_tag(object) do
2018-03-30 15:01:53 +02:00
tags =
(object["tag"] || [])
|> Enum.filter(fn data -> data["type"] == "Hashtag" and data["name"] end)
|> Enum.map(fn data -> String.slice(data["name"], 1..-1) end)
combined = (object["tag"] || []) ++ tags
object
|> Map.put("tag", combined)
end
# content map usually only has one language so this will do for now.
def fix_content_map(%{"contentMap" => content_map} = object) do
content_groups = Map.to_list(content_map)
{_, content} = Enum.at(content_groups, 0)
object
|> Map.put("content", content)
end
def fix_content_map(object), do: object
# TODO: validate those with a Ecto scheme
# - tags
# - emoji
2018-06-24 08:34:44 +02:00
def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = object} = data)
when objtype in ["Article", "Note"] do
actor = get_actor(data)
data = Map.put(data, "actor", actor)
2018-02-19 17:37:45 +01:00
with nil <- Activity.get_create_activity_by_object_ap_id(object["id"]),
%User{} = user <- User.get_or_fetch_by_ap_id(data["actor"]) do
object = fix_object(data["object"])
params = %{
to: data["to"],
object: object,
actor: user,
2018-02-25 22:28:53 +01:00
context: object["conversation"],
local: false,
published: data["published"],
2018-03-30 15:01:53 +02:00
additional:
Map.take(data, [
"cc",
"id"
])
}
ActivityPub.create(params)
else
2018-02-19 17:37:45 +01:00
%Activity{} = activity -> {:ok, activity}
_e -> :error
end
end
2018-03-30 15:01:53 +02:00
def handle_incoming(
%{"type" => "Follow", "object" => followed, "actor" => follower, "id" => id} = data
) do
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
%User{} = follower <- User.get_or_fetch_by_ap_id(follower),
{:ok, activity} <- ActivityPub.follow(follower, followed, id, false) do
if not User.locked?(followed) do
2018-05-28 20:31:48 +02:00
ActivityPub.accept(%{
to: [follower.ap_id],
actor: followed.ap_id,
object: data,
local: true
})
User.follow(follower, followed)
end
2018-03-30 15:01:53 +02:00
{:ok, activity}
else
_e -> :error
end
end
defp mastodon_follow_hack(%{"id" => id, "actor" => follower_id}, followed) do
with true <- id =~ "follows",
%User{local: true} = follower <- User.get_cached_by_ap_id(follower_id),
%Activity{} = activity <- Utils.fetch_latest_follow(follower, followed) do
{:ok, activity}
else
_ -> {:error, nil}
end
end
2018-05-25 10:03:34 +02:00
defp mastodon_follow_hack(_), do: {:error, nil}
defp get_follow_activity(follow_object, followed) do
with object_id when not is_nil(object_id) <- Utils.get_ap_id(follow_object),
{_, %Activity{} = activity} <- {:activity, Activity.get_by_ap_id(object_id)} do
{:ok, activity}
else
# Can't find the activity. This might a Mastodon 2.3 "Accept"
{:activity, nil} ->
mastodon_follow_hack(follow_object, followed)
2018-05-26 15:11:50 +02:00
_ ->
{:error, nil}
end
end
def handle_incoming(
%{"type" => "Accept", "object" => follow_object, "actor" => actor, "id" => id} = data
) do
with %User{} = followed <- User.get_or_fetch_by_ap_id(actor),
{:ok, follow_activity} <- get_follow_activity(follow_object, followed),
%User{local: true} = follower <- User.get_cached_by_ap_id(follow_activity.data["actor"]),
2018-05-26 15:11:50 +02:00
{:ok, activity} <-
ActivityPub.accept(%{
to: follow_activity.data["to"],
type: "Accept",
actor: followed.ap_id,
object: follow_activity.data["id"],
local: false
}) do
if not User.following?(follower, followed) do
{:ok, follower} = User.follow(follower, followed)
end
{:ok, activity}
else
_e -> :error
end
end
def handle_incoming(
%{"type" => "Reject", "object" => follow_object, "actor" => actor, "id" => id} = data
) do
with %User{} = followed <- User.get_or_fetch_by_ap_id(actor),
{:ok, follow_activity} <- get_follow_activity(follow_object, followed),
%User{local: true} = follower <- User.get_cached_by_ap_id(follow_activity.data["actor"]),
2018-05-26 15:11:50 +02:00
{:ok, activity} <-
ActivityPub.accept(%{
to: follow_activity.data["to"],
type: "Accept",
actor: followed.ap_id,
object: follow_activity.data["id"],
local: false
}) do
User.unfollow(follower, followed)
{:ok, activity}
else
_e -> :error
end
end
2018-03-30 15:01:53 +02:00
def handle_incoming(
%{"type" => "Like", "object" => object_id, "actor" => actor, "id" => id} = _data
2018-03-30 15:01:53 +02:00
) do
2018-02-17 20:13:12 +01:00
with %User{} = actor <- User.get_or_fetch_by_ap_id(actor),
2018-04-21 09:43:53 +02:00
{:ok, object} <-
get_obj_helper(object_id) || ActivityPub.fetch_object_from_id(object_id),
{:ok, activity, _object} <- ActivityPub.like(actor, object, id, false) do
2018-02-17 20:13:12 +01:00
{:ok, activity}
else
_e -> :error
end
end
2018-03-30 15:01:53 +02:00
def handle_incoming(
2018-05-04 23:16:02 +02:00
%{"type" => "Announce", "object" => object_id, "actor" => actor, "id" => id} = _data
2018-03-30 15:01:53 +02:00
) do
with %User{} = actor <- User.get_or_fetch_by_ap_id(actor),
2018-04-21 09:43:53 +02:00
{:ok, object} <-
get_obj_helper(object_id) || ActivityPub.fetch_object_from_id(object_id),
2018-05-04 23:16:02 +02:00
{:ok, activity, _object} <- ActivityPub.announce(actor, object, id, false) do
{:ok, activity}
else
_e -> :error
end
end
2018-03-30 15:01:53 +02:00
def handle_incoming(
%{"type" => "Update", "object" => %{"type" => "Person"} = object, "actor" => actor_id} =
data
) do
2018-02-25 16:14:25 +01:00
with %User{ap_id: ^actor_id} = actor <- User.get_by_ap_id(object["id"]) do
{:ok, new_user_data} = ActivityPub.user_data_from_user_object(object)
banner = new_user_data[:info]["banner"]
locked = new_user_data[:info]["locked"] || false
2018-03-30 15:01:53 +02:00
update_data =
new_user_data
|> Map.take([:name, :bio, :avatar])
|> Map.put(:info, Map.merge(actor.info, %{"banner" => banner, "locked" => locked}))
2018-02-25 16:14:25 +01:00
actor
|> User.upgrade_changeset(update_data)
2018-02-25 16:34:24 +01:00
|> User.update_and_set_cache()
2018-02-25 16:14:25 +01:00
2018-03-30 15:01:53 +02:00
ActivityPub.update(%{
local: false,
to: data["to"] || [],
cc: data["cc"] || [],
object: object,
actor: actor_id
})
2018-02-25 16:14:25 +01:00
else
e ->
Logger.error(e)
:error
end
end
2018-03-03 18:37:40 +01:00
# TODO: Make secure.
2018-03-30 15:01:53 +02:00
def handle_incoming(
2018-05-04 23:16:02 +02:00
%{"type" => "Delete", "object" => object_id, "actor" => actor, "id" => _id} = _data
2018-03-30 15:01:53 +02:00
) do
2018-05-26 13:52:05 +02:00
object_id = Utils.get_ap_id(object_id)
2018-03-30 15:01:53 +02:00
2018-05-04 23:16:02 +02:00
with %User{} = _actor <- User.get_or_fetch_by_ap_id(actor),
2018-04-21 09:43:53 +02:00
{:ok, object} <-
get_obj_helper(object_id) || ActivityPub.fetch_object_from_id(object_id),
2018-03-03 18:37:40 +01:00
{:ok, activity} <- ActivityPub.delete(object, false) do
{:ok, activity}
else
2018-05-04 23:16:02 +02:00
_e -> :error
2018-03-03 18:37:40 +01:00
end
end
def handle_incoming(
%{
"type" => "Undo",
"object" => %{"type" => "Announce", "object" => object_id},
"actor" => actor,
"id" => id
} = _data
) do
with %User{} = actor <- User.get_or_fetch_by_ap_id(actor),
{:ok, object} <-
get_obj_helper(object_id) || ActivityPub.fetch_object_from_id(object_id),
{:ok, activity, _} <- ActivityPub.unannounce(actor, object, id, false) do
{:ok, activity}
else
_e -> :error
end
end
def handle_incoming(
%{
"type" => "Undo",
"object" => %{"type" => "Follow", "object" => followed},
"actor" => follower,
"id" => id
2018-05-21 10:35:43 +02:00
} = _data
) do
2018-05-21 10:35:43 +02:00
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
%User{} = follower <- User.get_or_fetch_by_ap_id(follower),
2018-05-21 03:01:14 +02:00
{:ok, activity} <- ActivityPub.unfollow(follower, followed, id, false) do
User.unfollow(follower, followed)
{:ok, activity}
else
e -> :error
end
end
@ap_config Application.get_env(:pleroma, :activitypub)
@accept_blocks Keyword.get(@ap_config, :accept_blocks)
2018-05-21 10:35:43 +02:00
def handle_incoming(
%{
"type" => "Undo",
"object" => %{"type" => "Block", "object" => blocked},
"actor" => blocker,
"id" => id
} = _data
) do
with true <- @accept_blocks,
%User{local: true} = blocked <- User.get_cached_by_ap_id(blocked),
2018-05-21 10:35:43 +02:00
%User{} = blocker <- User.get_or_fetch_by_ap_id(blocker),
{:ok, activity} <- ActivityPub.unblock(blocker, blocked, id, false) do
2018-05-21 11:00:58 +02:00
User.unblock(blocker, blocked)
2018-05-21 10:35:43 +02:00
{:ok, activity}
else
e -> :error
end
end
def handle_incoming(
%{"type" => "Block", "object" => blocked, "actor" => blocker, "id" => id} = data
) do
with true <- @accept_blocks,
%User{local: true} = blocked = User.get_cached_by_ap_id(blocked),
2018-05-19 00:09:56 +02:00
%User{} = blocker = User.get_or_fetch_by_ap_id(blocker),
2018-05-21 03:01:14 +02:00
{:ok, activity} <- ActivityPub.block(blocker, blocked, id, false) do
2018-05-20 02:57:37 +02:00
User.unfollow(blocker, blocked)
User.block(blocker, blocked)
2018-05-19 00:09:56 +02:00
{:ok, activity}
else
e -> :error
end
end
def handle_incoming(
%{
"type" => "Undo",
"object" => %{"type" => "Like", "object" => object_id},
"actor" => actor,
"id" => id
} = _data
) do
with %User{} = actor <- User.get_or_fetch_by_ap_id(actor),
{:ok, object} <-
get_obj_helper(object_id) || ActivityPub.fetch_object_from_id(object_id),
{:ok, activity, _, _} <- ActivityPub.unlike(actor, object, id, false) do
{:ok, activity}
else
_e -> :error
end
end
def handle_incoming(_), do: :error
def get_obj_helper(id) do
if object = Object.normalize(id), do: {:ok, object}, else: nil
end
def set_reply_to_uri(%{"inReplyTo" => inReplyTo} = object) do
with false <- String.starts_with?(inReplyTo, "http"),
{:ok, %{data: replied_to_object}} <- get_obj_helper(inReplyTo) do
Map.put(object, "inReplyTo", replied_to_object["external_url"] || inReplyTo)
else
_e -> object
end
end
2018-03-30 15:01:53 +02:00
def set_reply_to_uri(obj), do: obj
# Prepares the object of an outgoing create activity.
2018-02-24 20:16:41 +01:00
def prepare_object(object) do
object
2018-02-18 14:07:13 +01:00
|> set_sensitive
2018-02-18 13:51:03 +01:00
|> add_hashtags
|> add_mention_tags
2018-03-13 08:05:43 +01:00
|> add_emoji_tags
|> add_attributed_to
2018-02-17 18:38:58 +01:00
|> prepare_attachments
2018-02-18 13:58:52 +01:00
|> set_conversation
|> set_reply_to_uri
2018-02-24 20:16:41 +01:00
end
2018-05-05 00:03:14 +02:00
# @doc
# """
# internal -> Mastodon
# """
2018-03-30 15:01:53 +02:00
2018-02-24 20:16:41 +01:00
def prepare_outgoing(%{"type" => "Create", "object" => %{"type" => "Note"} = object} = data) do
2018-03-30 15:01:53 +02:00
object =
object
|> prepare_object
data =
data
|> Map.put("object", object)
|> Map.put("@context", "https://www.w3.org/ns/activitystreams")
{:ok, data}
end
2018-05-26 20:03:23 +02:00
# Mastodon Accept/Reject requires a non-normalized object containing the actor URIs,
# because of course it does.
def prepare_outgoing(%{"type" => "Accept"} = data) do
with follow_activity <- Activity.normalize(data["object"]) do
2018-05-26 20:03:23 +02:00
object = %{
"actor" => follow_activity.actor,
"object" => follow_activity.data["object"],
"id" => follow_activity.data["id"],
"type" => "Follow"
}
data =
data
|> Map.put("object", object)
|> Map.put("@context", "https://www.w3.org/ns/activitystreams")
2018-05-26 20:03:23 +02:00
{:ok, data}
end
end
def prepare_outgoing(%{"type" => "Reject"} = data) do
with follow_activity <- Activity.normalize(data["object"]) do
object = %{
"actor" => follow_activity.actor,
"object" => follow_activity.data["object"],
"id" => follow_activity.data["id"],
"type" => "Follow"
}
data =
data
|> Map.put("object", object)
|> Map.put("@context", "https://www.w3.org/ns/activitystreams")
{:ok, data}
end
end
2018-05-04 23:16:02 +02:00
def prepare_outgoing(%{"type" => _type} = data) do
2018-03-30 15:01:53 +02:00
data =
data
|> maybe_fix_object_url
|> Map.put("@context", "https://www.w3.org/ns/activitystreams")
{:ok, data}
end
def maybe_fix_object_url(data) do
if is_binary(data["object"]) and not String.starts_with?(data["object"], "http") do
case ActivityPub.fetch_object_from_id(data["object"]) do
{:ok, relative_object} ->
if relative_object.data["external_url"] do
2018-05-04 23:16:02 +02:00
_data =
2018-03-30 15:01:53 +02:00
data
|> Map.put("object", relative_object.data["external_url"])
else
data
end
2018-03-30 15:01:53 +02:00
e ->
Logger.error("Couldn't fetch #{data["object"]} #{inspect(e)}")
data
end
else
data
end
end
2018-02-18 13:51:03 +01:00
def add_hashtags(object) do
2018-03-30 15:01:53 +02:00
tags =
(object["tag"] || [])
|> Enum.map(fn tag ->
%{
"href" => Pleroma.Web.Endpoint.url() <> "/tags/#{tag}",
"name" => "##{tag}",
"type" => "Hashtag"
}
end)
2018-02-18 13:51:03 +01:00
object
|> Map.put("tag", tags)
end
def add_mention_tags(object) do
2018-02-19 10:39:03 +01:00
recipients = object["to"] ++ (object["cc"] || [])
2018-03-30 15:01:53 +02:00
mentions =
recipients
|> Enum.map(fn ap_id -> User.get_cached_by_ap_id(ap_id) end)
|> Enum.filter(& &1)
|> Enum.map(fn user ->
%{"type" => "Mention", "href" => user.ap_id, "name" => "@#{user.nickname}"}
end)
2018-02-17 14:20:53 +01:00
tags = object["tag"] || []
object
2018-02-17 14:20:53 +01:00
|> Map.put("tag", tags ++ mentions)
end
2018-03-13 08:05:43 +01:00
# TODO: we should probably send mtime instead of unix epoch time for updated
def add_emoji_tags(object) do
tags = object["tag"] || []
emoji = object["emoji"] || []
2018-03-30 15:01:53 +02:00
out =
emoji
|> Enum.map(fn {name, url} ->
%{
"icon" => %{"url" => url, "type" => "Image"},
"name" => ":" <> name <> ":",
"type" => "Emoji",
"updated" => "1970-01-01T00:00:00Z",
"id" => url
}
end)
2018-03-13 08:05:43 +01:00
object
|> Map.put("tag", tags ++ out)
end
2018-02-18 13:58:52 +01:00
def set_conversation(object) do
Map.put(object, "conversation", object["context"])
end
2018-02-18 14:07:13 +01:00
def set_sensitive(object) do
tags = object["tag"] || []
Map.put(object, "sensitive", "nsfw" in tags)
end
def add_attributed_to(object) do
attributedTo = object["attributedTo"] || object["actor"]
object
|> Map.put("attributedTo", attributedTo)
end
2018-02-17 18:38:58 +01:00
def prepare_attachments(object) do
2018-03-30 15:01:53 +02:00
attachments =
(object["attachment"] || [])
|> Enum.map(fn data ->
[%{"mediaType" => media_type, "href" => href} | _] = data["url"]
%{"url" => href, "mediaType" => media_type, "name" => data["name"], "type" => "Document"}
end)
2018-02-17 18:38:58 +01:00
object
|> Map.put("attachment", attachments)
end
2018-02-21 22:21:40 +01:00
2018-02-24 10:51:15 +01:00
defp user_upgrade_task(user) do
old_follower_address = User.ap_followers(user)
2018-03-30 15:01:53 +02:00
q =
from(
u in User,
where: ^old_follower_address in u.following,
update: [
set: [
following:
fragment(
"array_replace(?,?,?)",
u.following,
^old_follower_address,
^user.follower_address
)
]
]
)
2018-02-24 10:51:15 +01:00
Repo.update_all(q, [])
maybe_retire_websub(user.ap_id)
2018-02-24 10:51:15 +01:00
# Only do this for recent activties, don't go through the whole db.
# Only look at the last 1000 activities.
since = (Repo.aggregate(Activity, :max, :id) || 0) - 1_000
2018-03-30 15:01:53 +02:00
q =
from(
a in Activity,
where: ^old_follower_address in a.recipients,
where: a.id > ^since,
update: [
set: [
recipients:
fragment(
"array_replace(?,?,?)",
a.recipients,
^old_follower_address,
^user.follower_address
)
]
]
)
2018-02-24 10:51:15 +01:00
Repo.update_all(q, [])
end
def upgrade_user_from_ap_id(ap_id, async \\ true) do
with %User{local: false} = user <- User.get_by_ap_id(ap_id),
2018-02-21 22:21:40 +01:00
{:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id) do
2018-03-30 15:01:53 +02:00
data =
data
|> Map.put(:info, Map.merge(user.info, data[:info]))
2018-02-21 22:21:40 +01:00
2018-03-07 20:19:48 +01:00
already_ap = User.ap_enabled?(user)
2018-03-30 15:01:53 +02:00
{:ok, user} =
User.upgrade_changeset(user, data)
|> Repo.update()
2018-02-21 22:21:40 +01:00
2018-03-07 20:19:48 +01:00
if !already_ap do
# This could potentially take a long time, do it in the background
if async do
Task.start(fn ->
user_upgrade_task(user)
end)
else
2018-02-24 10:51:15 +01:00
user_upgrade_task(user)
2018-03-07 20:19:48 +01:00
end
2018-02-24 10:51:15 +01:00
end
2018-02-21 22:21:40 +01:00
{:ok, user}
else
e -> e
end
end
def maybe_retire_websub(ap_id) do
# some sanity checks
2018-03-30 15:01:53 +02:00
if is_binary(ap_id) && String.length(ap_id) > 8 do
q =
from(
ws in Pleroma.Web.Websub.WebsubClientSubscription,
where: fragment("? like ?", ws.topic, ^"#{ap_id}%")
)
Repo.delete_all(q)
end
end
def maybe_fix_user_url(data) do
if is_map(data["url"]) do
Map.put(data, "url", data["url"]["href"])
else
data
end
end
def maybe_fix_user_object(data) do
data
|> maybe_fix_user_url
end
end