2018-12-23 21:04:54 +01:00
|
|
|
# Pleroma: A lightweight social networking server
|
2018-12-31 16:41:47 +01:00
|
|
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 21:04:54 +01:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
defmodule Pleroma.Web.ActivityPub.Utils do
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Ecto.Changeset
|
|
|
|
alias Ecto.UUID
|
|
|
|
alias Pleroma.Activity
|
|
|
|
alias Pleroma.Notification
|
|
|
|
alias Pleroma.Object
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Repo
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Pleroma.User
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Web
|
2019-03-04 16:09:58 +01:00
|
|
|
alias Pleroma.Web.ActivityPub.Visibility
|
2017-05-16 15:31:11 +02:00
|
|
|
alias Pleroma.Web.Endpoint
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Pleroma.Web.Router.Helpers
|
2019-02-09 16:16:26 +01:00
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
import Ecto.Query
|
2019-02-09 16:16:26 +01:00
|
|
|
|
2018-05-26 20:03:23 +02:00
|
|
|
require Logger
|
2019-07-29 04:43:19 +02:00
|
|
|
require Pleroma.Constants
|
2017-05-16 15:31:11 +02:00
|
|
|
|
2019-09-27 14:22:35 +02:00
|
|
|
@supported_object_types ["Article", "Note", "Video", "Page", "Question", "Answer", "Audio"]
|
2019-05-16 21:09:18 +02:00
|
|
|
@supported_report_states ~w(open closed resolved)
|
|
|
|
@valid_visibilities ~w(public unlisted private direct)
|
2018-11-09 14:39:44 +01:00
|
|
|
|
2018-05-19 09:03:53 +02:00
|
|
|
# Some implementations send the actor URI as the actor field, others send the entire actor object,
|
|
|
|
# so figure out what the actor's URI is based on what we have.
|
2019-07-15 15:01:22 +02:00
|
|
|
def get_ap_id(%{"id" => id} = _), do: id
|
|
|
|
def get_ap_id(id), do: id
|
2018-05-19 09:03:53 +02:00
|
|
|
|
|
|
|
def normalize_params(params) do
|
2018-05-26 13:52:05 +02:00
|
|
|
Map.put(params, "actor", get_ap_id(params["actor"]))
|
2018-05-19 09:03:53 +02:00
|
|
|
end
|
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
@spec determine_explicit_mentions(map()) :: map()
|
|
|
|
def determine_explicit_mentions(%{"tag" => tag} = _) when is_list(tag) do
|
|
|
|
Enum.flat_map(tag, fn
|
|
|
|
%{"type" => "Mention", "href" => href} -> [href]
|
|
|
|
_ -> []
|
|
|
|
end)
|
2018-11-17 16:51:02 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def determine_explicit_mentions(%{"tag" => tag} = object) when is_map(tag) do
|
2019-09-04 14:25:12 +02:00
|
|
|
object
|
|
|
|
|> Map.put("tag", [tag])
|
2018-11-17 16:51:02 +01:00
|
|
|
|> determine_explicit_mentions()
|
|
|
|
end
|
|
|
|
|
|
|
|
def determine_explicit_mentions(_), do: []
|
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
@spec recipient_in_collection(any(), any()) :: boolean()
|
2018-10-25 07:02:21 +02:00
|
|
|
defp recipient_in_collection(ap_id, coll) when is_binary(coll), do: ap_id == coll
|
|
|
|
defp recipient_in_collection(ap_id, coll) when is_list(coll), do: ap_id in coll
|
|
|
|
defp recipient_in_collection(_, _), do: false
|
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
@spec recipient_in_message(User.t(), User.t(), map()) :: boolean()
|
2019-04-16 20:10:15 +02:00
|
|
|
def recipient_in_message(%User{ap_id: ap_id} = recipient, %User{} = actor, params) do
|
2019-09-04 14:25:12 +02:00
|
|
|
addresses = [params["to"], params["cc"], params["bto"], params["bcc"]]
|
2018-10-25 07:02:21 +02:00
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
cond do
|
|
|
|
Enum.any?(addresses, &recipient_in_collection(ap_id, &1)) -> true
|
2018-10-26 03:24:22 +02:00
|
|
|
# if the message is unaddressed at all, then assume it is directly addressed
|
|
|
|
# to the recipient
|
2019-09-04 14:25:12 +02:00
|
|
|
Enum.all?(addresses, &is_nil(&1)) -> true
|
2019-04-16 20:10:15 +02:00
|
|
|
# if the message is sent from somebody the user is following, then assume it
|
|
|
|
# is addressed to the recipient
|
2019-09-04 14:25:12 +02:00
|
|
|
User.following?(recipient, actor) -> true
|
|
|
|
true -> false
|
2018-10-25 07:02:21 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp extract_list(target) when is_binary(target), do: [target]
|
|
|
|
defp extract_list(lst) when is_list(lst), do: lst
|
|
|
|
defp extract_list(_), do: []
|
|
|
|
|
|
|
|
def maybe_splice_recipient(ap_id, params) do
|
2019-09-03 16:58:30 +02:00
|
|
|
need_splice? =
|
2018-10-25 07:02:21 +02:00
|
|
|
!recipient_in_collection(ap_id, params["to"]) &&
|
|
|
|
!recipient_in_collection(ap_id, params["cc"])
|
|
|
|
|
2019-09-03 16:58:30 +02:00
|
|
|
if need_splice? do
|
|
|
|
cc_list = extract_list(params["cc"])
|
|
|
|
Map.put(params, "cc", [ap_id | cc_list])
|
2018-10-25 07:02:21 +02:00
|
|
|
else
|
|
|
|
params
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-21 18:23:27 +01:00
|
|
|
def make_json_ld_header do
|
|
|
|
%{
|
|
|
|
"@context" => [
|
|
|
|
"https://www.w3.org/ns/activitystreams",
|
2019-04-08 13:03:10 +02:00
|
|
|
"#{Web.base_url()}/schemas/litepub-0.1.jsonld",
|
|
|
|
%{
|
|
|
|
"@language" => "und"
|
|
|
|
}
|
2018-03-21 18:23:27 +01:00
|
|
|
]
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
def make_date do
|
2018-03-30 15:01:53 +02:00
|
|
|
DateTime.utc_now() |> DateTime.to_iso8601()
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def generate_activity_id do
|
|
|
|
generate_id("activities")
|
|
|
|
end
|
|
|
|
|
|
|
|
def generate_context_id do
|
|
|
|
generate_id("contexts")
|
|
|
|
end
|
|
|
|
|
|
|
|
def generate_object_id do
|
2018-03-30 15:01:53 +02:00
|
|
|
Helpers.o_status_url(Endpoint, :object, UUID.generate())
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def generate_id(type) do
|
2018-03-30 15:01:53 +02:00
|
|
|
"#{Web.base_url()}/#{type}/#{UUID.generate()}"
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2018-11-09 14:39:44 +01:00
|
|
|
def get_notified_from_object(%{"type" => type} = object) when type in @supported_object_types do
|
2018-11-09 09:55:52 +01:00
|
|
|
fake_create_activity = %{
|
|
|
|
"to" => object["to"],
|
|
|
|
"cc" => object["cc"],
|
|
|
|
"type" => "Create",
|
|
|
|
"object" => object
|
|
|
|
}
|
|
|
|
|
2019-09-03 16:58:30 +02:00
|
|
|
get_notified_from_object(fake_create_activity)
|
2018-11-09 09:55:52 +01:00
|
|
|
end
|
|
|
|
|
2018-11-08 20:31:59 +01:00
|
|
|
def get_notified_from_object(object) do
|
2018-11-09 09:42:33 +01:00
|
|
|
Notification.get_notified_from_activity(%Activity{data: object}, false)
|
2018-11-08 20:31:59 +01:00
|
|
|
end
|
|
|
|
|
2018-04-02 15:17:09 +02:00
|
|
|
def create_context(context) do
|
|
|
|
context = context || generate_id("contexts")
|
|
|
|
|
2019-06-24 20:59:12 +02:00
|
|
|
# Ecto has problems accessing the constraint inside the jsonb,
|
|
|
|
# so we explicitly check for the existed object before insert
|
|
|
|
object = Object.get_cached_by_ap_id(context)
|
2018-04-02 16:27:36 +02:00
|
|
|
|
2019-06-24 20:59:12 +02:00
|
|
|
with true <- is_nil(object),
|
|
|
|
changeset <- Object.context_mapping(context),
|
|
|
|
{:ok, inserted_object} <- Repo.insert(changeset) do
|
|
|
|
inserted_object
|
|
|
|
else
|
|
|
|
_ ->
|
|
|
|
object
|
2018-04-02 15:17:09 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
@doc """
|
|
|
|
Enqueues an activity for federation if it's local
|
|
|
|
"""
|
2019-08-27 15:21:03 +02:00
|
|
|
@spec maybe_federate(any()) :: :ok
|
2017-05-16 15:31:11 +02:00
|
|
|
def maybe_federate(%Activity{local: true} = activity) do
|
2019-07-09 08:30:51 +02:00
|
|
|
if Pleroma.Config.get!([:instance, :federating]) do
|
2019-08-09 19:08:01 +02:00
|
|
|
Pleroma.Web.Federator.publish(activity)
|
2019-07-09 08:30:51 +02:00
|
|
|
end
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
:ok
|
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
def maybe_federate(_), do: :ok
|
|
|
|
|
|
|
|
@doc """
|
|
|
|
Adds an id and a published data if they aren't there,
|
|
|
|
also adds it to an included object
|
|
|
|
"""
|
2019-09-04 14:25:12 +02:00
|
|
|
@spec lazy_put_activity_defaults(map(), boolean) :: map()
|
2019-09-15 13:07:07 +02:00
|
|
|
def lazy_put_activity_defaults(map, fake? \\ false)
|
2017-05-16 15:31:11 +02:00
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
def lazy_put_activity_defaults(map, true) do
|
|
|
|
map
|
|
|
|
|> Map.put_new("id", "pleroma:fakeid")
|
|
|
|
|> Map.put_new_lazy("published", &make_date/0)
|
|
|
|
|> Map.put_new("context", "pleroma:fakecontext")
|
|
|
|
|> Map.put_new("context_id", -1)
|
|
|
|
|> lazy_put_object_defaults(true)
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2019-09-15 13:07:07 +02:00
|
|
|
def lazy_put_activity_defaults(map, _fake?) do
|
2019-09-04 14:25:12 +02:00
|
|
|
%{data: %{"id" => context}, id: context_id} = create_context(map["context"])
|
2019-04-01 11:16:51 +02:00
|
|
|
|
|
|
|
map
|
2019-09-04 14:25:12 +02:00
|
|
|
|> Map.put_new_lazy("id", &generate_activity_id/0)
|
2019-04-01 11:16:51 +02:00
|
|
|
|> Map.put_new_lazy("published", &make_date/0)
|
2019-09-04 14:25:12 +02:00
|
|
|
|> Map.put_new("context", context)
|
|
|
|
|> Map.put_new("context_id", context_id)
|
|
|
|
|> lazy_put_object_defaults(false)
|
2019-04-01 11:16:51 +02:00
|
|
|
end
|
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
# Adds an id and published date if they aren't there.
|
|
|
|
#
|
|
|
|
@spec lazy_put_object_defaults(map(), boolean()) :: map()
|
|
|
|
defp lazy_put_object_defaults(%{"object" => map} = activity, true)
|
|
|
|
when is_map(map) do
|
|
|
|
object =
|
|
|
|
map
|
|
|
|
|> Map.put_new("id", "pleroma:fake_object_id")
|
|
|
|
|> Map.put_new_lazy("published", &make_date/0)
|
|
|
|
|> Map.put_new("context", activity["context"])
|
|
|
|
|> Map.put_new("context_id", activity["context_id"])
|
|
|
|
|> Map.put_new("fake", true)
|
|
|
|
|
|
|
|
%{activity | "object" => object}
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
defp lazy_put_object_defaults(%{"object" => map} = activity, _)
|
|
|
|
when is_map(map) do
|
|
|
|
object =
|
|
|
|
map
|
|
|
|
|> Map.put_new_lazy("id", &generate_object_id/0)
|
|
|
|
|> Map.put_new_lazy("published", &make_date/0)
|
|
|
|
|> Map.put_new("context", activity["context"])
|
|
|
|
|> Map.put_new("context_id", activity["context_id"])
|
|
|
|
|
|
|
|
%{activity | "object" => object}
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
defp lazy_put_object_defaults(activity, _), do: activity
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
@doc """
|
|
|
|
Inserts a full object if it is contained in an activity.
|
|
|
|
"""
|
2018-11-25 22:44:03 +01:00
|
|
|
def insert_full_object(%{"object" => %{"type" => type} = object_data} = map)
|
2018-11-09 14:39:44 +01:00
|
|
|
when is_map(object_data) and type in @supported_object_types do
|
2018-11-25 22:44:03 +01:00
|
|
|
with {:ok, object} <- Object.create(object_data) do
|
2019-09-03 16:58:30 +02:00
|
|
|
map = Map.put(map, "object", object.data["id"])
|
2018-11-25 22:44:03 +01:00
|
|
|
|
2019-04-17 11:22:32 +02:00
|
|
|
{:ok, map, object}
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2019-04-17 11:22:32 +02:00
|
|
|
def insert_full_object(map), do: {:ok, map, nil}
|
2017-05-16 15:31:11 +02:00
|
|
|
|
|
|
|
#### Like-related helpers
|
|
|
|
|
|
|
|
@doc """
|
|
|
|
Returns an existing like if a user already liked an object
|
|
|
|
"""
|
2019-08-27 15:21:03 +02:00
|
|
|
@spec get_existing_like(String.t(), map()) :: Activity.t() | nil
|
2017-11-19 02:22:07 +01:00
|
|
|
def get_existing_like(actor, %{data: %{"id" => id}}) do
|
2019-08-27 15:21:03 +02:00
|
|
|
actor
|
|
|
|
|> Activity.Queries.by_actor()
|
|
|
|
|> Activity.Queries.by_object_id(id)
|
|
|
|
|> Activity.Queries.by_type("Like")
|
2019-09-03 16:58:30 +02:00
|
|
|
|> limit(1)
|
2019-08-27 15:21:03 +02:00
|
|
|
|> Repo.one()
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
@spec make_like_data(User.t(), map(), String.t()) :: map()
|
2019-03-04 16:09:58 +01:00
|
|
|
def make_like_data(
|
|
|
|
%User{ap_id: ap_id} = actor,
|
|
|
|
%{data: %{"actor" => object_actor_id, "id" => id}} = object,
|
|
|
|
activity_id
|
|
|
|
) do
|
|
|
|
object_actor = User.get_cached_by_ap_id(object_actor_id)
|
|
|
|
|
|
|
|
to =
|
|
|
|
if Visibility.is_public?(object) do
|
|
|
|
[actor.follower_address, object.data["actor"]]
|
|
|
|
else
|
|
|
|
[object.data["actor"]]
|
|
|
|
end
|
|
|
|
|
|
|
|
cc =
|
|
|
|
(object.data["to"] ++ (object.data["cc"] || []))
|
|
|
|
|> List.delete(actor.ap_id)
|
|
|
|
|> List.delete(object_actor.follower_address)
|
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2017-05-16 15:31:11 +02:00
|
|
|
"type" => "Like",
|
|
|
|
"actor" => ap_id,
|
|
|
|
"object" => id,
|
2019-03-04 16:09:58 +01:00
|
|
|
"to" => to,
|
|
|
|
"cc" => cc,
|
2017-05-16 15:31:11 +02:00
|
|
|
"context" => object.data["context"]
|
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
@spec update_element_in_object(String.t(), list(any), Object.t()) ::
|
|
|
|
{:ok, Object.t()} | {:error, Ecto.Changeset.t()}
|
2017-05-16 15:31:11 +02:00
|
|
|
def update_element_in_object(property, element, object) do
|
2019-08-27 15:21:03 +02:00
|
|
|
data =
|
|
|
|
Map.merge(
|
|
|
|
object.data,
|
|
|
|
%{"#{property}_count" => length(element), "#{property}s" => element}
|
|
|
|
)
|
2017-05-16 15:31:11 +02:00
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
object
|
|
|
|
|> Changeset.change(data: data)
|
|
|
|
|> Object.update_and_set_cache()
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
@spec add_like_to_object(Activity.t(), Object.t()) ::
|
|
|
|
{:ok, Object.t()} | {:error, Ecto.Changeset.t()}
|
2017-05-16 15:31:11 +02:00
|
|
|
def add_like_to_object(%Activity{data: %{"actor" => actor}}, object) do
|
2019-08-27 15:21:03 +02:00
|
|
|
[actor | fetch_likes(object)]
|
|
|
|
|> Enum.uniq()
|
|
|
|
|> update_likes_in_object(object)
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
@spec remove_like_from_object(Activity.t(), Object.t()) ::
|
|
|
|
{:ok, Object.t()} | {:error, Ecto.Changeset.t()}
|
2017-05-16 15:31:11 +02:00
|
|
|
def remove_like_from_object(%Activity{data: %{"actor" => actor}}, object) do
|
2019-08-27 15:21:03 +02:00
|
|
|
object
|
|
|
|
|> fetch_likes()
|
|
|
|
|> List.delete(actor)
|
|
|
|
|> update_likes_in_object(object)
|
|
|
|
end
|
2018-07-12 19:06:28 +02:00
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
defp update_likes_in_object(likes, object) do
|
|
|
|
update_element_in_object("like", likes, object)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp fetch_likes(object) do
|
|
|
|
if is_list(object.data["likes"]) do
|
|
|
|
object.data["likes"]
|
|
|
|
else
|
|
|
|
[]
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
#### Follow-related helpers
|
|
|
|
|
2018-05-26 20:03:23 +02:00
|
|
|
@doc """
|
|
|
|
Updates a follow activity's state (for locked accounts).
|
|
|
|
"""
|
2019-09-04 14:25:12 +02:00
|
|
|
@spec update_follow_state_for_all(Activity.t(), String.t()) :: {:ok, Activity} | {:error, any()}
|
2019-06-05 16:43:35 +02:00
|
|
|
def update_follow_state_for_all(
|
|
|
|
%Activity{data: %{"actor" => actor, "object" => object}} = activity,
|
2019-01-29 13:21:02 +01:00
|
|
|
state
|
|
|
|
) do
|
2019-09-04 20:40:53 +02:00
|
|
|
"Follow"
|
|
|
|
|> Activity.Queries.by_type()
|
|
|
|
|> Activity.Queries.by_actor(actor)
|
|
|
|
|> Activity.Queries.by_object_id(object)
|
|
|
|
|> where(fragment("data->>'state' = 'pending'"))
|
|
|
|
|> update(set: [data: fragment("jsonb_set(data, '{state}', ?)", ^state)])
|
|
|
|
|> Repo.update_all([])
|
2019-01-29 13:21:02 +01:00
|
|
|
|
2019-09-04 20:40:53 +02:00
|
|
|
User.set_follow_state_cache(actor, object, state)
|
|
|
|
|
|
|
|
activity = Activity.get_by_id(activity.id)
|
|
|
|
|
|
|
|
{:ok, activity}
|
2019-01-29 13:21:02 +01:00
|
|
|
end
|
|
|
|
|
2019-08-14 23:47:30 +02:00
|
|
|
def update_follow_state(
|
|
|
|
%Activity{data: %{"actor" => actor, "object" => object}} = activity,
|
|
|
|
state
|
|
|
|
) do
|
2019-09-03 16:58:30 +02:00
|
|
|
new_data = Map.put(activity.data, "state", state)
|
|
|
|
changeset = Changeset.change(activity, data: new_data)
|
|
|
|
|
|
|
|
with {:ok, activity} <- Repo.update(changeset) do
|
|
|
|
User.set_follow_state_cache(actor, object, state)
|
2018-05-26 20:03:23 +02:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
@doc """
|
|
|
|
Makes a follow activity data for the given follower and followed
|
|
|
|
"""
|
2018-05-28 20:31:48 +02:00
|
|
|
def make_follow_data(
|
|
|
|
%User{ap_id: follower_id},
|
2018-12-09 10:12:48 +01:00
|
|
|
%User{ap_id: followed_id} = _followed,
|
2018-05-28 20:31:48 +02:00
|
|
|
activity_id
|
|
|
|
) do
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2017-05-16 15:31:11 +02:00
|
|
|
"type" => "Follow",
|
|
|
|
"actor" => follower_id,
|
|
|
|
"to" => [followed_id],
|
2019-07-29 04:43:19 +02:00
|
|
|
"cc" => [Pleroma.Constants.as_public()],
|
2018-10-06 01:31:00 +02:00
|
|
|
"object" => followed_id,
|
|
|
|
"state" => "pending"
|
2017-05-16 15:31:11 +02:00
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
def fetch_latest_follow(%User{ap_id: follower_id}, %User{ap_id: followed_id}) do
|
2019-09-03 16:58:30 +02:00
|
|
|
"Follow"
|
|
|
|
|> Activity.Queries.by_type()
|
|
|
|
|> where(actor: ^follower_id)
|
|
|
|
# this is to use the index
|
|
|
|
|> Activity.Queries.by_object_id(followed_id)
|
|
|
|
|> order_by([activity], fragment("? desc nulls last", activity.id))
|
|
|
|
|> limit(1)
|
|
|
|
|> Repo.one()
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
#### Announce-related helpers
|
|
|
|
|
|
|
|
@doc """
|
2018-04-17 10:13:08 +02:00
|
|
|
Retruns an existing announce activity if the notice has already been announced
|
2017-05-16 15:31:11 +02:00
|
|
|
"""
|
2019-09-04 14:25:12 +02:00
|
|
|
@spec get_existing_announce(String.t(), map()) :: Activity.t() | nil
|
2019-09-03 16:58:30 +02:00
|
|
|
def get_existing_announce(actor, %{data: %{"id" => ap_id}}) do
|
|
|
|
"Announce"
|
|
|
|
|> Activity.Queries.by_type()
|
|
|
|
|> where(actor: ^actor)
|
|
|
|
# this is to use the index
|
|
|
|
|> Activity.Queries.by_object_id(ap_id)
|
|
|
|
|> Repo.one()
|
2018-04-14 09:39:16 +02:00
|
|
|
end
|
|
|
|
|
2018-04-16 16:59:32 +02:00
|
|
|
@doc """
|
|
|
|
Make announce activity data for the given actor and object
|
|
|
|
"""
|
2018-08-06 12:37:52 +02:00
|
|
|
# for relayed messages, we only want to send to subscribers
|
|
|
|
def make_announce_data(
|
2019-01-18 00:12:42 +01:00
|
|
|
%User{ap_id: ap_id} = user,
|
2018-08-06 12:37:52 +02:00
|
|
|
%Object{data: %{"id" => id}} = object,
|
2019-01-18 00:12:42 +01:00
|
|
|
activity_id,
|
|
|
|
false
|
2018-08-06 12:37:52 +02:00
|
|
|
) do
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2018-08-06 12:37:52 +02:00
|
|
|
"type" => "Announce",
|
|
|
|
"actor" => ap_id,
|
|
|
|
"object" => id,
|
|
|
|
"to" => [user.follower_address],
|
|
|
|
"cc" => [],
|
|
|
|
"context" => object.data["context"]
|
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2018-08-06 12:37:52 +02:00
|
|
|
end
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
def make_announce_data(
|
|
|
|
%User{ap_id: ap_id} = user,
|
|
|
|
%Object{data: %{"id" => id}} = object,
|
2019-01-18 00:12:42 +01:00
|
|
|
activity_id,
|
|
|
|
true
|
2018-03-30 15:01:53 +02:00
|
|
|
) do
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2017-05-16 15:31:11 +02:00
|
|
|
"type" => "Announce",
|
|
|
|
"actor" => ap_id,
|
|
|
|
"object" => id,
|
2017-07-19 19:06:49 +02:00
|
|
|
"to" => [user.follower_address, object.data["actor"]],
|
2019-07-29 04:43:19 +02:00
|
|
|
"cc" => [Pleroma.Constants.as_public()],
|
2017-05-16 15:31:11 +02:00
|
|
|
"context" => object.data["context"]
|
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2018-04-17 10:13:08 +02:00
|
|
|
@doc """
|
|
|
|
Make unannounce activity data for the given actor and object
|
|
|
|
"""
|
|
|
|
def make_unannounce_data(
|
|
|
|
%User{ap_id: ap_id} = user,
|
2019-10-04 16:32:42 +02:00
|
|
|
%Activity{data: %{"context" => context, "object" => object}} = activity,
|
2018-04-23 03:28:51 +02:00
|
|
|
activity_id
|
2018-04-17 10:13:08 +02:00
|
|
|
) do
|
2019-10-04 16:32:42 +02:00
|
|
|
object = Object.normalize(object)
|
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2018-04-17 10:13:08 +02:00
|
|
|
"type" => "Undo",
|
|
|
|
"actor" => ap_id,
|
2018-04-23 03:28:51 +02:00
|
|
|
"object" => activity.data,
|
2019-10-04 16:32:42 +02:00
|
|
|
"to" => [user.follower_address, object.data["actor"]],
|
2019-07-29 04:43:19 +02:00
|
|
|
"cc" => [Pleroma.Constants.as_public()],
|
2018-04-18 02:35:07 +02:00
|
|
|
"context" => context
|
2018-04-17 10:13:08 +02:00
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2018-04-17 10:13:08 +02:00
|
|
|
end
|
|
|
|
|
2018-05-19 15:22:43 +02:00
|
|
|
def make_unlike_data(
|
|
|
|
%User{ap_id: ap_id} = user,
|
2019-10-04 16:32:42 +02:00
|
|
|
%Activity{data: %{"context" => context, "object" => object}} = activity,
|
2018-05-19 15:22:43 +02:00
|
|
|
activity_id
|
|
|
|
) do
|
2019-10-04 16:32:42 +02:00
|
|
|
object = Object.normalize(object)
|
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2018-05-19 15:22:43 +02:00
|
|
|
"type" => "Undo",
|
|
|
|
"actor" => ap_id,
|
|
|
|
"object" => activity.data,
|
2019-10-04 16:32:42 +02:00
|
|
|
"to" => [user.follower_address, object.data["actor"]],
|
2019-07-29 04:43:19 +02:00
|
|
|
"cc" => [Pleroma.Constants.as_public()],
|
2018-05-19 15:22:43 +02:00
|
|
|
"context" => context
|
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2018-05-19 15:22:43 +02:00
|
|
|
end
|
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
@spec add_announce_to_object(Activity.t(), Object.t()) ::
|
|
|
|
{:ok, Object.t()} | {:error, Ecto.Changeset.t()}
|
2018-08-06 12:37:52 +02:00
|
|
|
def add_announce_to_object(
|
2019-10-01 21:40:35 +02:00
|
|
|
%Activity{data: %{"actor" => actor}},
|
2018-08-06 12:37:52 +02:00
|
|
|
object
|
|
|
|
) do
|
2019-09-04 16:42:27 +02:00
|
|
|
announcements = take_announcements(object)
|
2018-07-12 19:06:28 +02:00
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
with announcements <- Enum.uniq([actor | announcements]) do
|
2017-05-16 15:31:11 +02:00
|
|
|
update_element_in_object("announcement", announcements, object)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-06 12:37:52 +02:00
|
|
|
def add_announce_to_object(_, object), do: {:ok, object}
|
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
@spec remove_announce_from_object(Activity.t(), Object.t()) ::
|
|
|
|
{:ok, Object.t()} | {:error, Ecto.Changeset.t()}
|
2018-04-14 09:39:16 +02:00
|
|
|
def remove_announce_from_object(%Activity{data: %{"actor" => actor}}, object) do
|
2019-09-04 16:42:27 +02:00
|
|
|
with announcements <- List.delete(take_announcements(object), actor) do
|
2018-04-14 09:39:16 +02:00
|
|
|
update_element_in_object("announcement", announcements, object)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-04 16:42:27 +02:00
|
|
|
defp take_announcements(%{data: %{"announcements" => announcements}} = _)
|
2019-09-04 14:25:12 +02:00
|
|
|
when is_list(announcements),
|
|
|
|
do: announcements
|
|
|
|
|
2019-09-04 16:42:27 +02:00
|
|
|
defp take_announcements(_), do: []
|
2019-09-04 14:25:12 +02:00
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
#### Unfollow-related helpers
|
|
|
|
|
2018-05-21 03:01:14 +02:00
|
|
|
def make_unfollow_data(follower, followed, follow_activity, activity_id) do
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2017-05-16 15:31:11 +02:00
|
|
|
"type" => "Undo",
|
|
|
|
"actor" => follower.ap_id,
|
|
|
|
"to" => [followed.ap_id],
|
2018-05-21 03:01:14 +02:00
|
|
|
"object" => follow_activity.data
|
2017-05-16 15:31:11 +02:00
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2018-05-19 00:09:56 +02:00
|
|
|
#### Block-related helpers
|
2019-09-04 14:25:12 +02:00
|
|
|
@spec fetch_latest_block(User.t(), User.t()) :: Activity.t() | nil
|
2018-05-19 00:09:56 +02:00
|
|
|
def fetch_latest_block(%User{ap_id: blocker_id}, %User{ap_id: blocked_id}) do
|
2019-09-03 16:58:30 +02:00
|
|
|
"Block"
|
|
|
|
|> Activity.Queries.by_type()
|
|
|
|
|> where(actor: ^blocker_id)
|
|
|
|
# this is to use the index
|
|
|
|
|> Activity.Queries.by_object_id(blocked_id)
|
|
|
|
|> order_by([activity], fragment("? desc nulls last", activity.id))
|
|
|
|
|> limit(1)
|
|
|
|
|> Repo.one()
|
2018-05-19 00:09:56 +02:00
|
|
|
end
|
|
|
|
|
2018-05-21 03:01:14 +02:00
|
|
|
def make_block_data(blocker, blocked, activity_id) do
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2018-05-19 00:09:56 +02:00
|
|
|
"type" => "Block",
|
|
|
|
"actor" => blocker.ap_id,
|
|
|
|
"to" => [blocked.ap_id],
|
|
|
|
"object" => blocked.ap_id
|
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2018-05-19 00:09:56 +02:00
|
|
|
end
|
|
|
|
|
2018-05-21 03:01:14 +02:00
|
|
|
def make_unblock_data(blocker, blocked, block_activity, activity_id) do
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2018-05-19 00:09:56 +02:00
|
|
|
"type" => "Undo",
|
|
|
|
"actor" => blocker.ap_id,
|
|
|
|
"to" => [blocked.ap_id],
|
|
|
|
"object" => block_activity.data
|
2017-05-16 15:31:11 +02:00
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
#### Create-related helpers
|
|
|
|
|
|
|
|
def make_create_data(params, additional) do
|
|
|
|
published = params.published || make_date()
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-11-19 02:22:07 +01:00
|
|
|
%{
|
2017-05-16 15:31:11 +02:00
|
|
|
"type" => "Create",
|
2018-03-30 15:01:53 +02:00
|
|
|
"to" => params.to |> Enum.uniq(),
|
2017-05-16 15:31:11 +02:00
|
|
|
"actor" => params.actor.ap_id,
|
|
|
|
"object" => params.object,
|
|
|
|
"published" => published,
|
|
|
|
"context" => params.context
|
|
|
|
}
|
|
|
|
|> Map.merge(additional)
|
|
|
|
end
|
2019-02-20 17:51:25 +01:00
|
|
|
|
2019-09-27 14:22:35 +02:00
|
|
|
#### Listen-related helpers
|
|
|
|
def make_listen_data(params, additional) do
|
|
|
|
published = params.published || make_date()
|
|
|
|
|
|
|
|
%{
|
|
|
|
"type" => "Listen",
|
|
|
|
"to" => params.to |> Enum.uniq(),
|
|
|
|
"actor" => params.actor.ap_id,
|
|
|
|
"object" => params.object,
|
|
|
|
"published" => published,
|
|
|
|
"context" => params.context
|
|
|
|
}
|
|
|
|
|> Map.merge(additional)
|
|
|
|
end
|
|
|
|
|
2019-02-20 17:51:25 +01:00
|
|
|
#### Flag-related helpers
|
2019-09-04 14:25:12 +02:00
|
|
|
@spec make_flag_data(map(), map()) :: map()
|
|
|
|
def make_flag_data(%{actor: actor, context: context, content: content} = params, additional) do
|
2019-02-20 17:51:25 +01:00
|
|
|
%{
|
|
|
|
"type" => "Flag",
|
2019-09-04 14:25:12 +02:00
|
|
|
"actor" => actor.ap_id,
|
|
|
|
"content" => content,
|
|
|
|
"object" => build_flag_object(params),
|
|
|
|
"context" => context,
|
2019-05-16 21:09:18 +02:00
|
|
|
"state" => "open"
|
2019-02-20 17:51:25 +01:00
|
|
|
}
|
|
|
|
|> Map.merge(additional)
|
|
|
|
end
|
2019-03-06 22:13:26 +01:00
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
def make_flag_data(_, _), do: %{}
|
|
|
|
|
|
|
|
defp build_flag_object(%{account: account, statuses: statuses} = _) do
|
|
|
|
[account.ap_id] ++
|
|
|
|
Enum.map(statuses || [], fn
|
|
|
|
%Activity{} = act -> act.data["id"]
|
|
|
|
act when is_map(act) -> act["id"]
|
|
|
|
act when is_binary(act) -> act
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp build_flag_object(_), do: []
|
|
|
|
|
2019-03-06 22:13:26 +01:00
|
|
|
@doc """
|
|
|
|
Fetches the OrderedCollection/OrderedCollectionPage from `from`, limiting the amount of pages fetched after
|
|
|
|
the first one to `pages_left` pages.
|
|
|
|
If the amount of pages is higher than the collection has, it returns whatever was there.
|
|
|
|
"""
|
|
|
|
def fetch_ordered_collection(from, pages_left, acc \\ []) do
|
|
|
|
with {:ok, response} <- Tesla.get(from),
|
2019-05-13 22:37:38 +02:00
|
|
|
{:ok, collection} <- Jason.decode(response.body) do
|
2019-03-06 22:13:26 +01:00
|
|
|
case collection["type"] do
|
|
|
|
"OrderedCollection" ->
|
|
|
|
# If we've encountered the OrderedCollection and not the page,
|
|
|
|
# just call the same function on the page address
|
|
|
|
fetch_ordered_collection(collection["first"], pages_left)
|
|
|
|
|
|
|
|
"OrderedCollectionPage" ->
|
|
|
|
if pages_left > 0 do
|
|
|
|
# There are still more pages
|
|
|
|
if Map.has_key?(collection, "next") do
|
|
|
|
# There are still more pages, go deeper saving what we have into the accumulator
|
|
|
|
fetch_ordered_collection(
|
|
|
|
collection["next"],
|
|
|
|
pages_left - 1,
|
|
|
|
acc ++ collection["orderedItems"]
|
|
|
|
)
|
|
|
|
else
|
|
|
|
# No more pages left, just return whatever we already have
|
|
|
|
acc ++ collection["orderedItems"]
|
|
|
|
end
|
|
|
|
else
|
|
|
|
# Got the amount of pages needed, add them all to the accumulator
|
|
|
|
acc ++ collection["orderedItems"]
|
|
|
|
end
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
{:error, "Not an OrderedCollection or OrderedCollectionPage"}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2019-05-16 21:09:18 +02:00
|
|
|
|
|
|
|
#### Report-related helpers
|
|
|
|
|
|
|
|
def update_report_state(%Activity{} = activity, state) when state in @supported_report_states do
|
2019-09-03 16:58:30 +02:00
|
|
|
new_data = Map.put(activity.data, "state", state)
|
|
|
|
|
|
|
|
activity
|
|
|
|
|> Changeset.change(data: new_data)
|
|
|
|
|> Repo.update()
|
2019-05-16 21:09:18 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def update_report_state(_, _), do: {:error, "Unsupported state"}
|
|
|
|
|
|
|
|
def update_activity_visibility(activity, visibility) when visibility in @valid_visibilities do
|
|
|
|
[to, cc, recipients] =
|
|
|
|
activity
|
|
|
|
|> get_updated_targets(visibility)
|
|
|
|
|> Enum.map(&Enum.uniq/1)
|
|
|
|
|
|
|
|
object_data =
|
|
|
|
activity.object.data
|
|
|
|
|> Map.put("to", to)
|
|
|
|
|> Map.put("cc", cc)
|
|
|
|
|
|
|
|
{:ok, object} =
|
|
|
|
activity.object
|
|
|
|
|> Object.change(%{data: object_data})
|
|
|
|
|> Object.update_and_set_cache()
|
|
|
|
|
|
|
|
activity_data =
|
|
|
|
activity.data
|
|
|
|
|> Map.put("to", to)
|
|
|
|
|> Map.put("cc", cc)
|
|
|
|
|
|
|
|
activity
|
|
|
|
|> Map.put(:object, object)
|
|
|
|
|> Activity.change(%{data: activity_data, recipients: recipients})
|
|
|
|
|> Repo.update()
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_activity_visibility(_, _), do: {:error, "Unsupported visibility"}
|
|
|
|
|
|
|
|
defp get_updated_targets(
|
|
|
|
%Activity{data: %{"to" => to} = data, recipients: recipients},
|
|
|
|
visibility
|
|
|
|
) do
|
|
|
|
cc = Map.get(data, "cc", [])
|
|
|
|
follower_address = User.get_cached_by_ap_id(data["actor"]).follower_address
|
2019-07-29 04:43:19 +02:00
|
|
|
public = Pleroma.Constants.as_public()
|
2019-05-16 21:09:18 +02:00
|
|
|
|
|
|
|
case visibility do
|
|
|
|
"public" ->
|
|
|
|
to = [public | List.delete(to, follower_address)]
|
|
|
|
cc = [follower_address | List.delete(cc, public)]
|
|
|
|
recipients = [public | recipients]
|
|
|
|
[to, cc, recipients]
|
|
|
|
|
|
|
|
"private" ->
|
|
|
|
to = [follower_address | List.delete(to, public)]
|
|
|
|
cc = List.delete(cc, public)
|
|
|
|
recipients = List.delete(recipients, public)
|
|
|
|
[to, cc, recipients]
|
|
|
|
|
|
|
|
"unlisted" ->
|
|
|
|
to = [follower_address | List.delete(to, public)]
|
|
|
|
cc = [public | List.delete(cc, follower_address)]
|
|
|
|
recipients = recipients ++ [follower_address, public]
|
|
|
|
[to, cc, recipients]
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
[to, cc, recipients]
|
|
|
|
end
|
|
|
|
end
|
2019-06-01 15:07:01 +02:00
|
|
|
|
|
|
|
def get_existing_votes(actor, %{data: %{"id" => id}}) do
|
2019-09-03 16:58:30 +02:00
|
|
|
actor
|
|
|
|
|> Activity.Queries.by_actor()
|
|
|
|
|> Activity.Queries.by_type("Create")
|
|
|
|
|> Activity.with_preloaded_object()
|
|
|
|
|> where([a, object: o], fragment("(?)->>'inReplyTo' = ?", o.data, ^to_string(id)))
|
|
|
|
|> where([a, object: o], fragment("(?)->>'type' = 'Answer'", o.data))
|
|
|
|
|> Repo.all()
|
2019-06-01 15:07:01 +02:00
|
|
|
end
|
2019-08-27 15:21:03 +02:00
|
|
|
|
2019-10-05 14:49:45 +02:00
|
|
|
def maybe_put(map, _key, nil), do: map
|
|
|
|
def maybe_put(map, key, value), do: Map.put(map, key, value)
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|