2018-12-23 21:04:54 +01:00
|
|
|
# Pleroma: A lightweight social networking server
|
2020-03-02 06:08:45 +01:00
|
|
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 21:04:54 +01:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
defmodule Pleroma.Web.ActivityPub.Utils do
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Ecto.Changeset
|
|
|
|
alias Ecto.UUID
|
|
|
|
alias Pleroma.Activity
|
|
|
|
alias Pleroma.Notification
|
|
|
|
alias Pleroma.Object
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Repo
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Pleroma.User
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Web
|
2019-10-07 14:01:18 +02:00
|
|
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
2019-03-04 16:09:58 +01:00
|
|
|
alias Pleroma.Web.ActivityPub.Visibility
|
2019-10-23 21:27:22 +02:00
|
|
|
alias Pleroma.Web.AdminAPI.AccountView
|
2017-05-16 15:31:11 +02:00
|
|
|
alias Pleroma.Web.Endpoint
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Pleroma.Web.Router.Helpers
|
2019-02-09 16:16:26 +01:00
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
import Ecto.Query
|
2019-02-09 16:16:26 +01:00
|
|
|
|
2018-05-26 20:03:23 +02:00
|
|
|
require Logger
|
2019-07-29 04:43:19 +02:00
|
|
|
require Pleroma.Constants
|
2017-05-16 15:31:11 +02:00
|
|
|
|
2019-12-17 16:16:21 +01:00
|
|
|
@supported_object_types [
|
|
|
|
"Article",
|
|
|
|
"Note",
|
|
|
|
"Event",
|
|
|
|
"Video",
|
|
|
|
"Page",
|
|
|
|
"Question",
|
|
|
|
"Answer",
|
|
|
|
"Audio"
|
|
|
|
]
|
2019-10-27 14:05:32 +01:00
|
|
|
@strip_status_report_states ~w(closed resolved)
|
2019-05-16 21:09:18 +02:00
|
|
|
@supported_report_states ~w(open closed resolved)
|
|
|
|
@valid_visibilities ~w(public unlisted private direct)
|
2018-11-09 14:39:44 +01:00
|
|
|
|
2018-05-19 09:03:53 +02:00
|
|
|
# Some implementations send the actor URI as the actor field, others send the entire actor object,
|
|
|
|
# so figure out what the actor's URI is based on what we have.
|
2019-07-15 15:01:22 +02:00
|
|
|
def get_ap_id(%{"id" => id} = _), do: id
|
|
|
|
def get_ap_id(id), do: id
|
2018-05-19 09:03:53 +02:00
|
|
|
|
|
|
|
def normalize_params(params) do
|
2018-05-26 13:52:05 +02:00
|
|
|
Map.put(params, "actor", get_ap_id(params["actor"]))
|
2018-05-19 09:03:53 +02:00
|
|
|
end
|
|
|
|
|
2020-02-25 15:34:56 +01:00
|
|
|
@spec determine_explicit_mentions(map()) :: [any]
|
|
|
|
def determine_explicit_mentions(%{"tag" => tag}) when is_list(tag) do
|
2019-09-04 14:25:12 +02:00
|
|
|
Enum.flat_map(tag, fn
|
|
|
|
%{"type" => "Mention", "href" => href} -> [href]
|
|
|
|
_ -> []
|
|
|
|
end)
|
2018-11-17 16:51:02 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def determine_explicit_mentions(%{"tag" => tag} = object) when is_map(tag) do
|
2019-09-04 14:25:12 +02:00
|
|
|
object
|
|
|
|
|> Map.put("tag", [tag])
|
2018-11-17 16:51:02 +01:00
|
|
|
|> determine_explicit_mentions()
|
|
|
|
end
|
|
|
|
|
|
|
|
def determine_explicit_mentions(_), do: []
|
|
|
|
|
2019-10-23 03:50:25 +02:00
|
|
|
@spec label_in_collection?(any(), any()) :: boolean()
|
|
|
|
defp label_in_collection?(ap_id, coll) when is_binary(coll), do: ap_id == coll
|
|
|
|
defp label_in_collection?(ap_id, coll) when is_list(coll), do: ap_id in coll
|
|
|
|
defp label_in_collection?(_, _), do: false
|
|
|
|
|
|
|
|
@spec label_in_message?(String.t(), map()) :: boolean()
|
|
|
|
def label_in_message?(label, params),
|
|
|
|
do:
|
|
|
|
[params["to"], params["cc"], params["bto"], params["bcc"]]
|
|
|
|
|> Enum.any?(&label_in_collection?(label, &1))
|
|
|
|
|
|
|
|
@spec unaddressed_message?(map()) :: boolean()
|
|
|
|
def unaddressed_message?(params),
|
|
|
|
do:
|
|
|
|
[params["to"], params["cc"], params["bto"], params["bcc"]]
|
|
|
|
|> Enum.all?(&is_nil(&1))
|
2018-10-25 07:02:21 +02:00
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
@spec recipient_in_message(User.t(), User.t(), map()) :: boolean()
|
2019-10-23 03:50:25 +02:00
|
|
|
def recipient_in_message(%User{ap_id: ap_id} = recipient, %User{} = actor, params),
|
|
|
|
do:
|
|
|
|
label_in_message?(ap_id, params) || unaddressed_message?(params) ||
|
|
|
|
User.following?(recipient, actor)
|
2018-10-25 07:02:21 +02:00
|
|
|
|
|
|
|
defp extract_list(target) when is_binary(target), do: [target]
|
|
|
|
defp extract_list(lst) when is_list(lst), do: lst
|
|
|
|
defp extract_list(_), do: []
|
|
|
|
|
|
|
|
def maybe_splice_recipient(ap_id, params) do
|
2019-09-03 16:58:30 +02:00
|
|
|
need_splice? =
|
2019-10-23 03:50:25 +02:00
|
|
|
!label_in_collection?(ap_id, params["to"]) &&
|
|
|
|
!label_in_collection?(ap_id, params["cc"])
|
2018-10-25 07:02:21 +02:00
|
|
|
|
2019-09-03 16:58:30 +02:00
|
|
|
if need_splice? do
|
|
|
|
cc_list = extract_list(params["cc"])
|
|
|
|
Map.put(params, "cc", [ap_id | cc_list])
|
2018-10-25 07:02:21 +02:00
|
|
|
else
|
|
|
|
params
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-21 18:23:27 +01:00
|
|
|
def make_json_ld_header do
|
|
|
|
%{
|
|
|
|
"@context" => [
|
|
|
|
"https://www.w3.org/ns/activitystreams",
|
2019-04-08 13:03:10 +02:00
|
|
|
"#{Web.base_url()}/schemas/litepub-0.1.jsonld",
|
|
|
|
%{
|
|
|
|
"@language" => "und"
|
|
|
|
}
|
2018-03-21 18:23:27 +01:00
|
|
|
]
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
def make_date do
|
2018-03-30 15:01:53 +02:00
|
|
|
DateTime.utc_now() |> DateTime.to_iso8601()
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def generate_activity_id do
|
|
|
|
generate_id("activities")
|
|
|
|
end
|
|
|
|
|
|
|
|
def generate_context_id do
|
|
|
|
generate_id("contexts")
|
|
|
|
end
|
|
|
|
|
|
|
|
def generate_object_id do
|
2018-03-30 15:01:53 +02:00
|
|
|
Helpers.o_status_url(Endpoint, :object, UUID.generate())
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def generate_id(type) do
|
2018-03-30 15:01:53 +02:00
|
|
|
"#{Web.base_url()}/#{type}/#{UUID.generate()}"
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2018-11-09 14:39:44 +01:00
|
|
|
def get_notified_from_object(%{"type" => type} = object) when type in @supported_object_types do
|
2018-11-09 09:55:52 +01:00
|
|
|
fake_create_activity = %{
|
|
|
|
"to" => object["to"],
|
|
|
|
"cc" => object["cc"],
|
|
|
|
"type" => "Create",
|
|
|
|
"object" => object
|
|
|
|
}
|
|
|
|
|
2019-09-03 16:58:30 +02:00
|
|
|
get_notified_from_object(fake_create_activity)
|
2018-11-09 09:55:52 +01:00
|
|
|
end
|
|
|
|
|
2018-11-08 20:31:59 +01:00
|
|
|
def get_notified_from_object(object) do
|
2018-11-09 09:42:33 +01:00
|
|
|
Notification.get_notified_from_activity(%Activity{data: object}, false)
|
2018-11-08 20:31:59 +01:00
|
|
|
end
|
|
|
|
|
2018-04-02 15:17:09 +02:00
|
|
|
def create_context(context) do
|
|
|
|
context = context || generate_id("contexts")
|
|
|
|
|
2019-06-24 20:59:12 +02:00
|
|
|
# Ecto has problems accessing the constraint inside the jsonb,
|
|
|
|
# so we explicitly check for the existed object before insert
|
|
|
|
object = Object.get_cached_by_ap_id(context)
|
2018-04-02 16:27:36 +02:00
|
|
|
|
2019-06-24 20:59:12 +02:00
|
|
|
with true <- is_nil(object),
|
|
|
|
changeset <- Object.context_mapping(context),
|
|
|
|
{:ok, inserted_object} <- Repo.insert(changeset) do
|
|
|
|
inserted_object
|
|
|
|
else
|
|
|
|
_ ->
|
|
|
|
object
|
2018-04-02 15:17:09 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
@doc """
|
|
|
|
Enqueues an activity for federation if it's local
|
|
|
|
"""
|
2019-08-27 15:21:03 +02:00
|
|
|
@spec maybe_federate(any()) :: :ok
|
2017-05-16 15:31:11 +02:00
|
|
|
def maybe_federate(%Activity{local: true} = activity) do
|
2019-07-09 08:30:51 +02:00
|
|
|
if Pleroma.Config.get!([:instance, :federating]) do
|
2019-08-09 19:08:01 +02:00
|
|
|
Pleroma.Web.Federator.publish(activity)
|
2019-07-09 08:30:51 +02:00
|
|
|
end
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
:ok
|
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
def maybe_federate(_), do: :ok
|
|
|
|
|
|
|
|
@doc """
|
|
|
|
Adds an id and a published data if they aren't there,
|
|
|
|
also adds it to an included object
|
|
|
|
"""
|
2019-09-04 14:25:12 +02:00
|
|
|
@spec lazy_put_activity_defaults(map(), boolean) :: map()
|
2019-09-15 13:07:07 +02:00
|
|
|
def lazy_put_activity_defaults(map, fake? \\ false)
|
2017-05-16 15:31:11 +02:00
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
def lazy_put_activity_defaults(map, true) do
|
|
|
|
map
|
|
|
|
|> Map.put_new("id", "pleroma:fakeid")
|
|
|
|
|> Map.put_new_lazy("published", &make_date/0)
|
|
|
|
|> Map.put_new("context", "pleroma:fakecontext")
|
|
|
|
|> Map.put_new("context_id", -1)
|
|
|
|
|> lazy_put_object_defaults(true)
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2019-09-15 13:07:07 +02:00
|
|
|
def lazy_put_activity_defaults(map, _fake?) do
|
2019-09-04 14:25:12 +02:00
|
|
|
%{data: %{"id" => context}, id: context_id} = create_context(map["context"])
|
2019-04-01 11:16:51 +02:00
|
|
|
|
|
|
|
map
|
2019-09-04 14:25:12 +02:00
|
|
|
|> Map.put_new_lazy("id", &generate_activity_id/0)
|
2019-04-01 11:16:51 +02:00
|
|
|
|> Map.put_new_lazy("published", &make_date/0)
|
2019-09-04 14:25:12 +02:00
|
|
|
|> Map.put_new("context", context)
|
|
|
|
|> Map.put_new("context_id", context_id)
|
|
|
|
|> lazy_put_object_defaults(false)
|
2019-04-01 11:16:51 +02:00
|
|
|
end
|
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
# Adds an id and published date if they aren't there.
|
|
|
|
#
|
|
|
|
@spec lazy_put_object_defaults(map(), boolean()) :: map()
|
|
|
|
defp lazy_put_object_defaults(%{"object" => map} = activity, true)
|
|
|
|
when is_map(map) do
|
|
|
|
object =
|
|
|
|
map
|
|
|
|
|> Map.put_new("id", "pleroma:fake_object_id")
|
|
|
|
|> Map.put_new_lazy("published", &make_date/0)
|
|
|
|
|> Map.put_new("context", activity["context"])
|
|
|
|
|> Map.put_new("context_id", activity["context_id"])
|
|
|
|
|> Map.put_new("fake", true)
|
|
|
|
|
|
|
|
%{activity | "object" => object}
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
defp lazy_put_object_defaults(%{"object" => map} = activity, _)
|
|
|
|
when is_map(map) do
|
|
|
|
object =
|
|
|
|
map
|
|
|
|
|> Map.put_new_lazy("id", &generate_object_id/0)
|
|
|
|
|> Map.put_new_lazy("published", &make_date/0)
|
|
|
|
|> Map.put_new("context", activity["context"])
|
|
|
|
|> Map.put_new("context_id", activity["context_id"])
|
|
|
|
|
|
|
|
%{activity | "object" => object}
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
defp lazy_put_object_defaults(activity, _), do: activity
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
@doc """
|
|
|
|
Inserts a full object if it is contained in an activity.
|
|
|
|
"""
|
2018-11-25 22:44:03 +01:00
|
|
|
def insert_full_object(%{"object" => %{"type" => type} = object_data} = map)
|
2018-11-09 14:39:44 +01:00
|
|
|
when is_map(object_data) and type in @supported_object_types do
|
2018-11-25 22:44:03 +01:00
|
|
|
with {:ok, object} <- Object.create(object_data) do
|
2019-09-03 16:58:30 +02:00
|
|
|
map = Map.put(map, "object", object.data["id"])
|
2018-11-25 22:44:03 +01:00
|
|
|
|
2019-04-17 11:22:32 +02:00
|
|
|
{:ok, map, object}
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2019-04-17 11:22:32 +02:00
|
|
|
def insert_full_object(map), do: {:ok, map, nil}
|
2017-05-16 15:31:11 +02:00
|
|
|
|
|
|
|
#### Like-related helpers
|
|
|
|
|
|
|
|
@doc """
|
|
|
|
Returns an existing like if a user already liked an object
|
|
|
|
"""
|
2019-08-27 15:21:03 +02:00
|
|
|
@spec get_existing_like(String.t(), map()) :: Activity.t() | nil
|
2017-11-19 02:22:07 +01:00
|
|
|
def get_existing_like(actor, %{data: %{"id" => id}}) do
|
2019-08-27 15:21:03 +02:00
|
|
|
actor
|
|
|
|
|> Activity.Queries.by_actor()
|
|
|
|
|> Activity.Queries.by_object_id(id)
|
|
|
|
|> Activity.Queries.by_type("Like")
|
2019-09-03 16:58:30 +02:00
|
|
|
|> limit(1)
|
2019-08-27 15:21:03 +02:00
|
|
|
|> Repo.one()
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2019-01-11 23:34:32 +01:00
|
|
|
@doc """
|
|
|
|
Returns like activities targeting an object
|
|
|
|
"""
|
|
|
|
def get_object_likes(%{data: %{"id" => id}}) do
|
2019-08-27 15:21:03 +02:00
|
|
|
id
|
|
|
|
|> Activity.Queries.by_object_id()
|
|
|
|
|> Activity.Queries.by_type("Like")
|
|
|
|
|> Repo.all()
|
2019-01-11 23:34:32 +01:00
|
|
|
end
|
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
@spec make_like_data(User.t(), map(), String.t()) :: map()
|
2019-03-04 16:09:58 +01:00
|
|
|
def make_like_data(
|
|
|
|
%User{ap_id: ap_id} = actor,
|
|
|
|
%{data: %{"actor" => object_actor_id, "id" => id}} = object,
|
|
|
|
activity_id
|
|
|
|
) do
|
|
|
|
object_actor = User.get_cached_by_ap_id(object_actor_id)
|
|
|
|
|
|
|
|
to =
|
|
|
|
if Visibility.is_public?(object) do
|
|
|
|
[actor.follower_address, object.data["actor"]]
|
|
|
|
else
|
|
|
|
[object.data["actor"]]
|
|
|
|
end
|
|
|
|
|
|
|
|
cc =
|
|
|
|
(object.data["to"] ++ (object.data["cc"] || []))
|
|
|
|
|> List.delete(actor.ap_id)
|
|
|
|
|> List.delete(object_actor.follower_address)
|
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2017-05-16 15:31:11 +02:00
|
|
|
"type" => "Like",
|
|
|
|
"actor" => ap_id,
|
|
|
|
"object" => id,
|
2019-03-04 16:09:58 +01:00
|
|
|
"to" => to,
|
|
|
|
"cc" => cc,
|
2017-05-16 15:31:11 +02:00
|
|
|
"context" => object.data["context"]
|
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2019-10-05 10:42:29 +02:00
|
|
|
def make_emoji_reaction_data(user, object, emoji, activity_id) do
|
|
|
|
make_like_data(user, object, activity_id)
|
2020-02-06 18:09:57 +01:00
|
|
|
|> Map.put("type", "EmojiReact")
|
2019-10-05 10:42:29 +02:00
|
|
|
|> Map.put("content", emoji)
|
|
|
|
end
|
|
|
|
|
2020-01-22 13:57:42 +01:00
|
|
|
@spec update_element_in_object(String.t(), list(any), Object.t(), integer() | nil) ::
|
2019-08-27 15:21:03 +02:00
|
|
|
{:ok, Object.t()} | {:error, Ecto.Changeset.t()}
|
2020-01-22 13:57:42 +01:00
|
|
|
def update_element_in_object(property, element, object, count \\ nil) do
|
2019-09-03 23:50:04 +02:00
|
|
|
length =
|
2020-01-22 13:57:42 +01:00
|
|
|
count ||
|
|
|
|
length(element)
|
2019-09-03 23:50:04 +02:00
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
data =
|
|
|
|
Map.merge(
|
|
|
|
object.data,
|
2019-09-03 23:50:04 +02:00
|
|
|
%{"#{property}_count" => length, "#{property}s" => element}
|
2019-08-27 15:21:03 +02:00
|
|
|
)
|
2017-05-16 15:31:11 +02:00
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
object
|
|
|
|
|> Changeset.change(data: data)
|
|
|
|
|> Object.update_and_set_cache()
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2019-09-03 23:50:04 +02:00
|
|
|
@spec add_emoji_reaction_to_object(Activity.t(), Object.t()) ::
|
|
|
|
{:ok, Object.t()} | {:error, Ecto.Changeset.t()}
|
|
|
|
|
|
|
|
def add_emoji_reaction_to_object(
|
|
|
|
%Activity{data: %{"content" => emoji, "actor" => actor}},
|
|
|
|
object
|
|
|
|
) do
|
2020-01-23 12:34:34 +01:00
|
|
|
reactions = get_cached_emoji_reactions(object)
|
2020-01-22 13:57:42 +01:00
|
|
|
|
|
|
|
new_reactions =
|
|
|
|
case Enum.find_index(reactions, fn [candidate, _] -> emoji == candidate end) do
|
|
|
|
nil ->
|
|
|
|
reactions ++ [[emoji, [actor]]]
|
|
|
|
|
|
|
|
index ->
|
|
|
|
List.update_at(
|
|
|
|
reactions,
|
|
|
|
index,
|
|
|
|
fn [emoji, users] -> [emoji, Enum.uniq([actor | users])] end
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
count = emoji_count(new_reactions)
|
|
|
|
|
|
|
|
update_element_in_object("reaction", new_reactions, object, count)
|
|
|
|
end
|
|
|
|
|
|
|
|
def emoji_count(reactions_list) do
|
|
|
|
Enum.reduce(reactions_list, 0, fn [_, users], acc -> acc + length(users) end)
|
2019-09-03 23:50:04 +02:00
|
|
|
end
|
|
|
|
|
2019-10-02 15:08:20 +02:00
|
|
|
def remove_emoji_reaction_from_object(
|
|
|
|
%Activity{data: %{"content" => emoji, "actor" => actor}},
|
|
|
|
object
|
|
|
|
) do
|
2020-01-23 12:34:34 +01:00
|
|
|
reactions = get_cached_emoji_reactions(object)
|
2019-10-02 15:08:20 +02:00
|
|
|
|
|
|
|
new_reactions =
|
2020-01-22 13:57:42 +01:00
|
|
|
case Enum.find_index(reactions, fn [candidate, _] -> emoji == candidate end) do
|
|
|
|
nil ->
|
|
|
|
reactions
|
|
|
|
|
|
|
|
index ->
|
|
|
|
List.update_at(
|
|
|
|
reactions,
|
|
|
|
index,
|
|
|
|
fn [emoji, users] -> [emoji, List.delete(users, actor)] end
|
|
|
|
)
|
|
|
|
|> Enum.reject(fn [_, users] -> Enum.empty?(users) end)
|
2019-10-02 15:08:20 +02:00
|
|
|
end
|
|
|
|
|
2020-01-22 13:57:42 +01:00
|
|
|
count = emoji_count(new_reactions)
|
|
|
|
update_element_in_object("reaction", new_reactions, object, count)
|
2019-10-02 15:08:20 +02:00
|
|
|
end
|
|
|
|
|
2020-01-23 12:34:34 +01:00
|
|
|
def get_cached_emoji_reactions(object) do
|
|
|
|
if is_list(object.data["reactions"]) do
|
|
|
|
object.data["reactions"]
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
@spec add_like_to_object(Activity.t(), Object.t()) ::
|
|
|
|
{:ok, Object.t()} | {:error, Ecto.Changeset.t()}
|
2017-05-16 15:31:11 +02:00
|
|
|
def add_like_to_object(%Activity{data: %{"actor" => actor}}, object) do
|
2019-08-27 15:21:03 +02:00
|
|
|
[actor | fetch_likes(object)]
|
|
|
|
|> Enum.uniq()
|
|
|
|
|> update_likes_in_object(object)
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
@spec remove_like_from_object(Activity.t(), Object.t()) ::
|
|
|
|
{:ok, Object.t()} | {:error, Ecto.Changeset.t()}
|
2017-05-16 15:31:11 +02:00
|
|
|
def remove_like_from_object(%Activity{data: %{"actor" => actor}}, object) do
|
2019-08-27 15:21:03 +02:00
|
|
|
object
|
|
|
|
|> fetch_likes()
|
|
|
|
|> List.delete(actor)
|
|
|
|
|> update_likes_in_object(object)
|
|
|
|
end
|
2018-07-12 19:06:28 +02:00
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
defp update_likes_in_object(likes, object) do
|
|
|
|
update_element_in_object("like", likes, object)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp fetch_likes(object) do
|
|
|
|
if is_list(object.data["likes"]) do
|
|
|
|
object.data["likes"]
|
|
|
|
else
|
|
|
|
[]
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
#### Follow-related helpers
|
|
|
|
|
2018-05-26 20:03:23 +02:00
|
|
|
@doc """
|
|
|
|
Updates a follow activity's state (for locked accounts).
|
|
|
|
"""
|
2020-02-25 15:34:56 +01:00
|
|
|
@spec update_follow_state_for_all(Activity.t(), String.t()) :: {:ok, Activity | nil}
|
2019-06-05 16:43:35 +02:00
|
|
|
def update_follow_state_for_all(
|
|
|
|
%Activity{data: %{"actor" => actor, "object" => object}} = activity,
|
2019-01-29 13:21:02 +01:00
|
|
|
state
|
|
|
|
) do
|
2019-09-04 20:40:53 +02:00
|
|
|
"Follow"
|
|
|
|
|> Activity.Queries.by_type()
|
|
|
|
|> Activity.Queries.by_actor(actor)
|
|
|
|
|> Activity.Queries.by_object_id(object)
|
|
|
|
|> where(fragment("data->>'state' = 'pending'"))
|
|
|
|
|> update(set: [data: fragment("jsonb_set(data, '{state}', ?)", ^state)])
|
|
|
|
|> Repo.update_all([])
|
2019-01-29 13:21:02 +01:00
|
|
|
|
2019-09-04 20:40:53 +02:00
|
|
|
User.set_follow_state_cache(actor, object, state)
|
|
|
|
|
|
|
|
activity = Activity.get_by_id(activity.id)
|
|
|
|
|
|
|
|
{:ok, activity}
|
2019-01-29 13:21:02 +01:00
|
|
|
end
|
|
|
|
|
2019-08-14 23:47:30 +02:00
|
|
|
def update_follow_state(
|
|
|
|
%Activity{data: %{"actor" => actor, "object" => object}} = activity,
|
|
|
|
state
|
|
|
|
) do
|
2019-09-03 16:58:30 +02:00
|
|
|
new_data = Map.put(activity.data, "state", state)
|
|
|
|
changeset = Changeset.change(activity, data: new_data)
|
|
|
|
|
|
|
|
with {:ok, activity} <- Repo.update(changeset) do
|
|
|
|
User.set_follow_state_cache(actor, object, state)
|
2018-05-26 20:03:23 +02:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
@doc """
|
|
|
|
Makes a follow activity data for the given follower and followed
|
|
|
|
"""
|
2018-05-28 20:31:48 +02:00
|
|
|
def make_follow_data(
|
|
|
|
%User{ap_id: follower_id},
|
2018-12-09 10:12:48 +01:00
|
|
|
%User{ap_id: followed_id} = _followed,
|
2018-05-28 20:31:48 +02:00
|
|
|
activity_id
|
|
|
|
) do
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2017-05-16 15:31:11 +02:00
|
|
|
"type" => "Follow",
|
|
|
|
"actor" => follower_id,
|
|
|
|
"to" => [followed_id],
|
2019-07-29 04:43:19 +02:00
|
|
|
"cc" => [Pleroma.Constants.as_public()],
|
2018-10-06 01:31:00 +02:00
|
|
|
"object" => followed_id,
|
|
|
|
"state" => "pending"
|
2017-05-16 15:31:11 +02:00
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
def fetch_latest_follow(%User{ap_id: follower_id}, %User{ap_id: followed_id}) do
|
2019-09-03 16:58:30 +02:00
|
|
|
"Follow"
|
|
|
|
|> Activity.Queries.by_type()
|
|
|
|
|> where(actor: ^follower_id)
|
|
|
|
# this is to use the index
|
|
|
|
|> Activity.Queries.by_object_id(followed_id)
|
|
|
|
|> order_by([activity], fragment("? desc nulls last", activity.id))
|
|
|
|
|> limit(1)
|
|
|
|
|> Repo.one()
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2020-02-04 17:35:32 +01:00
|
|
|
def fetch_latest_undo(%User{ap_id: ap_id}) do
|
|
|
|
"Undo"
|
|
|
|
|> Activity.Queries.by_type()
|
|
|
|
|> where(actor: ^ap_id)
|
|
|
|
|> order_by([activity], fragment("? desc nulls last", activity.id))
|
|
|
|
|> limit(1)
|
|
|
|
|> Repo.one()
|
|
|
|
end
|
|
|
|
|
2019-10-02 15:38:57 +02:00
|
|
|
def get_latest_reaction(internal_activity_id, %{ap_id: ap_id}, emoji) do
|
|
|
|
%{data: %{"object" => object_ap_id}} = Activity.get_by_id(internal_activity_id)
|
|
|
|
|
2020-02-06 18:09:57 +01:00
|
|
|
"EmojiReact"
|
2019-10-02 15:38:57 +02:00
|
|
|
|> Activity.Queries.by_type()
|
|
|
|
|> where(actor: ^ap_id)
|
|
|
|
|> where([activity], fragment("?->>'content' = ?", activity.data, ^emoji))
|
|
|
|
|> Activity.Queries.by_object_id(object_ap_id)
|
|
|
|
|> order_by([activity], fragment("? desc nulls last", activity.id))
|
|
|
|
|> limit(1)
|
|
|
|
|> Repo.one()
|
|
|
|
end
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
#### Announce-related helpers
|
|
|
|
|
|
|
|
@doc """
|
2018-04-17 10:13:08 +02:00
|
|
|
Retruns an existing announce activity if the notice has already been announced
|
2017-05-16 15:31:11 +02:00
|
|
|
"""
|
2019-09-04 14:25:12 +02:00
|
|
|
@spec get_existing_announce(String.t(), map()) :: Activity.t() | nil
|
2019-09-03 16:58:30 +02:00
|
|
|
def get_existing_announce(actor, %{data: %{"id" => ap_id}}) do
|
|
|
|
"Announce"
|
|
|
|
|> Activity.Queries.by_type()
|
|
|
|
|> where(actor: ^actor)
|
|
|
|
# this is to use the index
|
|
|
|
|> Activity.Queries.by_object_id(ap_id)
|
|
|
|
|> Repo.one()
|
2018-04-14 09:39:16 +02:00
|
|
|
end
|
|
|
|
|
2018-04-16 16:59:32 +02:00
|
|
|
@doc """
|
|
|
|
Make announce activity data for the given actor and object
|
|
|
|
"""
|
2018-08-06 12:37:52 +02:00
|
|
|
# for relayed messages, we only want to send to subscribers
|
|
|
|
def make_announce_data(
|
2019-01-18 00:12:42 +01:00
|
|
|
%User{ap_id: ap_id} = user,
|
2018-08-06 12:37:52 +02:00
|
|
|
%Object{data: %{"id" => id}} = object,
|
2019-01-18 00:12:42 +01:00
|
|
|
activity_id,
|
|
|
|
false
|
2018-08-06 12:37:52 +02:00
|
|
|
) do
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2018-08-06 12:37:52 +02:00
|
|
|
"type" => "Announce",
|
|
|
|
"actor" => ap_id,
|
|
|
|
"object" => id,
|
|
|
|
"to" => [user.follower_address],
|
|
|
|
"cc" => [],
|
|
|
|
"context" => object.data["context"]
|
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2018-08-06 12:37:52 +02:00
|
|
|
end
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
def make_announce_data(
|
|
|
|
%User{ap_id: ap_id} = user,
|
|
|
|
%Object{data: %{"id" => id}} = object,
|
2019-01-18 00:12:42 +01:00
|
|
|
activity_id,
|
|
|
|
true
|
2018-03-30 15:01:53 +02:00
|
|
|
) do
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2017-05-16 15:31:11 +02:00
|
|
|
"type" => "Announce",
|
|
|
|
"actor" => ap_id,
|
|
|
|
"object" => id,
|
2017-07-19 19:06:49 +02:00
|
|
|
"to" => [user.follower_address, object.data["actor"]],
|
2019-07-29 04:43:19 +02:00
|
|
|
"cc" => [Pleroma.Constants.as_public()],
|
2017-05-16 15:31:11 +02:00
|
|
|
"context" => object.data["context"]
|
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2018-04-17 10:13:08 +02:00
|
|
|
@doc """
|
|
|
|
Make unannounce activity data for the given actor and object
|
|
|
|
"""
|
|
|
|
def make_unannounce_data(
|
|
|
|
%User{ap_id: ap_id} = user,
|
2019-10-04 16:32:42 +02:00
|
|
|
%Activity{data: %{"context" => context, "object" => object}} = activity,
|
2018-04-23 03:28:51 +02:00
|
|
|
activity_id
|
2018-04-17 10:13:08 +02:00
|
|
|
) do
|
2019-10-04 16:32:42 +02:00
|
|
|
object = Object.normalize(object)
|
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2018-04-17 10:13:08 +02:00
|
|
|
"type" => "Undo",
|
|
|
|
"actor" => ap_id,
|
2018-04-23 03:28:51 +02:00
|
|
|
"object" => activity.data,
|
2019-10-04 16:32:42 +02:00
|
|
|
"to" => [user.follower_address, object.data["actor"]],
|
2019-07-29 04:43:19 +02:00
|
|
|
"cc" => [Pleroma.Constants.as_public()],
|
2018-04-18 02:35:07 +02:00
|
|
|
"context" => context
|
2018-04-17 10:13:08 +02:00
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2018-04-17 10:13:08 +02:00
|
|
|
end
|
|
|
|
|
2018-05-19 15:22:43 +02:00
|
|
|
def make_unlike_data(
|
|
|
|
%User{ap_id: ap_id} = user,
|
2019-10-04 16:32:42 +02:00
|
|
|
%Activity{data: %{"context" => context, "object" => object}} = activity,
|
2018-05-19 15:22:43 +02:00
|
|
|
activity_id
|
|
|
|
) do
|
2019-10-04 16:32:42 +02:00
|
|
|
object = Object.normalize(object)
|
|
|
|
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2018-05-19 15:22:43 +02:00
|
|
|
"type" => "Undo",
|
|
|
|
"actor" => ap_id,
|
|
|
|
"object" => activity.data,
|
2019-10-04 16:32:42 +02:00
|
|
|
"to" => [user.follower_address, object.data["actor"]],
|
2019-07-29 04:43:19 +02:00
|
|
|
"cc" => [Pleroma.Constants.as_public()],
|
2018-05-19 15:22:43 +02:00
|
|
|
"context" => context
|
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2018-05-19 15:22:43 +02:00
|
|
|
end
|
|
|
|
|
2019-10-02 15:08:20 +02:00
|
|
|
def make_undo_data(
|
|
|
|
%User{ap_id: actor, follower_address: follower_address},
|
|
|
|
%Activity{
|
|
|
|
data: %{"id" => undone_activity_id, "context" => context},
|
|
|
|
actor: undone_activity_actor
|
|
|
|
},
|
|
|
|
activity_id \\ nil
|
|
|
|
) do
|
|
|
|
%{
|
|
|
|
"type" => "Undo",
|
|
|
|
"actor" => actor,
|
|
|
|
"object" => undone_activity_id,
|
|
|
|
"to" => [follower_address, undone_activity_actor],
|
|
|
|
"cc" => [Pleroma.Constants.as_public()],
|
|
|
|
"context" => context
|
|
|
|
}
|
|
|
|
|> maybe_put("id", activity_id)
|
|
|
|
end
|
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
@spec add_announce_to_object(Activity.t(), Object.t()) ::
|
|
|
|
{:ok, Object.t()} | {:error, Ecto.Changeset.t()}
|
2018-08-06 12:37:52 +02:00
|
|
|
def add_announce_to_object(
|
2019-10-01 21:40:35 +02:00
|
|
|
%Activity{data: %{"actor" => actor}},
|
2018-08-06 12:37:52 +02:00
|
|
|
object
|
|
|
|
) do
|
2019-10-05 22:41:33 +02:00
|
|
|
unless actor |> User.get_cached_by_ap_id() |> User.invisible?() do
|
|
|
|
announcements = take_announcements(object)
|
2018-07-12 19:06:28 +02:00
|
|
|
|
2019-10-05 22:41:33 +02:00
|
|
|
with announcements <- Enum.uniq([actor | announcements]) do
|
|
|
|
update_element_in_object("announcement", announcements, object)
|
|
|
|
end
|
|
|
|
else
|
|
|
|
{:ok, object}
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-06 12:37:52 +02:00
|
|
|
def add_announce_to_object(_, object), do: {:ok, object}
|
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
@spec remove_announce_from_object(Activity.t(), Object.t()) ::
|
|
|
|
{:ok, Object.t()} | {:error, Ecto.Changeset.t()}
|
2018-04-14 09:39:16 +02:00
|
|
|
def remove_announce_from_object(%Activity{data: %{"actor" => actor}}, object) do
|
2019-09-04 16:42:27 +02:00
|
|
|
with announcements <- List.delete(take_announcements(object), actor) do
|
2018-04-14 09:39:16 +02:00
|
|
|
update_element_in_object("announcement", announcements, object)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-04 16:42:27 +02:00
|
|
|
defp take_announcements(%{data: %{"announcements" => announcements}} = _)
|
2019-09-04 14:25:12 +02:00
|
|
|
when is_list(announcements),
|
|
|
|
do: announcements
|
|
|
|
|
2019-09-04 16:42:27 +02:00
|
|
|
defp take_announcements(_), do: []
|
2019-09-04 14:25:12 +02:00
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
#### Unfollow-related helpers
|
|
|
|
|
2018-05-21 03:01:14 +02:00
|
|
|
def make_unfollow_data(follower, followed, follow_activity, activity_id) do
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2017-05-16 15:31:11 +02:00
|
|
|
"type" => "Undo",
|
|
|
|
"actor" => follower.ap_id,
|
|
|
|
"to" => [followed.ap_id],
|
2018-05-21 03:01:14 +02:00
|
|
|
"object" => follow_activity.data
|
2017-05-16 15:31:11 +02:00
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
2018-05-19 00:09:56 +02:00
|
|
|
#### Block-related helpers
|
2019-09-04 14:25:12 +02:00
|
|
|
@spec fetch_latest_block(User.t(), User.t()) :: Activity.t() | nil
|
2018-05-19 00:09:56 +02:00
|
|
|
def fetch_latest_block(%User{ap_id: blocker_id}, %User{ap_id: blocked_id}) do
|
2019-09-03 16:58:30 +02:00
|
|
|
"Block"
|
|
|
|
|> Activity.Queries.by_type()
|
|
|
|
|> where(actor: ^blocker_id)
|
|
|
|
# this is to use the index
|
|
|
|
|> Activity.Queries.by_object_id(blocked_id)
|
|
|
|
|> order_by([activity], fragment("? desc nulls last", activity.id))
|
|
|
|
|> limit(1)
|
|
|
|
|> Repo.one()
|
2018-05-19 00:09:56 +02:00
|
|
|
end
|
|
|
|
|
2018-05-21 03:01:14 +02:00
|
|
|
def make_block_data(blocker, blocked, activity_id) do
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2018-05-19 00:09:56 +02:00
|
|
|
"type" => "Block",
|
|
|
|
"actor" => blocker.ap_id,
|
|
|
|
"to" => [blocked.ap_id],
|
|
|
|
"object" => blocked.ap_id
|
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2018-05-19 00:09:56 +02:00
|
|
|
end
|
|
|
|
|
2018-05-21 03:01:14 +02:00
|
|
|
def make_unblock_data(blocker, blocked, block_activity, activity_id) do
|
2019-08-27 15:21:03 +02:00
|
|
|
%{
|
2018-05-19 00:09:56 +02:00
|
|
|
"type" => "Undo",
|
|
|
|
"actor" => blocker.ap_id,
|
|
|
|
"to" => [blocked.ap_id],
|
|
|
|
"object" => block_activity.data
|
2017-05-16 15:31:11 +02:00
|
|
|
}
|
2019-08-27 15:21:03 +02:00
|
|
|
|> maybe_put("id", activity_id)
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
#### Create-related helpers
|
|
|
|
|
|
|
|
def make_create_data(params, additional) do
|
|
|
|
published = params.published || make_date()
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-11-19 02:22:07 +01:00
|
|
|
%{
|
2017-05-16 15:31:11 +02:00
|
|
|
"type" => "Create",
|
2018-03-30 15:01:53 +02:00
|
|
|
"to" => params.to |> Enum.uniq(),
|
2017-05-16 15:31:11 +02:00
|
|
|
"actor" => params.actor.ap_id,
|
|
|
|
"object" => params.object,
|
|
|
|
"published" => published,
|
|
|
|
"context" => params.context
|
|
|
|
}
|
|
|
|
|> Map.merge(additional)
|
|
|
|
end
|
2019-02-20 17:51:25 +01:00
|
|
|
|
2019-09-27 14:22:35 +02:00
|
|
|
#### Listen-related helpers
|
|
|
|
def make_listen_data(params, additional) do
|
|
|
|
published = params.published || make_date()
|
2019-03-14 20:04:52 +01:00
|
|
|
|
2019-09-27 14:22:35 +02:00
|
|
|
%{
|
|
|
|
"type" => "Listen",
|
|
|
|
"to" => params.to |> Enum.uniq(),
|
|
|
|
"actor" => params.actor.ap_id,
|
|
|
|
"object" => params.object,
|
|
|
|
"published" => published,
|
|
|
|
"context" => params.context
|
|
|
|
}
|
|
|
|
|> Map.merge(additional)
|
|
|
|
end
|
2019-02-20 17:51:25 +01:00
|
|
|
|
|
|
|
#### Flag-related helpers
|
2019-09-04 14:25:12 +02:00
|
|
|
@spec make_flag_data(map(), map()) :: map()
|
|
|
|
def make_flag_data(%{actor: actor, context: context, content: content} = params, additional) do
|
2019-02-20 17:51:25 +01:00
|
|
|
%{
|
|
|
|
"type" => "Flag",
|
2019-09-04 14:25:12 +02:00
|
|
|
"actor" => actor.ap_id,
|
|
|
|
"content" => content,
|
|
|
|
"object" => build_flag_object(params),
|
|
|
|
"context" => context,
|
2019-05-16 21:09:18 +02:00
|
|
|
"state" => "open"
|
2019-02-20 17:51:25 +01:00
|
|
|
}
|
|
|
|
|> Map.merge(additional)
|
|
|
|
end
|
2019-03-06 22:13:26 +01:00
|
|
|
|
2019-09-04 14:25:12 +02:00
|
|
|
def make_flag_data(_, _), do: %{}
|
|
|
|
|
|
|
|
defp build_flag_object(%{account: account, statuses: statuses} = _) do
|
2019-11-07 13:45:36 +01:00
|
|
|
[account.ap_id] ++ build_flag_object(%{statuses: statuses})
|
|
|
|
end
|
2019-10-23 21:27:22 +02:00
|
|
|
|
2019-11-07 13:45:36 +01:00
|
|
|
defp build_flag_object(%{statuses: statuses}) do
|
|
|
|
Enum.map(statuses || [], &build_flag_object/1)
|
|
|
|
end
|
2019-10-23 21:27:22 +02:00
|
|
|
|
2019-11-07 13:45:36 +01:00
|
|
|
defp build_flag_object(act) when is_map(act) or is_binary(act) do
|
|
|
|
id =
|
|
|
|
case act do
|
|
|
|
%Activity{} = act -> act.data["id"]
|
|
|
|
act when is_map(act) -> act["id"]
|
|
|
|
act when is_binary(act) -> act
|
|
|
|
end
|
|
|
|
|
2019-11-27 14:54:12 +01:00
|
|
|
case Activity.get_by_ap_id_with_object(id) do
|
|
|
|
%Activity{} = activity ->
|
|
|
|
%{
|
|
|
|
"type" => "Note",
|
|
|
|
"id" => activity.data["id"],
|
|
|
|
"content" => activity.object.data["content"],
|
|
|
|
"published" => activity.object.data["published"],
|
|
|
|
"actor" =>
|
|
|
|
AccountView.render("show.json", %{
|
|
|
|
user: User.get_by_ap_id(activity.object.data["actor"])
|
|
|
|
})
|
|
|
|
}
|
2019-11-07 13:45:36 +01:00
|
|
|
|
2019-11-27 14:54:12 +01:00
|
|
|
_ ->
|
|
|
|
%{"id" => id, "deleted" => true}
|
|
|
|
end
|
2019-09-04 14:25:12 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
defp build_flag_object(_), do: []
|
|
|
|
|
2019-03-06 22:13:26 +01:00
|
|
|
@doc """
|
|
|
|
Fetches the OrderedCollection/OrderedCollectionPage from `from`, limiting the amount of pages fetched after
|
|
|
|
the first one to `pages_left` pages.
|
|
|
|
If the amount of pages is higher than the collection has, it returns whatever was there.
|
|
|
|
"""
|
|
|
|
def fetch_ordered_collection(from, pages_left, acc \\ []) do
|
|
|
|
with {:ok, response} <- Tesla.get(from),
|
2019-05-13 22:37:38 +02:00
|
|
|
{:ok, collection} <- Jason.decode(response.body) do
|
2019-03-06 22:13:26 +01:00
|
|
|
case collection["type"] do
|
|
|
|
"OrderedCollection" ->
|
|
|
|
# If we've encountered the OrderedCollection and not the page,
|
|
|
|
# just call the same function on the page address
|
|
|
|
fetch_ordered_collection(collection["first"], pages_left)
|
|
|
|
|
|
|
|
"OrderedCollectionPage" ->
|
|
|
|
if pages_left > 0 do
|
|
|
|
# There are still more pages
|
|
|
|
if Map.has_key?(collection, "next") do
|
|
|
|
# There are still more pages, go deeper saving what we have into the accumulator
|
|
|
|
fetch_ordered_collection(
|
|
|
|
collection["next"],
|
|
|
|
pages_left - 1,
|
|
|
|
acc ++ collection["orderedItems"]
|
|
|
|
)
|
|
|
|
else
|
|
|
|
# No more pages left, just return whatever we already have
|
|
|
|
acc ++ collection["orderedItems"]
|
|
|
|
end
|
|
|
|
else
|
|
|
|
# Got the amount of pages needed, add them all to the accumulator
|
|
|
|
acc ++ collection["orderedItems"]
|
|
|
|
end
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
{:error, "Not an OrderedCollection or OrderedCollectionPage"}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2019-05-16 21:09:18 +02:00
|
|
|
|
|
|
|
#### Report-related helpers
|
2019-10-07 14:01:18 +02:00
|
|
|
def get_reports(params, page, page_size) do
|
|
|
|
params =
|
|
|
|
params
|
|
|
|
|> Map.put("type", "Flag")
|
|
|
|
|> Map.put("skip_preload", true)
|
2019-12-03 15:54:07 +01:00
|
|
|
|> Map.put("preload_report_notes", true)
|
2019-10-07 14:01:18 +02:00
|
|
|
|> Map.put("total", true)
|
|
|
|
|> Map.put("limit", page_size)
|
|
|
|
|> Map.put("offset", (page - 1) * page_size)
|
|
|
|
|
|
|
|
ActivityPub.fetch_activities([], params, :offset)
|
|
|
|
end
|
|
|
|
|
2019-11-07 13:45:36 +01:00
|
|
|
def parse_report_group(activity) do
|
|
|
|
reports = get_reports_by_status_id(activity["id"])
|
|
|
|
max_date = Enum.max_by(reports, &NaiveDateTime.from_iso8601!(&1.data["published"]))
|
|
|
|
actors = Enum.map(reports, & &1.user_actor)
|
2019-11-27 16:09:00 +01:00
|
|
|
[%{data: %{"object" => [account_id | _]}} | _] = reports
|
|
|
|
|
|
|
|
account =
|
|
|
|
AccountView.render("show.json", %{
|
|
|
|
user: User.get_by_ap_id(account_id)
|
|
|
|
})
|
|
|
|
|
2019-11-27 14:54:12 +01:00
|
|
|
status = get_status_data(activity)
|
2019-11-07 13:45:36 +01:00
|
|
|
|
|
|
|
%{
|
|
|
|
date: max_date.data["published"],
|
2019-11-27 16:09:00 +01:00
|
|
|
account: account,
|
2019-11-24 16:04:29 +01:00
|
|
|
status: status,
|
2019-11-07 13:45:36 +01:00
|
|
|
actors: Enum.uniq(actors),
|
|
|
|
reports: reports
|
2019-10-07 14:01:18 +02:00
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2019-11-27 14:54:12 +01:00
|
|
|
defp get_status_data(status) do
|
|
|
|
case status["deleted"] do
|
|
|
|
true ->
|
|
|
|
%{
|
|
|
|
"id" => status["id"],
|
|
|
|
"deleted" => true
|
|
|
|
}
|
2019-11-24 16:04:29 +01:00
|
|
|
|
|
|
|
_ ->
|
2019-11-27 14:54:12 +01:00
|
|
|
Activity.get_by_ap_id(status["id"])
|
2019-11-24 16:04:29 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-11-06 12:25:46 +01:00
|
|
|
def get_reports_by_status_id(ap_id) do
|
2019-10-07 14:01:18 +02:00
|
|
|
from(a in Activity,
|
|
|
|
where: fragment("(?)->>'type' = 'Flag'", a.data),
|
2019-11-24 17:39:35 +01:00
|
|
|
where: fragment("(?)->'object' @> ?", a.data, ^[%{id: ap_id}]),
|
|
|
|
or_where: fragment("(?)->'object' @> ?", a.data, ^[ap_id])
|
2019-10-07 14:01:18 +02:00
|
|
|
)
|
|
|
|
|> Activity.with_preloaded_user_actor()
|
|
|
|
|> Repo.all()
|
|
|
|
end
|
|
|
|
|
2019-11-27 14:54:12 +01:00
|
|
|
@spec get_reports_grouped_by_status([String.t()]) :: %{
|
2019-11-22 05:35:21 +01:00
|
|
|
required(:groups) => [
|
|
|
|
%{
|
|
|
|
required(:date) => String.t(),
|
|
|
|
required(:account) => %{},
|
|
|
|
required(:status) => %{},
|
|
|
|
required(:actors) => [%User{}],
|
|
|
|
required(:reports) => [%Activity{}]
|
|
|
|
}
|
2019-11-27 14:54:12 +01:00
|
|
|
]
|
2019-11-22 05:35:21 +01:00
|
|
|
}
|
|
|
|
def get_reports_grouped_by_status(activity_ids) do
|
|
|
|
parsed_groups =
|
|
|
|
activity_ids
|
|
|
|
|> Enum.map(fn id ->
|
|
|
|
id
|
|
|
|
|> build_flag_object()
|
|
|
|
|> parse_report_group()
|
|
|
|
end)
|
|
|
|
|
|
|
|
%{
|
|
|
|
groups: parsed_groups
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2019-11-07 13:45:36 +01:00
|
|
|
@spec get_reported_activities() :: [
|
2019-11-06 12:25:46 +01:00
|
|
|
%{
|
|
|
|
required(:activity) => String.t(),
|
|
|
|
required(:date) => String.t()
|
|
|
|
}
|
|
|
|
]
|
2019-11-07 13:45:36 +01:00
|
|
|
def get_reported_activities do
|
2019-11-22 05:35:21 +01:00
|
|
|
reported_activities_query =
|
|
|
|
from(a in Activity,
|
|
|
|
where: fragment("(?)->>'type' = 'Flag'", a.data),
|
|
|
|
select: %{
|
|
|
|
activity: fragment("jsonb_array_elements((? #- '{object,0}')->'object')", a.data)
|
|
|
|
},
|
|
|
|
group_by: fragment("activity")
|
|
|
|
)
|
|
|
|
|
|
|
|
from(a in subquery(reported_activities_query),
|
|
|
|
distinct: true,
|
2019-10-07 14:01:18 +02:00
|
|
|
select: %{
|
2019-11-22 05:35:21 +01:00
|
|
|
id: fragment("COALESCE(?->>'id'::text, ? #>> '{}')", a.activity, a.activity)
|
|
|
|
}
|
2019-10-07 14:01:18 +02:00
|
|
|
)
|
|
|
|
|> Repo.all()
|
2019-11-22 05:35:21 +01:00
|
|
|
|> Enum.map(& &1.id)
|
2019-10-07 14:01:18 +02:00
|
|
|
end
|
2019-05-16 21:09:18 +02:00
|
|
|
|
2019-10-27 14:05:32 +01:00
|
|
|
def update_report_state(%Activity{} = activity, state)
|
|
|
|
when state in @strip_status_report_states do
|
|
|
|
{:ok, stripped_activity} = strip_report_status_data(activity)
|
|
|
|
|
|
|
|
new_data =
|
|
|
|
activity.data
|
|
|
|
|> Map.put("state", state)
|
|
|
|
|> Map.put("object", stripped_activity.data["object"])
|
|
|
|
|
|
|
|
activity
|
|
|
|
|> Changeset.change(data: new_data)
|
|
|
|
|> Repo.update()
|
|
|
|
end
|
|
|
|
|
2019-05-16 21:09:18 +02:00
|
|
|
def update_report_state(%Activity{} = activity, state) when state in @supported_report_states do
|
2019-09-03 16:58:30 +02:00
|
|
|
new_data = Map.put(activity.data, "state", state)
|
|
|
|
|
|
|
|
activity
|
|
|
|
|> Changeset.change(data: new_data)
|
|
|
|
|> Repo.update()
|
2019-05-16 21:09:18 +02:00
|
|
|
end
|
|
|
|
|
2019-10-04 18:00:58 +02:00
|
|
|
def update_report_state(activity_ids, state) when state in @supported_report_states do
|
|
|
|
activities_num = length(activity_ids)
|
|
|
|
|
|
|
|
from(a in Activity, where: a.id in ^activity_ids)
|
|
|
|
|> update(set: [data: fragment("jsonb_set(data, '{state}', ?)", ^state)])
|
|
|
|
|> Repo.update_all([])
|
|
|
|
|> case do
|
|
|
|
{^activities_num, _} -> :ok
|
|
|
|
_ -> {:error, activity_ids}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-05-16 21:09:18 +02:00
|
|
|
def update_report_state(_, _), do: {:error, "Unsupported state"}
|
|
|
|
|
2019-10-27 14:05:32 +01:00
|
|
|
def strip_report_status_data(activity) do
|
|
|
|
[actor | reported_activities] = activity.data["object"]
|
2019-11-16 10:44:48 +01:00
|
|
|
|
|
|
|
stripped_activities =
|
|
|
|
Enum.map(reported_activities, fn
|
|
|
|
act when is_map(act) -> act["id"]
|
|
|
|
act when is_binary(act) -> act
|
|
|
|
end)
|
|
|
|
|
2019-10-27 14:05:32 +01:00
|
|
|
new_data = put_in(activity.data, ["object"], [actor | stripped_activities])
|
|
|
|
|
|
|
|
{:ok, %{activity | data: new_data}}
|
|
|
|
end
|
|
|
|
|
2019-05-16 21:09:18 +02:00
|
|
|
def update_activity_visibility(activity, visibility) when visibility in @valid_visibilities do
|
|
|
|
[to, cc, recipients] =
|
|
|
|
activity
|
|
|
|
|> get_updated_targets(visibility)
|
|
|
|
|> Enum.map(&Enum.uniq/1)
|
|
|
|
|
|
|
|
object_data =
|
|
|
|
activity.object.data
|
|
|
|
|> Map.put("to", to)
|
|
|
|
|> Map.put("cc", cc)
|
|
|
|
|
|
|
|
{:ok, object} =
|
|
|
|
activity.object
|
|
|
|
|> Object.change(%{data: object_data})
|
|
|
|
|> Object.update_and_set_cache()
|
|
|
|
|
|
|
|
activity_data =
|
|
|
|
activity.data
|
|
|
|
|> Map.put("to", to)
|
|
|
|
|> Map.put("cc", cc)
|
|
|
|
|
|
|
|
activity
|
|
|
|
|> Map.put(:object, object)
|
|
|
|
|> Activity.change(%{data: activity_data, recipients: recipients})
|
|
|
|
|> Repo.update()
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_activity_visibility(_, _), do: {:error, "Unsupported visibility"}
|
|
|
|
|
|
|
|
defp get_updated_targets(
|
|
|
|
%Activity{data: %{"to" => to} = data, recipients: recipients},
|
|
|
|
visibility
|
|
|
|
) do
|
|
|
|
cc = Map.get(data, "cc", [])
|
|
|
|
follower_address = User.get_cached_by_ap_id(data["actor"]).follower_address
|
2019-07-29 04:43:19 +02:00
|
|
|
public = Pleroma.Constants.as_public()
|
2019-05-16 21:09:18 +02:00
|
|
|
|
|
|
|
case visibility do
|
|
|
|
"public" ->
|
|
|
|
to = [public | List.delete(to, follower_address)]
|
|
|
|
cc = [follower_address | List.delete(cc, public)]
|
|
|
|
recipients = [public | recipients]
|
|
|
|
[to, cc, recipients]
|
|
|
|
|
|
|
|
"private" ->
|
|
|
|
to = [follower_address | List.delete(to, public)]
|
|
|
|
cc = List.delete(cc, public)
|
|
|
|
recipients = List.delete(recipients, public)
|
|
|
|
[to, cc, recipients]
|
|
|
|
|
|
|
|
"unlisted" ->
|
|
|
|
to = [follower_address | List.delete(to, public)]
|
|
|
|
cc = [public | List.delete(cc, follower_address)]
|
|
|
|
recipients = recipients ++ [follower_address, public]
|
|
|
|
[to, cc, recipients]
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
[to, cc, recipients]
|
|
|
|
end
|
|
|
|
end
|
2019-06-01 15:07:01 +02:00
|
|
|
|
|
|
|
def get_existing_votes(actor, %{data: %{"id" => id}}) do
|
2019-09-03 16:58:30 +02:00
|
|
|
actor
|
|
|
|
|> Activity.Queries.by_actor()
|
|
|
|
|> Activity.Queries.by_type("Create")
|
|
|
|
|> Activity.with_preloaded_object()
|
|
|
|
|> where([a, object: o], fragment("(?)->>'inReplyTo' = ?", o.data, ^to_string(id)))
|
|
|
|
|> where([a, object: o], fragment("(?)->>'type' = 'Answer'", o.data))
|
|
|
|
|> Repo.all()
|
2019-06-01 15:07:01 +02:00
|
|
|
end
|
2019-08-27 15:21:03 +02:00
|
|
|
|
2019-10-05 14:49:45 +02:00
|
|
|
def maybe_put(map, _key, nil), do: map
|
|
|
|
def maybe_put(map, key, value), do: Map.put(map, key, value)
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|