2018-12-23 21:04:54 +01:00
|
|
|
# Pleroma: A lightweight social networking server
|
2018-12-31 16:41:47 +01:00
|
|
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 21:04:54 +01:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2017-03-21 09:21:52 +01:00
|
|
|
defmodule Pleroma.Activity do
|
|
|
|
use Ecto.Schema
|
2019-02-09 16:16:26 +01:00
|
|
|
|
|
|
|
alias Pleroma.Activity
|
2019-05-04 11:46:42 +02:00
|
|
|
alias Pleroma.Bookmark
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Notification
|
2019-03-23 00:34:47 +01:00
|
|
|
alias Pleroma.Object
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Pleroma.Repo
|
2019-05-20 18:35:46 +02:00
|
|
|
alias Pleroma.ThreadMute
|
2019-05-07 17:00:50 +02:00
|
|
|
alias Pleroma.User
|
2019-02-09 16:16:26 +01:00
|
|
|
|
2019-04-17 11:22:32 +02:00
|
|
|
import Ecto.Changeset
|
2017-04-13 15:49:42 +02:00
|
|
|
import Ecto.Query
|
2017-03-21 09:21:52 +01:00
|
|
|
|
2018-12-29 10:48:54 +01:00
|
|
|
@type t :: %__MODULE__{}
|
2019-05-06 18:45:22 +02:00
|
|
|
@type actor :: String.t()
|
|
|
|
|
2019-01-09 16:08:24 +01:00
|
|
|
@primary_key {:id, Pleroma.FlakeId, autogenerate: true}
|
2018-12-29 10:48:54 +01:00
|
|
|
|
2018-12-10 15:50:10 +01:00
|
|
|
# https://github.com/tootsuite/mastodon/blob/master/app/models/notification.rb#L19
|
|
|
|
@mastodon_notification_types %{
|
|
|
|
"Create" => "mention",
|
|
|
|
"Follow" => "follow",
|
|
|
|
"Announce" => "reblog",
|
|
|
|
"Like" => "favourite"
|
|
|
|
}
|
|
|
|
|
2019-03-18 02:32:23 +01:00
|
|
|
@mastodon_to_ap_notification_types for {k, v} <- @mastodon_notification_types,
|
|
|
|
into: %{},
|
|
|
|
do: {v, k}
|
|
|
|
|
2017-03-21 09:21:52 +01:00
|
|
|
schema "activities" do
|
2018-03-30 15:01:53 +02:00
|
|
|
field(:data, :map)
|
|
|
|
field(:local, :boolean, default: true)
|
|
|
|
field(:actor, :string)
|
2019-03-30 10:58:40 +01:00
|
|
|
field(:recipients, {:array, :string}, default: [])
|
2019-05-20 18:35:46 +02:00
|
|
|
field(:thread_muted?, :boolean, virtual: true)
|
2019-05-07 17:00:50 +02:00
|
|
|
# This is a fake relation, do not use outside of with_preloaded_bookmark/get_bookmark
|
|
|
|
has_one(:bookmark, Bookmark)
|
2018-03-30 15:01:53 +02:00
|
|
|
has_many(:notifications, Notification, on_delete: :delete_all)
|
2017-03-21 09:21:52 +01:00
|
|
|
|
2019-03-23 00:34:47 +01:00
|
|
|
# Attention: this is a fake relation, don't try to preload it blindly and expect it to work!
|
|
|
|
# The foreign key is embedded in a jsonb field.
|
|
|
|
#
|
|
|
|
# To use it, you probably want to do an inner join and a preload:
|
|
|
|
#
|
|
|
|
# ```
|
|
|
|
# |> join(:inner, [activity], o in Object,
|
2019-03-25 04:32:19 +01:00
|
|
|
# on: fragment("(?->>'id') = COALESCE((?)->'object'->> 'id', (?)->>'object')",
|
|
|
|
# o.data, activity.data, activity.data))
|
2019-03-23 00:34:47 +01:00
|
|
|
# |> preload([activity, object], [object: object])
|
|
|
|
# ```
|
2019-03-23 01:09:56 +01:00
|
|
|
#
|
|
|
|
# As a convenience, Activity.with_preloaded_object() sets up an inner join and preload for the
|
|
|
|
# typical case.
|
2019-03-23 00:34:47 +01:00
|
|
|
has_one(:object, Object, on_delete: :nothing, foreign_key: :id)
|
|
|
|
|
2017-03-21 09:21:52 +01:00
|
|
|
timestamps()
|
|
|
|
end
|
2017-04-13 15:49:42 +02:00
|
|
|
|
2019-05-16 00:25:14 +02:00
|
|
|
def with_joined_object(query) do
|
|
|
|
join(query, :inner, [activity], o in Object,
|
2019-03-23 01:28:16 +01:00
|
|
|
on:
|
|
|
|
fragment(
|
2019-03-25 04:32:19 +01:00
|
|
|
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
2019-03-23 01:28:16 +01:00
|
|
|
o.data,
|
2019-03-25 04:32:19 +01:00
|
|
|
activity.data,
|
2019-03-23 01:28:16 +01:00
|
|
|
activity.data
|
2019-05-16 00:25:14 +02:00
|
|
|
),
|
|
|
|
as: :object
|
2019-03-23 01:09:56 +01:00
|
|
|
)
|
2019-05-16 00:25:14 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def with_preloaded_object(query) do
|
|
|
|
query
|
|
|
|
|> has_named_binding?(:object)
|
|
|
|
|> if(do: query, else: with_joined_object(query))
|
|
|
|
|> preload([activity, object: object], object: object)
|
2019-03-23 01:09:56 +01:00
|
|
|
end
|
|
|
|
|
2019-05-07 17:00:50 +02:00
|
|
|
def with_preloaded_bookmark(query, %User{} = user) do
|
|
|
|
from([a] in query,
|
|
|
|
left_join: b in Bookmark,
|
|
|
|
on: b.user_id == ^user.id and b.activity_id == a.id,
|
|
|
|
preload: [bookmark: b]
|
|
|
|
)
|
2019-03-23 01:09:56 +01:00
|
|
|
end
|
|
|
|
|
2019-05-07 17:00:50 +02:00
|
|
|
def with_preloaded_bookmark(query, _), do: query
|
2019-05-20 18:35:46 +02:00
|
|
|
|
|
|
|
def with_set_thread_muted_field(query, %User{} = user) do
|
|
|
|
from([a] in query,
|
|
|
|
left_join: tm in ThreadMute,
|
|
|
|
on: tm.user_id == ^user.id and tm.context == fragment("?->>'context'", a.data),
|
|
|
|
select: %Activity{a | thread_muted?: not is_nil(tm.id)}
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def with_set_thread_muted_field(query, _), do: query
|
2019-05-07 17:00:50 +02:00
|
|
|
|
2017-04-13 15:49:42 +02:00
|
|
|
def get_by_ap_id(ap_id) do
|
2018-03-30 15:01:53 +02:00
|
|
|
Repo.one(
|
|
|
|
from(
|
|
|
|
activity in Activity,
|
|
|
|
where: fragment("(?)->>'id' = ?", activity.data, ^to_string(ap_id))
|
|
|
|
)
|
|
|
|
)
|
2017-04-13 15:49:42 +02:00
|
|
|
end
|
|
|
|
|
2019-05-07 17:00:50 +02:00
|
|
|
def get_bookmark(%Activity{} = activity, %User{} = user) do
|
|
|
|
if Ecto.assoc_loaded?(activity.bookmark) do
|
|
|
|
activity.bookmark
|
|
|
|
else
|
|
|
|
Bookmark.get(user.id, activity.id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_bookmark(_, _), do: nil
|
|
|
|
|
2018-11-29 07:52:54 +01:00
|
|
|
def change(struct, params \\ %{}) do
|
|
|
|
struct
|
2019-05-16 21:09:18 +02:00
|
|
|
|> cast(params, [:data, :recipients])
|
2018-11-29 07:52:54 +01:00
|
|
|
|> validate_required([:data])
|
|
|
|
|> unique_constraint(:ap_id, name: :activities_unique_apid_index)
|
|
|
|
end
|
|
|
|
|
2019-03-23 03:49:10 +01:00
|
|
|
def get_by_ap_id_with_object(ap_id) do
|
|
|
|
Repo.one(
|
|
|
|
from(
|
|
|
|
activity in Activity,
|
|
|
|
where: fragment("(?)->>'id' = ?", activity.data, ^to_string(ap_id)),
|
2019-03-23 04:13:19 +01:00
|
|
|
left_join: o in Object,
|
2019-03-23 03:49:10 +01:00
|
|
|
on:
|
|
|
|
fragment(
|
2019-03-25 04:32:19 +01:00
|
|
|
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
2019-03-23 03:49:10 +01:00
|
|
|
o.data,
|
2019-03-25 04:32:19 +01:00
|
|
|
activity.data,
|
2019-03-23 03:49:10 +01:00
|
|
|
activity.data
|
|
|
|
),
|
|
|
|
preload: [object: o]
|
|
|
|
)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-01-22 08:54:11 +01:00
|
|
|
def get_by_id(id) do
|
2019-03-04 13:55:11 +01:00
|
|
|
Activity
|
|
|
|
|> where([a], a.id == ^id)
|
2019-04-11 12:22:42 +02:00
|
|
|
|> restrict_deactivated_users()
|
2019-03-04 13:55:11 +01:00
|
|
|
|> Repo.one()
|
2019-01-22 08:54:11 +01:00
|
|
|
end
|
|
|
|
|
2019-03-23 00:34:47 +01:00
|
|
|
def get_by_id_with_object(id) do
|
|
|
|
from(activity in Activity,
|
|
|
|
where: activity.id == ^id,
|
|
|
|
inner_join: o in Object,
|
|
|
|
on:
|
|
|
|
fragment(
|
2019-03-25 04:32:19 +01:00
|
|
|
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
2019-03-23 00:34:47 +01:00
|
|
|
o.data,
|
2019-03-25 04:32:19 +01:00
|
|
|
activity.data,
|
2019-03-23 00:34:47 +01:00
|
|
|
activity.data
|
|
|
|
),
|
|
|
|
preload: [object: o]
|
|
|
|
)
|
|
|
|
|> Repo.one()
|
|
|
|
end
|
|
|
|
|
2019-01-21 07:14:20 +01:00
|
|
|
def by_object_ap_id(ap_id) do
|
2018-03-30 15:01:53 +02:00
|
|
|
from(
|
|
|
|
activity in Activity,
|
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
|
|
|
activity.data,
|
|
|
|
activity.data,
|
|
|
|
^to_string(ap_id)
|
2019-01-21 07:14:20 +01:00
|
|
|
)
|
2018-03-30 15:01:53 +02:00
|
|
|
)
|
2017-08-01 17:05:07 +02:00
|
|
|
end
|
|
|
|
|
2019-01-21 07:14:20 +01:00
|
|
|
def create_by_object_ap_id(ap_ids) when is_list(ap_ids) do
|
2018-03-30 15:01:53 +02:00
|
|
|
from(
|
|
|
|
activity in Activity,
|
|
|
|
where:
|
|
|
|
fragment(
|
2019-01-21 07:14:20 +01:00
|
|
|
"coalesce((?)->'object'->>'id', (?)->>'object') = ANY(?)",
|
2018-03-30 15:01:53 +02:00
|
|
|
activity.data,
|
|
|
|
activity.data,
|
2019-01-21 07:14:20 +01:00
|
|
|
^ap_ids
|
|
|
|
),
|
|
|
|
where: fragment("(?)->>'type' = 'Create'", activity.data)
|
2018-03-30 15:01:53 +02:00
|
|
|
)
|
2017-08-16 16:29:25 +02:00
|
|
|
end
|
|
|
|
|
2019-03-23 00:34:47 +01:00
|
|
|
def create_by_object_ap_id(ap_id) when is_binary(ap_id) do
|
2018-03-30 15:01:53 +02:00
|
|
|
from(
|
|
|
|
activity in Activity,
|
|
|
|
where:
|
|
|
|
fragment(
|
2019-01-21 07:14:20 +01:00
|
|
|
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
2018-03-30 15:01:53 +02:00
|
|
|
activity.data,
|
|
|
|
activity.data,
|
2019-01-21 07:14:20 +01:00
|
|
|
^to_string(ap_id)
|
2018-03-30 15:01:53 +02:00
|
|
|
),
|
2018-03-27 18:18:24 +02:00
|
|
|
where: fragment("(?)->>'type' = 'Create'", activity.data)
|
2018-03-30 15:01:53 +02:00
|
|
|
)
|
2018-03-27 18:18:24 +02:00
|
|
|
end
|
|
|
|
|
2019-03-23 00:34:47 +01:00
|
|
|
def create_by_object_ap_id(_), do: nil
|
|
|
|
|
2019-01-21 06:46:47 +01:00
|
|
|
def get_all_create_by_object_ap_id(ap_id) do
|
|
|
|
Repo.all(create_by_object_ap_id(ap_id))
|
|
|
|
end
|
|
|
|
|
2019-01-21 07:14:20 +01:00
|
|
|
def get_create_by_object_ap_id(ap_id) when is_binary(ap_id) do
|
2019-01-21 07:07:54 +01:00
|
|
|
create_by_object_ap_id(ap_id)
|
2019-04-11 12:22:42 +02:00
|
|
|
|> restrict_deactivated_users()
|
2018-03-30 15:01:53 +02:00
|
|
|
|> Repo.one()
|
2017-04-30 11:16:41 +02:00
|
|
|
end
|
2018-06-03 19:11:22 +02:00
|
|
|
|
2019-01-21 07:14:20 +01:00
|
|
|
def get_create_by_object_ap_id(_), do: nil
|
2018-06-18 22:54:59 +02:00
|
|
|
|
2019-03-23 00:34:47 +01:00
|
|
|
def create_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do
|
|
|
|
from(
|
|
|
|
activity in Activity,
|
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
|
|
|
activity.data,
|
|
|
|
activity.data,
|
|
|
|
^to_string(ap_id)
|
|
|
|
),
|
|
|
|
where: fragment("(?)->>'type' = 'Create'", activity.data),
|
|
|
|
inner_join: o in Object,
|
|
|
|
on:
|
|
|
|
fragment(
|
2019-03-25 04:32:19 +01:00
|
|
|
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
2019-03-23 00:34:47 +01:00
|
|
|
o.data,
|
2019-03-25 04:32:19 +01:00
|
|
|
activity.data,
|
2019-03-23 00:34:47 +01:00
|
|
|
activity.data
|
|
|
|
),
|
|
|
|
preload: [object: o]
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def create_by_object_ap_id_with_object(_), do: nil
|
|
|
|
|
2019-04-18 20:40:40 +02:00
|
|
|
def get_create_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do
|
2019-03-23 00:34:47 +01:00
|
|
|
ap_id
|
|
|
|
|> create_by_object_ap_id_with_object()
|
|
|
|
|> Repo.one()
|
|
|
|
end
|
|
|
|
|
2019-04-18 20:40:40 +02:00
|
|
|
def get_create_by_object_ap_id_with_object(_), do: nil
|
2018-10-25 04:47:55 +02:00
|
|
|
|
2018-11-25 19:44:04 +01:00
|
|
|
defp get_in_reply_to_activity_from_object(%Object{data: %{"inReplyTo" => ap_id}}) do
|
2019-04-17 11:22:32 +02:00
|
|
|
get_create_by_object_ap_id_with_object(ap_id)
|
2018-10-25 04:47:55 +02:00
|
|
|
end
|
|
|
|
|
2018-11-25 19:44:04 +01:00
|
|
|
defp get_in_reply_to_activity_from_object(_), do: nil
|
|
|
|
|
|
|
|
def get_in_reply_to_activity(%Activity{data: %{"object" => object}}) do
|
|
|
|
get_in_reply_to_activity_from_object(Object.normalize(object))
|
|
|
|
end
|
2018-10-25 04:47:55 +02:00
|
|
|
|
2019-04-17 11:22:32 +02:00
|
|
|
def normalize(obj) when is_map(obj), do: get_by_ap_id_with_object(obj["id"])
|
|
|
|
def normalize(ap_id) when is_binary(ap_id), do: get_by_ap_id_with_object(ap_id)
|
|
|
|
def normalize(_), do: nil
|
2018-12-10 15:50:10 +01:00
|
|
|
|
2019-03-09 12:12:15 +01:00
|
|
|
def delete_by_ap_id(id) when is_binary(id) do
|
|
|
|
by_object_ap_id(id)
|
2019-03-20 13:59:27 +01:00
|
|
|
|> select([u], u)
|
|
|
|
|> Repo.delete_all()
|
2019-03-09 12:12:15 +01:00
|
|
|
|> elem(1)
|
|
|
|
|> Enum.find(fn
|
2019-04-17 15:35:01 +02:00
|
|
|
%{data: %{"type" => "Create", "object" => ap_id}} when is_binary(ap_id) -> ap_id == id
|
2019-03-09 12:12:15 +01:00
|
|
|
%{data: %{"type" => "Create", "object" => %{"id" => ap_id}}} -> ap_id == id
|
|
|
|
_ -> nil
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
|
|
|
def delete_by_ap_id(_), do: nil
|
|
|
|
|
2018-12-10 15:50:10 +01:00
|
|
|
for {ap_type, type} <- @mastodon_notification_types do
|
|
|
|
def mastodon_notification_type(%Activity{data: %{"type" => unquote(ap_type)}}),
|
|
|
|
do: unquote(type)
|
|
|
|
end
|
|
|
|
|
|
|
|
def mastodon_notification_type(%Activity{}), do: nil
|
2019-02-20 17:51:25 +01:00
|
|
|
|
2019-03-18 02:32:23 +01:00
|
|
|
def from_mastodon_notification_type(type) do
|
|
|
|
Map.get(@mastodon_to_ap_notification_types, type)
|
|
|
|
end
|
|
|
|
|
2019-02-20 17:51:25 +01:00
|
|
|
def all_by_actor_and_id(actor, status_ids \\ [])
|
|
|
|
def all_by_actor_and_id(_actor, []), do: []
|
|
|
|
|
|
|
|
def all_by_actor_and_id(actor, status_ids) do
|
|
|
|
Activity
|
|
|
|
|> where([s], s.id in ^status_ids)
|
|
|
|
|> where([s], s.actor == ^actor)
|
|
|
|
|> Repo.all()
|
|
|
|
end
|
2019-03-04 13:55:11 +01:00
|
|
|
|
2019-05-08 16:34:36 +02:00
|
|
|
def follow_requests_for_actor(%Pleroma.User{ap_id: ap_id}) do
|
|
|
|
from(
|
|
|
|
a in Activity,
|
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"? ->> 'type' = 'Follow'",
|
|
|
|
a.data
|
|
|
|
),
|
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"? ->> 'state' = 'pending'",
|
|
|
|
a.data
|
|
|
|
),
|
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
|
|
|
a.data,
|
|
|
|
a.data,
|
|
|
|
^ap_id
|
|
|
|
)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-05-06 18:45:22 +02:00
|
|
|
@spec query_by_actor(actor()) :: Ecto.Query.t()
|
|
|
|
def query_by_actor(actor) do
|
|
|
|
from(a in Activity, where: a.actor == ^actor)
|
|
|
|
end
|
2019-05-07 11:51:11 +02:00
|
|
|
|
2019-04-11 12:22:42 +02:00
|
|
|
def restrict_deactivated_users(query) do
|
2019-03-04 13:55:11 +01:00
|
|
|
from(activity in query,
|
|
|
|
where:
|
|
|
|
fragment(
|
2019-04-11 12:22:42 +02:00
|
|
|
"? not in (SELECT ap_id FROM users WHERE info->'deactivated' @> 'true')",
|
2019-03-04 13:55:11 +01:00
|
|
|
activity.actor
|
|
|
|
)
|
|
|
|
)
|
|
|
|
end
|
2019-05-31 11:22:13 +02:00
|
|
|
|
2019-07-11 15:55:31 +02:00
|
|
|
defdelegate search(user, query, options \\ []), to: Pleroma.Activity.Search
|
2017-03-21 09:21:52 +01:00
|
|
|
end
|