Pleroma/lib/pleroma/web/activity_pub/activity_pub.ex

1182 lines
35 KiB
Elixir
Raw Normal View History

# Pleroma: A lightweight social networking server
2018-12-31 16:41:47 +01:00
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
2017-03-21 09:21:52 +01:00
defmodule Pleroma.Web.ActivityPub.ActivityPub do
2019-02-09 16:16:26 +01:00
alias Pleroma.Activity
2019-06-03 15:04:39 +02:00
alias Pleroma.Config
alias Pleroma.Conversation
alias Pleroma.Notification
2019-02-09 16:16:26 +01:00
alias Pleroma.Object
alias Pleroma.Object.Containment
alias Pleroma.Object.Fetcher
alias Pleroma.Pagination
alias Pleroma.Repo
2019-02-09 16:16:26 +01:00
alias Pleroma.Upload
alias Pleroma.User
alias Pleroma.Web.ActivityPub.MRF
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.WebFinger
2019-02-06 20:20:02 +01:00
2017-03-21 17:53:20 +01:00
import Ecto.Query
2017-05-16 15:31:11 +02:00
import Pleroma.Web.ActivityPub.Utils
2019-02-22 13:29:52 +01:00
import Pleroma.Web.ActivityPub.Visibility
2019-02-06 20:20:02 +01:00
2017-05-07 20:16:07 +02:00
require Logger
require Pleroma.Constants
2017-03-21 09:21:52 +01:00
# For Announce activities, we filter the recipients based on following status for any actors
# that match actual users. See issue #164 for more information about why this is necessary.
defp get_recipients(%{"type" => "Announce"} = data) do
2019-05-01 11:11:17 +02:00
to = Map.get(data, "to", [])
cc = Map.get(data, "cc", [])
bcc = Map.get(data, "bcc", [])
actor = User.get_cached_by_ap_id(data["actor"])
recipients =
2019-05-01 11:11:17 +02:00
Enum.filter(Enum.concat([to, cc, bcc]), fn recipient ->
case User.get_cached_by_ap_id(recipient) do
2019-05-01 11:11:17 +02:00
nil -> true
user -> User.following?(user, actor)
end
end)
{recipients, to, cc}
end
2019-01-18 20:40:52 +01:00
defp get_recipients(%{"type" => "Create"} = data) do
2019-05-01 11:11:17 +02:00
to = Map.get(data, "to", [])
cc = Map.get(data, "cc", [])
bcc = Map.get(data, "bcc", [])
actor = Map.get(data, "actor", [])
recipients = [to, cc, bcc, [actor]] |> Enum.concat() |> Enum.uniq()
2019-01-18 20:40:52 +01:00
{recipients, to, cc}
end
defp get_recipients(data) do
2019-05-01 11:11:17 +02:00
to = Map.get(data, "to", [])
cc = Map.get(data, "cc", [])
bcc = Map.get(data, "bcc", [])
recipients = Enum.concat([to, cc, bcc])
{recipients, to, cc}
end
defp check_actor_is_active(actor) do
if not is_nil(actor) do
with user <- User.get_cached_by_ap_id(actor),
2018-11-18 21:40:52 +01:00
false <- user.info.deactivated do
2019-08-19 19:36:25 +02:00
true
else
2019-08-19 19:36:25 +02:00
_e -> false
end
else
2019-08-19 19:36:25 +02:00
true
end
end
defp check_remote_limit(%{"object" => %{"content" => content}}) when not is_nil(content) do
2019-06-03 15:04:39 +02:00
limit = Config.get([:instance, :remote_limit])
String.length(content) <= limit
end
defp check_remote_limit(_), do: true
def increase_note_count_if_public(actor, object) do
if is_public?(object), do: User.increase_note_count(actor), else: {:ok, actor}
end
def decrease_note_count_if_public(actor, object) do
if is_public?(object), do: User.decrease_note_count(actor), else: {:ok, actor}
end
def increase_replies_count_if_reply(%{
2019-04-15 10:50:36 +02:00
"object" => %{"inReplyTo" => reply_ap_id} = object,
"type" => "Create"
}) do
if is_public?(object) do
Object.increase_replies_count(reply_ap_id)
end
end
def increase_replies_count_if_reply(_create_data), do: :noop
def decrease_replies_count_if_reply(%Object{
2019-04-15 10:50:36 +02:00
data: %{"inReplyTo" => reply_ap_id} = object
}) do
if is_public?(object) do
Object.decrease_replies_count(reply_ap_id)
end
end
def decrease_replies_count_if_reply(_object), do: :noop
2019-05-21 13:12:10 +02:00
def increase_poll_votes_if_vote(%{
"object" => %{"inReplyTo" => reply_ap_id, "name" => name},
"type" => "Create"
}) do
Object.increase_vote_count(reply_ap_id, name)
end
def increase_poll_votes_if_vote(_create_data), do: :noop
def insert(map, local \\ true, fake \\ false, bypass_actor_check \\ false) when is_map(map) do
with nil <- Activity.normalize(map),
map <- lazy_put_activity_defaults(map, fake),
2019-08-19 19:36:25 +02:00
true <- bypass_actor_check || check_actor_is_active(map["actor"]),
{_, true} <- {:remote_limit_error, check_remote_limit(map)},
{:ok, map} <- MRF.filter(map),
2019-03-29 19:59:04 +01:00
{recipients, _, _} = get_recipients(map),
{:fake, false, map, recipients} <- {:fake, fake, map, recipients},
:ok <- Containment.contain_child(map),
{:ok, map, object} <- insert_full_object(map) do
2018-03-30 15:01:53 +02:00
{:ok, activity} =
Repo.insert(%Activity{
data: map,
local: local,
actor: map["actor"],
recipients: recipients
2018-03-30 15:01:53 +02:00
})
# Splice in the child object if we have one.
activity =
if not is_nil(object) do
Map.put(activity, :object, object)
else
activity
end
PleromaJobQueue.enqueue(:background, Pleroma.Web.RichMedia.Helpers, [:fetch, activity])
Notification.create_notifications(activity)
participations =
activity
|> Conversation.create_or_bump_for()
|> get_participations()
stream_out(activity)
stream_out_participations(participations)
{:ok, activity}
2017-05-16 15:31:11 +02:00
else
2019-03-29 19:59:04 +01:00
%Activity{} = activity ->
{:ok, activity}
{:fake, true, map, recipients} ->
2019-04-01 10:55:59 +02:00
activity = %Activity{
data: map,
local: local,
actor: map["actor"],
recipients: recipients,
id: "pleroma:fakeid"
}
2019-04-01 10:58:08 +02:00
Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
2019-04-01 10:55:59 +02:00
{:ok, activity}
2019-03-29 19:59:04 +01:00
error ->
{:error, error}
2017-05-07 20:13:10 +02:00
end
2017-03-21 09:21:52 +01:00
end
2017-03-21 17:53:20 +01:00
defp get_participations({:ok, %{participations: participations}}), do: participations
defp get_participations(_), do: []
def stream_out_participations(participations) do
participations =
participations
|> Repo.preload(:user)
Enum.each(participations, fn participation ->
Pleroma.Web.Streamer.stream("participation", participation)
end)
end
def stream_out_participations(%Object{data: %{"context" => context}}, user) do
with %Conversation{} = conversation <- Conversation.get_for_ap_id(context),
conversation = Repo.preload(conversation, :participations),
last_activity_id =
fetch_latest_activity_id_for_context(conversation.ap_id, %{
"user" => user,
"blocking_user" => user
}) do
if last_activity_id do
stream_out_participations(conversation.participations)
end
end
end
def stream_out_participations(_, _), do: :noop
def stream_out(activity) do
if activity.data["type"] in ["Create", "Announce", "Delete"] do
2019-05-21 15:58:15 +02:00
object = Object.normalize(activity)
# Do not stream out poll replies
2019-05-22 20:17:57 +02:00
unless object.data["type"] == "Answer" do
2019-05-21 15:58:15 +02:00
Pleroma.Web.Streamer.stream("user", activity)
Pleroma.Web.Streamer.stream("list", activity)
2018-03-30 15:01:53 +02:00
if get_visibility(activity) == "public" do
2019-05-21 15:58:15 +02:00
Pleroma.Web.Streamer.stream("public", activity)
2019-05-21 15:58:15 +02:00
if activity.local do
Pleroma.Web.Streamer.stream("public:local", activity)
end
2019-05-21 15:58:15 +02:00
if activity.data["type"] in ["Create"] do
object.data
|> Map.get("tag", [])
|> Enum.filter(fn tag -> is_bitstring(tag) end)
|> Enum.each(fn tag -> Pleroma.Web.Streamer.stream("hashtag:" <> tag, activity) end)
2019-05-21 15:58:15 +02:00
if object.data["attachment"] != [] do
Pleroma.Web.Streamer.stream("public:media", activity)
2019-05-21 15:58:15 +02:00
if activity.local do
Pleroma.Web.Streamer.stream("public:local:media", activity)
end
end
end
2019-05-21 15:58:15 +02:00
else
if get_visibility(activity) == "direct",
do: Pleroma.Web.Streamer.stream("direct", activity)
end
2017-11-11 14:59:25 +01:00
end
end
end
2019-03-29 19:59:04 +01:00
def create(%{to: to, actor: actor, context: context, object: object} = params, fake \\ false) do
2018-02-15 19:59:03 +01:00
additional = params[:additional] || %{}
2018-03-30 15:01:53 +02:00
# only accept false as false value
local = !(params[:local] == false)
2018-02-15 19:59:03 +01:00
published = params[:published]
2018-03-30 15:01:53 +02:00
with create_data <-
make_create_data(
%{to: to, actor: actor, published: published, context: context, object: object},
additional
),
2019-03-29 19:59:04 +01:00
{:ok, activity} <- insert(create_data, local, fake),
{:fake, false, activity} <- {:fake, fake, activity},
_ <- increase_replies_count_if_reply(create_data),
2019-05-21 13:12:10 +02:00
_ <- increase_poll_votes_if_vote(create_data),
# Changing note count prior to enqueuing federation task in order to avoid
# race conditions on updating user.info
{:ok, _actor} <- increase_note_count_if_public(actor, activity),
:ok <- maybe_federate(activity) do
{:ok, activity}
2019-03-29 19:59:04 +01:00
else
{:fake, true, activity} ->
{:ok, activity}
{:error, message} ->
{:error, message}
end
2017-03-21 09:21:52 +01:00
end
2017-03-21 17:53:20 +01:00
def accept(%{to: to, actor: actor, object: object} = params) do
2018-03-30 15:01:53 +02:00
# only accept false as false value
local = !(params[:local] == false)
with data <- %{"to" => to, "type" => "Accept", "actor" => actor.ap_id, "object" => object},
{:ok, activity} <- insert(data, local),
:ok <- maybe_federate(activity) do
{:ok, activity}
end
end
def reject(%{to: to, actor: actor, object: object} = params) do
# only accept false as false value
local = !(params[:local] == false)
with data <- %{"to" => to, "type" => "Reject", "actor" => actor.ap_id, "object" => object},
{:ok, activity} <- insert(data, local),
:ok <- maybe_federate(activity) do
{:ok, activity}
end
end
2018-02-25 16:14:25 +01:00
def update(%{to: to, cc: cc, actor: actor, object: object} = params) do
2018-03-30 15:01:53 +02:00
# only accept false as false value
local = !(params[:local] == false)
with data <- %{
"to" => to,
"cc" => cc,
"type" => "Update",
"actor" => actor,
"object" => object
},
2018-02-25 16:14:25 +01:00
{:ok, activity} <- insert(data, local),
:ok <- maybe_federate(activity) do
{:ok, activity}
end
end
2017-05-16 15:31:11 +02:00
# TODO: This is weird, maybe we shouldn't check here if we can make the activity.
2018-03-30 15:01:53 +02:00
def like(
%User{ap_id: ap_id} = user,
%Object{data: %{"id" => _}} = object,
activity_id \\ nil,
local \\ true
) do
2017-05-16 15:31:11 +02:00
with nil <- get_existing_like(ap_id, object),
like_data <- make_like_data(user, object, activity_id),
{:ok, activity} <- insert(like_data, local),
{:ok, object} <- add_like_to_object(activity, object),
:ok <- maybe_federate(activity) do
{:ok, activity, object}
else
%Activity{} = activity -> {:ok, activity, object}
error -> {:error, error}
end
2017-04-13 15:50:05 +02:00
end
def unlike(%User{} = actor, %Object{} = object, activity_id \\ nil, local \\ true) do
with %Activity{} = like_activity <- get_existing_like(actor.ap_id, object),
unlike_data <- make_unlike_data(actor, like_activity, activity_id),
{:ok, unlike_activity} <- insert(unlike_data, local),
{:ok, _activity} <- Repo.delete(like_activity),
{:ok, object} <- remove_like_from_object(like_activity, object),
:ok <- maybe_federate(unlike_activity) do
{:ok, unlike_activity, like_activity, object}
2018-03-30 15:01:53 +02:00
else
_e -> {:ok, object}
2017-04-14 18:08:47 +02:00
end
end
2018-03-30 15:01:53 +02:00
def announce(
%User{ap_id: _} = user,
%Object{data: %{"id" => _}} = object,
activity_id \\ nil,
local \\ true,
public \\ true
2018-03-30 15:01:53 +02:00
) do
2018-02-18 15:58:18 +01:00
with true <- is_public?(object),
announce_data <- make_announce_data(user, object, activity_id, public),
2017-05-16 15:31:11 +02:00
{:ok, activity} <- insert(announce_data, local),
{:ok, object} <- add_announce_to_object(activity, object),
:ok <- maybe_federate(activity) do
{:ok, activity, object}
else
error -> {:error, error}
end
2017-03-23 23:34:10 +01:00
end
def unannounce(
%User{} = actor,
%Object{} = object,
activity_id \\ nil,
local \\ true
) do
2018-05-09 03:52:21 +02:00
with %Activity{} = announce_activity <- get_existing_announce(actor.ap_id, object),
unannounce_data <- make_unannounce_data(actor, announce_activity, activity_id),
{:ok, unannounce_activity} <- insert(unannounce_data, local),
2018-05-09 03:52:21 +02:00
:ok <- maybe_federate(unannounce_activity),
{:ok, _activity} <- Repo.delete(announce_activity),
{:ok, object} <- remove_announce_from_object(announce_activity, object) do
{:ok, unannounce_activity, object}
2018-04-14 09:39:16 +02:00
else
_e -> {:ok, object}
end
end
2017-05-16 15:31:11 +02:00
def follow(follower, followed, activity_id \\ nil, local \\ true) do
with data <- make_follow_data(follower, followed, activity_id),
{:ok, activity} <- insert(data, local),
2019-08-14 23:47:30 +02:00
:ok <- maybe_federate(activity),
_ <- User.set_follow_state_cache(follower.ap_id, followed.ap_id, activity.data["state"]) do
2017-05-16 15:31:11 +02:00
{:ok, activity}
end
2017-03-23 17:56:49 +01:00
end
2018-05-21 03:01:14 +02:00
def unfollow(follower, followed, activity_id \\ nil, local \\ true) do
2017-05-16 15:31:11 +02:00
with %Activity{} = follow_activity <- fetch_latest_follow(follower, followed),
{:ok, follow_activity} <- update_follow_state(follow_activity, "cancelled"),
2018-05-21 03:01:14 +02:00
unfollow_data <- make_unfollow_data(follower, followed, follow_activity, activity_id),
2017-05-16 15:31:11 +02:00
{:ok, activity} <- insert(unfollow_data, local),
:ok <- maybe_federate(activity) do
2017-05-16 15:31:11 +02:00
{:ok, activity}
end
2017-03-23 23:34:10 +01:00
end
def delete(%User{ap_id: ap_id, follower_address: follower_address} = user) do
with data <- %{
"to" => [follower_address],
"type" => "Delete",
"actor" => ap_id,
"object" => %{"type" => "Person", "id" => ap_id}
},
{:ok, activity} <- insert(data, true, true, true),
:ok <- maybe_federate(activity) do
{:ok, user}
end
end
2017-09-04 18:47:33 +02:00
def delete(%Object{data: %{"id" => id, "actor" => actor}} = object, local \\ true) do
user = User.get_cached_by_ap_id(actor)
2019-03-14 18:43:14 +01:00
to = (object.data["to"] || []) ++ (object.data["cc"] || [])
2019-03-04 10:47:04 +01:00
with {:ok, object, activity} <- Object.delete(object),
data <- %{
"type" => "Delete",
"actor" => actor,
"object" => id,
"to" => to,
"deleted_activity_id" => activity && activity.id
},
{:ok, activity} <- insert(data, local, false),
stream_out_participations(object, user),
_ <- decrease_replies_count_if_reply(object),
# Changing note count prior to enqueuing federation task in order to avoid
# race conditions on updating user.info
{:ok, _actor} <- decrease_note_count_if_public(user, object),
:ok <- maybe_federate(activity) do
2017-09-04 18:47:33 +02:00
{:ok, activity}
end
end
2018-05-21 03:01:14 +02:00
def block(blocker, blocked, activity_id \\ nil, local \\ true) do
2019-06-03 15:04:39 +02:00
outgoing_blocks = Config.get([:activitypub, :outgoing_blocks])
unfollow_blocked = Config.get([:activitypub, :unfollow_blocked])
2018-05-19 00:09:56 +02:00
2019-05-30 10:33:58 +02:00
if unfollow_blocked do
follow_activity = fetch_latest_follow(blocker, blocked)
2019-05-30 10:33:58 +02:00
if follow_activity, do: unfollow(blocker, blocked, nil, local)
2018-05-19 00:09:56 +02:00
end
with true <- outgoing_blocks,
block_data <- make_block_data(blocker, blocked, activity_id),
2018-05-19 00:09:56 +02:00
{:ok, activity} <- insert(block_data, local),
:ok <- maybe_federate(activity) do
{:ok, activity}
else
2018-06-09 04:07:14 +02:00
_e -> {:ok, nil}
2018-05-19 00:09:56 +02:00
end
end
2018-05-21 03:01:14 +02:00
def unblock(blocker, blocked, activity_id \\ nil, local \\ true) do
2018-05-19 00:09:56 +02:00
with %Activity{} = block_activity <- fetch_latest_block(blocker, blocked),
2018-05-21 03:01:14 +02:00
unblock_data <- make_unblock_data(blocker, blocked, block_activity, activity_id),
2018-05-19 00:09:56 +02:00
{:ok, activity} <- insert(unblock_data, local),
:ok <- maybe_federate(activity) do
{:ok, activity}
end
end
2019-02-20 17:51:25 +01:00
def flag(
%{
actor: actor,
context: context,
account: account,
statuses: statuses,
content: content
} = params
) do
# only accept false as false value
local = !(params[:local] == false)
forward = !(params[:forward] == false)
additional = params[:additional] || %{}
2019-02-20 17:51:25 +01:00
params = %{
2019-02-20 17:51:25 +01:00
actor: actor,
context: context,
account: account,
statuses: statuses,
content: content
}
additional =
if forward do
Map.merge(additional, %{"to" => [], "cc" => [account.ap_id]})
else
Map.merge(additional, %{"to" => [], "cc" => []})
end
with flag_data <- make_flag_data(params, additional),
{:ok, activity} <- insert(flag_data, local),
:ok <- maybe_federate(activity) do
Enum.each(User.all_superusers(), fn superuser ->
superuser
|> Pleroma.Emails.AdminEmail.report(actor, account, statuses, content)
|> Pleroma.Emails.Mailer.deliver_async()
end)
{:ok, activity}
end
2019-02-20 17:51:25 +01:00
end
defp fetch_activities_for_context_query(context, opts) do
public = [Pleroma.Constants.as_public()]
2018-02-18 20:52:07 +01:00
2018-03-30 15:01:53 +02:00
recipients =
if opts["user"], do: [opts["user"].ap_id | opts["user"].following] ++ public, else: public
from(activity in Activity)
|> maybe_preload_objects(opts)
|> maybe_preload_bookmarks(opts)
|> maybe_set_thread_muted_field(opts)
|> restrict_blocked(opts)
|> restrict_recipients(recipients, opts["user"])
|> where(
[activity],
fragment(
"?->>'type' = ? and ?->>'context' = ?",
activity.data,
"Create",
activity.data,
^context
2018-03-30 15:01:53 +02:00
)
)
|> exclude_poll_votes(opts)
|> exclude_id(opts)
|> order_by([activity], desc: activity.id)
end
@spec fetch_activities_for_context(String.t(), keyword() | map()) :: [Activity.t()]
def fetch_activities_for_context(context, opts \\ %{}) do
context
|> fetch_activities_for_context_query(opts)
|> Repo.all()
end
2018-03-30 15:01:53 +02:00
@spec fetch_latest_activity_id_for_context(String.t(), keyword() | map()) ::
Pleroma.FlakeId.t() | nil
def fetch_latest_activity_id_for_context(context, opts \\ %{}) do
context
|> fetch_activities_for_context_query(Map.merge(%{"skip_preload" => true}, opts))
|> limit(1)
|> select([a], a.id)
|> Repo.one()
2017-03-23 21:22:49 +01:00
end
2017-03-21 20:31:48 +01:00
def fetch_public_activities(opts \\ %{}) do
q = fetch_activities_query([Pleroma.Constants.as_public()], opts)
2018-03-30 15:01:53 +02:00
2018-02-18 15:32:11 +01:00
q
|> restrict_unlisted()
|> Pagination.fetch_paginated(opts)
2018-03-30 15:01:53 +02:00
|> Enum.reverse()
2017-03-22 14:45:17 +01:00
end
2018-05-11 04:17:33 +02:00
@valid_visibilities ~w[direct unlisted public private]
defp restrict_visibility(query, %{visibility: visibility})
when is_list(visibility) do
if Enum.all?(visibility, &(&1 in @valid_visibilities)) do
query =
from(
a in query,
where:
fragment(
"activity_visibility(?, ?, ?) = ANY (?)",
a.actor,
a.recipients,
a.data,
^visibility
)
)
query
else
Logger.error("Could not restrict visibility to #{visibility}")
end
end
2019-01-09 16:45:09 +01:00
defp restrict_visibility(query, %{visibility: visibility})
when visibility in @valid_visibilities do
from(
a in query,
where:
fragment("activity_visibility(?, ?, ?) = ?", a.actor, a.recipients, a.data, ^visibility)
)
2018-05-11 04:17:33 +02:00
end
defp restrict_visibility(_query, %{visibility: visibility})
when visibility not in @valid_visibilities do
Logger.error("Could not restrict visibility to #{visibility}")
end
defp restrict_visibility(query, _visibility), do: query
defp restrict_thread_visibility(query, _, %{skip_thread_containment: true} = _),
do: query
defp restrict_thread_visibility(
query,
%{"user" => %User{info: %{skip_thread_containment: true}}},
_
),
do: query
defp restrict_thread_visibility(query, %{"user" => %User{ap_id: ap_id}}, _) do
from(
a in query,
where: fragment("thread_visibility(?, (?)->>'id') = true", ^ap_id, a.data)
)
end
defp restrict_thread_visibility(query, _, _), do: query
def fetch_user_activities(user, reading_user, params \\ %{}) do
params =
params
|> Map.put("type", ["Create", "Announce"])
|> Map.put("user", reading_user)
|> Map.put("actor_id", user.ap_id)
|> Map.put("whole_db", true)
2019-01-07 14:45:33 +01:00
|> Map.put("pinned_activity_ids", user.info.pinned_activities)
recipients =
user_activities_recipients(%{
"godmode" => params["godmode"],
"reading_user" => reading_user
})
fetch_activities(recipients, params)
|> Enum.reverse()
end
defp user_activities_recipients(%{"godmode" => true}) do
[]
end
defp user_activities_recipients(%{"reading_user" => reading_user}) do
if reading_user do
[Pleroma.Constants.as_public()] ++ [reading_user.ap_id | reading_user.following]
else
[Pleroma.Constants.as_public()]
end
end
defp restrict_since(query, %{"since_id" => ""}), do: query
2017-05-16 15:31:11 +02:00
defp restrict_since(query, %{"since_id" => since_id}) do
2018-03-30 15:01:53 +02:00
from(activity in query, where: activity.id > ^since_id)
2017-05-16 15:31:11 +02:00
end
2018-03-30 15:01:53 +02:00
2017-05-16 15:31:11 +02:00
defp restrict_since(query, _), do: query
2017-03-21 17:53:20 +01:00
defp restrict_tag_reject(_query, %{"tag_reject" => _tag_reject, "skip_preload" => true}) do
raise "Can't use the child object without preloading!"
end
2019-01-10 16:44:28 +01:00
defp restrict_tag_reject(query, %{"tag_reject" => tag_reject})
when is_list(tag_reject) and tag_reject != [] do
2018-12-19 17:21:35 +01:00
from(
[_activity, object] in query,
where: fragment("not (?)->'tag' \\?| (?)", object.data, ^tag_reject)
)
end
2019-01-10 16:44:28 +01:00
defp restrict_tag_reject(query, _), do: query
defp restrict_tag_all(_query, %{"tag_all" => _tag_all, "skip_preload" => true}) do
raise "Can't use the child object without preloading!"
end
2019-01-10 16:44:28 +01:00
defp restrict_tag_all(query, %{"tag_all" => tag_all})
when is_list(tag_all) and tag_all != [] do
from(
[_activity, object] in query,
where: fragment("(?)->'tag' \\?& (?)", object.data, ^tag_all)
2019-01-10 16:44:28 +01:00
)
end
defp restrict_tag_all(query, _), do: query
defp restrict_tag(_query, %{"tag" => _tag, "skip_preload" => true}) do
raise "Can't use the child object without preloading!"
end
defp restrict_tag(query, %{"tag" => tag}) when is_list(tag) do
from(
[_activity, object] in query,
where: fragment("(?)->'tag' \\?| (?)", object.data, ^tag)
2018-12-19 17:21:35 +01:00
)
end
defp restrict_tag(query, %{"tag" => tag}) when is_binary(tag) do
2018-03-30 15:01:53 +02:00
from(
[_activity, object] in query,
where: fragment("(?)->'tag' \\? (?)", object.data, ^tag)
2018-03-30 15:01:53 +02:00
)
2017-09-14 13:22:09 +02:00
end
2018-03-30 15:01:53 +02:00
2017-09-14 13:22:09 +02:00
defp restrict_tag(query, _), do: query
2018-05-04 23:16:02 +02:00
defp restrict_recipients(query, [], _user), do: query
2018-03-30 15:01:53 +02:00
defp restrict_recipients(query, recipients, nil) do
2018-03-30 15:01:53 +02:00
from(activity in query, where: fragment("? && ?", ^recipients, activity.recipients))
2017-03-21 17:53:20 +01:00
end
2018-03-30 15:01:53 +02:00
defp restrict_recipients(query, recipients, user) do
2018-03-30 15:01:53 +02:00
from(
activity in query,
where: fragment("? && ?", ^recipients, activity.recipients),
or_where: activity.actor == ^user.ap_id
2018-03-30 15:01:53 +02:00
)
end
2017-03-24 00:09:08 +01:00
2017-05-16 15:31:11 +02:00
defp restrict_local(query, %{"local_only" => true}) do
2018-03-30 15:01:53 +02:00
from(activity in query, where: activity.local == true)
2017-04-15 12:11:20 +02:00
end
2018-03-30 15:01:53 +02:00
2017-05-16 15:31:11 +02:00
defp restrict_local(query, _), do: query
2017-04-15 12:11:20 +02:00
2017-05-16 15:31:11 +02:00
defp restrict_actor(query, %{"actor_id" => actor_id}) do
2018-03-30 15:01:53 +02:00
from(activity in query, where: activity.actor == ^actor_id)
end
2018-03-30 15:01:53 +02:00
2017-05-16 15:31:11 +02:00
defp restrict_actor(query, _), do: query
2017-09-17 14:20:54 +02:00
defp restrict_type(query, %{"type" => type}) when is_binary(type) do
from(activity in query, where: fragment("?->>'type' = ?", activity.data, ^type))
2017-09-17 14:20:54 +02:00
end
2018-03-30 15:01:53 +02:00
defp restrict_type(query, %{"type" => type}) do
2018-03-30 15:01:53 +02:00
from(activity in query, where: fragment("?->>'type' = ANY(?)", activity.data, ^type))
end
2018-03-30 15:01:53 +02:00
defp restrict_type(query, _), do: query
2019-05-16 21:09:18 +02:00
defp restrict_state(query, %{"state" => state}) do
from(activity in query, where: fragment("?->>'state' = ?", activity.data, ^state))
end
defp restrict_state(query, _), do: query
defp restrict_favorited_by(query, %{"favorited_by" => ap_id}) do
2018-03-30 15:01:53 +02:00
from(
[_activity, object] in query,
where: fragment("(?)->'likes' \\? (?)", object.data, ^ap_id)
2018-03-30 15:01:53 +02:00
)
end
2018-03-30 15:01:53 +02:00
defp restrict_favorited_by(query, _), do: query
defp restrict_media(_query, %{"only_media" => _val, "skip_preload" => true}) do
raise "Can't use the child object without preloading!"
end
2017-11-14 14:50:23 +01:00
defp restrict_media(query, %{"only_media" => val}) when val == "true" or val == "1" do
2018-03-30 15:01:53 +02:00
from(
[_activity, object] in query,
where: fragment("not (?)->'attachment' = (?)", object.data, ^[])
2018-03-30 15:01:53 +02:00
)
2017-11-14 14:41:16 +01:00
end
2018-03-30 15:01:53 +02:00
2017-11-14 14:41:16 +01:00
defp restrict_media(query, _), do: query
defp restrict_replies(query, %{"exclude_replies" => val}) when val == "true" or val == "1" do
from(
activity in query,
where: fragment("?->'object'->>'inReplyTo' is null", activity.data)
)
end
defp restrict_replies(query, _), do: query
defp restrict_reblogs(query, %{"exclude_reblogs" => val}) when val == "true" or val == "1" do
from(activity in query, where: fragment("?->>'type' != 'Announce'", activity.data))
end
defp restrict_reblogs(query, _), do: query
2019-02-27 16:37:42 +01:00
defp restrict_muted(query, %{"with_muted" => val}) when val in [true, "true", "1"], do: query
defp restrict_muted(query, %{"muting_user" => %User{info: info}} = opts) do
mutes = info.mutes
query =
from([activity] in query,
where: fragment("not (? = ANY(?))", activity.actor, ^mutes),
where: fragment("not (?->'to' \\?| ?)", activity.data, ^mutes)
)
unless opts["skip_preload"] do
from([thread_mute: tm] in query, where: is_nil(tm))
else
query
end
end
defp restrict_muted(query, _), do: query
2017-11-02 22:47:11 +01:00
defp restrict_blocked(query, %{"blocking_user" => %User{info: info}}) do
2018-11-20 20:12:39 +01:00
blocks = info.blocks || []
domain_blocks = info.domain_blocks || []
2018-04-28 16:10:24 +02:00
query =
if has_named_binding?(query, :object), do: query, else: Activity.with_joined_object(query)
2018-04-28 16:10:24 +02:00
from(
[activity, object: o] in query,
where: fragment("not (? = ANY(?))", activity.actor, ^blocks),
where: fragment("not (? && ?)", activity.recipients, ^blocks),
where:
fragment(
"not (?->>'type' = 'Announce' and ?->'to' \\?| ?)",
activity.data,
activity.data,
^blocks
),
where: fragment("not (split_part(?, '/', 3) = ANY(?))", activity.actor, ^domain_blocks),
where: fragment("not (split_part(?->>'actor', '/', 3) = ANY(?))", o.data, ^domain_blocks)
)
end
2018-03-30 15:01:53 +02:00
defp restrict_blocked(query, _), do: query
defp restrict_unlisted(query) do
from(
activity in query,
2018-05-13 10:56:44 +02:00
where:
fragment(
"not (coalesce(?->'cc', '{}'::jsonb) \\?| ?)",
2018-05-13 10:56:44 +02:00
activity.data,
^[Pleroma.Constants.as_public()]
2018-05-13 10:56:44 +02:00
)
)
end
2019-01-07 14:45:33 +01:00
defp restrict_pinned(query, %{"pinned" => "true", "pinned_activity_ids" => ids}) do
from(activity in query, where: activity.id in ^ids)
end
defp restrict_pinned(query, _), do: query
2019-03-11 16:57:54 +01:00
defp restrict_muted_reblogs(query, %{"muting_user" => %User{info: info}}) do
muted_reblogs = info.muted_reblogs || []
from(
activity in query,
where:
fragment(
"not ( ?->>'type' = 'Announce' and ? = ANY(?))",
activity.data,
activity.actor,
^muted_reblogs
)
2019-03-11 16:57:54 +01:00
)
end
defp restrict_muted_reblogs(query, _), do: query
defp exclude_poll_votes(query, %{"include_poll_votes" => "true"}), do: query
defp exclude_poll_votes(query, _) do
if has_named_binding?(query, :object) do
from([activity, object: o] in query,
where: fragment("not(?->>'type' = ?)", o.data, "Answer")
)
else
query
end
end
defp exclude_id(query, %{"exclude_id" => id}) when is_binary(id) do
from(activity in query, where: activity.id != ^id)
end
defp exclude_id(query, _), do: query
defp maybe_preload_objects(query, %{"skip_preload" => true}), do: query
defp maybe_preload_objects(query, _) do
query
|> Activity.with_preloaded_object()
end
defp maybe_preload_bookmarks(query, %{"skip_preload" => true}), do: query
defp maybe_preload_bookmarks(query, opts) do
query
|> Activity.with_preloaded_bookmark(opts["user"])
end
defp maybe_set_thread_muted_field(query, %{"skip_preload" => true}), do: query
defp maybe_set_thread_muted_field(query, opts) do
query
|> Activity.with_set_thread_muted_field(opts["muting_user"] || opts["user"])
end
defp maybe_order(query, %{order: :desc}) do
query
|> order_by(desc: :id)
end
defp maybe_order(query, %{order: :asc}) do
query
|> order_by(asc: :id)
end
defp maybe_order(query, _), do: query
2018-02-18 15:32:11 +01:00
def fetch_activities_query(recipients, opts \\ %{}) do
config = %{
skip_thread_containment: Config.get([:instance, :skip_thread_containment])
}
2017-03-29 02:05:51 +02:00
2019-05-13 11:15:14 +02:00
Activity
|> maybe_preload_objects(opts)
|> maybe_preload_bookmarks(opts)
|> maybe_set_thread_muted_field(opts)
|> maybe_order(opts)
|> restrict_recipients(recipients, opts["user"])
2017-09-14 13:22:09 +02:00
|> restrict_tag(opts)
2019-01-10 16:44:28 +01:00
|> restrict_tag_reject(opts)
|> restrict_tag_all(opts)
2017-05-16 15:31:11 +02:00
|> restrict_since(opts)
|> restrict_local(opts)
|> restrict_actor(opts)
|> restrict_type(opts)
2019-05-16 21:09:18 +02:00
|> restrict_state(opts)
|> restrict_favorited_by(opts)
|> restrict_blocked(opts)
|> restrict_muted(opts)
2017-11-14 14:41:16 +01:00
|> restrict_media(opts)
2018-05-11 04:17:33 +02:00
|> restrict_visibility(opts)
|> restrict_thread_visibility(opts, config)
|> restrict_replies(opts)
|> restrict_reblogs(opts)
2019-01-07 14:45:33 +01:00
|> restrict_pinned(opts)
2019-03-11 16:57:54 +01:00
|> restrict_muted_reblogs(opts)
2019-04-11 12:22:42 +02:00
|> Activity.restrict_deactivated_users()
|> exclude_poll_votes(opts)
2018-02-18 15:32:11 +01:00
end
def fetch_activities(recipients, opts \\ %{}) do
2019-05-13 11:15:14 +02:00
list_memberships = Pleroma.List.memberships(opts["user"])
fetch_activities_query(recipients ++ list_memberships, opts)
|> Pagination.fetch_paginated(opts)
2018-03-30 15:01:53 +02:00
|> Enum.reverse()
2019-05-13 11:15:14 +02:00
|> maybe_update_cc(list_memberships, opts["user"])
end
defp maybe_update_cc(activities, list_memberships, %User{ap_id: user_ap_id})
when is_list(list_memberships) and length(list_memberships) > 0 do
2019-05-13 11:15:14 +02:00
Enum.map(activities, fn
%{data: %{"bcc" => bcc}} = activity when is_list(bcc) and length(bcc) > 0 ->
2019-05-13 11:15:14 +02:00
if Enum.any?(bcc, &(&1 in list_memberships)) do
update_in(activity.data["cc"], &[user_ap_id | &1])
2019-05-13 11:15:14 +02:00
else
activity
end
activity ->
activity
end)
2017-04-21 18:54:21 +02:00
end
defp maybe_update_cc(activities, _, _), do: activities
def fetch_activities_bounded_query(query, recipients, recipients_with_public) do
from(activity in query,
where:
fragment("? && ?", activity.recipients, ^recipients) or
(fragment("? && ?", activity.recipients, ^recipients_with_public) and
^Pleroma.Constants.as_public() in activity.recipients)
)
end
def fetch_activities_bounded(recipients, recipients_with_public, opts \\ %{}) do
fetch_activities_query([], opts)
|> fetch_activities_bounded_query(recipients, recipients_with_public)
|> Pagination.fetch_paginated(opts)
|> Enum.reverse()
end
2018-11-23 17:40:45 +01:00
def upload(file, opts \\ []) do
with {:ok, data} <- Upload.store(file, opts) do
obj_data =
if opts[:actor] do
Map.put(data, "actor", opts[:actor])
else
data
end
Repo.insert(%Object{data: obj_data})
end
2017-03-29 02:05:51 +02:00
end
defp object_to_user_data(data) do
2018-03-30 15:01:53 +02:00
avatar =
data["icon"]["url"] &&
%{
"type" => "Image",
"url" => [%{"href" => data["icon"]["url"]}]
}
banner =
data["image"]["url"] &&
%{
"type" => "Image",
"url" => [%{"href" => data["image"]["url"]}]
}
2018-02-25 16:14:25 +01:00
fields =
data
|> Map.get("attachment", [])
|> Enum.filter(fn %{"type" => t} -> t == "PropertyValue" end)
|> Enum.map(fn fields -> Map.take(fields, ["name", "value"]) end)
locked = data["manuallyApprovesFollowers"] || false
data = Transmogrifier.maybe_fix_user_object(data)
2018-02-25 16:14:25 +01:00
user_data = %{
ap_id: data["id"],
info: %{
2019-07-13 23:21:35 +02:00
ap_enabled: true,
source_data: data,
banner: banner,
fields: fields,
2019-07-13 23:21:35 +02:00
locked: locked
2018-02-25 16:14:25 +01:00
},
avatar: avatar,
name: data["name"],
follower_address: data["followers"],
2019-07-10 15:01:32 +02:00
following_address: data["following"],
2018-02-25 16:14:25 +01:00
bio: data["summary"]
}
# nickname can be nil because of virtual actors
user_data =
if data["preferredUsername"] do
2018-08-06 10:26:36 +02:00
Map.put(
user_data,
:nickname,
"#{data["preferredUsername"]}@#{URI.parse(data["id"]).host}"
)
else
Map.put(user_data, :nickname, nil)
end
2018-02-25 16:14:25 +01:00
{:ok, user_data}
end
def fetch_follow_information_for_user(user) do
with {:ok, following_data} <-
Fetcher.fetch_and_contain_remote_object_from_id(user.following_address),
following_count when is_integer(following_count) <- following_data["totalItems"],
{:ok, hide_follows} <- collection_private(following_data),
{:ok, followers_data} <-
Fetcher.fetch_and_contain_remote_object_from_id(user.follower_address),
followers_count when is_integer(followers_count) <- followers_data["totalItems"],
{:ok, hide_followers} <- collection_private(followers_data) do
{:ok,
%{
hide_follows: hide_follows,
follower_count: followers_count,
following_count: following_count,
hide_followers: hide_followers
}}
else
{:error, _} = e ->
e
e ->
{:error, e}
end
end
defp maybe_update_follow_information(data) do
with {:enabled, true} <-
{:enabled, Pleroma.Config.get([:instance, :external_user_synchronization])},
{:ok, info} <- fetch_follow_information_for_user(data) do
info = Map.merge(data.info, info)
Map.put(data, :info, info)
else
{:enabled, false} ->
data
e ->
Logger.error(
"Follower/Following counter update for #{data.ap_id} failed.\n" <> inspect(e)
)
data
end
end
defp collection_private(data) do
if is_map(data["first"]) and
data["first"]["type"] in ["CollectionPage", "OrderedCollectionPage"] do
{:ok, false}
else
with {:ok, %{"type" => type}} when type in ["CollectionPage", "OrderedCollectionPage"] <-
Fetcher.fetch_and_contain_remote_object_from_id(data["first"]) do
{:ok, false}
else
{:error, {:ok, %{status: code}}} when code in [401, 403] ->
{:ok, true}
{:error, _} = e ->
e
e ->
{:error, e}
end
end
end
def user_data_from_user_object(data) do
with {:ok, data} <- MRF.filter(data),
{:ok, data} <- object_to_user_data(data) do
{:ok, data}
else
e -> {:error, e}
end
end
2018-02-21 22:21:40 +01:00
def fetch_and_prepare_user_from_ap_id(ap_id) do
with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id),
{:ok, data} <- user_data_from_user_object(data),
data <- maybe_update_follow_information(data) do
{:ok, data}
2018-02-23 15:00:19 +01:00
else
e -> Logger.error("Could not decode user at fetch #{ap_id}, #{inspect(e)}")
2018-02-21 22:21:40 +01:00
end
end
def make_user_from_ap_id(ap_id) do
2019-04-22 09:20:43 +02:00
if _user = User.get_cached_by_ap_id(ap_id) do
2018-02-21 22:21:40 +01:00
Transmogrifier.upgrade_user_from_ap_id(ap_id)
else
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id) do
User.insert_or_update_user(data)
2018-02-18 23:11:31 +01:00
else
2018-02-25 16:52:33 +01:00
e -> {:error, e}
2018-02-18 23:11:31 +01:00
end
2018-02-11 17:20:02 +01:00
end
end
2018-02-18 12:27:05 +01:00
def make_user_from_nickname(nickname) do
with {:ok, %{"ap_id" => ap_id}} when not is_nil(ap_id) <- WebFinger.finger(nickname) do
make_user_from_ap_id(ap_id)
2018-02-25 16:52:33 +01:00
else
2018-03-19 18:56:49 +01:00
_e -> {:error, "No AP id in WebFinger"}
2018-02-18 12:27:05 +01:00
end
end
# filter out broken threads
def contain_broken_threads(%Activity{} = activity, %User{} = user) do
entire_thread_visible_for_user?(activity, user)
end
# do post-processing on a specific activity
def contain_activity(%Activity{} = activity, %User{} = user) do
2019-03-11 16:57:54 +01:00
contain_broken_threads(activity, user)
end
2019-05-08 18:19:20 +02:00
def fetch_direct_messages_query do
Activity
|> restrict_type(%{"type" => "Create"})
|> restrict_visibility(%{visibility: "direct"})
|> order_by([activity], asc: activity.id)
end
2017-03-21 09:21:52 +01:00
end