2018-12-23 21:04:54 +01:00
|
|
|
# Pleroma: A lightweight social networking server
|
2018-12-31 16:41:47 +01:00
|
|
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 21:04:54 +01:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2017-03-21 09:21:52 +01:00
|
|
|
defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Activity
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Pleroma.Instances
|
|
|
|
alias Pleroma.Notification
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Object
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Pleroma.Repo
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Upload
|
|
|
|
alias Pleroma.User
|
|
|
|
alias Pleroma.Web.ActivityPub.MRF
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Web.Federator
|
|
|
|
alias Pleroma.Web.OStatus
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Pleroma.Web.WebFinger
|
2019-02-06 20:20:02 +01:00
|
|
|
|
2017-03-21 17:53:20 +01:00
|
|
|
import Ecto.Query
|
2017-05-16 15:31:11 +02:00
|
|
|
import Pleroma.Web.ActivityPub.Utils
|
2019-02-22 13:29:52 +01:00
|
|
|
import Pleroma.Web.ActivityPub.Visibility
|
2019-02-06 20:20:02 +01:00
|
|
|
|
2017-05-07 20:16:07 +02:00
|
|
|
require Logger
|
2017-03-21 09:21:52 +01:00
|
|
|
|
2018-02-11 17:20:02 +01:00
|
|
|
@httpoison Application.get_env(:pleroma, :httpoison)
|
|
|
|
|
2018-06-18 06:33:41 +02:00
|
|
|
# For Announce activities, we filter the recipients based on following status for any actors
|
|
|
|
# that match actual users. See issue #164 for more information about why this is necessary.
|
2018-08-29 10:37:36 +02:00
|
|
|
defp get_recipients(%{"type" => "Announce"} = data) do
|
|
|
|
to = data["to"] || []
|
|
|
|
cc = data["cc"] || []
|
2018-06-18 06:33:41 +02:00
|
|
|
actor = User.get_cached_by_ap_id(data["actor"])
|
|
|
|
|
2019-02-06 21:19:35 +01:00
|
|
|
recipients =
|
|
|
|
(to ++ cc)
|
|
|
|
|> Enum.filter(fn recipient ->
|
|
|
|
case User.get_cached_by_ap_id(recipient) do
|
|
|
|
nil ->
|
|
|
|
true
|
|
|
|
|
|
|
|
user ->
|
|
|
|
User.following?(user, actor)
|
|
|
|
end
|
|
|
|
end)
|
2018-08-29 10:37:36 +02:00
|
|
|
|
|
|
|
{recipients, to, cc}
|
2018-06-18 06:33:41 +02:00
|
|
|
end
|
|
|
|
|
2019-01-18 20:40:52 +01:00
|
|
|
defp get_recipients(%{"type" => "Create"} = data) do
|
|
|
|
to = data["to"] || []
|
|
|
|
cc = data["cc"] || []
|
|
|
|
actor = data["actor"] || []
|
|
|
|
recipients = (to ++ cc ++ [actor]) |> Enum.uniq()
|
|
|
|
{recipients, to, cc}
|
|
|
|
end
|
|
|
|
|
2018-08-29 10:37:36 +02:00
|
|
|
defp get_recipients(data) do
|
|
|
|
to = data["to"] || []
|
|
|
|
cc = data["cc"] || []
|
|
|
|
recipients = to ++ cc
|
|
|
|
{recipients, to, cc}
|
2017-12-12 18:07:14 +01:00
|
|
|
end
|
|
|
|
|
2018-05-14 01:28:56 +02:00
|
|
|
defp check_actor_is_active(actor) do
|
2018-05-19 05:17:56 +02:00
|
|
|
if not is_nil(actor) do
|
|
|
|
with user <- User.get_cached_by_ap_id(actor),
|
2018-11-18 21:40:52 +01:00
|
|
|
false <- user.info.deactivated do
|
2018-05-19 05:17:56 +02:00
|
|
|
:ok
|
|
|
|
else
|
|
|
|
_e -> :reject
|
|
|
|
end
|
2018-05-14 01:28:56 +02:00
|
|
|
else
|
|
|
|
:ok
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-01-26 03:50:49 +01:00
|
|
|
defp check_remote_limit(%{"object" => %{"content" => content}}) when not is_nil(content) do
|
2018-12-26 12:39:35 +01:00
|
|
|
limit = Pleroma.Config.get([:instance, :remote_limit])
|
|
|
|
String.length(content) <= limit
|
|
|
|
end
|
|
|
|
|
|
|
|
defp check_remote_limit(_), do: true
|
|
|
|
|
2019-03-03 11:21:03 +01:00
|
|
|
def increase_note_count_if_public(actor, object) do
|
|
|
|
if is_public?(object), do: User.increase_note_count(actor), else: {:ok, actor}
|
|
|
|
end
|
|
|
|
|
|
|
|
def decrease_note_count_if_public(actor, object) do
|
|
|
|
if is_public?(object), do: User.decrease_note_count(actor), else: {:ok, actor}
|
|
|
|
end
|
|
|
|
|
2017-05-02 10:47:04 +02:00
|
|
|
def insert(map, local \\ true) when is_map(map) do
|
2018-06-18 23:21:03 +02:00
|
|
|
with nil <- Activity.normalize(map),
|
2017-05-16 15:31:11 +02:00
|
|
|
map <- lazy_put_activity_defaults(map),
|
2018-05-14 01:28:56 +02:00
|
|
|
:ok <- check_actor_is_active(map["actor"]),
|
2018-12-26 12:39:35 +01:00
|
|
|
{_, true} <- {:remote_limit_error, check_remote_limit(map)},
|
2018-05-10 18:34:09 +02:00
|
|
|
{:ok, map} <- MRF.filter(map),
|
2018-04-28 16:10:24 +02:00
|
|
|
:ok <- insert_full_object(map) do
|
2018-08-29 20:38:30 +02:00
|
|
|
{recipients, _, _} = get_recipients(map)
|
2018-08-29 10:37:36 +02:00
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
{:ok, activity} =
|
|
|
|
Repo.insert(%Activity{
|
|
|
|
data: map,
|
|
|
|
local: local,
|
|
|
|
actor: map["actor"],
|
2018-08-29 20:38:30 +02:00
|
|
|
recipients: recipients
|
2018-03-30 15:01:53 +02:00
|
|
|
})
|
|
|
|
|
2019-01-28 07:07:18 +01:00
|
|
|
Task.start(fn ->
|
|
|
|
Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
|
|
|
|
end)
|
|
|
|
|
2017-09-11 16:15:28 +02:00
|
|
|
Notification.create_notifications(activity)
|
2017-11-19 13:47:50 +01:00
|
|
|
stream_out(activity)
|
2017-09-11 16:15:28 +02:00
|
|
|
{:ok, activity}
|
2017-05-16 15:31:11 +02:00
|
|
|
else
|
|
|
|
%Activity{} = activity -> {:ok, activity}
|
|
|
|
error -> {:error, error}
|
2017-05-07 20:13:10 +02:00
|
|
|
end
|
2017-03-21 09:21:52 +01:00
|
|
|
end
|
2017-03-21 17:53:20 +01:00
|
|
|
|
2017-11-19 13:47:50 +01:00
|
|
|
def stream_out(activity) do
|
2018-05-13 21:33:59 +02:00
|
|
|
public = "https://www.w3.org/ns/activitystreams#Public"
|
|
|
|
|
2019-01-20 13:00:46 +01:00
|
|
|
if activity.data["type"] in ["Create", "Announce", "Delete"] do
|
2017-11-19 13:47:50 +01:00
|
|
|
Pleroma.Web.Streamer.stream("user", activity)
|
2018-05-30 15:33:37 +02:00
|
|
|
Pleroma.Web.Streamer.stream("list", activity)
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2018-05-13 21:33:59 +02:00
|
|
|
if Enum.member?(activity.data["to"], public) do
|
|
|
|
Pleroma.Web.Streamer.stream("public", activity)
|
2018-05-11 04:17:59 +02:00
|
|
|
|
2018-05-13 21:33:59 +02:00
|
|
|
if activity.local do
|
|
|
|
Pleroma.Web.Streamer.stream("public:local", activity)
|
|
|
|
end
|
2018-06-17 15:01:27 +02:00
|
|
|
|
2019-01-20 13:00:46 +01:00
|
|
|
if activity.data["type"] in ["Create"] do
|
|
|
|
activity.data["object"]
|
|
|
|
|> Map.get("tag", [])
|
|
|
|
|> Enum.filter(fn tag -> is_bitstring(tag) end)
|
2019-02-03 18:44:18 +01:00
|
|
|
|> Enum.each(fn tag -> Pleroma.Web.Streamer.stream("hashtag:" <> tag, activity) end)
|
2018-08-07 22:45:40 +02:00
|
|
|
|
2019-01-20 13:00:46 +01:00
|
|
|
if activity.data["object"]["attachment"] != [] do
|
|
|
|
Pleroma.Web.Streamer.stream("public:media", activity)
|
2018-06-17 15:01:27 +02:00
|
|
|
|
2019-01-20 13:00:46 +01:00
|
|
|
if activity.local do
|
|
|
|
Pleroma.Web.Streamer.stream("public:local:media", activity)
|
|
|
|
end
|
2018-06-17 15:01:27 +02:00
|
|
|
end
|
|
|
|
end
|
2018-05-13 21:33:59 +02:00
|
|
|
else
|
|
|
|
if !Enum.member?(activity.data["cc"] || [], public) &&
|
|
|
|
!Enum.member?(
|
|
|
|
activity.data["to"],
|
|
|
|
User.get_by_ap_id(activity.data["actor"]).follower_address
|
2018-05-13 21:36:41 +02:00
|
|
|
),
|
|
|
|
do: Pleroma.Web.Streamer.stream("direct", activity)
|
2017-11-11 14:59:25 +01:00
|
|
|
end
|
2017-11-19 13:47:50 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-15 19:59:03 +01:00
|
|
|
def create(%{to: to, actor: actor, context: context, object: object} = params) do
|
|
|
|
additional = params[:additional] || %{}
|
2018-03-30 15:01:53 +02:00
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
2018-02-15 19:59:03 +01:00
|
|
|
published = params[:published]
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
with create_data <-
|
|
|
|
make_create_data(
|
|
|
|
%{to: to, actor: actor, published: published, context: context, object: object},
|
|
|
|
additional
|
|
|
|
),
|
2017-11-19 13:47:50 +01:00
|
|
|
{:ok, activity} <- insert(create_data, local),
|
2019-03-05 05:37:33 +01:00
|
|
|
# Changing note count prior to enqueuing federation task in order to avoid
|
|
|
|
# race conditions on updating user.info
|
2019-03-03 11:21:03 +01:00
|
|
|
{:ok, _actor} <- increase_note_count_if_public(actor, activity),
|
2019-01-21 12:16:51 +01:00
|
|
|
:ok <- maybe_federate(activity) do
|
2017-04-24 18:46:34 +02:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
2017-03-21 09:21:52 +01:00
|
|
|
end
|
2017-03-21 17:53:20 +01:00
|
|
|
|
2018-02-17 16:08:55 +01:00
|
|
|
def accept(%{to: to, actor: actor, object: object} = params) do
|
2018-03-30 15:01:53 +02:00
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
2018-02-17 16:08:55 +01:00
|
|
|
|
2019-02-10 00:26:29 +01:00
|
|
|
with data <- %{"to" => to, "type" => "Accept", "actor" => actor.ap_id, "object" => object},
|
2018-02-17 16:08:55 +01:00
|
|
|
{:ok, activity} <- insert(data, local),
|
2019-03-03 16:39:37 +01:00
|
|
|
:ok <- maybe_federate(activity) do
|
2018-02-17 16:08:55 +01:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-26 14:07:46 +02:00
|
|
|
def reject(%{to: to, actor: actor, object: object} = params) do
|
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
|
|
|
|
2019-02-10 00:26:29 +01:00
|
|
|
with data <- %{"to" => to, "type" => "Reject", "actor" => actor.ap_id, "object" => object},
|
2018-05-26 14:07:46 +02:00
|
|
|
{:ok, activity} <- insert(data, local),
|
2019-03-03 16:39:37 +01:00
|
|
|
:ok <- maybe_federate(activity) do
|
2018-05-26 14:07:46 +02:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-25 16:14:25 +01:00
|
|
|
def update(%{to: to, cc: cc, actor: actor, object: object} = params) do
|
2018-03-30 15:01:53 +02:00
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
|
|
|
|
|
|
|
with data <- %{
|
|
|
|
"to" => to,
|
|
|
|
"cc" => cc,
|
|
|
|
"type" => "Update",
|
|
|
|
"actor" => actor,
|
|
|
|
"object" => object
|
|
|
|
},
|
2018-02-25 16:14:25 +01:00
|
|
|
{:ok, activity} <- insert(data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
# TODO: This is weird, maybe we shouldn't check here if we can make the activity.
|
2018-03-30 15:01:53 +02:00
|
|
|
def like(
|
|
|
|
%User{ap_id: ap_id} = user,
|
|
|
|
%Object{data: %{"id" => _}} = object,
|
|
|
|
activity_id \\ nil,
|
|
|
|
local \\ true
|
|
|
|
) do
|
2017-05-16 15:31:11 +02:00
|
|
|
with nil <- get_existing_like(ap_id, object),
|
|
|
|
like_data <- make_like_data(user, object, activity_id),
|
|
|
|
{:ok, activity} <- insert(like_data, local),
|
|
|
|
{:ok, object} <- add_like_to_object(activity, object),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity, object}
|
|
|
|
else
|
|
|
|
%Activity{} = activity -> {:ok, activity, object}
|
|
|
|
error -> {:error, error}
|
2017-04-14 15:07:24 +02:00
|
|
|
end
|
2017-04-13 15:50:05 +02:00
|
|
|
end
|
|
|
|
|
2018-05-19 15:22:43 +02:00
|
|
|
def unlike(
|
|
|
|
%User{} = actor,
|
|
|
|
%Object{} = object,
|
|
|
|
activity_id \\ nil,
|
|
|
|
local \\ true
|
|
|
|
) do
|
|
|
|
with %Activity{} = like_activity <- get_existing_like(actor.ap_id, object),
|
|
|
|
unlike_data <- make_unlike_data(actor, like_activity, activity_id),
|
|
|
|
{:ok, unlike_activity} <- insert(unlike_data, local),
|
|
|
|
{:ok, _activity} <- Repo.delete(like_activity),
|
|
|
|
{:ok, object} <- remove_like_from_object(like_activity, object),
|
|
|
|
:ok <- maybe_federate(unlike_activity) do
|
|
|
|
{:ok, unlike_activity, like_activity, object}
|
2018-03-30 15:01:53 +02:00
|
|
|
else
|
|
|
|
_e -> {:ok, object}
|
2017-04-14 18:08:47 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
def announce(
|
|
|
|
%User{ap_id: _} = user,
|
|
|
|
%Object{data: %{"id" => _}} = object,
|
|
|
|
activity_id \\ nil,
|
2019-01-18 00:12:42 +01:00
|
|
|
local \\ true,
|
|
|
|
public \\ true
|
2018-03-30 15:01:53 +02:00
|
|
|
) do
|
2018-02-18 15:58:18 +01:00
|
|
|
with true <- is_public?(object),
|
2019-01-18 00:12:42 +01:00
|
|
|
announce_data <- make_announce_data(user, object, activity_id, public),
|
2017-05-16 15:31:11 +02:00
|
|
|
{:ok, activity} <- insert(announce_data, local),
|
|
|
|
{:ok, object} <- add_announce_to_object(activity, object),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity, object}
|
|
|
|
else
|
|
|
|
error -> {:error, error}
|
|
|
|
end
|
2017-03-23 23:34:10 +01:00
|
|
|
end
|
|
|
|
|
2018-04-23 03:28:51 +02:00
|
|
|
def unannounce(
|
|
|
|
%User{} = actor,
|
|
|
|
%Object{} = object,
|
2018-05-09 05:59:36 +02:00
|
|
|
activity_id \\ nil,
|
|
|
|
local \\ true
|
2018-04-23 03:28:51 +02:00
|
|
|
) do
|
2018-05-09 03:52:21 +02:00
|
|
|
with %Activity{} = announce_activity <- get_existing_announce(actor.ap_id, object),
|
|
|
|
unannounce_data <- make_unannounce_data(actor, announce_activity, activity_id),
|
2018-04-18 09:39:42 +02:00
|
|
|
{:ok, unannounce_activity} <- insert(unannounce_data, local),
|
2018-05-09 03:52:21 +02:00
|
|
|
:ok <- maybe_federate(unannounce_activity),
|
|
|
|
{:ok, _activity} <- Repo.delete(announce_activity),
|
|
|
|
{:ok, object} <- remove_announce_from_object(announce_activity, object) do
|
2018-06-14 03:29:55 +02:00
|
|
|
{:ok, unannounce_activity, object}
|
2018-04-14 09:39:16 +02:00
|
|
|
else
|
|
|
|
_e -> {:ok, object}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
def follow(follower, followed, activity_id \\ nil, local \\ true) do
|
|
|
|
with data <- make_follow_data(follower, followed, activity_id),
|
|
|
|
{:ok, activity} <- insert(data, local),
|
2019-03-03 16:39:37 +01:00
|
|
|
:ok <- maybe_federate(activity) do
|
2017-05-16 15:31:11 +02:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
2017-03-23 17:56:49 +01:00
|
|
|
end
|
|
|
|
|
2018-05-21 03:01:14 +02:00
|
|
|
def unfollow(follower, followed, activity_id \\ nil, local \\ true) do
|
2017-05-16 15:31:11 +02:00
|
|
|
with %Activity{} = follow_activity <- fetch_latest_follow(follower, followed),
|
2018-05-27 15:51:13 +02:00
|
|
|
{:ok, follow_activity} <- update_follow_state(follow_activity, "cancelled"),
|
2018-05-21 03:01:14 +02:00
|
|
|
unfollow_data <- make_unfollow_data(follower, followed, follow_activity, activity_id),
|
2017-05-16 15:31:11 +02:00
|
|
|
{:ok, activity} <- insert(unfollow_data, local),
|
2019-03-03 16:39:37 +01:00
|
|
|
:ok <- maybe_federate(activity) do
|
2017-05-16 15:31:11 +02:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
2017-03-23 23:34:10 +01:00
|
|
|
end
|
|
|
|
|
2017-09-04 18:47:33 +02:00
|
|
|
def delete(%Object{data: %{"id" => id, "actor" => actor}} = object, local \\ true) do
|
|
|
|
user = User.get_cached_by_ap_id(actor)
|
2019-03-14 18:43:14 +01:00
|
|
|
to = (object.data["to"] || []) ++ (object.data["cc"] || [])
|
2019-03-04 10:47:04 +01:00
|
|
|
|
2019-03-09 12:12:15 +01:00
|
|
|
with {:ok, object, activity} <- Object.delete(object),
|
|
|
|
data <- %{
|
|
|
|
"type" => "Delete",
|
|
|
|
"actor" => actor,
|
|
|
|
"object" => id,
|
|
|
|
"to" => to,
|
|
|
|
"deleted_activity_id" => activity && activity.id
|
|
|
|
},
|
2017-09-04 20:47:43 +02:00
|
|
|
{:ok, activity} <- insert(data, local),
|
2019-03-05 05:37:33 +01:00
|
|
|
# Changing note count prior to enqueuing federation task in order to avoid
|
|
|
|
# race conditions on updating user.info
|
2019-03-03 11:21:03 +01:00
|
|
|
{:ok, _actor} <- decrease_note_count_if_public(user, object),
|
2019-01-21 12:16:51 +01:00
|
|
|
:ok <- maybe_federate(activity) do
|
2017-09-04 18:47:33 +02:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-21 03:01:14 +02:00
|
|
|
def block(blocker, blocked, activity_id \\ nil, local \\ true) do
|
2018-06-23 23:32:00 +02:00
|
|
|
ap_config = Application.get_env(:pleroma, :activitypub)
|
|
|
|
unfollow_blocked = Keyword.get(ap_config, :unfollow_blocked)
|
|
|
|
outgoing_blocks = Keyword.get(ap_config, :outgoing_blocks)
|
2018-05-19 00:09:56 +02:00
|
|
|
|
2018-06-23 23:27:07 +02:00
|
|
|
with true <- unfollow_blocked do
|
2018-06-09 02:12:16 +02:00
|
|
|
follow_activity = fetch_latest_follow(blocker, blocked)
|
2018-06-09 06:28:11 +02:00
|
|
|
|
2018-06-09 02:12:16 +02:00
|
|
|
if follow_activity do
|
|
|
|
unfollow(blocker, blocked, nil, local)
|
|
|
|
end
|
2018-05-19 00:09:56 +02:00
|
|
|
end
|
|
|
|
|
2018-06-25 08:05:44 +02:00
|
|
|
with true <- outgoing_blocks,
|
|
|
|
block_data <- make_block_data(blocker, blocked, activity_id),
|
2018-05-19 00:09:56 +02:00
|
|
|
{:ok, activity} <- insert(block_data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
2018-06-09 02:12:16 +02:00
|
|
|
else
|
2018-06-09 04:07:14 +02:00
|
|
|
_e -> {:ok, nil}
|
2018-05-19 00:09:56 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-21 03:01:14 +02:00
|
|
|
def unblock(blocker, blocked, activity_id \\ nil, local \\ true) do
|
2018-05-19 00:09:56 +02:00
|
|
|
with %Activity{} = block_activity <- fetch_latest_block(blocker, blocked),
|
2018-05-21 03:01:14 +02:00
|
|
|
unblock_data <- make_unblock_data(blocker, blocked, block_activity, activity_id),
|
2018-05-19 00:09:56 +02:00
|
|
|
{:ok, activity} <- insert(unblock_data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-20 17:51:25 +01:00
|
|
|
def flag(
|
|
|
|
%{
|
|
|
|
actor: actor,
|
|
|
|
context: context,
|
|
|
|
account: account,
|
|
|
|
statuses: statuses,
|
|
|
|
content: content
|
|
|
|
} = params
|
|
|
|
) do
|
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
2019-03-14 20:29:47 +01:00
|
|
|
forward = !(params[:forward] == false)
|
|
|
|
|
|
|
|
additional = params[:additional] || %{}
|
2019-02-20 17:51:25 +01:00
|
|
|
|
2019-03-14 20:29:47 +01:00
|
|
|
params = %{
|
2019-02-20 17:51:25 +01:00
|
|
|
actor: actor,
|
|
|
|
context: context,
|
|
|
|
account: account,
|
|
|
|
statuses: statuses,
|
|
|
|
content: content
|
|
|
|
}
|
2019-03-14 20:29:47 +01:00
|
|
|
|
|
|
|
additional =
|
|
|
|
if forward do
|
|
|
|
Map.merge(additional, %{"to" => [], "cc" => [account.ap_id]})
|
|
|
|
else
|
2019-03-14 20:52:08 +01:00
|
|
|
Map.merge(additional, %{"to" => [], "cc" => []})
|
2019-03-14 20:29:47 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
with flag_data <- make_flag_data(params, additional),
|
|
|
|
{:ok, activity} <- insert(flag_data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
2019-03-14 20:38:46 +01:00
|
|
|
Enum.each(User.all_superusers(), fn superuser ->
|
|
|
|
superuser
|
|
|
|
|> Pleroma.AdminEmail.report(actor, account, statuses, content)
|
|
|
|
|> Pleroma.Mailer.deliver_async()
|
|
|
|
end)
|
|
|
|
|
2019-03-14 20:29:47 +01:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
2019-02-20 17:51:25 +01:00
|
|
|
end
|
|
|
|
|
2017-11-02 22:44:36 +01:00
|
|
|
def fetch_activities_for_context(context, opts \\ %{}) do
|
2018-02-18 20:52:07 +01:00
|
|
|
public = ["https://www.w3.org/ns/activitystreams#Public"]
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
recipients =
|
|
|
|
if opts["user"], do: [opts["user"].ap_id | opts["user"].following] ++ public, else: public
|
|
|
|
|
|
|
|
query = from(activity in Activity)
|
|
|
|
|
|
|
|
query =
|
|
|
|
query
|
2018-02-18 15:50:34 +01:00
|
|
|
|> restrict_blocked(opts)
|
2018-02-18 20:52:07 +01:00
|
|
|
|> restrict_recipients(recipients, opts["user"])
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
query =
|
|
|
|
from(
|
|
|
|
activity in query,
|
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"?->>'type' = ? and ?->>'context' = ?",
|
|
|
|
activity.data,
|
|
|
|
"Create",
|
|
|
|
activity.data,
|
|
|
|
^context
|
|
|
|
),
|
|
|
|
order_by: [desc: :id]
|
|
|
|
)
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
Repo.all(query)
|
2017-03-23 21:22:49 +01:00
|
|
|
end
|
|
|
|
|
2017-03-21 20:31:48 +01:00
|
|
|
def fetch_public_activities(opts \\ %{}) do
|
2018-02-26 10:09:30 +01:00
|
|
|
q = fetch_activities_query(["https://www.w3.org/ns/activitystreams#Public"], opts)
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2018-02-18 15:32:11 +01:00
|
|
|
q
|
2018-05-13 12:38:13 +02:00
|
|
|
|> restrict_unlisted()
|
2018-03-30 15:01:53 +02:00
|
|
|
|> Repo.all()
|
|
|
|
|> Enum.reverse()
|
2017-03-22 14:45:17 +01:00
|
|
|
end
|
|
|
|
|
2018-05-11 04:17:33 +02:00
|
|
|
@valid_visibilities ~w[direct unlisted public private]
|
|
|
|
|
2019-03-01 07:37:29 +01:00
|
|
|
defp restrict_visibility(query, %{visibility: visibility})
|
|
|
|
when is_list(visibility) do
|
|
|
|
if Enum.all?(visibility, &(&1 in @valid_visibilities)) do
|
|
|
|
query =
|
|
|
|
from(
|
|
|
|
a in query,
|
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"activity_visibility(?, ?, ?) = ANY (?)",
|
|
|
|
a.actor,
|
|
|
|
a.recipients,
|
|
|
|
a.data,
|
|
|
|
^visibility
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
Ecto.Adapters.SQL.to_sql(:all, Repo, query)
|
|
|
|
|
|
|
|
query
|
|
|
|
else
|
|
|
|
Logger.error("Could not restrict visibility to #{visibility}")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-01-09 16:45:09 +01:00
|
|
|
defp restrict_visibility(query, %{visibility: visibility})
|
|
|
|
when visibility in @valid_visibilities do
|
|
|
|
query =
|
|
|
|
from(
|
|
|
|
a in query,
|
|
|
|
where:
|
|
|
|
fragment("activity_visibility(?, ?, ?) = ?", a.actor, a.recipients, a.data, ^visibility)
|
|
|
|
)
|
|
|
|
|
|
|
|
Ecto.Adapters.SQL.to_sql(:all, Repo, query)
|
2018-05-11 04:17:33 +02:00
|
|
|
|
2019-01-09 16:45:09 +01:00
|
|
|
query
|
2018-05-11 04:17:33 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_visibility(_query, %{visibility: visibility})
|
|
|
|
when visibility not in @valid_visibilities do
|
|
|
|
Logger.error("Could not restrict visibility to #{visibility}")
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_visibility(query, _visibility), do: query
|
|
|
|
|
2018-05-20 16:15:18 +02:00
|
|
|
def fetch_user_activities(user, reading_user, params \\ %{}) do
|
|
|
|
params =
|
|
|
|
params
|
|
|
|
|> Map.put("type", ["Create", "Announce"])
|
|
|
|
|> Map.put("actor_id", user.ap_id)
|
|
|
|
|> Map.put("whole_db", true)
|
2019-01-07 14:45:33 +01:00
|
|
|
|> Map.put("pinned_activity_ids", user.info.pinned_activities)
|
2018-05-20 16:15:18 +02:00
|
|
|
|
|
|
|
recipients =
|
|
|
|
if reading_user do
|
|
|
|
["https://www.w3.org/ns/activitystreams#Public"] ++
|
|
|
|
[reading_user.ap_id | reading_user.following]
|
|
|
|
else
|
|
|
|
["https://www.w3.org/ns/activitystreams#Public"]
|
|
|
|
end
|
|
|
|
|
|
|
|
fetch_activities(recipients, params)
|
|
|
|
|> Enum.reverse()
|
|
|
|
end
|
|
|
|
|
2019-01-15 16:39:23 +01:00
|
|
|
defp restrict_since(query, %{"since_id" => ""}), do: query
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
defp restrict_since(query, %{"since_id" => since_id}) do
|
2018-03-30 15:01:53 +02:00
|
|
|
from(activity in query, where: activity.id > ^since_id)
|
2017-05-16 15:31:11 +02:00
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
defp restrict_since(query, _), do: query
|
2017-03-21 17:53:20 +01:00
|
|
|
|
2019-01-10 16:44:28 +01:00
|
|
|
defp restrict_tag_reject(query, %{"tag_reject" => tag_reject})
|
|
|
|
when is_list(tag_reject) and tag_reject != [] do
|
2018-12-19 17:21:35 +01:00
|
|
|
from(
|
|
|
|
activity in query,
|
2019-03-05 05:44:53 +01:00
|
|
|
where: fragment(~s(\(not \(? #> '{"object","tag"}'\) \\?| ?\)), activity.data, ^tag_reject)
|
2018-12-21 18:24:13 +01:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-01-10 16:44:28 +01:00
|
|
|
defp restrict_tag_reject(query, _), do: query
|
|
|
|
|
|
|
|
defp restrict_tag_all(query, %{"tag_all" => tag_all})
|
|
|
|
when is_list(tag_all) and tag_all != [] do
|
|
|
|
from(
|
|
|
|
activity in query,
|
2019-03-05 05:44:53 +01:00
|
|
|
where: fragment(~s(\(? #> '{"object","tag"}'\) \\?& ?), activity.data, ^tag_all)
|
2019-01-10 16:44:28 +01:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_tag_all(query, _), do: query
|
|
|
|
|
2018-12-21 18:24:13 +01:00
|
|
|
defp restrict_tag(query, %{"tag" => tag}) when is_list(tag) do
|
|
|
|
from(
|
|
|
|
activity in query,
|
2019-03-05 05:44:53 +01:00
|
|
|
where: fragment(~s(\(? #> '{"object","tag"}'\) \\?| ?), activity.data, ^tag)
|
2018-12-19 17:21:35 +01:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2018-12-21 18:24:13 +01:00
|
|
|
defp restrict_tag(query, %{"tag" => tag}) when is_binary(tag) do
|
2018-03-30 15:01:53 +02:00
|
|
|
from(
|
|
|
|
activity in query,
|
2019-03-05 05:44:53 +01:00
|
|
|
where: fragment(~s(? <@ (? #> '{"object","tag"}'\)), ^tag, activity.data)
|
2018-03-30 15:01:53 +02:00
|
|
|
)
|
2017-09-14 13:22:09 +02:00
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-09-14 13:22:09 +02:00
|
|
|
defp restrict_tag(query, _), do: query
|
|
|
|
|
2018-08-29 10:51:23 +02:00
|
|
|
defp restrict_to_cc(query, recipients_to, recipients_cc) do
|
|
|
|
from(
|
|
|
|
activity in query,
|
|
|
|
where:
|
|
|
|
fragment(
|
2018-08-29 20:32:04 +02:00
|
|
|
"(?->'to' \\?| ?) or (?->'cc' \\?| ?)",
|
|
|
|
activity.data,
|
2018-08-29 10:51:23 +02:00
|
|
|
^recipients_to,
|
2018-08-29 20:32:04 +02:00
|
|
|
activity.data,
|
|
|
|
^recipients_cc
|
2018-08-29 10:51:23 +02:00
|
|
|
)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2018-05-04 23:16:02 +02:00
|
|
|
defp restrict_recipients(query, [], _user), do: query
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2018-02-18 15:20:36 +01:00
|
|
|
defp restrict_recipients(query, recipients, nil) do
|
2018-03-30 15:01:53 +02:00
|
|
|
from(activity in query, where: fragment("? && ?", ^recipients, activity.recipients))
|
2017-03-21 17:53:20 +01:00
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2018-02-18 15:20:36 +01:00
|
|
|
defp restrict_recipients(query, recipients, user) do
|
2018-03-30 15:01:53 +02:00
|
|
|
from(
|
|
|
|
activity in query,
|
2018-02-18 15:20:36 +01:00
|
|
|
where: fragment("? && ?", ^recipients, activity.recipients),
|
|
|
|
or_where: activity.actor == ^user.ap_id
|
2018-03-30 15:01:53 +02:00
|
|
|
)
|
2018-02-18 15:20:36 +01:00
|
|
|
end
|
2017-03-24 00:09:08 +01:00
|
|
|
|
2018-03-22 05:47:18 +01:00
|
|
|
defp restrict_limit(query, %{"limit" => limit}) do
|
2018-03-30 15:01:53 +02:00
|
|
|
from(activity in query, limit: ^limit)
|
2018-03-22 05:47:18 +01:00
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2018-03-22 05:47:18 +01:00
|
|
|
defp restrict_limit(query, _), do: query
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
defp restrict_local(query, %{"local_only" => true}) do
|
2018-03-30 15:01:53 +02:00
|
|
|
from(activity in query, where: activity.local == true)
|
2017-04-15 12:11:20 +02:00
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
defp restrict_local(query, _), do: query
|
2017-04-15 12:11:20 +02:00
|
|
|
|
2019-01-15 16:39:23 +01:00
|
|
|
defp restrict_max(query, %{"max_id" => ""}), do: query
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
defp restrict_max(query, %{"max_id" => max_id}) do
|
2018-03-30 15:01:53 +02:00
|
|
|
from(activity in query, where: activity.id < ^max_id)
|
2017-05-07 19:28:23 +02:00
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
defp restrict_max(query, _), do: query
|
2017-05-07 19:28:23 +02:00
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
defp restrict_actor(query, %{"actor_id" => actor_id}) do
|
2018-03-30 15:01:53 +02:00
|
|
|
from(activity in query, where: activity.actor == ^actor_id)
|
2017-05-07 19:28:23 +02:00
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
defp restrict_actor(query, _), do: query
|
2017-05-07 19:28:23 +02:00
|
|
|
|
2017-09-17 14:20:54 +02:00
|
|
|
defp restrict_type(query, %{"type" => type}) when is_binary(type) do
|
2019-02-04 23:47:29 +01:00
|
|
|
from(activity in query, where: fragment("?->>'type' = ?", activity.data, ^type))
|
2017-09-17 14:20:54 +02:00
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-09-09 12:05:17 +02:00
|
|
|
defp restrict_type(query, %{"type" => type}) do
|
2018-03-30 15:01:53 +02:00
|
|
|
from(activity in query, where: fragment("?->>'type' = ANY(?)", activity.data, ^type))
|
2017-09-09 12:05:17 +02:00
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-09-09 12:05:17 +02:00
|
|
|
defp restrict_type(query, _), do: query
|
|
|
|
|
2017-09-17 13:09:49 +02:00
|
|
|
defp restrict_favorited_by(query, %{"favorited_by" => ap_id}) do
|
2018-03-30 15:01:53 +02:00
|
|
|
from(
|
|
|
|
activity in query,
|
2019-03-05 05:44:53 +01:00
|
|
|
where: fragment(~s(? <@ (? #> '{"object","likes"}'\)), ^ap_id, activity.data)
|
2018-03-30 15:01:53 +02:00
|
|
|
)
|
2017-09-17 13:09:49 +02:00
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-09-17 13:09:49 +02:00
|
|
|
defp restrict_favorited_by(query, _), do: query
|
|
|
|
|
2017-11-14 14:50:23 +01:00
|
|
|
defp restrict_media(query, %{"only_media" => val}) when val == "true" or val == "1" do
|
2018-03-30 15:01:53 +02:00
|
|
|
from(
|
|
|
|
activity in query,
|
2019-03-05 05:44:53 +01:00
|
|
|
where: fragment(~s(not (? #> '{"object","attachment"}' = ?\)), activity.data, ^[])
|
2018-03-30 15:01:53 +02:00
|
|
|
)
|
2017-11-14 14:41:16 +01:00
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-11-14 14:41:16 +01:00
|
|
|
defp restrict_media(query, _), do: query
|
|
|
|
|
2018-06-18 05:18:39 +02:00
|
|
|
defp restrict_replies(query, %{"exclude_replies" => val}) when val == "true" or val == "1" do
|
|
|
|
from(
|
|
|
|
activity in query,
|
|
|
|
where: fragment("?->'object'->>'inReplyTo' is null", activity.data)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_replies(query, _), do: query
|
|
|
|
|
2018-12-27 06:30:01 +01:00
|
|
|
defp restrict_reblogs(query, %{"exclude_reblogs" => val}) when val == "true" or val == "1" do
|
|
|
|
from(activity in query, where: fragment("?->>'type' != 'Announce'", activity.data))
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_reblogs(query, _), do: query
|
|
|
|
|
2019-02-27 16:37:42 +01:00
|
|
|
defp restrict_muted(query, %{"with_muted" => val}) when val in [true, "true", "1"], do: query
|
|
|
|
|
2018-09-05 22:49:15 +02:00
|
|
|
defp restrict_muted(query, %{"muting_user" => %User{info: info}}) do
|
2019-02-19 21:09:16 +01:00
|
|
|
mutes = info.mutes
|
2018-09-05 22:49:15 +02:00
|
|
|
|
|
|
|
from(
|
|
|
|
activity in query,
|
|
|
|
where: fragment("not (? = ANY(?))", activity.actor, ^mutes),
|
|
|
|
where: fragment("not (?->'to' \\?| ?)", activity.data, ^mutes)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_muted(query, _), do: query
|
|
|
|
|
2017-11-02 22:47:11 +01:00
|
|
|
defp restrict_blocked(query, %{"blocking_user" => %User{info: info}}) do
|
2018-11-20 20:12:39 +01:00
|
|
|
blocks = info.blocks || []
|
|
|
|
domain_blocks = info.domain_blocks || []
|
2018-04-28 16:10:24 +02:00
|
|
|
|
|
|
|
from(
|
|
|
|
activity in query,
|
2018-04-14 13:26:20 +02:00
|
|
|
where: fragment("not (? = ANY(?))", activity.actor, ^blocks),
|
2018-06-03 22:33:33 +02:00
|
|
|
where: fragment("not (?->'to' \\?| ?)", activity.data, ^blocks),
|
2018-06-06 20:13:40 +02:00
|
|
|
where: fragment("not (split_part(?, '/', 3) = ANY(?))", activity.actor, ^domain_blocks)
|
2018-04-14 13:26:20 +02:00
|
|
|
)
|
2017-11-02 22:37:26 +01:00
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-11-02 22:37:26 +01:00
|
|
|
defp restrict_blocked(query, _), do: query
|
|
|
|
|
2018-04-29 04:53:19 +02:00
|
|
|
defp restrict_unlisted(query) do
|
|
|
|
from(
|
|
|
|
activity in query,
|
2018-05-13 10:56:44 +02:00
|
|
|
where:
|
|
|
|
fragment(
|
2018-05-13 11:58:03 +02:00
|
|
|
"not (coalesce(?->'cc', '{}'::jsonb) \\?| ?)",
|
2018-05-13 10:56:44 +02:00
|
|
|
activity.data,
|
|
|
|
^["https://www.w3.org/ns/activitystreams#Public"]
|
|
|
|
)
|
2018-04-29 04:53:19 +02:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-01-07 14:45:33 +01:00
|
|
|
defp restrict_pinned(query, %{"pinned" => "true", "pinned_activity_ids" => ids}) do
|
|
|
|
from(activity in query, where: activity.id in ^ids)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_pinned(query, _), do: query
|
|
|
|
|
2019-03-11 16:57:54 +01:00
|
|
|
defp restrict_muted_reblogs(query, %{"muting_user" => %User{info: info}}) do
|
|
|
|
muted_reblogs = info.muted_reblogs || []
|
|
|
|
|
|
|
|
from(
|
|
|
|
activity in query,
|
|
|
|
where: fragment("not ?->>'type' = 'Announce'", activity.data),
|
|
|
|
where: fragment("not ? = ANY(?)", activity.actor, ^muted_reblogs)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_muted_reblogs(query, _), do: query
|
|
|
|
|
2018-02-18 15:32:11 +01:00
|
|
|
def fetch_activities_query(recipients, opts \\ %{}) do
|
2018-03-30 15:01:53 +02:00
|
|
|
base_query =
|
|
|
|
from(
|
|
|
|
activity in Activity,
|
|
|
|
limit: 20,
|
|
|
|
order_by: [fragment("? desc nulls last", activity.id)]
|
|
|
|
)
|
2017-03-29 02:05:51 +02:00
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
base_query
|
2018-02-18 15:20:36 +01:00
|
|
|
|> restrict_recipients(recipients, opts["user"])
|
2017-09-14 13:22:09 +02:00
|
|
|
|> restrict_tag(opts)
|
2019-01-10 16:44:28 +01:00
|
|
|
|> restrict_tag_reject(opts)
|
|
|
|
|> restrict_tag_all(opts)
|
2017-05-16 15:31:11 +02:00
|
|
|
|> restrict_since(opts)
|
|
|
|
|> restrict_local(opts)
|
2018-03-22 05:47:18 +01:00
|
|
|
|> restrict_limit(opts)
|
2017-05-16 15:31:11 +02:00
|
|
|
|> restrict_max(opts)
|
|
|
|
|> restrict_actor(opts)
|
2017-09-09 12:05:17 +02:00
|
|
|
|> restrict_type(opts)
|
2017-09-17 13:09:49 +02:00
|
|
|
|> restrict_favorited_by(opts)
|
2017-11-02 22:37:26 +01:00
|
|
|
|> restrict_blocked(opts)
|
2018-09-05 22:49:15 +02:00
|
|
|
|> restrict_muted(opts)
|
2017-11-14 14:41:16 +01:00
|
|
|
|> restrict_media(opts)
|
2018-05-11 04:17:33 +02:00
|
|
|
|> restrict_visibility(opts)
|
2018-06-18 05:18:39 +02:00
|
|
|
|> restrict_replies(opts)
|
2018-12-27 06:30:01 +01:00
|
|
|
|> restrict_reblogs(opts)
|
2019-01-07 14:45:33 +01:00
|
|
|
|> restrict_pinned(opts)
|
2019-03-11 16:57:54 +01:00
|
|
|
|> restrict_muted_reblogs(opts)
|
2018-02-18 15:32:11 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def fetch_activities(recipients, opts \\ %{}) do
|
|
|
|
fetch_activities_query(recipients, opts)
|
2018-03-30 15:01:53 +02:00
|
|
|
|> Repo.all()
|
|
|
|
|> Enum.reverse()
|
2017-04-21 18:54:21 +02:00
|
|
|
end
|
|
|
|
|
2018-08-29 10:51:23 +02:00
|
|
|
def fetch_activities_bounded(recipients_to, recipients_cc, opts \\ %{}) do
|
|
|
|
fetch_activities_query([], opts)
|
|
|
|
|> restrict_to_cc(recipients_to, recipients_cc)
|
|
|
|
|> Repo.all()
|
|
|
|
|> Enum.reverse()
|
|
|
|
end
|
|
|
|
|
2018-11-23 17:40:45 +01:00
|
|
|
def upload(file, opts \\ []) do
|
|
|
|
with {:ok, data} <- Upload.store(file, opts) do
|
2018-12-06 08:26:17 +01:00
|
|
|
obj_data =
|
|
|
|
if opts[:actor] do
|
|
|
|
Map.put(data, "actor", opts[:actor])
|
|
|
|
else
|
|
|
|
data
|
|
|
|
end
|
|
|
|
|
2018-12-05 11:37:06 +01:00
|
|
|
Repo.insert(%Object{data: obj_data})
|
2018-10-29 17:30:12 +01:00
|
|
|
end
|
2017-03-29 02:05:51 +02:00
|
|
|
end
|
2017-12-12 18:07:14 +01:00
|
|
|
|
2018-02-25 16:14:25 +01:00
|
|
|
def user_data_from_user_object(data) do
|
2018-03-30 15:01:53 +02:00
|
|
|
avatar =
|
|
|
|
data["icon"]["url"] &&
|
|
|
|
%{
|
|
|
|
"type" => "Image",
|
|
|
|
"url" => [%{"href" => data["icon"]["url"]}]
|
|
|
|
}
|
|
|
|
|
|
|
|
banner =
|
|
|
|
data["image"]["url"] &&
|
|
|
|
%{
|
|
|
|
"type" => "Image",
|
|
|
|
"url" => [%{"href" => data["image"]["url"]}]
|
|
|
|
}
|
2018-02-25 16:14:25 +01:00
|
|
|
|
2018-05-25 06:15:42 +02:00
|
|
|
locked = data["manuallyApprovesFollowers"] || false
|
2018-05-19 09:30:02 +02:00
|
|
|
data = Transmogrifier.maybe_fix_user_object(data)
|
|
|
|
|
2018-02-25 16:14:25 +01:00
|
|
|
user_data = %{
|
|
|
|
ap_id: data["id"],
|
|
|
|
info: %{
|
|
|
|
"ap_enabled" => true,
|
|
|
|
"source_data" => data,
|
2018-05-25 06:15:42 +02:00
|
|
|
"banner" => banner,
|
|
|
|
"locked" => locked
|
2018-02-25 16:14:25 +01:00
|
|
|
},
|
|
|
|
avatar: avatar,
|
|
|
|
name: data["name"],
|
|
|
|
follower_address: data["followers"],
|
|
|
|
bio: data["summary"]
|
|
|
|
}
|
|
|
|
|
2018-08-06 08:50:18 +02:00
|
|
|
# nickname can be nil because of virtual actors
|
|
|
|
user_data =
|
|
|
|
if data["preferredUsername"] do
|
2018-08-06 10:26:36 +02:00
|
|
|
Map.put(
|
|
|
|
user_data,
|
|
|
|
:nickname,
|
|
|
|
"#{data["preferredUsername"]}@#{URI.parse(data["id"]).host}"
|
|
|
|
)
|
2018-08-06 08:50:18 +02:00
|
|
|
else
|
|
|
|
Map.put(user_data, :nickname, nil)
|
|
|
|
end
|
|
|
|
|
2018-02-25 16:14:25 +01:00
|
|
|
{:ok, user_data}
|
|
|
|
end
|
|
|
|
|
2018-02-21 22:21:40 +01:00
|
|
|
def fetch_and_prepare_user_from_ap_id(ap_id) do
|
2018-11-17 21:13:38 +01:00
|
|
|
with {:ok, data} <- fetch_and_contain_remote_object_from_id(ap_id) do
|
2018-02-25 16:14:25 +01:00
|
|
|
user_data_from_user_object(data)
|
2018-02-23 15:00:19 +01:00
|
|
|
else
|
2018-03-19 19:18:52 +01:00
|
|
|
e -> Logger.error("Could not decode user at fetch #{ap_id}, #{inspect(e)}")
|
2018-02-21 22:21:40 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def make_user_from_ap_id(ap_id) do
|
2018-05-04 23:16:02 +02:00
|
|
|
if _user = User.get_by_ap_id(ap_id) do
|
2018-02-21 22:21:40 +01:00
|
|
|
Transmogrifier.upgrade_user_from_ap_id(ap_id)
|
|
|
|
else
|
|
|
|
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id) do
|
|
|
|
User.insert_or_update_user(data)
|
2018-02-18 23:11:31 +01:00
|
|
|
else
|
2018-02-25 16:52:33 +01:00
|
|
|
e -> {:error, e}
|
2018-02-18 23:11:31 +01:00
|
|
|
end
|
2018-02-11 17:20:02 +01:00
|
|
|
end
|
|
|
|
end
|
2018-02-11 20:43:33 +01:00
|
|
|
|
2018-02-18 12:27:05 +01:00
|
|
|
def make_user_from_nickname(nickname) do
|
|
|
|
with {:ok, %{"ap_id" => ap_id}} when not is_nil(ap_id) <- WebFinger.finger(nickname) do
|
|
|
|
make_user_from_ap_id(ap_id)
|
2018-02-25 16:52:33 +01:00
|
|
|
else
|
2018-03-19 18:56:49 +01:00
|
|
|
_e -> {:error, "No AP id in WebFinger"}
|
2018-02-18 12:27:05 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-06-07 05:26:41 +02:00
|
|
|
def should_federate?(inbox, public) do
|
|
|
|
if public do
|
|
|
|
true
|
|
|
|
else
|
|
|
|
inbox_info = URI.parse(inbox)
|
2018-11-06 19:34:57 +01:00
|
|
|
!Enum.member?(Pleroma.Config.get([:instance, :quarantined_instances], []), inbox_info.host)
|
2018-06-07 05:26:41 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-11 20:43:33 +01:00
|
|
|
def publish(actor, activity) do
|
2019-01-24 17:15:23 +01:00
|
|
|
remote_followers =
|
2018-03-30 15:01:53 +02:00
|
|
|
if actor.follower_address in activity.recipients do
|
|
|
|
{:ok, followers} = User.get_followers(actor)
|
|
|
|
followers |> Enum.filter(&(!&1.local))
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
2018-02-17 16:08:55 +01:00
|
|
|
|
2018-06-07 05:26:41 +02:00
|
|
|
public = is_public?(activity)
|
|
|
|
|
2018-02-17 14:11:20 +01:00
|
|
|
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
|
2018-03-27 16:45:38 +02:00
|
|
|
json = Jason.encode!(data)
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2019-02-04 14:50:28 +01:00
|
|
|
(Pleroma.Web.Salmon.remote_users(activity) ++ remote_followers)
|
2019-01-28 16:17:17 +01:00
|
|
|
|> Enum.filter(fn user -> User.ap_enabled?(user) end)
|
|
|
|
|> Enum.map(fn %{info: %{source_data: data}} ->
|
|
|
|
(is_map(data["endpoints"]) && Map.get(data["endpoints"], "sharedInbox")) || data["inbox"]
|
|
|
|
end)
|
|
|
|
|> Enum.uniq()
|
|
|
|
|> Enum.filter(fn inbox -> should_federate?(inbox, public) end)
|
2019-02-04 14:50:28 +01:00
|
|
|
|> Instances.filter_reachable()
|
|
|
|
|> Enum.each(fn {inbox, unreachable_since} ->
|
2019-01-28 16:17:17 +01:00
|
|
|
Federator.publish_single_ap(%{
|
2018-03-30 15:01:53 +02:00
|
|
|
inbox: inbox,
|
|
|
|
json: json,
|
|
|
|
actor: actor,
|
2019-02-03 10:41:27 +01:00
|
|
|
id: activity.data["id"],
|
|
|
|
unreachable_since: unreachable_since
|
2018-03-30 15:01:53 +02:00
|
|
|
})
|
|
|
|
end)
|
2018-02-11 20:43:33 +01:00
|
|
|
end
|
2018-02-17 21:56:33 +01:00
|
|
|
|
2019-02-03 10:41:27 +01:00
|
|
|
def publish_one(%{inbox: inbox, json: json, actor: actor, id: id} = params) do
|
2018-02-20 08:51:19 +01:00
|
|
|
Logger.info("Federating #{id} to #{inbox}")
|
|
|
|
host = URI.parse(inbox).host
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2018-07-31 23:41:18 +02:00
|
|
|
digest = "SHA-256=" <> (:crypto.hash(:sha256, json) |> Base.encode64())
|
|
|
|
|
2019-02-21 01:23:17 +01:00
|
|
|
date =
|
|
|
|
NaiveDateTime.utc_now()
|
2019-03-01 13:21:09 +01:00
|
|
|
|> Timex.format!("{WDshort}, {0D} {Mshort} {YYYY} {h24}:{m}:{s} GMT")
|
2019-02-21 01:23:17 +01:00
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
signature =
|
2018-07-31 23:41:18 +02:00
|
|
|
Pleroma.Web.HTTPSignatures.sign(actor, %{
|
|
|
|
host: host,
|
|
|
|
"content-length": byte_size(json),
|
2019-02-21 01:23:17 +01:00
|
|
|
digest: digest,
|
|
|
|
date: date
|
2018-07-31 23:41:18 +02:00
|
|
|
})
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2019-01-24 09:54:52 +01:00
|
|
|
with {:ok, %{status: code}} when code in 200..299 <-
|
2019-01-23 16:37:25 +01:00
|
|
|
result =
|
|
|
|
@httpoison.post(
|
|
|
|
inbox,
|
|
|
|
json,
|
|
|
|
[
|
|
|
|
{"Content-Type", "application/activity+json"},
|
2019-02-21 01:23:17 +01:00
|
|
|
{"Date", date},
|
2019-01-23 16:37:25 +01:00
|
|
|
{"signature", signature},
|
2019-01-29 11:12:28 +01:00
|
|
|
{"digest", digest}
|
2019-01-23 16:37:25 +01:00
|
|
|
]
|
|
|
|
) do
|
2019-02-03 11:28:13 +01:00
|
|
|
if !Map.has_key?(params, :unreachable_since) || params[:unreachable_since],
|
|
|
|
do: Instances.set_reachable(inbox)
|
|
|
|
|
2019-01-23 16:37:25 +01:00
|
|
|
result
|
|
|
|
else
|
2019-01-28 13:25:06 +01:00
|
|
|
{_post_result, response} ->
|
2019-02-03 10:41:27 +01:00
|
|
|
unless params[:unreachable_since], do: Instances.set_unreachable(inbox)
|
2019-01-28 13:25:06 +01:00
|
|
|
{:error, response}
|
2019-01-23 16:37:25 +01:00
|
|
|
end
|
2018-02-20 08:51:19 +01:00
|
|
|
end
|
|
|
|
|
2018-02-18 11:24:54 +01:00
|
|
|
# TODO:
|
|
|
|
# This will create a Create activity, which we need internally at the moment.
|
2018-02-17 21:56:33 +01:00
|
|
|
def fetch_object_from_id(id) do
|
|
|
|
if object = Object.get_cached_by_ap_id(id) do
|
|
|
|
{:ok, object}
|
|
|
|
else
|
2018-11-17 21:02:02 +01:00
|
|
|
with {:ok, data} <- fetch_and_contain_remote_object_from_id(id),
|
2018-06-18 23:01:33 +02:00
|
|
|
nil <- Object.normalize(data),
|
2018-03-30 15:01:53 +02:00
|
|
|
params <- %{
|
|
|
|
"type" => "Create",
|
|
|
|
"to" => data["to"],
|
|
|
|
"cc" => data["cc"],
|
2018-11-17 19:17:17 +01:00
|
|
|
"actor" => data["actor"] || data["attributedTo"],
|
2018-03-30 15:01:53 +02:00
|
|
|
"object" => data
|
|
|
|
},
|
2018-09-02 01:20:02 +02:00
|
|
|
:ok <- Transmogrifier.contain_origin(id, params),
|
2018-02-21 15:22:24 +01:00
|
|
|
{:ok, activity} <- Transmogrifier.handle_incoming(params) do
|
2018-06-18 23:01:33 +02:00
|
|
|
{:ok, Object.normalize(activity.data["object"])}
|
2018-02-18 11:24:54 +01:00
|
|
|
else
|
2018-09-30 07:25:28 +02:00
|
|
|
{:error, {:reject, nil}} ->
|
2018-09-28 02:44:15 +02:00
|
|
|
{:reject, nil}
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
object = %Object{} ->
|
|
|
|
{:ok, object}
|
|
|
|
|
2018-05-04 23:16:02 +02:00
|
|
|
_e ->
|
2018-02-23 15:00:19 +01:00
|
|
|
Logger.info("Couldn't get object via AP, trying out OStatus fetching...")
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2018-02-23 15:00:19 +01:00
|
|
|
case OStatus.fetch_activity_from_url(id) do
|
2018-06-18 23:01:33 +02:00
|
|
|
{:ok, [activity | _]} -> {:ok, Object.normalize(activity.data["object"])}
|
2018-02-23 15:00:19 +01:00
|
|
|
e -> e
|
|
|
|
end
|
2018-02-17 21:56:33 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2018-02-18 15:50:34 +01:00
|
|
|
|
2018-11-17 21:02:02 +01:00
|
|
|
def fetch_and_contain_remote_object_from_id(id) do
|
2019-02-14 20:42:33 +01:00
|
|
|
Logger.info("Fetching object #{id} via AP")
|
2018-11-17 21:02:02 +01:00
|
|
|
|
|
|
|
with true <- String.starts_with?(id, "http"),
|
2018-12-02 15:08:36 +01:00
|
|
|
{:ok, %{body: body, status: code}} when code in 200..299 <-
|
2018-11-17 21:02:02 +01:00
|
|
|
@httpoison.get(
|
|
|
|
id,
|
2018-12-06 10:41:29 +01:00
|
|
|
[{:Accept, "application/activity+json"}]
|
2018-11-17 21:02:02 +01:00
|
|
|
),
|
|
|
|
{:ok, data} <- Jason.decode(body),
|
|
|
|
:ok <- Transmogrifier.contain_origin_from_id(id, data) do
|
|
|
|
{:ok, data}
|
|
|
|
else
|
|
|
|
e ->
|
|
|
|
{:error, e}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-10-26 08:16:51 +02:00
|
|
|
# filter out broken threads
|
|
|
|
def contain_broken_threads(%Activity{} = activity, %User{} = user) do
|
|
|
|
entire_thread_visible_for_user?(activity, user)
|
|
|
|
end
|
|
|
|
|
|
|
|
# do post-processing on a specific activity
|
|
|
|
def contain_activity(%Activity{} = activity, %User{} = user) do
|
2019-03-11 16:57:54 +01:00
|
|
|
contain_broken_threads(activity, user)
|
2018-10-26 08:16:51 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
# do post-processing on a timeline
|
|
|
|
def contain_timeline(timeline, user) do
|
|
|
|
timeline
|
|
|
|
|> Enum.filter(fn activity ->
|
|
|
|
contain_activity(activity, user)
|
|
|
|
end)
|
|
|
|
end
|
2017-03-21 09:21:52 +01:00
|
|
|
end
|