2017-03-21 09:21:52 +01:00
|
|
|
defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
2017-11-19 02:22:07 +01:00
|
|
|
alias Pleroma.{Activity, Repo, Object, Upload, User, Notification}
|
2018-02-17 14:11:20 +01:00
|
|
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
2018-02-18 12:27:05 +01:00
|
|
|
alias Pleroma.Web.WebFinger
|
2018-02-20 08:51:19 +01:00
|
|
|
alias Pleroma.Web.Federator
|
2018-02-21 15:22:24 +01:00
|
|
|
alias Pleroma.Web.OStatus
|
2017-03-21 17:53:20 +01:00
|
|
|
import Ecto.Query
|
2017-05-16 15:31:11 +02:00
|
|
|
import Pleroma.Web.ActivityPub.Utils
|
2017-05-07 20:16:07 +02:00
|
|
|
require Logger
|
2017-03-21 09:21:52 +01:00
|
|
|
|
2018-02-11 17:20:02 +01:00
|
|
|
@httpoison Application.get_env(:pleroma, :httpoison)
|
|
|
|
|
2017-12-12 18:07:14 +01:00
|
|
|
def get_recipients(data) do
|
|
|
|
(data["to"] || []) ++ (data["cc"] || [])
|
|
|
|
end
|
|
|
|
|
2017-05-02 10:47:04 +02:00
|
|
|
def insert(map, local \\ true) when is_map(map) do
|
2017-05-16 15:31:11 +02:00
|
|
|
with nil <- Activity.get_by_ap_id(map["id"]),
|
|
|
|
map <- lazy_put_activity_defaults(map),
|
|
|
|
:ok <- insert_full_object(map) do
|
2017-12-12 18:07:14 +01:00
|
|
|
{:ok, activity} = Repo.insert(%Activity{data: map, local: local, actor: map["actor"], recipients: get_recipients(map)})
|
2017-09-11 16:15:28 +02:00
|
|
|
Notification.create_notifications(activity)
|
2017-11-19 13:47:50 +01:00
|
|
|
stream_out(activity)
|
2017-09-11 16:15:28 +02:00
|
|
|
{:ok, activity}
|
2017-05-16 15:31:11 +02:00
|
|
|
else
|
|
|
|
%Activity{} = activity -> {:ok, activity}
|
|
|
|
error -> {:error, error}
|
2017-05-07 20:13:10 +02:00
|
|
|
end
|
2017-03-21 09:21:52 +01:00
|
|
|
end
|
2017-03-21 17:53:20 +01:00
|
|
|
|
2017-11-19 13:47:50 +01:00
|
|
|
def stream_out(activity) do
|
|
|
|
if activity.data["type"] in ["Create", "Announce"] do
|
|
|
|
Pleroma.Web.Streamer.stream("user", activity)
|
|
|
|
if Enum.member?(activity.data["to"], "https://www.w3.org/ns/activitystreams#Public") do
|
2017-11-11 14:59:25 +01:00
|
|
|
Pleroma.Web.Streamer.stream("public", activity)
|
2017-11-19 13:47:50 +01:00
|
|
|
if activity.local do
|
2017-11-16 13:48:58 +01:00
|
|
|
Pleroma.Web.Streamer.stream("public:local", activity)
|
|
|
|
end
|
2017-11-11 14:59:25 +01:00
|
|
|
end
|
2017-11-19 13:47:50 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-15 19:59:03 +01:00
|
|
|
def create(%{to: to, actor: actor, context: context, object: object} = params) do
|
|
|
|
additional = params[:additional] || %{}
|
|
|
|
local = !(params[:local] == false) # only accept false as false value
|
|
|
|
published = params[:published]
|
|
|
|
|
2017-11-19 13:47:50 +01:00
|
|
|
with create_data <- make_create_data(%{to: to, actor: actor, published: published, context: context, object: object}, additional),
|
|
|
|
{:ok, activity} <- insert(create_data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
2017-04-24 18:46:34 +02:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
2017-03-21 09:21:52 +01:00
|
|
|
end
|
2017-03-21 17:53:20 +01:00
|
|
|
|
2018-02-17 16:08:55 +01:00
|
|
|
def accept(%{to: to, actor: actor, object: object} = params) do
|
|
|
|
local = !(params[:local] == false) # only accept false as false value
|
|
|
|
|
|
|
|
with data <- %{"to" => to, "type" => "Accept", "actor" => actor, "object" => object},
|
|
|
|
{:ok, activity} <- insert(data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
# TODO: This is weird, maybe we shouldn't check here if we can make the activity.
|
2017-11-19 02:22:07 +01:00
|
|
|
def like(%User{ap_id: ap_id} = user, %Object{data: %{"id" => _}} = object, activity_id \\ nil, local \\ true) do
|
2017-05-16 15:31:11 +02:00
|
|
|
with nil <- get_existing_like(ap_id, object),
|
|
|
|
like_data <- make_like_data(user, object, activity_id),
|
|
|
|
{:ok, activity} <- insert(like_data, local),
|
|
|
|
{:ok, object} <- add_like_to_object(activity, object),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity, object}
|
|
|
|
else
|
|
|
|
%Activity{} = activity -> {:ok, activity, object}
|
|
|
|
error -> {:error, error}
|
2017-04-14 15:07:24 +02:00
|
|
|
end
|
2017-04-13 15:50:05 +02:00
|
|
|
end
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
def unlike(%User{} = actor, %Object{} = object) do
|
|
|
|
with %Activity{} = activity <- get_existing_like(actor.ap_id, object),
|
|
|
|
{:ok, _activity} <- Repo.delete(activity),
|
|
|
|
{:ok, object} <- remove_like_from_object(activity, object) do
|
2017-04-14 18:08:47 +02:00
|
|
|
{:ok, object}
|
2017-05-16 15:31:11 +02:00
|
|
|
else _e -> {:ok, object}
|
2017-04-14 18:08:47 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-11-19 02:22:07 +01:00
|
|
|
def announce(%User{ap_id: _} = user, %Object{data: %{"id" => _}} = object, activity_id \\ nil, local \\ true) do
|
2018-02-18 15:58:18 +01:00
|
|
|
with true <- is_public?(object),
|
|
|
|
announce_data <- make_announce_data(user, object, activity_id),
|
2017-05-16 15:31:11 +02:00
|
|
|
{:ok, activity} <- insert(announce_data, local),
|
|
|
|
{:ok, object} <- add_announce_to_object(activity, object),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity, object}
|
|
|
|
else
|
|
|
|
error -> {:error, error}
|
|
|
|
end
|
2017-03-23 23:34:10 +01:00
|
|
|
end
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
def follow(follower, followed, activity_id \\ nil, local \\ true) do
|
|
|
|
with data <- make_follow_data(follower, followed, activity_id),
|
|
|
|
{:ok, activity} <- insert(data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
|
|
|
end
|
2017-03-23 17:56:49 +01:00
|
|
|
end
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
def unfollow(follower, followed, local \\ true) do
|
|
|
|
with %Activity{} = follow_activity <- fetch_latest_follow(follower, followed),
|
|
|
|
unfollow_data <- make_unfollow_data(follower, followed, follow_activity),
|
|
|
|
{:ok, activity} <- insert(unfollow_data, local),
|
|
|
|
:ok, maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
|
|
|
end
|
2017-03-23 23:34:10 +01:00
|
|
|
end
|
|
|
|
|
2017-09-04 18:47:33 +02:00
|
|
|
def delete(%Object{data: %{"id" => id, "actor" => actor}} = object, local \\ true) do
|
|
|
|
user = User.get_cached_by_ap_id(actor)
|
|
|
|
data = %{
|
|
|
|
"type" => "Delete",
|
|
|
|
"actor" => actor,
|
|
|
|
"object" => id,
|
|
|
|
"to" => [user.follower_address, "https://www.w3.org/ns/activitystreams#Public"]
|
|
|
|
}
|
2017-09-04 20:47:43 +02:00
|
|
|
with Repo.delete(object),
|
|
|
|
Repo.delete_all(Activity.all_non_create_by_object_ap_id_q(id)),
|
|
|
|
{:ok, activity} <- insert(data, local),
|
2017-09-04 18:47:33 +02:00
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-11-02 22:44:36 +01:00
|
|
|
def fetch_activities_for_context(context, opts \\ %{}) do
|
2018-02-18 20:52:07 +01:00
|
|
|
public = ["https://www.w3.org/ns/activitystreams#Public"]
|
|
|
|
recipients = if opts["user"], do: [opts["user"].ap_id | opts["user"].following] ++ public, else: public
|
|
|
|
|
|
|
|
query = from activity in Activity
|
2018-02-18 15:50:34 +01:00
|
|
|
query = query
|
|
|
|
|> restrict_blocked(opts)
|
2018-02-18 20:52:07 +01:00
|
|
|
|> restrict_recipients(recipients, opts["user"])
|
|
|
|
|
|
|
|
query = from activity in query,
|
|
|
|
where: fragment("?->>'type' = ? and ?->>'context' = ?", activity.data, "Create", activity.data, ^context),
|
|
|
|
order_by: [desc: :id]
|
2017-05-16 15:31:11 +02:00
|
|
|
Repo.all(query)
|
2017-03-23 21:22:49 +01:00
|
|
|
end
|
|
|
|
|
2017-03-21 20:31:48 +01:00
|
|
|
def fetch_public_activities(opts \\ %{}) do
|
2018-02-18 15:32:11 +01:00
|
|
|
public = %{to: ["https://www.w3.org/ns/activitystreams#Public"]}
|
|
|
|
q = fetch_activities_query([], opts)
|
|
|
|
q = from activity in q,
|
|
|
|
where: fragment(~s(? @> ?), activity.data, ^public)
|
|
|
|
q
|
|
|
|
|> Repo.all
|
|
|
|
|> Enum.reverse
|
2017-03-22 14:45:17 +01:00
|
|
|
end
|
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
defp restrict_since(query, %{"since_id" => since_id}) do
|
|
|
|
from activity in query, where: activity.id > ^since_id
|
|
|
|
end
|
|
|
|
defp restrict_since(query, _), do: query
|
2017-03-21 17:53:20 +01:00
|
|
|
|
2017-09-14 13:22:09 +02:00
|
|
|
defp restrict_tag(query, %{"tag" => tag}) do
|
|
|
|
from activity in query,
|
|
|
|
where: fragment("? <@ (? #> '{\"object\",\"tag\"}')", ^tag, activity.data)
|
|
|
|
end
|
|
|
|
defp restrict_tag(query, _), do: query
|
|
|
|
|
2018-02-18 15:20:36 +01:00
|
|
|
defp restrict_recipients(query, [], user), do: query
|
|
|
|
defp restrict_recipients(query, recipients, nil) do
|
2018-02-15 20:32:07 +01:00
|
|
|
from activity in query,
|
|
|
|
where: fragment("? && ?", ^recipients, activity.recipients)
|
2017-03-21 17:53:20 +01:00
|
|
|
end
|
2018-02-18 15:20:36 +01:00
|
|
|
defp restrict_recipients(query, recipients, user) do
|
|
|
|
from activity in query,
|
|
|
|
where: fragment("? && ?", ^recipients, activity.recipients),
|
|
|
|
or_where: activity.actor == ^user.ap_id
|
|
|
|
end
|
2017-03-24 00:09:08 +01:00
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
defp restrict_local(query, %{"local_only" => true}) do
|
|
|
|
from activity in query, where: activity.local == true
|
2017-04-15 12:11:20 +02:00
|
|
|
end
|
2017-05-16 15:31:11 +02:00
|
|
|
defp restrict_local(query, _), do: query
|
2017-04-15 12:11:20 +02:00
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
defp restrict_max(query, %{"max_id" => max_id}) do
|
|
|
|
from activity in query, where: activity.id < ^max_id
|
2017-05-07 19:28:23 +02:00
|
|
|
end
|
2017-05-16 15:31:11 +02:00
|
|
|
defp restrict_max(query, _), do: query
|
2017-05-07 19:28:23 +02:00
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
defp restrict_actor(query, %{"actor_id" => actor_id}) do
|
|
|
|
from activity in query,
|
2017-11-09 10:41:19 +01:00
|
|
|
where: activity.actor == ^actor_id
|
2017-05-07 19:28:23 +02:00
|
|
|
end
|
2017-05-16 15:31:11 +02:00
|
|
|
defp restrict_actor(query, _), do: query
|
2017-05-07 19:28:23 +02:00
|
|
|
|
2017-09-17 14:20:54 +02:00
|
|
|
defp restrict_type(query, %{"type" => type}) when is_binary(type) do
|
|
|
|
restrict_type(query, %{"type" => [type]})
|
|
|
|
end
|
2017-09-09 12:05:17 +02:00
|
|
|
defp restrict_type(query, %{"type" => type}) do
|
|
|
|
from activity in query,
|
2017-09-17 14:20:54 +02:00
|
|
|
where: fragment("?->>'type' = ANY(?)", activity.data, ^type)
|
2017-09-09 12:05:17 +02:00
|
|
|
end
|
|
|
|
defp restrict_type(query, _), do: query
|
|
|
|
|
2017-09-17 13:09:49 +02:00
|
|
|
defp restrict_favorited_by(query, %{"favorited_by" => ap_id}) do
|
|
|
|
from activity in query,
|
|
|
|
where: fragment("? <@ (? #> '{\"object\",\"likes\"}')", ^ap_id, activity.data)
|
|
|
|
end
|
|
|
|
defp restrict_favorited_by(query, _), do: query
|
|
|
|
|
2017-11-14 14:50:23 +01:00
|
|
|
defp restrict_media(query, %{"only_media" => val}) when val == "true" or val == "1" do
|
2017-11-14 14:41:16 +01:00
|
|
|
from activity in query,
|
|
|
|
where: fragment("not (? #> '{\"object\",\"attachment\"}' = ?)", activity.data, ^[])
|
|
|
|
end
|
|
|
|
defp restrict_media(query, _), do: query
|
|
|
|
|
2017-10-31 15:26:37 +01:00
|
|
|
# Only search through last 100_000 activities by default
|
2017-11-08 17:25:18 +01:00
|
|
|
defp restrict_recent(query, %{"whole_db" => true}), do: query
|
2017-10-31 15:26:37 +01:00
|
|
|
defp restrict_recent(query, _) do
|
2017-11-05 12:05:25 +01:00
|
|
|
since = (Repo.aggregate(Activity, :max, :id) || 0) - 100_000
|
2017-10-31 15:26:37 +01:00
|
|
|
|
|
|
|
from activity in query,
|
|
|
|
where: activity.id > ^since
|
|
|
|
end
|
|
|
|
|
2017-11-02 22:47:11 +01:00
|
|
|
defp restrict_blocked(query, %{"blocking_user" => %User{info: info}}) do
|
|
|
|
blocks = info["blocks"] || []
|
2017-11-02 22:37:26 +01:00
|
|
|
from activity in query,
|
2017-11-09 10:41:19 +01:00
|
|
|
where: fragment("not (? = ANY(?))", activity.actor, ^blocks)
|
2017-11-02 22:37:26 +01:00
|
|
|
end
|
|
|
|
defp restrict_blocked(query, _), do: query
|
|
|
|
|
2018-02-18 15:32:11 +01:00
|
|
|
def fetch_activities_query(recipients, opts \\ %{}) do
|
2017-05-16 15:31:11 +02:00
|
|
|
base_query = from activity in Activity,
|
|
|
|
limit: 20,
|
2017-11-09 10:41:19 +01:00
|
|
|
order_by: [fragment("? desc nulls last", activity.id)]
|
2017-03-29 02:05:51 +02:00
|
|
|
|
2017-05-16 15:31:11 +02:00
|
|
|
base_query
|
2018-02-18 15:20:36 +01:00
|
|
|
|> restrict_recipients(recipients, opts["user"])
|
2017-09-14 13:22:09 +02:00
|
|
|
|> restrict_tag(opts)
|
2017-05-16 15:31:11 +02:00
|
|
|
|> restrict_since(opts)
|
|
|
|
|> restrict_local(opts)
|
|
|
|
|> restrict_max(opts)
|
|
|
|
|> restrict_actor(opts)
|
2017-09-09 12:05:17 +02:00
|
|
|
|> restrict_type(opts)
|
2017-09-17 13:09:49 +02:00
|
|
|
|> restrict_favorited_by(opts)
|
2017-10-31 15:26:37 +01:00
|
|
|
|> restrict_recent(opts)
|
2017-11-02 22:37:26 +01:00
|
|
|
|> restrict_blocked(opts)
|
2017-11-14 14:41:16 +01:00
|
|
|
|> restrict_media(opts)
|
2018-02-18 15:32:11 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def fetch_activities(recipients, opts \\ %{}) do
|
|
|
|
fetch_activities_query(recipients, opts)
|
2017-05-16 15:31:11 +02:00
|
|
|
|> Repo.all
|
|
|
|
|> Enum.reverse
|
2017-04-21 18:54:21 +02:00
|
|
|
end
|
|
|
|
|
2017-04-16 14:23:30 +02:00
|
|
|
def upload(file) do
|
2017-03-29 02:05:51 +02:00
|
|
|
data = Upload.store(file)
|
|
|
|
Repo.insert(%Object{data: data})
|
|
|
|
end
|
2017-12-12 18:07:14 +01:00
|
|
|
|
2018-02-21 22:21:40 +01:00
|
|
|
def fetch_and_prepare_user_from_ap_id(ap_id) do
|
2018-02-11 17:20:02 +01:00
|
|
|
with {:ok, %{status_code: 200, body: body}} <- @httpoison.get(ap_id, ["Accept": "application/activity+json"]),
|
|
|
|
{:ok, data} <- Poison.decode(body)
|
|
|
|
do
|
2018-02-22 08:14:15 +01:00
|
|
|
avatar = %{
|
|
|
|
"type" => "Image",
|
|
|
|
"url" => [%{"href" => data["icon"]["url"]}]
|
|
|
|
}
|
|
|
|
|
|
|
|
banner = %{
|
|
|
|
"type" => "Image",
|
|
|
|
"url" => [%{"href" => data["image"]["url"]}]
|
|
|
|
}
|
|
|
|
|
2018-02-11 17:20:02 +01:00
|
|
|
user_data = %{
|
|
|
|
ap_id: data["id"],
|
|
|
|
info: %{
|
|
|
|
"ap_enabled" => true,
|
2018-02-22 08:14:15 +01:00
|
|
|
"source_data" => data,
|
|
|
|
"banner" => banner
|
2018-02-11 17:20:02 +01:00
|
|
|
},
|
2018-02-22 08:14:15 +01:00
|
|
|
avatar: avatar,
|
2018-02-11 17:20:02 +01:00
|
|
|
nickname: "#{data["preferredUsername"]}@#{URI.parse(ap_id).host}",
|
2018-02-21 22:21:40 +01:00
|
|
|
name: data["name"],
|
2018-02-22 08:14:15 +01:00
|
|
|
follower_address: data["followers"],
|
2018-02-11 17:20:02 +01:00
|
|
|
}
|
|
|
|
|
2018-02-21 22:21:40 +01:00
|
|
|
{:ok, user_data}
|
2018-02-23 15:00:19 +01:00
|
|
|
else
|
|
|
|
e -> Logger.error("Could not user at fetch #{ap_id}, #{inspect(e)}")
|
2018-02-21 22:21:40 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def make_user_from_ap_id(ap_id) do
|
|
|
|
if user = User.get_by_ap_id(ap_id) do
|
|
|
|
Transmogrifier.upgrade_user_from_ap_id(ap_id)
|
|
|
|
else
|
|
|
|
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id) do
|
|
|
|
User.insert_or_update_user(data)
|
2018-02-18 23:11:31 +01:00
|
|
|
else
|
2018-02-21 22:21:40 +01:00
|
|
|
e -> e
|
2018-02-18 23:11:31 +01:00
|
|
|
end
|
2018-02-11 17:20:02 +01:00
|
|
|
end
|
|
|
|
end
|
2018-02-11 20:43:33 +01:00
|
|
|
|
2018-02-18 12:27:05 +01:00
|
|
|
def make_user_from_nickname(nickname) do
|
|
|
|
with {:ok, %{"ap_id" => ap_id}} when not is_nil(ap_id) <- WebFinger.finger(nickname) do
|
|
|
|
make_user_from_ap_id(ap_id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-11 20:43:33 +01:00
|
|
|
def publish(actor, activity) do
|
2018-02-18 23:02:44 +01:00
|
|
|
followers = if actor.follower_address in activity.recipients do
|
2018-02-18 23:01:37 +01:00
|
|
|
{:ok, followers} = User.get_followers(actor)
|
|
|
|
followers
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
2018-02-17 16:08:55 +01:00
|
|
|
|
2018-02-18 16:59:41 +01:00
|
|
|
remote_inboxes = (Pleroma.Web.Salmon.remote_users(activity) ++ followers)
|
2018-02-17 16:18:10 +01:00
|
|
|
|> Enum.filter(fn (user) -> User.ap_enabled?(user) end)
|
|
|
|
|> Enum.map(fn (%{info: %{"source_data" => data}}) ->
|
2018-02-18 16:59:41 +01:00
|
|
|
(data["endpoints"] && data["endpoints"]["sharedInbox"]) || data["inbox"]
|
2018-02-17 16:18:10 +01:00
|
|
|
end)
|
2018-02-17 16:08:55 +01:00
|
|
|
|> Enum.uniq
|
|
|
|
|
2018-02-17 14:11:20 +01:00
|
|
|
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
|
2018-02-18 16:05:25 +01:00
|
|
|
json = Poison.encode!(data)
|
2018-02-17 16:18:10 +01:00
|
|
|
Enum.each remote_inboxes, fn(inbox) ->
|
2018-02-20 08:51:19 +01:00
|
|
|
Federator.enqueue(:publish_single_ap, %{inbox: inbox, json: json, actor: actor, id: activity.data["id"]})
|
2018-02-11 20:43:33 +01:00
|
|
|
end
|
|
|
|
end
|
2018-02-17 21:56:33 +01:00
|
|
|
|
2018-02-20 08:51:19 +01:00
|
|
|
def publish_one(%{inbox: inbox, json: json, actor: actor, id: id}) do
|
|
|
|
Logger.info("Federating #{id} to #{inbox}")
|
|
|
|
host = URI.parse(inbox).host
|
|
|
|
signature = Pleroma.Web.HTTPSignatures.sign(actor, %{host: host, "content-length": byte_size(json)})
|
|
|
|
@httpoison.post(inbox, json, [{"Content-Type", "application/activity+json"}, {"signature", signature}])
|
|
|
|
end
|
|
|
|
|
2018-02-18 11:24:54 +01:00
|
|
|
# TODO:
|
|
|
|
# This will create a Create activity, which we need internally at the moment.
|
2018-02-17 21:56:33 +01:00
|
|
|
def fetch_object_from_id(id) do
|
|
|
|
if object = Object.get_cached_by_ap_id(id) do
|
|
|
|
{:ok, object}
|
|
|
|
else
|
2018-02-21 16:22:20 +01:00
|
|
|
Logger.info("Fetching #{id} via AP")
|
2018-02-17 21:56:33 +01:00
|
|
|
with {:ok, %{body: body, status_code: code}} when code in 200..299 <- @httpoison.get(id, [Accept: "application/activity+json"], follow_redirect: true, timeout: 10000, recv_timeout: 20000),
|
|
|
|
{:ok, data} <- Poison.decode(body),
|
2018-02-18 11:24:54 +01:00
|
|
|
nil <- Object.get_by_ap_id(data["id"]),
|
2018-02-21 15:22:24 +01:00
|
|
|
params <- %{"type" => "Create", "to" => data["to"], "cc" => data["cc"], "actor" => data["attributedTo"], "object" => data},
|
|
|
|
{:ok, activity} <- Transmogrifier.handle_incoming(params) do
|
2018-02-18 11:24:54 +01:00
|
|
|
{:ok, Object.get_by_ap_id(activity.data["object"]["id"])}
|
|
|
|
else
|
|
|
|
object = %Object{} -> {:ok, object}
|
2018-02-23 15:00:19 +01:00
|
|
|
e ->
|
|
|
|
Logger.info("Couldn't get object via AP, trying out OStatus fetching...")
|
|
|
|
case OStatus.fetch_activity_from_url(id) do
|
|
|
|
{:ok, [activity | _]} -> {:ok, Object.get_by_ap_id(activity.data["object"]["id"])}
|
|
|
|
e -> e
|
|
|
|
end
|
2018-02-17 21:56:33 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2018-02-18 15:50:34 +01:00
|
|
|
|
2018-02-18 15:58:18 +01:00
|
|
|
def is_public?(activity) do
|
2018-02-18 15:50:34 +01:00
|
|
|
"https://www.w3.org/ns/activitystreams#Public" in (activity.data["to"] ++ (activity.data["cc"] || []))
|
|
|
|
end
|
2018-02-18 15:58:18 +01:00
|
|
|
|
|
|
|
def visible_for_user?(activity, nil) do
|
|
|
|
is_public?(activity)
|
|
|
|
end
|
2018-02-18 15:50:34 +01:00
|
|
|
def visible_for_user?(activity, user) do
|
|
|
|
x = [user.ap_id | user.following]
|
|
|
|
y = (activity.data["to"] ++ (activity.data["cc"] || []))
|
|
|
|
visible_for_user?(activity, nil) || Enum.any?(x, &(&1 in y))
|
|
|
|
end
|
2017-03-21 09:21:52 +01:00
|
|
|
end
|