2019-07-10 07:13:23 +02:00
|
|
|
# Pleroma: A lightweight social networking server
|
|
|
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2018-12-01 23:53:10 +01:00
|
|
|
defmodule Pleroma.Object.Fetcher do
|
2019-05-25 06:24:21 +02:00
|
|
|
alias Pleroma.HTTP
|
2019-04-17 11:22:32 +02:00
|
|
|
alias Pleroma.Object
|
2018-12-01 23:53:10 +01:00
|
|
|
alias Pleroma.Object.Containment
|
2019-07-18 00:41:42 +02:00
|
|
|
alias Pleroma.Signature
|
2019-09-18 17:13:21 +02:00
|
|
|
alias Pleroma.Repo
|
2019-07-18 00:41:42 +02:00
|
|
|
alias Pleroma.Web.ActivityPub.InternalFetchActor
|
2018-12-01 23:53:10 +01:00
|
|
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
|
|
|
alias Pleroma.Web.OStatus
|
|
|
|
|
|
|
|
require Logger
|
2019-09-18 18:53:51 +02:00
|
|
|
require Pleroma.Constants
|
2018-12-01 23:53:10 +01:00
|
|
|
|
2019-09-18 18:07:25 +02:00
|
|
|
defp touch_changeset(changeset) do
|
|
|
|
updated_at =
|
|
|
|
NaiveDateTime.utc_now()
|
|
|
|
|> NaiveDateTime.truncate(:second)
|
|
|
|
|
|
|
|
Ecto.Changeset.put_change(changeset, :updated_at, updated_at)
|
|
|
|
end
|
|
|
|
|
2019-09-18 18:53:51 +02:00
|
|
|
defp maybe_reinject_internal_fields(data, %{data: %{} = old_data}) do
|
|
|
|
internal_fields = Map.take(old_data, Pleroma.Constants.object_internal_fields())
|
|
|
|
|
|
|
|
Map.merge(data, internal_fields)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp maybe_reinject_internal_fields(data, _), do: data
|
|
|
|
|
2019-09-18 17:13:21 +02:00
|
|
|
defp reinject_object(struct, data) do
|
2019-05-21 02:41:58 +02:00
|
|
|
Logger.debug("Reinjecting object #{data["id"]}")
|
|
|
|
|
|
|
|
with data <- Transmogrifier.fix_object(data),
|
2019-09-18 18:53:51 +02:00
|
|
|
data <- maybe_reinject_internal_fields(data, struct),
|
2019-09-18 17:13:21 +02:00
|
|
|
changeset <- Object.change(struct, %{data: data}),
|
2019-09-18 18:07:25 +02:00
|
|
|
changeset <- touch_changeset(changeset),
|
2019-09-18 17:13:21 +02:00
|
|
|
{:ok, object} <- Repo.insert_or_update(changeset) do
|
2019-05-21 02:41:58 +02:00
|
|
|
{:ok, object}
|
|
|
|
else
|
|
|
|
e ->
|
|
|
|
Logger.error("Error while processing object: #{inspect(e)}")
|
|
|
|
{:error, e}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-18 17:13:21 +02:00
|
|
|
def refetch_object(%Object{data: %{"id" => id}} = object) do
|
|
|
|
with {:ok, data} <- fetch_and_contain_remote_object_from_id(id),
|
|
|
|
{:ok, object} <- reinject_object(object, data) do
|
|
|
|
{:ok, object}
|
|
|
|
else
|
|
|
|
e -> {:error, e}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-01 23:53:10 +01:00
|
|
|
# TODO:
|
|
|
|
# This will create a Create activity, which we need internally at the moment.
|
2019-06-29 19:04:50 +02:00
|
|
|
def fetch_object_from_id(id, options \\ []) do
|
2018-12-01 23:53:10 +01:00
|
|
|
if object = Object.get_cached_by_ap_id(id) do
|
|
|
|
{:ok, object}
|
|
|
|
else
|
|
|
|
Logger.info("Fetching #{id} via AP")
|
|
|
|
|
2019-07-14 17:28:25 +02:00
|
|
|
with {:fetch, {:ok, data}} <- {:fetch, fetch_and_contain_remote_object_from_id(id)},
|
|
|
|
{:normalize, nil} <- {:normalize, Object.normalize(data, false)},
|
2018-12-01 23:53:10 +01:00
|
|
|
params <- %{
|
|
|
|
"type" => "Create",
|
|
|
|
"to" => data["to"],
|
|
|
|
"cc" => data["cc"],
|
2019-07-14 17:28:25 +02:00
|
|
|
# Should we seriously keep this attributedTo thing?
|
2018-12-01 23:53:10 +01:00
|
|
|
"actor" => data["actor"] || data["attributedTo"],
|
|
|
|
"object" => data
|
|
|
|
},
|
2019-07-14 17:28:25 +02:00
|
|
|
{:containment, :ok} <- {:containment, Containment.contain_origin(id, params)},
|
2019-06-29 19:04:50 +02:00
|
|
|
{:ok, activity} <- Transmogrifier.handle_incoming(params, options),
|
2019-05-21 02:41:58 +02:00
|
|
|
{:object, _data, %Object{} = object} <-
|
|
|
|
{:object, data, Object.normalize(activity, false)} do
|
|
|
|
{:ok, object}
|
2018-12-01 23:53:10 +01:00
|
|
|
else
|
2019-07-14 17:28:25 +02:00
|
|
|
{:containment, _} ->
|
|
|
|
{:error, "Object containment failed."}
|
2019-05-21 02:41:58 +02:00
|
|
|
|
2018-12-01 23:53:10 +01:00
|
|
|
{:error, {:reject, nil}} ->
|
|
|
|
{:reject, nil}
|
|
|
|
|
2019-05-21 02:41:58 +02:00
|
|
|
{:object, data, nil} ->
|
2019-09-18 17:13:21 +02:00
|
|
|
reinject_object(%Object{}, data)
|
2019-05-21 02:41:58 +02:00
|
|
|
|
2019-07-14 17:28:25 +02:00
|
|
|
{:normalize, object = %Object{}} ->
|
2018-12-01 23:53:10 +01:00
|
|
|
{:ok, object}
|
|
|
|
|
|
|
|
_e ->
|
2019-07-14 12:13:11 +02:00
|
|
|
# Only fallback when receiving a fetch/normalization error with ActivityPub
|
2018-12-01 23:53:10 +01:00
|
|
|
Logger.info("Couldn't get object via AP, trying out OStatus fetching...")
|
|
|
|
|
2019-07-14 12:13:11 +02:00
|
|
|
# FIXME: OStatus Object Containment?
|
2018-12-01 23:53:10 +01:00
|
|
|
case OStatus.fetch_activity_from_url(id) do
|
2019-04-22 10:27:29 +02:00
|
|
|
{:ok, [activity | _]} -> {:ok, Object.normalize(activity, false)}
|
2018-12-01 23:53:10 +01:00
|
|
|
e -> e
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-06-29 19:04:50 +02:00
|
|
|
def fetch_object_from_id!(id, options \\ []) do
|
|
|
|
with {:ok, object} <- fetch_object_from_id(id, options) do
|
2018-12-04 04:17:25 +01:00
|
|
|
object
|
|
|
|
else
|
|
|
|
_e ->
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-18 00:41:42 +02:00
|
|
|
defp make_signature(id, date) do
|
|
|
|
uri = URI.parse(id)
|
|
|
|
|
|
|
|
signature =
|
|
|
|
InternalFetchActor.get_actor()
|
|
|
|
|> Signature.sign(%{
|
|
|
|
"(request-target)": "get #{uri.path}",
|
|
|
|
host: uri.host,
|
|
|
|
date: date
|
|
|
|
})
|
|
|
|
|
|
|
|
[{:Signature, signature}]
|
|
|
|
end
|
|
|
|
|
|
|
|
defp sign_fetch(headers, id, date) do
|
|
|
|
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
|
|
|
|
headers ++ make_signature(id, date)
|
|
|
|
else
|
|
|
|
headers
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp maybe_date_fetch(headers, date) do
|
|
|
|
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
|
|
|
|
headers ++ [{:Date, date}]
|
|
|
|
else
|
|
|
|
headers
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-13 18:17:57 +02:00
|
|
|
def fetch_and_contain_remote_object_from_id(id) when is_binary(id) do
|
2019-04-17 13:21:39 +02:00
|
|
|
Logger.info("Fetching object #{id} via AP")
|
2018-12-01 23:53:10 +01:00
|
|
|
|
2019-08-22 21:39:06 +02:00
|
|
|
date = Pleroma.Signature.signed_date()
|
2019-07-18 00:41:42 +02:00
|
|
|
|
|
|
|
headers =
|
|
|
|
[{:Accept, "application/activity+json"}]
|
|
|
|
|> maybe_date_fetch(date)
|
|
|
|
|> sign_fetch(id, date)
|
|
|
|
|
|
|
|
Logger.debug("Fetch headers: #{inspect(headers)}")
|
|
|
|
|
2018-12-01 23:53:10 +01:00
|
|
|
with true <- String.starts_with?(id, "http"),
|
2019-07-18 00:41:42 +02:00
|
|
|
{:ok, %{body: body, status: code}} when code in 200..299 <- HTTP.get(id, headers),
|
2018-12-01 23:53:10 +01:00
|
|
|
{:ok, data} <- Jason.decode(body),
|
|
|
|
:ok <- Containment.contain_origin_from_id(id, data) do
|
|
|
|
{:ok, data}
|
|
|
|
else
|
2019-06-13 12:13:35 +02:00
|
|
|
{:ok, %{status: code}} when code in [404, 410] ->
|
2019-06-13 11:34:03 +02:00
|
|
|
{:error, "Object has been deleted"}
|
|
|
|
|
2018-12-01 23:53:10 +01:00
|
|
|
e ->
|
|
|
|
{:error, e}
|
|
|
|
end
|
|
|
|
end
|
2019-07-13 18:17:57 +02:00
|
|
|
|
2019-07-20 21:04:47 +02:00
|
|
|
def fetch_and_contain_remote_object_from_id(%{"id" => id}),
|
|
|
|
do: fetch_and_contain_remote_object_from_id(id)
|
|
|
|
|
2019-07-20 20:53:00 +02:00
|
|
|
def fetch_and_contain_remote_object_from_id(_id), do: {:error, "id must be a string"}
|
2018-12-01 23:53:10 +01:00
|
|
|
end
|