2017-04-18 18:41:51 +02:00
|
|
|
defmodule Pleroma.Web.OStatus do
|
2017-05-05 16:07:44 +02:00
|
|
|
@httpoison Application.get_env(:pleroma, :httpoison)
|
|
|
|
|
2017-04-24 18:46:34 +02:00
|
|
|
import Ecto.Query
|
2017-04-27 09:43:58 +02:00
|
|
|
import Pleroma.Web.XML
|
2017-04-24 18:46:34 +02:00
|
|
|
require Logger
|
|
|
|
|
2017-05-05 16:27:03 +02:00
|
|
|
alias Pleroma.{Repo, User, Web, Object, Activity}
|
2017-04-24 18:46:34 +02:00
|
|
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
2017-04-29 17:51:59 +02:00
|
|
|
alias Pleroma.Web.{WebFinger, Websub}
|
2018-05-21 10:36:20 +02:00
|
|
|
alias Pleroma.Web.OStatus.{FollowHandler, UnfollowHandler, NoteHandler, DeleteHandler}
|
2018-02-24 13:11:39 +01:00
|
|
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
2017-04-18 18:41:51 +02:00
|
|
|
|
2018-11-10 11:05:41 +01:00
|
|
|
def is_representable?(%Activity{data: data}) do
|
|
|
|
object = Object.normalize(data["object"])
|
|
|
|
|
|
|
|
cond do
|
|
|
|
is_nil(object) ->
|
|
|
|
false
|
|
|
|
|
|
|
|
object.data["type"] == "Note" ->
|
|
|
|
true
|
|
|
|
|
|
|
|
true ->
|
|
|
|
false
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-10 13:25:33 +01:00
|
|
|
def metadata(url), do: oembed_links(url)
|
|
|
|
|
|
|
|
def oembed_links(url) do
|
|
|
|
Enum.map(["xml", "json"], fn format ->
|
|
|
|
href = oembed_path(url, format)
|
2018-12-10 20:08:02 +01:00
|
|
|
"<link rel=\"alternate\" type=\"application/#{format}+oembed\" href=\"#{href}\">"
|
2018-12-10 13:25:33 +01:00
|
|
|
end)
|
|
|
|
|> Enum.join("\r\n")
|
|
|
|
end
|
|
|
|
|
2017-04-18 18:41:51 +02:00
|
|
|
def feed_path(user) do
|
|
|
|
"#{user.ap_id}/feed.atom"
|
|
|
|
end
|
|
|
|
|
2017-04-20 17:47:33 +02:00
|
|
|
def pubsub_path(user) do
|
2018-03-30 15:01:53 +02:00
|
|
|
"#{Web.base_url()}/push/hub/#{user.nickname}"
|
2017-04-18 18:41:51 +02:00
|
|
|
end
|
|
|
|
|
2017-04-24 18:46:34 +02:00
|
|
|
def salmon_path(user) do
|
|
|
|
"#{user.ap_id}/salmon"
|
|
|
|
end
|
|
|
|
|
2018-01-18 17:42:32 +01:00
|
|
|
def remote_follow_path do
|
2018-03-30 15:01:53 +02:00
|
|
|
"#{Web.base_url()}/ostatus_subscribe?acct={uri}"
|
2018-01-18 17:42:32 +01:00
|
|
|
end
|
|
|
|
|
2018-12-10 13:25:33 +01:00
|
|
|
def oembed_path(url, format) do
|
|
|
|
query = URI.encode_query(%{url: url, format: format})
|
|
|
|
"#{Web.base_url()}/oembed?#{query}"
|
|
|
|
end
|
|
|
|
|
2017-04-24 18:46:34 +02:00
|
|
|
def handle_incoming(xml_string) do
|
2017-08-04 16:57:38 +02:00
|
|
|
with doc when doc != :error <- parse_document(xml_string) do
|
|
|
|
entries = :xmerl_xpath.string('//entry', doc)
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
activities =
|
|
|
|
Enum.map(entries, fn entry ->
|
|
|
|
{:xmlObj, :string, object_type} =
|
|
|
|
:xmerl_xpath.string('string(/entry/activity:object-type[1])', entry)
|
|
|
|
|
|
|
|
{:xmlObj, :string, verb} = :xmerl_xpath.string('string(/entry/activity:verb[1])', entry)
|
|
|
|
Logger.debug("Handling #{verb}")
|
|
|
|
|
|
|
|
try do
|
|
|
|
case verb do
|
|
|
|
'http://activitystrea.ms/schema/1.0/delete' ->
|
|
|
|
with {:ok, activity} <- DeleteHandler.handle_delete(entry, doc), do: activity
|
|
|
|
|
|
|
|
'http://activitystrea.ms/schema/1.0/follow' ->
|
|
|
|
with {:ok, activity} <- FollowHandler.handle(entry, doc), do: activity
|
|
|
|
|
2018-05-21 10:36:20 +02:00
|
|
|
'http://activitystrea.ms/schema/1.0/unfollow' ->
|
|
|
|
with {:ok, activity} <- UnfollowHandler.handle(entry, doc), do: activity
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
'http://activitystrea.ms/schema/1.0/share' ->
|
|
|
|
with {:ok, activity, retweeted_activity} <- handle_share(entry, doc),
|
|
|
|
do: [activity, retweeted_activity]
|
|
|
|
|
|
|
|
'http://activitystrea.ms/schema/1.0/favorite' ->
|
|
|
|
with {:ok, activity, favorited_activity} <- handle_favorite(entry, doc),
|
|
|
|
do: [activity, favorited_activity]
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
case object_type do
|
|
|
|
'http://activitystrea.ms/schema/1.0/note' ->
|
|
|
|
with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity
|
|
|
|
|
|
|
|
'http://activitystrea.ms/schema/1.0/comment' ->
|
|
|
|
with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
Logger.error("Couldn't parse incoming document")
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
rescue
|
|
|
|
e ->
|
|
|
|
Logger.error("Error occured while handling activity")
|
|
|
|
Logger.error(xml_string)
|
|
|
|
Logger.error(inspect(e))
|
|
|
|
nil
|
2017-08-04 16:57:38 +02:00
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
end)
|
|
|
|
|> Enum.filter(& &1)
|
2017-08-01 12:41:46 +02:00
|
|
|
|
2017-08-04 16:57:38 +02:00
|
|
|
{:ok, activities}
|
|
|
|
else
|
|
|
|
_e -> {:error, []}
|
|
|
|
end
|
2017-04-24 18:46:34 +02:00
|
|
|
end
|
|
|
|
|
2017-05-07 20:05:03 +02:00
|
|
|
def make_share(entry, doc, retweeted_activity) do
|
2017-05-04 18:42:29 +02:00
|
|
|
with {:ok, actor} <- find_make_or_update_user(doc),
|
2018-06-18 23:08:37 +02:00
|
|
|
%Object{} = object <- Object.normalize(retweeted_activity.data["object"]),
|
2017-05-07 20:05:03 +02:00
|
|
|
id when not is_nil(id) <- string_from_xpath("/entry/id", entry),
|
|
|
|
{:ok, activity, _object} = ActivityPub.announce(actor, object, id, false) do
|
2017-05-04 18:42:29 +02:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def handle_share(entry, doc) do
|
2017-05-19 16:08:46 +02:00
|
|
|
with {:ok, retweeted_activity} <- get_or_build_object(entry),
|
2017-05-04 18:42:29 +02:00
|
|
|
{:ok, activity} <- make_share(entry, doc, retweeted_activity) do
|
|
|
|
{:ok, activity, retweeted_activity}
|
|
|
|
else
|
|
|
|
e -> {:error, e}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-07 20:05:03 +02:00
|
|
|
def make_favorite(entry, doc, favorited_activity) do
|
2017-05-05 16:07:44 +02:00
|
|
|
with {:ok, actor} <- find_make_or_update_user(doc),
|
2018-06-18 23:08:37 +02:00
|
|
|
%Object{} = object <- Object.normalize(favorited_activity.data["object"]),
|
2017-05-07 20:05:03 +02:00
|
|
|
id when not is_nil(id) <- string_from_xpath("/entry/id", entry),
|
|
|
|
{:ok, activity, _object} = ActivityPub.like(actor, object, id, false) do
|
2017-05-05 16:07:44 +02:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-19 16:08:46 +02:00
|
|
|
def get_or_build_object(entry) do
|
|
|
|
with {:ok, activity} <- get_or_try_fetching(entry) do
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
_e ->
|
|
|
|
with [object] <- :xmerl_xpath.string('/entry/activity:object', entry) do
|
2017-05-20 13:35:22 +02:00
|
|
|
NoteHandler.handle_note(object, object)
|
2017-05-19 16:08:46 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-05 16:27:03 +02:00
|
|
|
def get_or_try_fetching(entry) do
|
2017-05-11 17:59:11 +02:00
|
|
|
Logger.debug("Trying to get entry from db")
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-05-05 16:27:03 +02:00
|
|
|
with id when not is_nil(id) <- string_from_xpath("//activity:object[1]/id", entry),
|
|
|
|
%Activity{} = activity <- Activity.get_create_activity_by_object_ap_id(id) do
|
|
|
|
{:ok, activity}
|
2018-03-30 15:01:53 +02:00
|
|
|
else
|
|
|
|
_ ->
|
2017-05-11 09:34:11 +02:00
|
|
|
Logger.debug("Couldn't get, will try to fetch")
|
2018-03-30 15:01:53 +02:00
|
|
|
|
|
|
|
with href when not is_nil(href) <-
|
|
|
|
string_from_xpath("//activity:object[1]/link[@type=\"text/html\"]/@href", entry),
|
2017-08-04 16:57:38 +02:00
|
|
|
{:ok, [favorited_activity]} <- fetch_activity_from_url(href) do
|
2017-05-05 16:27:03 +02:00
|
|
|
{:ok, favorited_activity}
|
2018-03-30 15:01:53 +02:00
|
|
|
else
|
|
|
|
e -> Logger.debug("Couldn't find href: #{inspect(e)}")
|
2017-05-05 16:27:03 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-05 16:07:44 +02:00
|
|
|
def handle_favorite(entry, doc) do
|
2017-05-05 16:27:03 +02:00
|
|
|
with {:ok, favorited_activity} <- get_or_try_fetching(entry),
|
2017-05-05 16:07:44 +02:00
|
|
|
{:ok, activity} <- make_favorite(entry, doc, favorited_activity) do
|
|
|
|
{:ok, activity, favorited_activity}
|
|
|
|
else
|
|
|
|
e -> {:error, e}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-03 14:26:49 +02:00
|
|
|
def get_attachments(entry) do
|
|
|
|
:xmerl_xpath.string('/entry/link[@rel="enclosure"]', entry)
|
2018-03-30 15:01:53 +02:00
|
|
|
|> Enum.map(fn enclosure ->
|
2017-05-03 14:26:49 +02:00
|
|
|
with href when not is_nil(href) <- string_from_xpath("/link/@href", enclosure),
|
|
|
|
type when not is_nil(type) <- string_from_xpath("/link/@type", enclosure) do
|
|
|
|
%{
|
|
|
|
"type" => "Attachment",
|
2018-03-30 15:01:53 +02:00
|
|
|
"url" => [
|
|
|
|
%{
|
|
|
|
"type" => "Link",
|
|
|
|
"mediaType" => type,
|
|
|
|
"href" => href
|
|
|
|
}
|
|
|
|
]
|
2017-05-03 14:26:49 +02:00
|
|
|
}
|
|
|
|
end
|
|
|
|
end)
|
2018-03-30 15:01:53 +02:00
|
|
|
|> Enum.filter(& &1)
|
2017-05-03 14:26:49 +02:00
|
|
|
end
|
|
|
|
|
2017-05-20 13:35:22 +02:00
|
|
|
@doc """
|
2017-10-31 17:30:46 +01:00
|
|
|
Gets the content from a an entry.
|
2017-05-20 13:35:22 +02:00
|
|
|
"""
|
2017-05-10 18:46:23 +02:00
|
|
|
def get_content(entry) do
|
2017-10-31 17:30:46 +01:00
|
|
|
string_from_xpath("//content", entry)
|
|
|
|
end
|
2017-05-10 18:46:23 +02:00
|
|
|
|
2017-10-31 17:30:46 +01:00
|
|
|
@doc """
|
|
|
|
Get the cw that mastodon uses.
|
|
|
|
"""
|
|
|
|
def get_cw(entry) do
|
2018-01-29 17:44:35 +01:00
|
|
|
with cw when not is_nil(cw) <- string_from_xpath("/*/summary", entry) do
|
2017-10-31 17:30:46 +01:00
|
|
|
cw
|
2018-03-30 15:01:53 +02:00
|
|
|
else
|
|
|
|
_e -> nil
|
2017-05-10 18:46:23 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-18 15:58:18 +02:00
|
|
|
def get_tags(entry) do
|
|
|
|
:xmerl_xpath.string('//category', entry)
|
2018-03-30 15:01:53 +02:00
|
|
|
|> Enum.map(fn category -> string_from_xpath("/category/@term", category) end)
|
|
|
|
|> Enum.filter(& &1)
|
2017-11-01 09:33:29 +01:00
|
|
|
|> Enum.map(&String.downcase/1)
|
2017-05-18 15:58:18 +02:00
|
|
|
end
|
|
|
|
|
2017-05-24 17:34:38 +02:00
|
|
|
def maybe_update(doc, user) do
|
2018-02-24 13:06:53 +01:00
|
|
|
if "true" == string_from_xpath("//author[1]/ap_enabled", doc) do
|
|
|
|
Transmogrifier.upgrade_user_from_ap_id(user.ap_id)
|
|
|
|
else
|
|
|
|
maybe_update_ostatus(doc, user)
|
|
|
|
end
|
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2018-02-24 13:06:53 +01:00
|
|
|
def maybe_update_ostatus(doc, user) do
|
2017-05-24 17:34:38 +02:00
|
|
|
old_data = %{
|
|
|
|
avatar: user.avatar,
|
|
|
|
bio: user.bio,
|
2018-11-30 17:21:58 +01:00
|
|
|
name: user.name
|
2017-05-24 17:34:38 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
with false <- user.local,
|
|
|
|
avatar <- make_avatar_object(doc),
|
2017-06-26 17:00:58 +02:00
|
|
|
bio <- string_from_xpath("//author[1]/summary", doc),
|
2017-09-05 11:40:34 +02:00
|
|
|
name <- string_from_xpath("//author[1]/poco:displayName", doc),
|
2018-03-30 15:01:53 +02:00
|
|
|
new_data <- %{
|
|
|
|
avatar: avatar || old_data.avatar,
|
|
|
|
name: name || old_data.name,
|
2018-11-30 17:21:58 +01:00
|
|
|
bio: bio || old_data.bio
|
2018-03-30 15:01:53 +02:00
|
|
|
},
|
2017-05-24 17:34:38 +02:00
|
|
|
false <- new_data == old_data do
|
|
|
|
change = Ecto.Changeset.change(user, new_data)
|
2018-11-30 17:21:58 +01:00
|
|
|
User.update_and_set_cache(change)
|
2018-03-30 15:01:53 +02:00
|
|
|
else
|
|
|
|
_ ->
|
|
|
|
{:ok, user}
|
2017-05-24 17:34:38 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-02 17:13:41 +02:00
|
|
|
def find_make_or_update_user(doc) do
|
|
|
|
uri = string_from_xpath("//author/uri[1]", doc)
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-05-02 17:13:41 +02:00
|
|
|
with {:ok, user} <- find_or_make_user(uri) do
|
2017-05-24 17:34:38 +02:00
|
|
|
maybe_update(doc, user)
|
2017-05-02 17:13:41 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-04-29 19:06:01 +02:00
|
|
|
def find_or_make_user(uri) do
|
2018-03-30 15:01:53 +02:00
|
|
|
query = from(user in User, where: user.ap_id == ^uri)
|
2017-04-24 18:46:34 +02:00
|
|
|
|
|
|
|
user = Repo.one(query)
|
|
|
|
|
|
|
|
if is_nil(user) do
|
2017-04-29 19:06:01 +02:00
|
|
|
make_user(uri)
|
2017-04-24 18:46:34 +02:00
|
|
|
else
|
|
|
|
{:ok, user}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-11-09 08:32:54 +01:00
|
|
|
def make_user(uri, update \\ false) do
|
2017-04-29 19:06:01 +02:00
|
|
|
with {:ok, info} <- gather_user_info(uri) do
|
|
|
|
data = %{
|
2017-05-03 14:26:49 +02:00
|
|
|
name: info["name"],
|
|
|
|
nickname: info["nickname"] <> "@" <> info["host"],
|
|
|
|
ap_id: info["uri"],
|
2017-04-30 12:53:49 +02:00
|
|
|
info: info,
|
2017-05-24 17:34:38 +02:00
|
|
|
avatar: info["avatar"],
|
|
|
|
bio: info["bio"]
|
2017-04-29 19:06:01 +02:00
|
|
|
}
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-11-09 08:32:54 +01:00
|
|
|
with false <- update,
|
|
|
|
%User{} = user <- User.get_by_ap_id(data.ap_id) do
|
2017-05-12 18:58:25 +02:00
|
|
|
{:ok, user}
|
2018-03-30 15:01:53 +02:00
|
|
|
else
|
|
|
|
_e -> User.insert_or_update_user(data)
|
2017-05-12 18:58:25 +02:00
|
|
|
end
|
2017-04-29 19:06:01 +02:00
|
|
|
end
|
2017-04-24 18:46:34 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
# TODO: Just takes the first one for now.
|
2017-09-16 13:24:15 +02:00
|
|
|
def make_avatar_object(author_doc, rel \\ "avatar") do
|
|
|
|
href = string_from_xpath("//author[1]/link[@rel=\"#{rel}\"]/@href", author_doc)
|
|
|
|
type = string_from_xpath("//author[1]/link[@rel=\"#{rel}\"]/@type", author_doc)
|
2017-04-24 18:46:34 +02:00
|
|
|
|
|
|
|
if href do
|
|
|
|
%{
|
|
|
|
"type" => "Image",
|
2018-03-30 15:01:53 +02:00
|
|
|
"url" => [
|
|
|
|
%{
|
|
|
|
"type" => "Link",
|
|
|
|
"mediaType" => type,
|
|
|
|
"href" => href
|
|
|
|
}
|
|
|
|
]
|
2017-04-24 18:46:34 +02:00
|
|
|
}
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
2017-04-18 18:41:51 +02:00
|
|
|
end
|
2017-04-29 17:51:59 +02:00
|
|
|
|
|
|
|
def gather_user_info(username) do
|
|
|
|
with {:ok, webfinger_data} <- WebFinger.finger(username),
|
2017-05-03 14:26:49 +02:00
|
|
|
{:ok, feed_data} <- Websub.gather_feed_data(webfinger_data["topic"]) do
|
|
|
|
{:ok, Map.merge(webfinger_data, feed_data) |> Map.put("fqn", username)}
|
2018-03-30 15:01:53 +02:00
|
|
|
else
|
|
|
|
e ->
|
|
|
|
Logger.debug(fn -> "Couldn't gather info for #{username}" end)
|
|
|
|
{:error, e}
|
2017-04-29 17:51:59 +02:00
|
|
|
end
|
|
|
|
end
|
2017-05-05 16:07:44 +02:00
|
|
|
|
|
|
|
# Regex-based 'parsing' so we don't have to pull in a full html parser
|
|
|
|
# It's a hack anyway. Maybe revisit this in the future
|
|
|
|
@mastodon_regex ~r/<link href='(.*)' rel='alternate' type='application\/atom\+xml'>/
|
|
|
|
@gs_regex ~r/<link title=.* href="(.*)" type="application\/atom\+xml" rel="alternate">/
|
2017-05-05 20:15:26 +02:00
|
|
|
@gs_classic_regex ~r/<link rel="alternate" href="(.*)" type="application\/atom\+xml" title=.*>/
|
2017-05-05 16:07:44 +02:00
|
|
|
def get_atom_url(body) do
|
|
|
|
cond do
|
|
|
|
Regex.match?(@mastodon_regex, body) ->
|
|
|
|
[[_, match]] = Regex.scan(@mastodon_regex, body)
|
|
|
|
{:ok, match}
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-05-05 16:07:44 +02:00
|
|
|
Regex.match?(@gs_regex, body) ->
|
|
|
|
[[_, match]] = Regex.scan(@gs_regex, body)
|
|
|
|
{:ok, match}
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-05-05 20:15:26 +02:00
|
|
|
Regex.match?(@gs_classic_regex, body) ->
|
|
|
|
[[_, match]] = Regex.scan(@gs_classic_regex, body)
|
|
|
|
{:ok, match}
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-05-05 16:07:44 +02:00
|
|
|
true ->
|
2018-03-19 18:44:25 +01:00
|
|
|
Logger.debug(fn -> "Couldn't find Atom link in #{inspect(body)}" end)
|
|
|
|
{:error, "Couldn't find the Atom link"}
|
2017-05-05 16:07:44 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-04 16:57:38 +02:00
|
|
|
def fetch_activity_from_atom_url(url) do
|
2018-03-19 10:28:28 +01:00
|
|
|
with true <- String.starts_with?(url, "http"),
|
2018-12-02 15:08:36 +01:00
|
|
|
{:ok, %{body: body, status: code}} when code in 200..299 <-
|
2018-03-30 15:01:53 +02:00
|
|
|
@httpoison.get(
|
|
|
|
url,
|
2018-12-06 10:41:29 +01:00
|
|
|
[{:Accept, "application/atom+xml"}]
|
2018-03-30 15:01:53 +02:00
|
|
|
) do
|
2017-08-04 16:57:38 +02:00
|
|
|
Logger.debug("Got document from #{url}, handling...")
|
|
|
|
handle_incoming(body)
|
2018-02-23 15:00:19 +01:00
|
|
|
else
|
|
|
|
e ->
|
|
|
|
Logger.debug("Couldn't get #{url}: #{inspect(e)}")
|
|
|
|
e
|
2017-08-04 16:57:38 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-05 16:07:44 +02:00
|
|
|
def fetch_activity_from_html_url(url) do
|
2017-05-11 09:13:14 +02:00
|
|
|
Logger.debug("Trying to fetch #{url}")
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2018-03-19 10:28:28 +01:00
|
|
|
with true <- String.starts_with?(url, "http"),
|
2018-12-06 03:38:33 +01:00
|
|
|
{:ok, %{body: body}} <- @httpoison.get(url, []),
|
2017-08-04 16:57:38 +02:00
|
|
|
{:ok, atom_url} <- get_atom_url(body) do
|
2018-03-30 15:01:53 +02:00
|
|
|
fetch_activity_from_atom_url(atom_url)
|
2018-02-23 15:00:19 +01:00
|
|
|
else
|
|
|
|
e ->
|
|
|
|
Logger.debug("Couldn't get #{url}: #{inspect(e)}")
|
|
|
|
e
|
2017-05-05 16:07:44 +02:00
|
|
|
end
|
2017-04-18 18:41:51 +02:00
|
|
|
end
|
2017-08-04 16:57:38 +02:00
|
|
|
|
|
|
|
def fetch_activity_from_url(url) do
|
2018-12-10 07:39:57 +01:00
|
|
|
with {:ok, [_ | _] = activities} <- fetch_activity_from_atom_url(url) do
|
|
|
|
{:ok, activities}
|
|
|
|
else
|
|
|
|
_e -> fetch_activity_from_html_url(url)
|
2017-08-04 16:57:38 +02:00
|
|
|
end
|
2018-12-10 07:39:57 +01:00
|
|
|
rescue
|
|
|
|
e ->
|
|
|
|
Logger.debug("Couldn't get #{url}: #{inspect(e)}")
|
|
|
|
{:error, "Couldn't get #{url}: #{inspect(e)}"}
|
2017-08-04 16:57:38 +02:00
|
|
|
end
|
2017-04-18 18:41:51 +02:00
|
|
|
end
|