2017-03-21 17:53:20 +01:00
|
|
|
defmodule Pleroma.Web.TwitterAPI.TwitterAPI do
|
2017-03-30 17:07:03 +02:00
|
|
|
alias Pleroma.{User, Activity, Repo, Object}
|
2017-03-21 17:53:20 +01:00
|
|
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
2017-06-19 23:12:37 +02:00
|
|
|
alias Pleroma.Web.TwitterAPI.Representers.ActivityRepresenter
|
|
|
|
alias Pleroma.Web.TwitterAPI.UserView
|
2017-09-09 17:48:57 +02:00
|
|
|
alias Pleroma.Web.{OStatus, CommonAPI}
|
2017-05-17 18:00:20 +02:00
|
|
|
alias Pleroma.Formatter
|
2017-09-16 14:33:47 +02:00
|
|
|
import Ecto.Query
|
2017-03-21 17:53:20 +01:00
|
|
|
|
2017-06-18 17:20:39 +02:00
|
|
|
@httpoison Application.get_env(:pleroma, :httpoison)
|
|
|
|
|
2017-05-05 12:07:38 +02:00
|
|
|
def create_status(%User{} = user, %{"status" => status} = data) do
|
2017-09-15 14:17:36 +02:00
|
|
|
CommonAPI.post(user, data)
|
2017-03-21 17:53:20 +01:00
|
|
|
end
|
|
|
|
|
2017-03-22 16:51:20 +01:00
|
|
|
def fetch_friend_statuses(user, opts \\ %{}) do
|
2017-11-02 22:50:42 +01:00
|
|
|
opts = Map.put(opts, "blocking_user", user)
|
2017-04-12 16:45:23 +02:00
|
|
|
ActivityPub.fetch_activities([user.ap_id | user.following], opts)
|
2017-03-23 15:51:34 +01:00
|
|
|
|> activities_to_statuses(%{for: user})
|
2017-03-22 16:51:20 +01:00
|
|
|
end
|
|
|
|
|
2017-03-23 15:51:34 +01:00
|
|
|
def fetch_public_statuses(user, opts \\ %{}) do
|
2017-05-02 14:12:43 +02:00
|
|
|
opts = Map.put(opts, "local_only", true)
|
2017-11-02 22:50:42 +01:00
|
|
|
opts = Map.put(opts, "blocking_user", user)
|
2017-05-02 14:12:43 +02:00
|
|
|
ActivityPub.fetch_public_activities(opts)
|
|
|
|
|> activities_to_statuses(%{for: user})
|
|
|
|
end
|
|
|
|
|
|
|
|
def fetch_public_and_external_statuses(user, opts \\ %{}) do
|
2017-11-02 22:50:42 +01:00
|
|
|
opts = Map.put(opts, "blocking_user", user)
|
2017-03-22 16:51:20 +01:00
|
|
|
ActivityPub.fetch_public_activities(opts)
|
2017-03-23 15:51:34 +01:00
|
|
|
|> activities_to_statuses(%{for: user})
|
2017-03-22 16:51:20 +01:00
|
|
|
end
|
2017-03-21 17:53:20 +01:00
|
|
|
|
2017-04-14 15:09:13 +02:00
|
|
|
def fetch_user_statuses(user, opts \\ %{}) do
|
2017-04-16 15:44:30 +02:00
|
|
|
ActivityPub.fetch_activities([], opts)
|
2017-04-14 15:09:13 +02:00
|
|
|
|> activities_to_statuses(%{for: user})
|
|
|
|
end
|
|
|
|
|
2017-04-20 12:53:53 +02:00
|
|
|
def fetch_mentions(user, opts \\ %{}) do
|
|
|
|
ActivityPub.fetch_activities([user.ap_id], opts)
|
|
|
|
|> activities_to_statuses(%{for: user})
|
|
|
|
end
|
|
|
|
|
2017-03-28 17:22:44 +02:00
|
|
|
def fetch_conversation(user, id) do
|
2017-04-30 13:53:26 +02:00
|
|
|
with context when is_binary(context) <- conversation_id_to_context(id),
|
2017-11-02 22:50:42 +01:00
|
|
|
activities <- ActivityPub.fetch_activities_for_context(context, %{"blocking_user" => user}),
|
2017-03-28 17:22:44 +02:00
|
|
|
statuses <- activities |> activities_to_statuses(%{for: user})
|
|
|
|
do
|
|
|
|
statuses
|
2017-05-05 12:07:38 +02:00
|
|
|
else _e ->
|
2017-03-28 17:22:44 +02:00
|
|
|
[]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def fetch_status(user, id) do
|
|
|
|
with %Activity{} = activity <- Repo.get(Activity, id) do
|
|
|
|
activity_to_status(activity, %{for: user})
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-04-13 14:32:13 +02:00
|
|
|
def follow(%User{} = follower, params) do
|
2017-05-05 12:07:38 +02:00
|
|
|
with {:ok, %User{} = followed} <- get_user(params),
|
|
|
|
{:ok, follower} <- User.follow(follower, followed),
|
2017-05-07 19:28:23 +02:00
|
|
|
{:ok, activity} <- ActivityPub.follow(follower, followed)
|
2017-03-22 18:36:08 +01:00
|
|
|
do
|
2017-05-05 12:07:38 +02:00
|
|
|
{:ok, follower, followed, activity}
|
2017-04-12 16:34:36 +02:00
|
|
|
else
|
|
|
|
err -> err
|
2017-03-22 18:36:08 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-04-20 09:57:37 +02:00
|
|
|
def unfollow(%User{} = follower, params) do
|
|
|
|
with { :ok, %User{} = unfollowed } <- get_user(params),
|
2017-04-21 18:54:21 +02:00
|
|
|
{ :ok, follower, follow_activity } <- User.unfollow(follower, unfollowed),
|
|
|
|
{ :ok, _activity } <- ActivityPub.insert(%{
|
|
|
|
"type" => "Undo",
|
|
|
|
"actor" => follower.ap_id,
|
2017-05-07 19:28:23 +02:00
|
|
|
"object" => follow_activity.data["id"], # get latest Follow for these users
|
2017-04-21 18:54:21 +02:00
|
|
|
"published" => make_date()
|
|
|
|
})
|
2017-03-23 13:13:09 +01:00
|
|
|
do
|
2017-04-21 18:54:21 +02:00
|
|
|
{ :ok, follower, unfollowed }
|
2017-04-12 16:34:36 +02:00
|
|
|
else
|
|
|
|
err -> err
|
2017-03-23 13:13:09 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-09 17:48:57 +02:00
|
|
|
def repeat(%User{} = user, ap_id_or_id) do
|
|
|
|
with {:ok, _announce, %{data: %{"id" => id}}} = CommonAPI.repeat(ap_id_or_id, user),
|
|
|
|
%Activity{} = activity <- Activity.get_create_activity_by_object_ap_id(id),
|
|
|
|
status <- activity_to_status(activity, %{for: user}) do
|
|
|
|
{:ok, status}
|
|
|
|
end
|
2017-04-15 13:54:46 +02:00
|
|
|
end
|
|
|
|
|
2017-09-09 18:09:37 +02:00
|
|
|
def fav(%User{} = user, ap_id_or_id) do
|
|
|
|
with {:ok, _announce, %{data: %{"id" => id}}} = CommonAPI.favorite(ap_id_or_id, user),
|
|
|
|
%Activity{} = activity <- Activity.get_create_activity_by_object_ap_id(id),
|
|
|
|
status <- activity_to_status(activity, %{for: user}) do
|
|
|
|
{:ok, status}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-09 18:30:02 +02:00
|
|
|
def unfav(%User{} = user, ap_id_or_id) do
|
|
|
|
with {:ok, %{data: %{"id" => id}}} = CommonAPI.unfavorite(ap_id_or_id, user),
|
|
|
|
%Activity{} = activity <- Activity.get_create_activity_by_object_ap_id(id),
|
|
|
|
status <- activity_to_status(activity, %{for: user}) do
|
|
|
|
{:ok, status}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-04-14 16:13:34 +02:00
|
|
|
def upload(%Plug.Upload{} = file, format \\ "xml") do
|
2017-03-29 02:05:51 +02:00
|
|
|
{:ok, object} = ActivityPub.upload(file)
|
|
|
|
|
2017-03-30 16:08:23 +02:00
|
|
|
url = List.first(object.data["url"])
|
|
|
|
href = url["href"]
|
|
|
|
type = url["mediaType"]
|
|
|
|
|
2017-04-14 16:13:34 +02:00
|
|
|
case format do
|
|
|
|
"xml" ->
|
|
|
|
# Fake this as good as possible...
|
|
|
|
"""
|
|
|
|
<?xml version="1.0" encoding="UTF-8"?>
|
|
|
|
<rsp stat="ok" xmlns:atom="http://www.w3.org/2005/Atom">
|
|
|
|
<mediaid>#{object.id}</mediaid>
|
|
|
|
<media_id>#{object.id}</media_id>
|
|
|
|
<media_id_string>#{object.id}</media_id_string>
|
|
|
|
<media_url>#{href}</media_url>
|
|
|
|
<mediaurl>#{href}</mediaurl>
|
|
|
|
<atom:link rel="enclosure" href="#{href}" type="#{type}"></atom:link>
|
|
|
|
</rsp>
|
|
|
|
"""
|
|
|
|
"json" ->
|
|
|
|
%{
|
|
|
|
media_id: object.id,
|
|
|
|
media_id_string: "#{object.id}}",
|
|
|
|
media_url: href,
|
|
|
|
size: 0
|
|
|
|
} |> Poison.encode!
|
|
|
|
end
|
2017-03-29 02:05:51 +02:00
|
|
|
end
|
|
|
|
|
2017-04-16 10:25:27 +02:00
|
|
|
def register_user(params) do
|
|
|
|
params = %{
|
|
|
|
nickname: params["nickname"],
|
|
|
|
name: params["fullname"],
|
|
|
|
bio: params["bio"],
|
|
|
|
email: params["email"],
|
|
|
|
password: params["password"],
|
|
|
|
password_confirmation: params["confirm"]
|
|
|
|
}
|
|
|
|
|
|
|
|
changeset = User.register_changeset(%User{}, params)
|
|
|
|
|
|
|
|
with {:ok, user} <- Repo.insert(changeset) do
|
2017-06-19 23:12:37 +02:00
|
|
|
{:ok, user}
|
2017-04-16 10:25:27 +02:00
|
|
|
else
|
|
|
|
{:error, changeset} ->
|
2017-04-16 15:44:30 +02:00
|
|
|
errors = Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end)
|
|
|
|
|> Poison.encode!
|
2017-04-25 17:45:34 +02:00
|
|
|
{:error, %{error: errors}}
|
2017-04-16 10:25:27 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-06-12 17:12:55 +02:00
|
|
|
def get_by_id_or_nickname(id_or_nickname) do
|
|
|
|
if !is_integer(id_or_nickname) && :error == Integer.parse(id_or_nickname) do
|
|
|
|
Repo.get_by(User, nickname: id_or_nickname)
|
|
|
|
else
|
|
|
|
Repo.get(User, id_or_nickname)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-04-20 09:39:18 +02:00
|
|
|
def get_user(user \\ nil, params) do
|
2017-04-16 15:44:30 +02:00
|
|
|
case params do
|
2017-05-05 12:07:38 +02:00
|
|
|
%{"user_id" => user_id} ->
|
2017-06-12 17:12:55 +02:00
|
|
|
case target = get_by_id_or_nickname(user_id) do
|
2017-04-16 15:44:30 +02:00
|
|
|
nil ->
|
|
|
|
{:error, "No user with such user_id"}
|
|
|
|
_ ->
|
|
|
|
{:ok, target}
|
|
|
|
end
|
2017-05-05 12:07:38 +02:00
|
|
|
%{"screen_name" => nickname} ->
|
2017-04-16 15:44:30 +02:00
|
|
|
case target = Repo.get_by(User, nickname: nickname) do
|
|
|
|
nil ->
|
|
|
|
{:error, "No user with such screen_name"}
|
|
|
|
_ ->
|
|
|
|
{:ok, target}
|
|
|
|
end
|
|
|
|
_ ->
|
|
|
|
if user do
|
|
|
|
{:ok, user}
|
|
|
|
else
|
2017-04-16 16:05:48 +02:00
|
|
|
{:error, "You need to specify screen_name or user_id"}
|
2017-04-16 15:44:30 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-16 14:33:47 +02:00
|
|
|
defp parse_int(string, default \\ nil)
|
|
|
|
defp parse_int(string, default) when is_binary(string) do
|
|
|
|
with {n, _} <- Integer.parse(string) do
|
|
|
|
n
|
|
|
|
else
|
|
|
|
_e -> default
|
|
|
|
end
|
|
|
|
end
|
|
|
|
defp parse_int(_, default), do: default
|
|
|
|
|
|
|
|
def search(user, %{"q" => query} = params) do
|
|
|
|
limit = parse_int(params["rpp"], 20)
|
|
|
|
page = parse_int(params["page"], 1)
|
|
|
|
offset = (page - 1) * limit
|
|
|
|
|
|
|
|
q = from a in Activity,
|
|
|
|
where: fragment("?->>'type' = 'Create'", a.data),
|
|
|
|
where: fragment("to_tsvector('english', ?->'object'->>'content') @@ plainto_tsquery('english', ?)", a.data, ^query),
|
|
|
|
limit: ^limit,
|
2017-09-16 14:51:55 +02:00
|
|
|
offset: ^offset,
|
2017-09-17 11:32:24 +02:00
|
|
|
order_by: [desc: :inserted_at] # this one isn't indexed so psql won't take the wrong index.
|
2017-09-16 14:33:47 +02:00
|
|
|
|
|
|
|
activities = Repo.all(q)
|
|
|
|
activities_to_statuses(activities, %{for: user})
|
|
|
|
end
|
|
|
|
|
2017-03-23 15:51:34 +01:00
|
|
|
defp activities_to_statuses(activities, opts) do
|
2017-03-21 17:53:20 +01:00
|
|
|
Enum.map(activities, fn(activity) ->
|
2017-03-24 01:16:28 +01:00
|
|
|
activity_to_status(activity, opts)
|
2017-03-21 17:53:20 +01:00
|
|
|
end)
|
|
|
|
end
|
2017-03-24 01:16:28 +01:00
|
|
|
|
2017-04-13 17:05:53 +02:00
|
|
|
# For likes, fetch the liked activity, too.
|
|
|
|
defp activity_to_status(%Activity{data: %{"type" => "Like"}} = activity, opts) do
|
|
|
|
actor = get_in(activity.data, ["actor"])
|
2017-04-14 17:13:51 +02:00
|
|
|
user = User.get_cached_by_ap_id(actor)
|
2017-04-13 17:05:53 +02:00
|
|
|
[liked_activity] = Activity.all_by_object_ap_id(activity.data["object"])
|
|
|
|
|
|
|
|
ActivityRepresenter.to_map(activity, Map.merge(opts, %{user: user, liked_activity: liked_activity}))
|
|
|
|
end
|
|
|
|
|
2017-04-15 13:54:46 +02:00
|
|
|
# For announces, fetch the announced activity and the user.
|
|
|
|
defp activity_to_status(%Activity{data: %{"type" => "Announce"}} = activity, opts) do
|
|
|
|
actor = get_in(activity.data, ["actor"])
|
|
|
|
user = User.get_cached_by_ap_id(actor)
|
|
|
|
[announced_activity] = Activity.all_by_object_ap_id(activity.data["object"])
|
|
|
|
announced_actor = User.get_cached_by_ap_id(announced_activity.data["actor"])
|
|
|
|
|
|
|
|
ActivityRepresenter.to_map(activity, Map.merge(opts, %{users: [user, announced_actor], announced_activity: announced_activity}))
|
|
|
|
end
|
|
|
|
|
2017-09-04 20:48:29 +02:00
|
|
|
defp activity_to_status(%Activity{data: %{"type" => "Delete"}} = activity, opts) do
|
|
|
|
actor = get_in(activity.data, ["actor"])
|
|
|
|
user = User.get_cached_by_ap_id(actor)
|
|
|
|
ActivityRepresenter.to_map(activity, Map.merge(opts, %{user: user}))
|
|
|
|
end
|
|
|
|
|
2017-03-24 01:16:28 +01:00
|
|
|
defp activity_to_status(activity, opts) do
|
|
|
|
actor = get_in(activity.data, ["actor"])
|
2017-04-14 17:13:51 +02:00
|
|
|
user = User.get_cached_by_ap_id(actor)
|
|
|
|
# mentioned_users = Repo.all(from user in User, where: user.ap_id in ^activity.data["to"])
|
2017-04-21 01:01:58 +02:00
|
|
|
mentioned_users = Enum.map(activity.data["to"] || [], fn (ap_id) ->
|
2017-08-24 12:54:01 +02:00
|
|
|
if ap_id do
|
|
|
|
User.get_cached_by_ap_id(ap_id)
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
2017-04-14 17:13:51 +02:00
|
|
|
end)
|
|
|
|
|> Enum.filter(&(&1))
|
|
|
|
|
2017-04-03 18:28:19 +02:00
|
|
|
ActivityRepresenter.to_map(activity, Map.merge(opts, %{user: user, mentioned: mentioned_users}))
|
2017-03-24 01:16:28 +01:00
|
|
|
end
|
2017-04-05 01:04:54 +02:00
|
|
|
|
|
|
|
defp make_date do
|
|
|
|
DateTime.utc_now() |> DateTime.to_iso8601
|
|
|
|
end
|
2017-04-30 13:53:26 +02:00
|
|
|
|
|
|
|
def context_to_conversation_id(context) do
|
2017-05-01 16:15:21 +02:00
|
|
|
with %Object{id: id} <- Object.get_cached_by_ap_id(context) do
|
|
|
|
id
|
2017-06-20 09:50:22 +02:00
|
|
|
else _e ->
|
|
|
|
changeset = Object.context_mapping(context)
|
|
|
|
case Repo.insert(changeset) do
|
|
|
|
{:ok, %{id: id}} -> id
|
|
|
|
# This should be solved by an upsert, but it seems ecto
|
|
|
|
# has problems accessing the constraint inside the jsonb.
|
|
|
|
{:error, _} -> Object.get_cached_by_ap_id(context).id
|
|
|
|
end
|
2017-05-01 16:15:21 +02:00
|
|
|
end
|
2017-04-30 13:53:26 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def conversation_id_to_context(id) do
|
|
|
|
with %Object{data: %{"id" => context}} <- Repo.get(Object, id) do
|
|
|
|
context
|
|
|
|
else _e ->
|
|
|
|
{:error, "No such conversation"}
|
|
|
|
end
|
|
|
|
end
|
2017-05-10 18:44:57 +02:00
|
|
|
|
|
|
|
def get_external_profile(for_user, uri) do
|
2017-05-12 18:50:47 +02:00
|
|
|
with {:ok, %User{} = user} <- OStatus.find_or_make_user(uri) do
|
2017-06-18 17:20:39 +02:00
|
|
|
with url <- user.info["topic"],
|
|
|
|
{:ok, %{body: body}} <- @httpoison.get(url, [], follow_redirect: true, timeout: 10000, recv_timeout: 20000) do
|
|
|
|
OStatus.handle_incoming(body)
|
|
|
|
end
|
2017-06-19 23:12:37 +02:00
|
|
|
{:ok, UserView.render("show.json", %{user: user, for: for_user})}
|
2017-05-10 18:44:57 +02:00
|
|
|
else _e ->
|
2017-05-12 18:50:47 +02:00
|
|
|
{:error, "Couldn't find user"}
|
2017-05-10 18:44:57 +02:00
|
|
|
end
|
|
|
|
end
|
2017-03-21 17:53:20 +01:00
|
|
|
end
|