2018-12-23 21:04:54 +01:00
|
|
|
# Pleroma: A lightweight social networking server
|
2019-01-04 16:35:41 +01:00
|
|
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 21:04:54 +01:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2017-09-15 14:17:36 +02:00
|
|
|
defmodule Pleroma.Web.CommonAPI.Utils do
|
2018-12-11 13:31:52 +01:00
|
|
|
alias Calendar.Strftime
|
|
|
|
alias Comeonin.Pbkdf2
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Activity
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Pleroma.Config
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Formatter
|
|
|
|
alias Pleroma.Object
|
|
|
|
alias Pleroma.Repo
|
2018-12-11 13:31:52 +01:00
|
|
|
alias Pleroma.User
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Pleroma.Web.ActivityPub.Utils
|
2019-04-07 16:11:29 +02:00
|
|
|
alias Pleroma.Web.ActivityPub.Visibility
|
2018-08-12 21:24:10 +02:00
|
|
|
alias Pleroma.Web.Endpoint
|
2018-10-29 18:26:15 +01:00
|
|
|
alias Pleroma.Web.MediaProxy
|
2017-05-17 18:00:20 +02:00
|
|
|
|
2019-04-02 11:25:51 +02:00
|
|
|
require Logger
|
|
|
|
|
2017-09-15 14:17:36 +02:00
|
|
|
# This is a hack for twidere.
|
|
|
|
def get_by_id_or_ap_id(id) do
|
2019-03-23 01:28:16 +01:00
|
|
|
activity =
|
|
|
|
Activity.get_by_id_with_object(id) || Activity.get_create_by_object_ap_id_with_object(id)
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2018-06-03 19:11:22 +02:00
|
|
|
activity &&
|
|
|
|
if activity.data["type"] == "Create" do
|
|
|
|
activity
|
|
|
|
else
|
2019-03-23 01:22:14 +01:00
|
|
|
Activity.get_create_by_object_ap_id_with_object(activity.data["object"])
|
2018-06-03 19:11:22 +02:00
|
|
|
end
|
2017-09-15 14:17:36 +02:00
|
|
|
end
|
|
|
|
|
2018-11-02 17:33:51 +01:00
|
|
|
def get_replied_to_activity(""), do: nil
|
|
|
|
|
2017-09-15 14:17:36 +02:00
|
|
|
def get_replied_to_activity(id) when not is_nil(id) do
|
2019-04-02 11:50:31 +02:00
|
|
|
Activity.get_by_id(id)
|
2017-09-15 14:17:36 +02:00
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-09-15 14:17:36 +02:00
|
|
|
def get_replied_to_activity(_), do: nil
|
|
|
|
|
2019-01-04 17:27:46 +01:00
|
|
|
def attachments_from_ids(data) do
|
|
|
|
if Map.has_key?(data, "descriptions") do
|
|
|
|
attachments_from_ids_descs(data["media_ids"], data["descriptions"])
|
|
|
|
else
|
|
|
|
attachments_from_ids_no_descs(data["media_ids"])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def attachments_from_ids_no_descs(ids) do
|
|
|
|
Enum.map(ids || [], fn media_id ->
|
|
|
|
Repo.get(Object, media_id).data
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
|
|
|
def attachments_from_ids_descs(ids, descs_str) do
|
2019-01-04 16:35:41 +01:00
|
|
|
{_, descs} = Jason.decode(descs_str)
|
|
|
|
|
|
|
|
Enum.map(ids || [], fn media_id ->
|
|
|
|
Map.put(Repo.get(Object, media_id).data, "name", descs[media_id])
|
2017-05-17 18:00:20 +02:00
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2018-02-18 14:45:08 +01:00
|
|
|
def to_for_user_and_mentions(user, mentions, inReplyTo, "public") do
|
2018-03-30 15:01:53 +02:00
|
|
|
mentioned_users = Enum.map(mentions, fn {_, %{ap_id: ap_id}} -> ap_id end)
|
2018-11-08 20:17:01 +01:00
|
|
|
|
|
|
|
to = ["https://www.w3.org/ns/activitystreams#Public" | mentioned_users]
|
|
|
|
cc = [user.follower_address]
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-09-15 14:17:36 +02:00
|
|
|
if inReplyTo do
|
2018-11-08 20:17:01 +01:00
|
|
|
{Enum.uniq([inReplyTo.data["actor"] | to]), cc}
|
2017-08-28 19:17:38 +02:00
|
|
|
else
|
2018-02-18 14:45:08 +01:00
|
|
|
{to, cc}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def to_for_user_and_mentions(user, mentions, inReplyTo, "unlisted") do
|
2018-11-08 20:17:01 +01:00
|
|
|
mentioned_users = Enum.map(mentions, fn {_, %{ap_id: ap_id}} -> ap_id end)
|
|
|
|
|
|
|
|
to = [user.follower_address | mentioned_users]
|
|
|
|
cc = ["https://www.w3.org/ns/activitystreams#Public"]
|
|
|
|
|
|
|
|
if inReplyTo do
|
|
|
|
{Enum.uniq([inReplyTo.data["actor"] | to]), cc}
|
|
|
|
else
|
|
|
|
{to, cc}
|
|
|
|
end
|
2018-02-18 14:45:08 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def to_for_user_and_mentions(user, mentions, inReplyTo, "private") do
|
|
|
|
{to, cc} = to_for_user_and_mentions(user, mentions, inReplyTo, "direct")
|
|
|
|
{[user.follower_address | to], cc}
|
|
|
|
end
|
|
|
|
|
2018-05-04 22:59:01 +02:00
|
|
|
def to_for_user_and_mentions(_user, mentions, inReplyTo, "direct") do
|
2018-03-30 15:01:53 +02:00
|
|
|
mentioned_users = Enum.map(mentions, fn {_, %{ap_id: ap_id}} -> ap_id end)
|
|
|
|
|
2018-02-18 14:45:08 +01:00
|
|
|
if inReplyTo do
|
|
|
|
{Enum.uniq([inReplyTo.data["actor"] | mentioned_users]), []}
|
|
|
|
else
|
|
|
|
{mentioned_users, []}
|
2017-08-28 19:17:38 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-09-02 02:14:25 +02:00
|
|
|
def make_content_html(
|
|
|
|
status,
|
|
|
|
attachments,
|
2019-03-20 21:09:36 +01:00
|
|
|
data,
|
|
|
|
visibility
|
2018-09-02 02:14:25 +02:00
|
|
|
) do
|
2019-02-27 00:32:26 +01:00
|
|
|
no_attachment_links =
|
|
|
|
data
|
|
|
|
|> Map.get("no_attachment_links", Config.get([:instance, :no_attachment_links]))
|
|
|
|
|> Kernel.in([true, "true"])
|
|
|
|
|
|
|
|
content_type = get_content_type(data["content_type"])
|
|
|
|
|
2019-03-20 21:09:36 +01:00
|
|
|
options =
|
|
|
|
if visibility == "direct" && Config.get([:instance, :safe_dm_mentions]) do
|
|
|
|
[safe_mention: true]
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
|
2017-09-15 14:17:36 +02:00
|
|
|
status
|
2019-03-20 21:09:36 +01:00
|
|
|
|> format_input(content_type, options)
|
2017-12-07 19:44:09 +01:00
|
|
|
|> maybe_add_attachments(attachments, no_attachment_links)
|
2019-02-27 00:32:26 +01:00
|
|
|
|> maybe_add_nsfw_tag(data)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp get_content_type(content_type) do
|
|
|
|
if Enum.member?(Config.get([:instance, :allowed_post_formats]), content_type) do
|
|
|
|
content_type
|
|
|
|
else
|
|
|
|
"text/plain"
|
|
|
|
end
|
2017-09-15 14:17:36 +02:00
|
|
|
end
|
|
|
|
|
2019-02-27 00:32:26 +01:00
|
|
|
defp maybe_add_nsfw_tag({text, mentions, tags}, %{"sensitive" => sensitive})
|
|
|
|
when sensitive in [true, "True", "true", "1"] do
|
|
|
|
{text, mentions, [{"#nsfw", "nsfw"} | tags]}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp maybe_add_nsfw_tag(data, _), do: data
|
|
|
|
|
2017-09-15 14:17:36 +02:00
|
|
|
def make_context(%Activity{data: %{"context" => context}}), do: context
|
2018-03-30 15:01:53 +02:00
|
|
|
def make_context(_), do: Utils.generate_context_id()
|
2017-09-15 14:17:36 +02:00
|
|
|
|
2019-02-27 00:32:26 +01:00
|
|
|
def maybe_add_attachments(parsed, _attachments, true = _no_links), do: parsed
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2019-02-27 00:32:26 +01:00
|
|
|
def maybe_add_attachments({text, mentions, tags}, attachments, _no_links) do
|
|
|
|
text = add_attachments(text, attachments)
|
|
|
|
{text, mentions, tags}
|
2017-12-07 19:44:09 +01:00
|
|
|
end
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2017-05-17 18:00:20 +02:00
|
|
|
def add_attachments(text, attachments) do
|
2018-03-30 15:01:53 +02:00
|
|
|
attachment_text =
|
|
|
|
Enum.map(attachments, fn
|
2018-10-29 18:59:24 +01:00
|
|
|
%{"url" => [%{"href" => href} | _]} = attachment ->
|
|
|
|
name = attachment["name"] || URI.decode(Path.basename(href))
|
2018-10-29 18:26:15 +01:00
|
|
|
href = MediaProxy.url(href)
|
2018-03-30 15:01:53 +02:00
|
|
|
"<a href=\"#{href}\" class='attachment'>#{shortname(name)}</a>"
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
""
|
|
|
|
end)
|
|
|
|
|
2017-11-18 13:46:54 +01:00
|
|
|
Enum.join([text | attachment_text], "<br>")
|
2017-05-17 18:00:20 +02:00
|
|
|
end
|
|
|
|
|
2019-02-27 00:32:26 +01:00
|
|
|
def format_input(text, format, options \\ [])
|
2019-01-18 07:30:16 +01:00
|
|
|
|
2018-12-14 10:41:55 +01:00
|
|
|
@doc """
|
|
|
|
Formatting text to plain text.
|
|
|
|
"""
|
2019-02-27 00:32:26 +01:00
|
|
|
def format_input(text, "text/plain", options) do
|
2017-12-07 20:34:25 +01:00
|
|
|
text
|
2018-09-02 02:14:25 +02:00
|
|
|
|> Formatter.html_escape("text/plain")
|
2019-02-27 00:32:26 +01:00
|
|
|
|> Formatter.linkify(options)
|
|
|
|
|> (fn {text, mentions, tags} ->
|
|
|
|
{String.replace(text, ~r/\r?\n/, "<br>"), mentions, tags}
|
|
|
|
end).()
|
2017-09-17 15:21:44 +02:00
|
|
|
end
|
|
|
|
|
2018-12-14 10:41:55 +01:00
|
|
|
@doc """
|
|
|
|
Formatting text to html.
|
|
|
|
"""
|
2019-02-27 00:32:26 +01:00
|
|
|
def format_input(text, "text/html", options) do
|
2018-09-02 02:14:25 +02:00
|
|
|
text
|
|
|
|
|> Formatter.html_escape("text/html")
|
2019-02-27 00:32:26 +01:00
|
|
|
|> Formatter.linkify(options)
|
2018-09-02 02:14:25 +02:00
|
|
|
end
|
|
|
|
|
2018-12-14 10:41:55 +01:00
|
|
|
@doc """
|
|
|
|
Formatting text to markdown.
|
|
|
|
"""
|
2019-02-27 00:32:26 +01:00
|
|
|
def format_input(text, "text/markdown", options) do
|
2018-09-02 02:14:25 +02:00
|
|
|
text
|
2019-04-12 21:25:53 +02:00
|
|
|
|> Formatter.mentions_escape(options)
|
|
|
|
|> Earmark.as_html!()
|
2019-02-27 00:32:26 +01:00
|
|
|
|> Formatter.linkify(options)
|
2019-02-27 09:40:30 +01:00
|
|
|
|> Formatter.html_escape("text/html")
|
2017-05-17 18:00:20 +02:00
|
|
|
end
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
def make_note_data(
|
|
|
|
actor,
|
|
|
|
to,
|
|
|
|
context,
|
|
|
|
content_html,
|
|
|
|
attachments,
|
|
|
|
inReplyTo,
|
|
|
|
tags,
|
|
|
|
cw \\ nil,
|
|
|
|
cc \\ []
|
|
|
|
) do
|
|
|
|
object = %{
|
|
|
|
"type" => "Note",
|
|
|
|
"to" => to,
|
|
|
|
"cc" => cc,
|
|
|
|
"content" => content_html,
|
|
|
|
"summary" => cw,
|
|
|
|
"context" => context,
|
|
|
|
"attachment" => attachments,
|
|
|
|
"actor" => actor,
|
2018-05-23 17:25:24 +02:00
|
|
|
"tag" => tags |> Enum.map(fn {_, tag} -> tag end) |> Enum.uniq()
|
2018-03-30 15:01:53 +02:00
|
|
|
}
|
2017-05-17 18:00:20 +02:00
|
|
|
|
|
|
|
if inReplyTo do
|
|
|
|
object
|
|
|
|
|> Map.put("inReplyTo", inReplyTo.data["object"]["id"])
|
|
|
|
|> Map.put("inReplyToStatusId", inReplyTo.id)
|
|
|
|
else
|
|
|
|
object
|
|
|
|
end
|
|
|
|
end
|
2017-06-19 23:12:37 +02:00
|
|
|
|
|
|
|
def format_naive_asctime(date) do
|
|
|
|
date |> DateTime.from_naive!("Etc/UTC") |> format_asctime
|
|
|
|
end
|
|
|
|
|
|
|
|
def format_asctime(date) do
|
|
|
|
Strftime.strftime!(date, "%a %b %d %H:%M:%S %z %Y")
|
|
|
|
end
|
|
|
|
|
2019-04-02 11:25:51 +02:00
|
|
|
def date_to_asctime(date) when is_binary(date) do
|
|
|
|
with {:ok, date, _offset} <- DateTime.from_iso8601(date) do
|
2017-06-19 23:12:37 +02:00
|
|
|
format_asctime(date)
|
2018-03-30 15:01:53 +02:00
|
|
|
else
|
|
|
|
_e ->
|
2019-04-02 11:25:51 +02:00
|
|
|
Logger.warn("Date #{date} in wrong format, must be ISO 8601")
|
2017-06-19 23:12:37 +02:00
|
|
|
""
|
|
|
|
end
|
|
|
|
end
|
2017-09-15 14:17:36 +02:00
|
|
|
|
2017-06-19 23:12:37 +02:00
|
|
|
def date_to_asctime(date) do
|
2019-04-02 11:25:51 +02:00
|
|
|
Logger.warn("Date #{date} in wrong format, must be ISO 8601")
|
|
|
|
""
|
2017-06-19 23:12:37 +02:00
|
|
|
end
|
2017-09-15 14:17:36 +02:00
|
|
|
|
2017-09-15 17:50:47 +02:00
|
|
|
def to_masto_date(%NaiveDateTime{} = date) do
|
|
|
|
date
|
2018-03-30 15:01:53 +02:00
|
|
|
|> NaiveDateTime.to_iso8601()
|
2017-09-15 17:50:47 +02:00
|
|
|
|> String.replace(~r/(\.\d+)?$/, ".000Z", global: false)
|
|
|
|
end
|
|
|
|
|
|
|
|
def to_masto_date(date) do
|
|
|
|
try do
|
|
|
|
date
|
2018-03-30 15:01:53 +02:00
|
|
|
|> NaiveDateTime.from_iso8601!()
|
|
|
|
|> NaiveDateTime.to_iso8601()
|
2017-09-15 17:50:47 +02:00
|
|
|
|> String.replace(~r/(\.\d+)?$/, ".000Z", global: false)
|
|
|
|
rescue
|
|
|
|
_e -> ""
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-15 14:17:36 +02:00
|
|
|
defp shortname(name) do
|
|
|
|
if String.length(name) < 30 do
|
|
|
|
name
|
|
|
|
else
|
|
|
|
String.slice(name, 0..30) <> "…"
|
|
|
|
end
|
|
|
|
end
|
2018-05-11 13:32:59 +02:00
|
|
|
|
2018-05-21 23:17:34 +02:00
|
|
|
def confirm_current_password(user, password) do
|
2019-04-02 12:01:26 +02:00
|
|
|
with %User{local: true} = db_user <- User.get_by_id(user.id),
|
2018-05-21 23:17:34 +02:00
|
|
|
true <- Pbkdf2.checkpw(password, db_user.password_hash) do
|
2018-05-13 15:24:15 +02:00
|
|
|
{:ok, db_user}
|
|
|
|
else
|
|
|
|
_ -> {:error, "Invalid password."}
|
2018-05-11 13:32:59 +02:00
|
|
|
end
|
|
|
|
end
|
2018-08-12 21:24:10 +02:00
|
|
|
|
2018-12-09 10:12:48 +01:00
|
|
|
def emoji_from_profile(%{info: _info} = user) do
|
2018-08-12 21:24:10 +02:00
|
|
|
(Formatter.get_emoji(user.bio) ++ Formatter.get_emoji(user.name))
|
2019-04-01 12:17:57 +02:00
|
|
|
|> Enum.map(fn {shortcode, url, _} ->
|
2018-08-12 21:24:10 +02:00
|
|
|
%{
|
|
|
|
"type" => "Emoji",
|
2018-08-13 15:21:18 +02:00
|
|
|
"icon" => %{"type" => "Image", "url" => "#{Endpoint.url()}#{url}"},
|
2018-08-12 21:24:10 +02:00
|
|
|
"name" => ":#{shortcode}:"
|
|
|
|
}
|
|
|
|
end)
|
|
|
|
end
|
2019-01-24 21:30:43 +01:00
|
|
|
|
|
|
|
def maybe_notify_to_recipients(
|
|
|
|
recipients,
|
|
|
|
%Activity{data: %{"to" => to, "type" => _type}} = _activity
|
|
|
|
) do
|
|
|
|
recipients ++ to
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_notify_mentioned_recipients(
|
|
|
|
recipients,
|
2019-03-23 01:22:14 +01:00
|
|
|
%Activity{data: %{"to" => _to, "type" => type} = data} = activity
|
2019-01-24 21:30:43 +01:00
|
|
|
)
|
|
|
|
when type == "Create" do
|
2019-03-23 01:22:14 +01:00
|
|
|
object = Object.normalize(activity)
|
2019-01-24 21:30:43 +01:00
|
|
|
|
|
|
|
object_data =
|
|
|
|
cond do
|
|
|
|
!is_nil(object) ->
|
|
|
|
object.data
|
|
|
|
|
|
|
|
is_map(data["object"]) ->
|
|
|
|
data["object"]
|
|
|
|
|
|
|
|
true ->
|
|
|
|
%{}
|
|
|
|
end
|
|
|
|
|
|
|
|
tagged_mentions = maybe_extract_mentions(object_data)
|
|
|
|
|
|
|
|
recipients ++ tagged_mentions
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_notify_mentioned_recipients(recipients, _), do: recipients
|
|
|
|
|
2019-04-05 15:20:13 +02:00
|
|
|
def maybe_notify_subscribers(
|
2019-04-05 15:59:34 +02:00
|
|
|
recipients,
|
2019-04-07 16:11:29 +02:00
|
|
|
%Activity{data: %{"actor" => actor, "type" => type}} = activity
|
2019-04-05 15:59:34 +02:00
|
|
|
)
|
|
|
|
when type == "Create" do
|
2019-04-08 16:56:14 +02:00
|
|
|
with %User{} = user <- User.get_cached_by_ap_id(actor) do
|
2019-04-05 15:20:13 +02:00
|
|
|
subscriber_ids =
|
|
|
|
user
|
2019-04-05 17:51:45 +02:00
|
|
|
|> User.subscribers()
|
2019-04-07 16:11:29 +02:00
|
|
|
|> Enum.filter(&Visibility.visible_for_user?(activity, &1))
|
2019-04-05 15:20:13 +02:00
|
|
|
|> Enum.map(& &1.ap_id)
|
|
|
|
|
|
|
|
recipients ++ subscriber_ids
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_notify_subscribers(recipients, _), do: recipients
|
|
|
|
|
2019-01-24 21:30:43 +01:00
|
|
|
def maybe_extract_mentions(%{"tag" => tag}) do
|
|
|
|
tag
|
|
|
|
|> Enum.filter(fn x -> is_map(x) end)
|
|
|
|
|> Enum.filter(fn x -> x["type"] == "Mention" end)
|
|
|
|
|> Enum.map(fn x -> x["href"] end)
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_extract_mentions(_), do: []
|
2019-02-20 17:51:25 +01:00
|
|
|
|
2019-02-27 00:32:26 +01:00
|
|
|
def make_report_content_html(nil), do: {:ok, {nil, [], []}}
|
2019-02-20 17:51:25 +01:00
|
|
|
|
|
|
|
def make_report_content_html(comment) do
|
|
|
|
max_size = Pleroma.Config.get([:instance, :max_report_comment_size], 1000)
|
|
|
|
|
|
|
|
if String.length(comment) <= max_size do
|
2019-02-27 00:32:26 +01:00
|
|
|
{:ok, format_input(comment, "text/plain")}
|
2019-02-20 17:51:25 +01:00
|
|
|
else
|
|
|
|
{:error, "Comment must be up to #{max_size} characters"}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_report_statuses(%User{ap_id: actor}, %{"status_ids" => status_ids}) do
|
|
|
|
{:ok, Activity.all_by_actor_and_id(actor, status_ids)}
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_report_statuses(_, _), do: {:ok, nil}
|
2019-03-22 00:17:53 +01:00
|
|
|
|
|
|
|
# DEPRECATED mostly, context objects are now created at insertion time.
|
|
|
|
def context_to_conversation_id(context) do
|
|
|
|
with %Object{id: id} <- Object.get_cached_by_ap_id(context) do
|
|
|
|
id
|
|
|
|
else
|
|
|
|
_e ->
|
|
|
|
changeset = Object.context_mapping(context)
|
|
|
|
|
|
|
|
case Repo.insert(changeset) do
|
|
|
|
{:ok, %{id: id}} ->
|
|
|
|
id
|
|
|
|
|
|
|
|
# This should be solved by an upsert, but it seems ecto
|
|
|
|
# has problems accessing the constraint inside the jsonb.
|
|
|
|
{:error, _} ->
|
|
|
|
Object.get_cached_by_ap_id(context).id
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def conversation_id_to_context(id) do
|
|
|
|
with %Object{data: %{"id" => context}} <- Repo.get(Object, id) do
|
|
|
|
context
|
|
|
|
else
|
|
|
|
_e ->
|
|
|
|
{:error, "No such conversation"}
|
|
|
|
end
|
|
|
|
end
|
2017-05-17 18:00:20 +02:00
|
|
|
end
|