2018-12-23 21:04:54 +01:00
# Pleroma: A lightweight social networking server
2018-12-31 16:41:47 +01:00
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
2018-12-23 21:04:54 +01:00
# SPDX-License-Identifier: AGPL-3.0-only
2017-05-16 15:31:11 +02:00
defmodule Pleroma.Web.ActivityPub.Utils do
2019-03-05 03:52:23 +01:00
alias Ecto.Changeset
alias Ecto.UUID
alias Pleroma.Activity
alias Pleroma.Notification
alias Pleroma.Object
2019-02-09 16:16:26 +01:00
alias Pleroma.Repo
2019-03-05 03:52:23 +01:00
alias Pleroma.User
2019-02-09 16:16:26 +01:00
alias Pleroma.Web
2019-03-04 16:09:58 +01:00
alias Pleroma.Web.ActivityPub.Visibility
2017-05-16 15:31:11 +02:00
alias Pleroma.Web.Endpoint
2019-03-05 03:52:23 +01:00
alias Pleroma.Web.Router.Helpers
2019-02-09 16:16:26 +01:00
2017-05-16 15:31:11 +02:00
import Ecto.Query
2019-02-09 16:16:26 +01:00
2018-05-26 20:03:23 +02:00
require Logger
2017-05-16 15:31:11 +02:00
2018-11-09 14:39:44 +01:00
@supported_object_types [ " Article " , " Note " , " Video " , " Page " ]
2019-05-16 21:09:18 +02:00
@supported_report_states ~w( open closed resolved )
@valid_visibilities ~w( public unlisted private direct )
2018-11-09 14:39:44 +01:00
2018-05-19 09:03:53 +02:00
# Some implementations send the actor URI as the actor field, others send the entire actor object,
# so figure out what the actor's URI is based on what we have.
2018-05-26 13:52:05 +02:00
def get_ap_id ( object ) do
case object do
%{ " id " = > id } -> id
id -> id
2018-05-19 09:03:53 +02:00
end
end
def normalize_params ( params ) do
2018-05-26 13:52:05 +02:00
Map . put ( params , " actor " , get_ap_id ( params [ " actor " ] ) )
2018-05-19 09:03:53 +02:00
end
2018-11-17 16:51:02 +01:00
def determine_explicit_mentions ( %{ " tag " = > tag } = _object ) when is_list ( tag ) do
tag
|> Enum . filter ( fn x -> is_map ( x ) end )
|> Enum . filter ( fn x -> x [ " type " ] == " Mention " end )
|> Enum . map ( fn x -> x [ " href " ] end )
end
def determine_explicit_mentions ( %{ " tag " = > tag } = object ) when is_map ( tag ) do
Map . put ( object , " tag " , [ tag ] )
|> determine_explicit_mentions ( )
end
def determine_explicit_mentions ( _ ) , do : [ ]
2018-10-25 07:02:21 +02:00
defp recipient_in_collection ( ap_id , coll ) when is_binary ( coll ) , do : ap_id == coll
defp recipient_in_collection ( ap_id , coll ) when is_list ( coll ) , do : ap_id in coll
defp recipient_in_collection ( _ , _ ) , do : false
2019-04-16 20:10:15 +02:00
def recipient_in_message ( % User { ap_id : ap_id } = recipient , % User { } = actor , params ) do
2018-10-25 07:02:21 +02:00
cond do
recipient_in_collection ( ap_id , params [ " to " ] ) ->
true
recipient_in_collection ( ap_id , params [ " cc " ] ) ->
true
recipient_in_collection ( ap_id , params [ " bto " ] ) ->
true
recipient_in_collection ( ap_id , params [ " bcc " ] ) ->
true
2018-10-26 03:24:22 +02:00
# if the message is unaddressed at all, then assume it is directly addressed
# to the recipient
! params [ " to " ] && ! params [ " cc " ] && ! params [ " bto " ] && ! params [ " bcc " ] ->
true
2019-04-16 20:10:15 +02:00
# if the message is sent from somebody the user is following, then assume it
# is addressed to the recipient
User . following? ( recipient , actor ) ->
true
2018-10-25 07:02:21 +02:00
true ->
false
end
end
defp extract_list ( target ) when is_binary ( target ) , do : [ target ]
defp extract_list ( lst ) when is_list ( lst ) , do : lst
defp extract_list ( _ ) , do : [ ]
def maybe_splice_recipient ( ap_id , params ) do
need_splice =
! recipient_in_collection ( ap_id , params [ " to " ] ) &&
! recipient_in_collection ( ap_id , params [ " cc " ] )
cc_list = extract_list ( params [ " cc " ] )
if need_splice do
params
2018-10-25 07:24:01 +02:00
|> Map . put ( " cc " , [ ap_id | cc_list ] )
2018-10-25 07:02:21 +02:00
else
params
end
end
2018-03-21 18:23:27 +01:00
def make_json_ld_header do
%{
" @context " = > [
" https://www.w3.org/ns/activitystreams " ,
2019-04-08 13:03:10 +02:00
" #{ Web . base_url ( ) } /schemas/litepub-0.1.jsonld " ,
%{
" @language " = > " und "
}
2018-03-21 18:23:27 +01:00
]
}
end
2017-05-16 15:31:11 +02:00
def make_date do
2018-03-30 15:01:53 +02:00
DateTime . utc_now ( ) |> DateTime . to_iso8601 ( )
2017-05-16 15:31:11 +02:00
end
def generate_activity_id do
generate_id ( " activities " )
end
def generate_context_id do
generate_id ( " contexts " )
end
def generate_object_id do
2018-03-30 15:01:53 +02:00
Helpers . o_status_url ( Endpoint , :object , UUID . generate ( ) )
2017-05-16 15:31:11 +02:00
end
def generate_id ( type ) do
2018-03-30 15:01:53 +02:00
" #{ Web . base_url ( ) } / #{ type } / #{ UUID . generate ( ) } "
2017-05-16 15:31:11 +02:00
end
2018-11-09 14:39:44 +01:00
def get_notified_from_object ( %{ " type " = > type } = object ) when type in @supported_object_types do
2018-11-09 09:55:52 +01:00
fake_create_activity = %{
" to " = > object [ " to " ] ,
" cc " = > object [ " cc " ] ,
" type " = > " Create " ,
" object " = > object
}
Notification . get_notified_from_activity ( % Activity { data : fake_create_activity } , false )
end
2018-11-08 20:31:59 +01:00
def get_notified_from_object ( object ) do
2018-11-09 09:42:33 +01:00
Notification . get_notified_from_activity ( % Activity { data : object } , false )
2018-11-08 20:31:59 +01:00
end
2018-04-02 15:17:09 +02:00
def create_context ( context ) do
context = context || generate_id ( " contexts " )
changeset = Object . context_mapping ( context )
case Repo . insert ( changeset ) do
2018-04-02 16:27:36 +02:00
{ :ok , object } ->
object
2018-04-02 15:17:09 +02:00
# This should be solved by an upsert, but it seems ecto
# has problems accessing the constraint inside the jsonb.
2018-04-02 16:27:36 +02:00
{ :error , _ } ->
Object . get_cached_by_ap_id ( context )
2018-04-02 15:17:09 +02:00
end
end
2017-05-16 15:31:11 +02:00
@doc """
Enqueues an activity for federation if it ' s local
"""
def maybe_federate ( % Activity { local : true } = activity ) do
2018-03-30 15:01:53 +02:00
priority =
case activity . data [ " type " ] do
" Delete " -> 10
" Create " -> 1
_ -> 5
end
2019-01-28 16:17:17 +01:00
Pleroma.Web.Federator . publish ( activity , priority )
2017-05-16 15:31:11 +02:00
:ok
end
2018-03-30 15:01:53 +02:00
2017-05-16 15:31:11 +02:00
def maybe_federate ( _ ) , do : :ok
@doc """
Adds an id and a published data if they aren ' t there,
also adds it to an included object
"""
2019-03-30 11:57:54 +01:00
def lazy_put_activity_defaults ( map , fake \\ false ) do
2018-03-30 15:01:53 +02:00
map =
2019-04-01 11:16:51 +02:00
unless fake do
%{ data : %{ " id " = > context } , id : context_id } = create_context ( map [ " context " ] )
2019-03-30 11:57:54 +01:00
map
|> Map . put_new_lazy ( " id " , & generate_activity_id / 0 )
|> Map . put_new_lazy ( " published " , & make_date / 0 )
|> Map . put_new ( " context " , context )
|> Map . put_new ( " context_id " , context_id )
else
map
2019-04-01 11:16:51 +02:00
|> Map . put_new ( " id " , " pleroma:fakeid " )
|> Map . put_new_lazy ( " published " , & make_date / 0 )
|> Map . put_new ( " context " , " pleroma:fakecontext " )
|> Map . put_new ( " context_id " , - 1 )
2019-03-30 11:57:54 +01:00
end
2017-05-16 15:31:11 +02:00
if is_map ( map [ " object " ] ) do
2019-04-01 11:16:51 +02:00
object = lazy_put_object_defaults ( map [ " object " ] , map , fake )
2017-05-16 15:31:11 +02:00
%{ map | " object " = > object }
else
map
end
end
@doc """
Adds an id and published date if they aren ' t there.
"""
2019-04-01 11:16:51 +02:00
def lazy_put_object_defaults ( map , activity \\ %{ } , fake )
def lazy_put_object_defaults ( map , activity , true = _fake ) do
map
|> Map . put_new_lazy ( " published " , & make_date / 0 )
2019-04-01 11:25:53 +02:00
|> Map . put_new ( " id " , " pleroma:fake_object_id " )
2019-04-01 11:16:51 +02:00
|> Map . put_new ( " context " , activity [ " context " ] )
2019-04-01 11:25:53 +02:00
|> Map . put_new ( " fake " , true )
2019-04-01 11:16:51 +02:00
|> Map . put_new ( " context_id " , activity [ " context_id " ] )
end
def lazy_put_object_defaults ( map , activity , _fake ) do
2017-05-16 15:31:11 +02:00
map
|> Map . put_new_lazy ( " id " , & generate_object_id / 0 )
|> Map . put_new_lazy ( " published " , & make_date / 0 )
2018-04-02 15:17:09 +02:00
|> Map . put_new ( " context " , activity [ " context " ] )
|> Map . put_new ( " context_id " , activity [ " context_id " ] )
2017-05-16 15:31:11 +02:00
end
@doc """
Inserts a full object if it is contained in an activity .
"""
2018-11-25 22:44:03 +01:00
def insert_full_object ( %{ " object " = > %{ " type " = > type } = object_data } = map )
2018-11-09 14:39:44 +01:00
when is_map ( object_data ) and type in @supported_object_types do
2018-11-25 22:44:03 +01:00
with { :ok , object } <- Object . create ( object_data ) do
map =
map
|> Map . put ( " object " , object . data [ " id " ] )
2019-04-17 11:22:32 +02:00
{ :ok , map , object }
2017-05-16 15:31:11 +02:00
end
end
2018-03-30 15:01:53 +02:00
2019-04-17 11:22:32 +02:00
def insert_full_object ( map ) , do : { :ok , map , nil }
2017-05-16 15:31:11 +02:00
def update_object_in_activities ( %{ data : %{ " id " = > id } } = object ) do
# TODO
# Update activities that already had this. Could be done in a seperate process.
# Alternatively, just don't do this and fetch the current object each time. Most
# could probably be taken from cache.
2019-01-21 06:46:47 +01:00
relevant_activities = Activity . get_all_create_by_object_ap_id ( id )
2018-03-30 15:01:53 +02:00
Enum . map ( relevant_activities , fn activity ->
2017-05-16 15:31:11 +02:00
new_activity_data = activity . data |> Map . put ( " object " , object . data )
changeset = Changeset . change ( activity , data : new_activity_data )
Repo . update ( changeset )
end )
end
#### Like-related helpers
@doc """
Returns an existing like if a user already liked an object
"""
2017-11-19 02:22:07 +01:00
def get_existing_like ( actor , %{ data : %{ " id " = > id } } ) do
2018-03-30 15:01:53 +02:00
query =
from (
activity in Activity ,
where : fragment ( " (?)->>'actor' = ? " , activity . data , ^ actor ) ,
# this is to use the index
where :
fragment (
" coalesce((?)->'object'->>'id', (?)->>'object') = ? " ,
activity . data ,
activity . data ,
^ id
) ,
where : fragment ( " (?)->>'type' = 'Like' " , activity . data )
)
2017-10-24 14:39:01 +02:00
2017-05-16 15:31:11 +02:00
Repo . one ( query )
end
2019-01-11 23:34:32 +01:00
@doc """
Returns like activities targeting an object
"""
def get_object_likes ( %{ data : %{ " id " = > id } } ) do
query =
from (
activity in Activity ,
# this is to use the index
where :
fragment (
" coalesce((?)->'object'->>'id', (?)->>'object') = ? " ,
activity . data ,
activity . data ,
^ id
) ,
where : fragment ( " (?)->>'type' = 'Like' " , activity . data )
)
Repo . all ( query )
end
2019-03-04 16:09:58 +01:00
def make_like_data (
% User { ap_id : ap_id } = actor ,
%{ data : %{ " actor " = > object_actor_id , " id " = > id } } = object ,
activity_id
) do
object_actor = User . get_cached_by_ap_id ( object_actor_id )
to =
if Visibility . is_public? ( object ) do
[ actor . follower_address , object . data [ " actor " ] ]
else
[ object . data [ " actor " ] ]
end
cc =
( object . data [ " to " ] ++ ( object . data [ " cc " ] || [ ] ) )
|> List . delete ( actor . ap_id )
|> List . delete ( object_actor . follower_address )
2017-05-16 15:31:11 +02:00
data = %{
" type " = > " Like " ,
" actor " = > ap_id ,
" object " = > id ,
2019-03-04 16:09:58 +01:00
" to " = > to ,
" cc " = > cc ,
2017-05-16 15:31:11 +02:00
" context " = > object . data [ " context " ]
}
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
end
def update_element_in_object ( property , element , object ) do
2018-03-30 15:01:53 +02:00
with new_data <-
2018-04-21 09:43:53 +02:00
object . data
|> Map . put ( " #{ property } _count " , length ( element ) )
2018-03-30 15:01:53 +02:00
|> Map . put ( " #{ property } s " , element ) ,
2017-05-16 15:31:11 +02:00
changeset <- Changeset . change ( object , data : new_data ) ,
2019-02-03 18:28:14 +01:00
{ :ok , object } <- Object . update_and_set_cache ( changeset ) ,
2018-03-30 15:01:53 +02:00
_ <- update_object_in_activities ( object ) do
2017-05-16 15:31:11 +02:00
{ :ok , object }
end
end
def update_likes_in_object ( likes , object ) do
update_element_in_object ( " like " , likes , object )
end
def add_like_to_object ( % Activity { data : %{ " actor " = > actor } } , object ) do
2018-07-12 19:06:28 +02:00
likes = if is_list ( object . data [ " likes " ] ) , do : object . data [ " likes " ] , else : [ ]
with likes <- [ actor | likes ] |> Enum . uniq ( ) do
2017-05-16 15:31:11 +02:00
update_likes_in_object ( likes , object )
end
end
def remove_like_from_object ( % Activity { data : %{ " actor " = > actor } } , object ) do
2018-07-12 19:06:28 +02:00
likes = if is_list ( object . data [ " likes " ] ) , do : object . data [ " likes " ] , else : [ ]
with likes <- likes |> List . delete ( actor ) do
2017-05-16 15:31:11 +02:00
update_likes_in_object ( likes , object )
end
end
#### Follow-related helpers
2018-05-26 20:03:23 +02:00
@doc """
Updates a follow activity ' s state (for locked accounts).
"""
2019-01-29 13:21:02 +01:00
def update_follow_state (
% Activity { data : %{ " actor " = > actor , " object " = > object , " state " = > " pending " } } = activity ,
state
) do
try do
Ecto.Adapters.SQL . query! (
Repo ,
" UPDATE activities SET data = jsonb_set(data, '{state}', $1) WHERE data->>'type' = 'Follow' AND data->>'actor' = $2 AND data->>'object' = $3 AND data->>'state' = 'pending' " ,
[ state , actor , object ]
)
2019-04-02 11:50:31 +02:00
activity = Activity . get_by_id ( activity . id )
2019-01-29 13:21:02 +01:00
{ :ok , activity }
rescue
e ->
{ :error , e }
end
end
2018-05-26 20:03:23 +02:00
def update_follow_state ( % Activity { } = activity , state ) do
with new_data <-
activity . data
|> Map . put ( " state " , state ) ,
changeset <- Changeset . change ( activity , data : new_data ) ,
{ :ok , activity } <- Repo . update ( changeset ) do
{ :ok , activity }
end
end
2017-05-16 15:31:11 +02:00
@doc """
Makes a follow activity data for the given follower and followed
"""
2018-05-28 20:31:48 +02:00
def make_follow_data (
% User { ap_id : follower_id } ,
2018-12-09 10:12:48 +01:00
% User { ap_id : followed_id } = _followed ,
2018-05-28 20:31:48 +02:00
activity_id
) do
2017-05-16 15:31:11 +02:00
data = %{
" type " = > " Follow " ,
" actor " = > follower_id ,
" to " = > [ followed_id ] ,
2018-02-25 18:20:06 +01:00
" cc " = > [ " https://www.w3.org/ns/activitystreams # Public " ] ,
2018-10-06 01:31:00 +02:00
" object " = > followed_id ,
" state " = > " pending "
2017-05-16 15:31:11 +02:00
}
2018-05-26 20:03:23 +02:00
data = if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
data
2017-05-16 15:31:11 +02:00
end
2018-03-30 15:01:53 +02:00
def fetch_latest_follow ( % User { ap_id : follower_id } , % User { ap_id : followed_id } ) do
query =
from (
activity in Activity ,
2018-05-25 07:19:11 +02:00
where :
fragment (
" ? ->> 'type' = 'Follow' " ,
activity . data
) ,
where : activity . actor == ^ follower_id ,
2019-04-02 16:04:18 +02:00
# this is to use the index
2018-03-30 15:01:53 +02:00
where :
fragment (
2019-04-02 16:04:18 +02:00
" coalesce((?)->'object'->>'id', (?)->>'object') = ? " ,
activity . data ,
2018-03-30 15:01:53 +02:00
activity . data ,
2019-04-02 16:04:18 +02:00
^ followed_id
2018-03-30 15:01:53 +02:00
) ,
2019-04-02 16:04:18 +02:00
order_by : [ fragment ( " ? desc nulls last " , activity . id ) ] ,
2018-03-30 15:01:53 +02:00
limit : 1
)
2017-05-16 15:31:11 +02:00
Repo . one ( query )
end
#### Announce-related helpers
@doc """
2018-04-17 10:13:08 +02:00
Retruns an existing announce activity if the notice has already been announced
2017-05-16 15:31:11 +02:00
"""
2018-04-14 09:39:16 +02:00
def get_existing_announce ( actor , %{ data : %{ " id " = > id } } ) do
query =
from (
activity in Activity ,
2018-05-24 12:44:26 +02:00
where : activity . actor == ^ actor ,
2018-04-14 09:39:16 +02:00
# this is to use the index
where :
fragment (
" coalesce((?)->'object'->>'id', (?)->>'object') = ? " ,
activity . data ,
activity . data ,
^ id
) ,
where : fragment ( " (?)->>'type' = 'Announce' " , activity . data )
)
Repo . one ( query )
end
2018-04-16 16:59:32 +02:00
@doc """
Make announce activity data for the given actor and object
"""
2018-08-06 12:37:52 +02:00
# for relayed messages, we only want to send to subscribers
def make_announce_data (
2019-01-18 00:12:42 +01:00
% User { ap_id : ap_id } = user ,
2018-08-06 12:37:52 +02:00
% Object { data : %{ " id " = > id } } = object ,
2019-01-18 00:12:42 +01:00
activity_id ,
false
2018-08-06 12:37:52 +02:00
) do
data = %{
" type " = > " Announce " ,
" actor " = > ap_id ,
" object " = > id ,
" to " = > [ user . follower_address ] ,
" cc " = > [ ] ,
" context " = > object . data [ " context " ]
}
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
end
2018-03-30 15:01:53 +02:00
def make_announce_data (
% User { ap_id : ap_id } = user ,
% Object { data : %{ " id " = > id } } = object ,
2019-01-18 00:12:42 +01:00
activity_id ,
true
2018-03-30 15:01:53 +02:00
) do
2017-05-16 15:31:11 +02:00
data = %{
" type " = > " Announce " ,
" actor " = > ap_id ,
" object " = > id ,
2017-07-19 19:06:49 +02:00
" to " = > [ user . follower_address , object . data [ " actor " ] ] ,
2018-02-25 18:20:06 +01:00
" cc " = > [ " https://www.w3.org/ns/activitystreams # Public " ] ,
2017-05-16 15:31:11 +02:00
" context " = > object . data [ " context " ]
}
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
end
2018-04-17 10:13:08 +02:00
@doc """
Make unannounce activity data for the given actor and object
"""
def make_unannounce_data (
% User { ap_id : ap_id } = user ,
2018-04-23 03:28:51 +02:00
% Activity { data : %{ " context " = > context } } = activity ,
activity_id
2018-04-17 10:13:08 +02:00
) do
2018-04-23 03:28:51 +02:00
data = %{
2018-04-17 10:13:08 +02:00
" type " = > " Undo " ,
" actor " = > ap_id ,
2018-04-23 03:28:51 +02:00
" object " = > activity . data ,
2018-04-21 05:22:16 +02:00
" to " = > [ user . follower_address , activity . data [ " actor " ] ] ,
2018-04-17 10:13:08 +02:00
" cc " = > [ " https://www.w3.org/ns/activitystreams # Public " ] ,
2018-04-18 02:35:07 +02:00
" context " = > context
2018-04-17 10:13:08 +02:00
}
2018-04-23 03:28:51 +02:00
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
2018-04-17 10:13:08 +02:00
end
2018-05-19 15:22:43 +02:00
def make_unlike_data (
% User { ap_id : ap_id } = user ,
% Activity { data : %{ " context " = > context } } = activity ,
activity_id
) do
data = %{
" type " = > " Undo " ,
" actor " = > ap_id ,
" object " = > activity . data ,
" to " = > [ user . follower_address , activity . data [ " actor " ] ] ,
" cc " = > [ " https://www.w3.org/ns/activitystreams # Public " ] ,
" context " = > context
}
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
end
2018-08-06 12:37:52 +02:00
def add_announce_to_object (
% Activity {
data : %{ " actor " = > actor , " cc " = > [ " https://www.w3.org/ns/activitystreams # Public " ] }
} ,
object
) do
2018-07-12 19:06:28 +02:00
announcements =
if is_list ( object . data [ " announcements " ] ) , do : object . data [ " announcements " ] , else : [ ]
with announcements <- [ actor | announcements ] |> Enum . uniq ( ) do
2017-05-16 15:31:11 +02:00
update_element_in_object ( " announcement " , announcements , object )
end
end
2018-08-06 12:37:52 +02:00
def add_announce_to_object ( _ , object ) , do : { :ok , object }
2018-04-14 09:39:16 +02:00
def remove_announce_from_object ( % Activity { data : %{ " actor " = > actor } } , object ) do
2018-07-12 19:06:28 +02:00
announcements =
if is_list ( object . data [ " announcements " ] ) , do : object . data [ " announcements " ] , else : [ ]
with announcements <- announcements |> List . delete ( actor ) do
2018-04-14 09:39:16 +02:00
update_element_in_object ( " announcement " , announcements , object )
end
end
2017-05-16 15:31:11 +02:00
#### Unfollow-related helpers
2018-05-21 03:01:14 +02:00
def make_unfollow_data ( follower , followed , follow_activity , activity_id ) do
data = %{
2017-05-16 15:31:11 +02:00
" type " = > " Undo " ,
" actor " = > follower . ap_id ,
" to " = > [ followed . ap_id ] ,
2018-05-21 03:01:14 +02:00
" object " = > follow_activity . data
2017-05-16 15:31:11 +02:00
}
2018-05-21 03:02:06 +02:00
2018-05-21 03:01:14 +02:00
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
2017-05-16 15:31:11 +02:00
end
2018-05-19 00:09:56 +02:00
#### Block-related helpers
def fetch_latest_block ( % User { ap_id : blocker_id } , % User { ap_id : blocked_id } ) do
query =
from (
activity in Activity ,
2018-05-25 07:19:11 +02:00
where :
fragment (
" ? ->> 'type' = 'Block' " ,
activity . data
) ,
where : activity . actor == ^ blocker_id ,
2019-04-02 16:04:18 +02:00
# this is to use the index
2018-05-19 00:09:56 +02:00
where :
fragment (
2019-04-02 16:04:18 +02:00
" coalesce((?)->'object'->>'id', (?)->>'object') = ? " ,
activity . data ,
2018-05-19 00:09:56 +02:00
activity . data ,
2019-04-02 16:04:18 +02:00
^ blocked_id
2018-05-19 00:09:56 +02:00
) ,
2019-04-02 16:04:18 +02:00
order_by : [ fragment ( " ? desc nulls last " , activity . id ) ] ,
2018-05-19 00:09:56 +02:00
limit : 1
)
Repo . one ( query )
end
2018-05-21 03:01:14 +02:00
def make_block_data ( blocker , blocked , activity_id ) do
data = %{
2018-05-19 00:09:56 +02:00
" type " = > " Block " ,
" actor " = > blocker . ap_id ,
" to " = > [ blocked . ap_id ] ,
" object " = > blocked . ap_id
}
2018-05-21 03:02:06 +02:00
2018-05-21 03:01:14 +02:00
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
2018-05-19 00:09:56 +02:00
end
2018-05-21 03:01:14 +02:00
def make_unblock_data ( blocker , blocked , block_activity , activity_id ) do
data = %{
2018-05-19 00:09:56 +02:00
" type " = > " Undo " ,
" actor " = > blocker . ap_id ,
" to " = > [ blocked . ap_id ] ,
" object " = > block_activity . data
2017-05-16 15:31:11 +02:00
}
2018-05-21 03:02:06 +02:00
2018-05-21 03:01:14 +02:00
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
2017-05-16 15:31:11 +02:00
end
#### Create-related helpers
def make_create_data ( params , additional ) do
published = params . published || make_date ( )
2018-03-30 15:01:53 +02:00
2017-11-19 02:22:07 +01:00
%{
2017-05-16 15:31:11 +02:00
" type " = > " Create " ,
2018-03-30 15:01:53 +02:00
" to " = > params . to |> Enum . uniq ( ) ,
2017-05-16 15:31:11 +02:00
" actor " = > params . actor . ap_id ,
" object " = > params . object ,
" published " = > published ,
" context " = > params . context
}
|> Map . merge ( additional )
end
2019-02-20 17:51:25 +01:00
#### Flag-related helpers
def make_flag_data ( params , additional ) do
2019-03-14 20:04:52 +01:00
status_ap_ids =
Enum . map ( params . statuses || [ ] , fn
2019-03-14 20:29:04 +01:00
% Activity { } = act -> act . data [ " id " ]
2019-03-14 20:04:52 +01:00
act when is_map ( act ) -> act [ " id " ]
act when is_binary ( act ) -> act
end )
2019-02-20 17:51:25 +01:00
object = [ params . account . ap_id ] ++ status_ap_ids
%{
" type " = > " Flag " ,
" actor " = > params . actor . ap_id ,
" content " = > params . content ,
" object " = > object ,
2019-05-16 21:09:18 +02:00
" context " = > params . context ,
" state " = > " open "
2019-02-20 17:51:25 +01:00
}
|> Map . merge ( additional )
end
2019-03-06 22:13:26 +01:00
@doc """
Fetches the OrderedCollection / OrderedCollectionPage from ` from ` , limiting the amount of pages fetched after
the first one to ` pages_left ` pages .
If the amount of pages is higher than the collection has , it returns whatever was there .
"""
def fetch_ordered_collection ( from , pages_left , acc \\ [ ] ) do
with { :ok , response } <- Tesla . get ( from ) ,
2019-05-13 22:37:38 +02:00
{ :ok , collection } <- Jason . decode ( response . body ) do
2019-03-06 22:13:26 +01:00
case collection [ " type " ] do
" OrderedCollection " ->
# If we've encountered the OrderedCollection and not the page,
# just call the same function on the page address
fetch_ordered_collection ( collection [ " first " ] , pages_left )
" OrderedCollectionPage " ->
if pages_left > 0 do
# There are still more pages
if Map . has_key? ( collection , " next " ) do
# There are still more pages, go deeper saving what we have into the accumulator
fetch_ordered_collection (
collection [ " next " ] ,
pages_left - 1 ,
acc ++ collection [ " orderedItems " ]
)
else
# No more pages left, just return whatever we already have
acc ++ collection [ " orderedItems " ]
end
else
# Got the amount of pages needed, add them all to the accumulator
acc ++ collection [ " orderedItems " ]
end
_ ->
{ :error , " Not an OrderedCollection or OrderedCollectionPage " }
end
end
end
2019-05-16 21:09:18 +02:00
#### Report-related helpers
def update_report_state ( % Activity { } = activity , state ) when state in @supported_report_states do
with new_data <- Map . put ( activity . data , " state " , state ) ,
changeset <- Changeset . change ( activity , data : new_data ) ,
{ :ok , activity } <- Repo . update ( changeset ) do
{ :ok , activity }
end
end
def update_report_state ( _ , _ ) , do : { :error , " Unsupported state " }
def update_activity_visibility ( activity , visibility ) when visibility in @valid_visibilities do
[ to , cc , recipients ] =
activity
|> get_updated_targets ( visibility )
|> Enum . map ( & Enum . uniq / 1 )
object_data =
activity . object . data
|> Map . put ( " to " , to )
|> Map . put ( " cc " , cc )
{ :ok , object } =
activity . object
|> Object . change ( %{ data : object_data } )
|> Object . update_and_set_cache ( )
activity_data =
activity . data
|> Map . put ( " to " , to )
|> Map . put ( " cc " , cc )
activity
|> Map . put ( :object , object )
|> Activity . change ( %{ data : activity_data , recipients : recipients } )
|> Repo . update ( )
end
def update_activity_visibility ( _ , _ ) , do : { :error , " Unsupported visibility " }
defp get_updated_targets (
% Activity { data : %{ " to " = > to } = data , recipients : recipients } ,
visibility
) do
cc = Map . get ( data , " cc " , [ ] )
follower_address = User . get_cached_by_ap_id ( data [ " actor " ] ) . follower_address
public = " https://www.w3.org/ns/activitystreams # Public "
case visibility do
" public " ->
to = [ public | List . delete ( to , follower_address ) ]
cc = [ follower_address | List . delete ( cc , public ) ]
recipients = [ public | recipients ]
[ to , cc , recipients ]
" private " ->
to = [ follower_address | List . delete ( to , public ) ]
cc = List . delete ( cc , public )
recipients = List . delete ( recipients , public )
[ to , cc , recipients ]
" unlisted " ->
to = [ follower_address | List . delete ( to , public ) ]
cc = [ public | List . delete ( cc , follower_address ) ]
recipients = recipients ++ [ follower_address , public ]
[ to , cc , recipients ]
_ ->
[ to , cc , recipients ]
end
end
2017-05-16 15:31:11 +02:00
end