2018-12-23 20:04:54 +00:00
# Pleroma: A lightweight social networking server
2018-12-31 15:41:47 +00:00
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
2018-12-23 20:04:54 +00:00
# SPDX-License-Identifier: AGPL-3.0-only
2017-05-16 13:31:11 +00:00
defmodule Pleroma.Web.ActivityPub.Utils do
2019-03-05 02:52:23 +00:00
alias Ecto.Changeset
alias Ecto.UUID
alias Pleroma.Activity
alias Pleroma.Notification
alias Pleroma.Object
2019-02-09 15:16:26 +00:00
alias Pleroma.Repo
2019-03-05 02:52:23 +00:00
alias Pleroma.User
2019-02-09 15:16:26 +00:00
alias Pleroma.Web
2019-03-04 15:09:58 +00:00
alias Pleroma.Web.ActivityPub.Visibility
2017-05-16 13:31:11 +00:00
alias Pleroma.Web.Endpoint
2019-03-05 02:52:23 +00:00
alias Pleroma.Web.Router.Helpers
2019-02-09 15:16:26 +00:00
2017-05-16 13:31:11 +00:00
import Ecto.Query
2019-02-09 15:16:26 +00:00
2018-05-26 18:03:23 +00:00
require Logger
2017-05-16 13:31:11 +00:00
2018-11-09 13:39:44 +00:00
@supported_object_types [ " Article " , " Note " , " Video " , " Page " ]
2018-05-19 07:03:53 +00:00
# Some implementations send the actor URI as the actor field, others send the entire actor object,
# so figure out what the actor's URI is based on what we have.
2018-05-26 11:52:05 +00:00
def get_ap_id ( object ) do
case object do
%{ " id " = > id } -> id
id -> id
2018-05-19 07:03:53 +00:00
end
end
def normalize_params ( params ) do
2018-05-26 11:52:05 +00:00
Map . put ( params , " actor " , get_ap_id ( params [ " actor " ] ) )
2018-05-19 07:03:53 +00:00
end
2018-11-17 15:51:02 +00:00
def determine_explicit_mentions ( %{ " tag " = > tag } = _object ) when is_list ( tag ) do
tag
|> Enum . filter ( fn x -> is_map ( x ) end )
|> Enum . filter ( fn x -> x [ " type " ] == " Mention " end )
|> Enum . map ( fn x -> x [ " href " ] end )
end
def determine_explicit_mentions ( %{ " tag " = > tag } = object ) when is_map ( tag ) do
Map . put ( object , " tag " , [ tag ] )
|> determine_explicit_mentions ( )
end
def determine_explicit_mentions ( _ ) , do : [ ]
2018-10-25 05:02:21 +00:00
defp recipient_in_collection ( ap_id , coll ) when is_binary ( coll ) , do : ap_id == coll
defp recipient_in_collection ( ap_id , coll ) when is_list ( coll ) , do : ap_id in coll
defp recipient_in_collection ( _ , _ ) , do : false
def recipient_in_message ( ap_id , params ) do
cond do
recipient_in_collection ( ap_id , params [ " to " ] ) ->
true
recipient_in_collection ( ap_id , params [ " cc " ] ) ->
true
recipient_in_collection ( ap_id , params [ " bto " ] ) ->
true
recipient_in_collection ( ap_id , params [ " bcc " ] ) ->
true
2018-10-26 01:24:22 +00:00
# if the message is unaddressed at all, then assume it is directly addressed
# to the recipient
! params [ " to " ] && ! params [ " cc " ] && ! params [ " bto " ] && ! params [ " bcc " ] ->
true
2018-10-25 05:02:21 +00:00
true ->
false
end
end
defp extract_list ( target ) when is_binary ( target ) , do : [ target ]
defp extract_list ( lst ) when is_list ( lst ) , do : lst
defp extract_list ( _ ) , do : [ ]
def maybe_splice_recipient ( ap_id , params ) do
need_splice =
! recipient_in_collection ( ap_id , params [ " to " ] ) &&
! recipient_in_collection ( ap_id , params [ " cc " ] )
cc_list = extract_list ( params [ " cc " ] )
if need_splice do
params
2018-10-25 05:24:01 +00:00
|> Map . put ( " cc " , [ ap_id | cc_list ] )
2018-10-25 05:02:21 +00:00
else
params
end
end
2018-03-21 17:23:27 +00:00
def make_json_ld_header do
%{
" @context " = > [
" https://www.w3.org/ns/activitystreams " ,
2018-11-08 19:21:45 +00:00
" #{ Web . base_url ( ) } /schemas/litepub-0.1.jsonld "
2018-03-21 17:23:27 +00:00
]
}
end
2017-05-16 13:31:11 +00:00
def make_date do
2018-03-30 13:01:53 +00:00
DateTime . utc_now ( ) |> DateTime . to_iso8601 ( )
2017-05-16 13:31:11 +00:00
end
def generate_activity_id do
generate_id ( " activities " )
end
def generate_context_id do
generate_id ( " contexts " )
end
def generate_object_id do
2018-03-30 13:01:53 +00:00
Helpers . o_status_url ( Endpoint , :object , UUID . generate ( ) )
2017-05-16 13:31:11 +00:00
end
def generate_id ( type ) do
2018-03-30 13:01:53 +00:00
" #{ Web . base_url ( ) } / #{ type } / #{ UUID . generate ( ) } "
2017-05-16 13:31:11 +00:00
end
2018-11-09 13:39:44 +00:00
def get_notified_from_object ( %{ " type " = > type } = object ) when type in @supported_object_types do
2018-11-09 08:55:52 +00:00
fake_create_activity = %{
" to " = > object [ " to " ] ,
" cc " = > object [ " cc " ] ,
" type " = > " Create " ,
" object " = > object
}
Notification . get_notified_from_activity ( % Activity { data : fake_create_activity } , false )
end
2018-11-08 19:31:59 +00:00
def get_notified_from_object ( object ) do
2018-11-09 08:42:33 +00:00
Notification . get_notified_from_activity ( % Activity { data : object } , false )
2018-11-08 19:31:59 +00:00
end
2018-04-02 13:17:09 +00:00
def create_context ( context ) do
context = context || generate_id ( " contexts " )
changeset = Object . context_mapping ( context )
2019-02-11 08:07:39 +00:00
case Repo . insert ( changeset ) do
{ :ok , object } ->
object
# This should be solved by an upsert, but it seems ecto
# has problems accessing the constraint inside the jsonb.
{ :error , _ } ->
Object . get_cached_by_ap_id ( context )
2018-04-02 13:17:09 +00:00
end
end
2017-05-16 13:31:11 +00:00
@doc """
Enqueues an activity for federation if it ' s local
"""
def maybe_federate ( % Activity { local : true } = activity ) do
2018-03-30 13:01:53 +00:00
priority =
case activity . data [ " type " ] do
" Delete " -> 10
" Create " -> 1
_ -> 5
end
2019-01-28 15:17:17 +00:00
Pleroma.Web.Federator . publish ( activity , priority )
2017-05-16 13:31:11 +00:00
:ok
end
2018-03-30 13:01:53 +00:00
2017-05-16 13:31:11 +00:00
def maybe_federate ( _ ) , do : :ok
@doc """
Adds an id and a published data if they aren ' t there,
also adds it to an included object
"""
def lazy_put_activity_defaults ( map ) do
2018-04-02 13:17:09 +00:00
%{ data : %{ " id " = > context } , id : context_id } = create_context ( map [ " context " ] )
2018-03-30 13:01:53 +00:00
map =
map
|> Map . put_new_lazy ( " id " , & generate_activity_id / 0 )
|> Map . put_new_lazy ( " published " , & make_date / 0 )
2018-04-02 13:17:09 +00:00
|> Map . put_new ( " context " , context )
|> Map . put_new ( " context_id " , context_id )
2017-05-16 13:31:11 +00:00
if is_map ( map [ " object " ] ) do
2018-04-02 13:17:09 +00:00
object = lazy_put_object_defaults ( map [ " object " ] , map )
2017-05-16 13:31:11 +00:00
%{ map | " object " = > object }
else
map
end
end
@doc """
Adds an id and published date if they aren ' t there.
"""
2018-04-02 13:17:09 +00:00
def lazy_put_object_defaults ( map , activity \\ %{ } ) do
2017-05-16 13:31:11 +00:00
map
|> Map . put_new_lazy ( " id " , & generate_object_id / 0 )
|> Map . put_new_lazy ( " published " , & make_date / 0 )
2018-04-02 13:17:09 +00:00
|> Map . put_new ( " context " , activity [ " context " ] )
|> Map . put_new ( " context_id " , activity [ " context_id " ] )
2017-05-16 13:31:11 +00:00
end
@doc """
Inserts a full object if it is contained in an activity .
"""
2018-03-30 13:01:53 +00:00
def insert_full_object ( %{ " object " = > %{ " type " = > type } = object_data } )
2018-11-09 13:39:44 +00:00
when is_map ( object_data ) and type in @supported_object_types do
2019-03-23 00:40:08 +00:00
with { :ok , object } <- Object . create ( object_data ) do
{ :ok , object }
2017-05-16 13:31:11 +00:00
end
end
2018-03-30 13:01:53 +00:00
2019-03-23 00:40:08 +00:00
def insert_full_object ( _ ) , do : { :ok , nil }
2017-05-16 13:31:11 +00:00
def update_object_in_activities ( %{ data : %{ " id " = > id } } = object ) do
# TODO
# Update activities that already had this. Could be done in a seperate process.
# Alternatively, just don't do this and fetch the current object each time. Most
# could probably be taken from cache.
2019-01-21 05:46:47 +00:00
relevant_activities = Activity . get_all_create_by_object_ap_id ( id )
2018-03-30 13:01:53 +00:00
Enum . map ( relevant_activities , fn activity ->
2017-05-16 13:31:11 +00:00
new_activity_data = activity . data |> Map . put ( " object " , object . data )
changeset = Changeset . change ( activity , data : new_activity_data )
Repo . update ( changeset )
end )
end
2019-03-25 23:47:04 +00:00
# Only federate user icon if not nil
# Prevents federating instance default avatars
def maybe_make_icon ( user ) do
if User . avatar_url_ap ( user ) do
%{
" icon " = > %{
" type " = > " Image " ,
" url " = > User . avatar_url_ap ( user )
}
}
else
[ ]
end
end
2017-05-16 13:31:11 +00:00
#### Like-related helpers
@doc """
Returns an existing like if a user already liked an object
"""
2017-11-19 01:22:07 +00:00
def get_existing_like ( actor , %{ data : %{ " id " = > id } } ) do
2018-03-30 13:01:53 +00:00
query =
from (
activity in Activity ,
where : fragment ( " (?)->>'actor' = ? " , activity . data , ^ actor ) ,
# this is to use the index
where :
fragment (
" coalesce((?)->'object'->>'id', (?)->>'object') = ? " ,
activity . data ,
activity . data ,
^ id
) ,
where : fragment ( " (?)->>'type' = 'Like' " , activity . data )
)
2017-10-24 12:39:01 +00:00
2017-05-16 13:31:11 +00:00
Repo . one ( query )
end
2019-01-11 22:34:32 +00:00
@doc """
Returns like activities targeting an object
"""
def get_object_likes ( %{ data : %{ " id " = > id } } ) do
query =
from (
activity in Activity ,
# this is to use the index
where :
fragment (
" coalesce((?)->'object'->>'id', (?)->>'object') = ? " ,
activity . data ,
activity . data ,
^ id
) ,
where : fragment ( " (?)->>'type' = 'Like' " , activity . data )
)
Repo . all ( query )
end
2019-03-04 15:09:58 +00:00
def make_like_data (
% User { ap_id : ap_id } = actor ,
%{ data : %{ " actor " = > object_actor_id , " id " = > id } } = object ,
activity_id
) do
object_actor = User . get_cached_by_ap_id ( object_actor_id )
to =
if Visibility . is_public? ( object ) do
[ actor . follower_address , object . data [ " actor " ] ]
else
[ object . data [ " actor " ] ]
end
cc =
( object . data [ " to " ] ++ ( object . data [ " cc " ] || [ ] ) )
|> List . delete ( actor . ap_id )
|> List . delete ( object_actor . follower_address )
2017-05-16 13:31:11 +00:00
data = %{
" type " = > " Like " ,
" actor " = > ap_id ,
" object " = > id ,
2019-03-04 15:09:58 +00:00
" to " = > to ,
" cc " = > cc ,
2017-05-16 13:31:11 +00:00
" context " = > object . data [ " context " ]
}
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
end
def update_element_in_object ( property , element , object ) do
2018-03-30 13:01:53 +00:00
with new_data <-
2018-04-21 07:43:53 +00:00
object . data
|> Map . put ( " #{ property } _count " , length ( element ) )
2018-03-30 13:01:53 +00:00
|> Map . put ( " #{ property } s " , element ) ,
2017-05-16 13:31:11 +00:00
changeset <- Changeset . change ( object , data : new_data ) ,
2019-02-03 17:28:14 +00:00
{ :ok , object } <- Object . update_and_set_cache ( changeset ) ,
2018-03-30 13:01:53 +00:00
_ <- update_object_in_activities ( object ) do
2017-05-16 13:31:11 +00:00
{ :ok , object }
end
end
def update_likes_in_object ( likes , object ) do
update_element_in_object ( " like " , likes , object )
end
def add_like_to_object ( % Activity { data : %{ " actor " = > actor } } , object ) do
2018-07-12 17:06:28 +00:00
likes = if is_list ( object . data [ " likes " ] ) , do : object . data [ " likes " ] , else : [ ]
with likes <- [ actor | likes ] |> Enum . uniq ( ) do
2017-05-16 13:31:11 +00:00
update_likes_in_object ( likes , object )
end
end
def remove_like_from_object ( % Activity { data : %{ " actor " = > actor } } , object ) do
2018-07-12 17:06:28 +00:00
likes = if is_list ( object . data [ " likes " ] ) , do : object . data [ " likes " ] , else : [ ]
with likes <- likes |> List . delete ( actor ) do
2017-05-16 13:31:11 +00:00
update_likes_in_object ( likes , object )
end
end
#### Follow-related helpers
2018-05-26 18:03:23 +00:00
@doc """
Updates a follow activity ' s state (for locked accounts).
"""
2019-01-29 12:21:02 +00:00
def update_follow_state (
% Activity { data : %{ " actor " = > actor , " object " = > object , " state " = > " pending " } } = activity ,
state
) do
try do
Ecto.Adapters.SQL . query! (
Repo ,
" UPDATE activities SET data = jsonb_set(data, '{state}', $1) WHERE data->>'type' = 'Follow' AND data->>'actor' = $2 AND data->>'object' = $3 AND data->>'state' = 'pending' " ,
[ state , actor , object ]
)
activity = Repo . get ( Activity , activity . id )
{ :ok , activity }
rescue
e ->
{ :error , e }
end
end
2018-05-26 18:03:23 +00:00
def update_follow_state ( % Activity { } = activity , state ) do
with new_data <-
activity . data
|> Map . put ( " state " , state ) ,
changeset <- Changeset . change ( activity , data : new_data ) ,
{ :ok , activity } <- Repo . update ( changeset ) do
{ :ok , activity }
end
end
2017-05-16 13:31:11 +00:00
@doc """
Makes a follow activity data for the given follower and followed
"""
2018-05-28 18:31:48 +00:00
def make_follow_data (
% User { ap_id : follower_id } ,
2018-12-09 09:12:48 +00:00
% User { ap_id : followed_id } = _followed ,
2018-05-28 18:31:48 +00:00
activity_id
) do
2017-05-16 13:31:11 +00:00
data = %{
" type " = > " Follow " ,
" actor " = > follower_id ,
" to " = > [ followed_id ] ,
2018-02-25 17:20:06 +00:00
" cc " = > [ " https://www.w3.org/ns/activitystreams # Public " ] ,
2018-10-05 23:31:00 +00:00
" object " = > followed_id ,
" state " = > " pending "
2017-05-16 13:31:11 +00:00
}
2018-05-26 18:03:23 +00:00
data = if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
data
2017-05-16 13:31:11 +00:00
end
2018-03-30 13:01:53 +00:00
def fetch_latest_follow ( % User { ap_id : follower_id } , % User { ap_id : followed_id } ) do
query =
from (
activity in Activity ,
2018-05-25 05:19:11 +00:00
where :
fragment (
" ? ->> 'type' = 'Follow' " ,
activity . data
) ,
where : activity . actor == ^ follower_id ,
2018-03-30 13:01:53 +00:00
where :
fragment (
" ? @> ? " ,
activity . data ,
2018-05-25 05:19:11 +00:00
^ %{ object : followed_id }
2018-03-30 13:01:53 +00:00
) ,
order_by : [ desc : :id ] ,
limit : 1
)
2017-05-16 13:31:11 +00:00
Repo . one ( query )
end
#### Announce-related helpers
@doc """
2018-04-17 08:13:08 +00:00
Retruns an existing announce activity if the notice has already been announced
2017-05-16 13:31:11 +00:00
"""
2018-04-14 07:39:16 +00:00
def get_existing_announce ( actor , %{ data : %{ " id " = > id } } ) do
query =
from (
activity in Activity ,
2018-05-24 10:44:26 +00:00
where : activity . actor == ^ actor ,
2018-04-14 07:39:16 +00:00
# this is to use the index
where :
fragment (
" coalesce((?)->'object'->>'id', (?)->>'object') = ? " ,
activity . data ,
activity . data ,
^ id
) ,
where : fragment ( " (?)->>'type' = 'Announce' " , activity . data )
)
Repo . one ( query )
end
2018-04-16 14:59:32 +00:00
@doc """
Make announce activity data for the given actor and object
"""
2018-08-06 10:37:52 +00:00
# for relayed messages, we only want to send to subscribers
def make_announce_data (
2019-01-17 23:12:42 +00:00
% User { ap_id : ap_id } = user ,
2018-08-06 10:37:52 +00:00
% Object { data : %{ " id " = > id } } = object ,
2019-01-17 23:12:42 +00:00
activity_id ,
false
2018-08-06 10:37:52 +00:00
) do
data = %{
" type " = > " Announce " ,
" actor " = > ap_id ,
" object " = > id ,
" to " = > [ user . follower_address ] ,
" cc " = > [ ] ,
" context " = > object . data [ " context " ]
}
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
end
2018-03-30 13:01:53 +00:00
def make_announce_data (
% User { ap_id : ap_id } = user ,
% Object { data : %{ " id " = > id } } = object ,
2019-01-17 23:12:42 +00:00
activity_id ,
true
2018-03-30 13:01:53 +00:00
) do
2017-05-16 13:31:11 +00:00
data = %{
" type " = > " Announce " ,
" actor " = > ap_id ,
" object " = > id ,
2017-07-19 17:06:49 +00:00
" to " = > [ user . follower_address , object . data [ " actor " ] ] ,
2018-02-25 17:20:06 +00:00
" cc " = > [ " https://www.w3.org/ns/activitystreams # Public " ] ,
2017-05-16 13:31:11 +00:00
" context " = > object . data [ " context " ]
}
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
end
2018-04-17 08:13:08 +00:00
@doc """
Make unannounce activity data for the given actor and object
"""
def make_unannounce_data (
% User { ap_id : ap_id } = user ,
2018-04-23 01:28:51 +00:00
% Activity { data : %{ " context " = > context } } = activity ,
activity_id
2018-04-17 08:13:08 +00:00
) do
2018-04-23 01:28:51 +00:00
data = %{
2018-04-17 08:13:08 +00:00
" type " = > " Undo " ,
" actor " = > ap_id ,
2018-04-23 01:28:51 +00:00
" object " = > activity . data ,
2018-04-21 03:22:16 +00:00
" to " = > [ user . follower_address , activity . data [ " actor " ] ] ,
2018-04-17 08:13:08 +00:00
" cc " = > [ " https://www.w3.org/ns/activitystreams # Public " ] ,
2018-04-18 00:35:07 +00:00
" context " = > context
2018-04-17 08:13:08 +00:00
}
2018-04-23 01:28:51 +00:00
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
2018-04-17 08:13:08 +00:00
end
2018-05-19 13:22:43 +00:00
def make_unlike_data (
% User { ap_id : ap_id } = user ,
% Activity { data : %{ " context " = > context } } = activity ,
activity_id
) do
data = %{
" type " = > " Undo " ,
" actor " = > ap_id ,
" object " = > activity . data ,
" to " = > [ user . follower_address , activity . data [ " actor " ] ] ,
" cc " = > [ " https://www.w3.org/ns/activitystreams # Public " ] ,
" context " = > context
}
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
end
2018-08-06 10:37:52 +00:00
def add_announce_to_object (
% Activity {
data : %{ " actor " = > actor , " cc " = > [ " https://www.w3.org/ns/activitystreams # Public " ] }
} ,
object
) do
2018-07-12 17:06:28 +00:00
announcements =
if is_list ( object . data [ " announcements " ] ) , do : object . data [ " announcements " ] , else : [ ]
with announcements <- [ actor | announcements ] |> Enum . uniq ( ) do
2017-05-16 13:31:11 +00:00
update_element_in_object ( " announcement " , announcements , object )
end
end
2018-08-06 10:37:52 +00:00
def add_announce_to_object ( _ , object ) , do : { :ok , object }
2018-04-14 07:39:16 +00:00
def remove_announce_from_object ( % Activity { data : %{ " actor " = > actor } } , object ) do
2018-07-12 17:06:28 +00:00
announcements =
if is_list ( object . data [ " announcements " ] ) , do : object . data [ " announcements " ] , else : [ ]
with announcements <- announcements |> List . delete ( actor ) do
2018-04-14 07:39:16 +00:00
update_element_in_object ( " announcement " , announcements , object )
end
end
2017-05-16 13:31:11 +00:00
#### Unfollow-related helpers
2018-05-21 01:01:14 +00:00
def make_unfollow_data ( follower , followed , follow_activity , activity_id ) do
data = %{
2017-05-16 13:31:11 +00:00
" type " = > " Undo " ,
" actor " = > follower . ap_id ,
" to " = > [ followed . ap_id ] ,
2018-05-21 01:01:14 +00:00
" object " = > follow_activity . data
2017-05-16 13:31:11 +00:00
}
2018-05-21 01:02:06 +00:00
2018-05-21 01:01:14 +00:00
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
2017-05-16 13:31:11 +00:00
end
2018-05-18 22:09:56 +00:00
#### Block-related helpers
def fetch_latest_block ( % User { ap_id : blocker_id } , % User { ap_id : blocked_id } ) do
query =
from (
activity in Activity ,
2018-05-25 05:19:11 +00:00
where :
fragment (
" ? ->> 'type' = 'Block' " ,
activity . data
) ,
where : activity . actor == ^ blocker_id ,
2018-05-18 22:09:56 +00:00
where :
fragment (
" ? @> ? " ,
activity . data ,
2018-05-25 05:19:11 +00:00
^ %{ object : blocked_id }
2018-05-18 22:09:56 +00:00
) ,
order_by : [ desc : :id ] ,
limit : 1
)
Repo . one ( query )
end
2018-05-21 01:01:14 +00:00
def make_block_data ( blocker , blocked , activity_id ) do
data = %{
2018-05-18 22:09:56 +00:00
" type " = > " Block " ,
" actor " = > blocker . ap_id ,
" to " = > [ blocked . ap_id ] ,
" object " = > blocked . ap_id
}
2018-05-21 01:02:06 +00:00
2018-05-21 01:01:14 +00:00
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
2018-05-18 22:09:56 +00:00
end
2018-05-21 01:01:14 +00:00
def make_unblock_data ( blocker , blocked , block_activity , activity_id ) do
data = %{
2018-05-18 22:09:56 +00:00
" type " = > " Undo " ,
" actor " = > blocker . ap_id ,
" to " = > [ blocked . ap_id ] ,
" object " = > block_activity . data
2017-05-16 13:31:11 +00:00
}
2018-05-21 01:02:06 +00:00
2018-05-21 01:01:14 +00:00
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
2017-05-16 13:31:11 +00:00
end
#### Create-related helpers
def make_create_data ( params , additional ) do
published = params . published || make_date ( )
2018-03-30 13:01:53 +00:00
2017-11-19 01:22:07 +00:00
%{
2017-05-16 13:31:11 +00:00
" type " = > " Create " ,
2018-03-30 13:01:53 +00:00
" to " = > params . to |> Enum . uniq ( ) ,
2017-05-16 13:31:11 +00:00
" actor " = > params . actor . ap_id ,
" object " = > params . object ,
" published " = > published ,
" context " = > params . context
}
|> Map . merge ( additional )
end
2019-02-20 16:51:25 +00:00
#### Flag-related helpers
def make_flag_data ( params , additional ) do
2019-03-14 19:04:52 +00:00
status_ap_ids =
Enum . map ( params . statuses || [ ] , fn
2019-03-14 19:29:04 +00:00
% Activity { } = act -> act . data [ " id " ]
2019-03-14 19:04:52 +00:00
act when is_map ( act ) -> act [ " id " ]
act when is_binary ( act ) -> act
end )
2019-02-20 16:51:25 +00:00
object = [ params . account . ap_id ] ++ status_ap_ids
%{
" type " = > " Flag " ,
" actor " = > params . actor . ap_id ,
" content " = > params . content ,
" object " = > object ,
" context " = > params . context
}
|> Map . merge ( additional )
end
2019-03-06 21:13:26 +00:00
@doc """
Fetches the OrderedCollection / OrderedCollectionPage from ` from ` , limiting the amount of pages fetched after
the first one to ` pages_left ` pages .
If the amount of pages is higher than the collection has , it returns whatever was there .
"""
def fetch_ordered_collection ( from , pages_left , acc \\ [ ] ) do
with { :ok , response } <- Tesla . get ( from ) ,
{ :ok , collection } <- Poison . decode ( response . body ) do
case collection [ " type " ] do
" OrderedCollection " ->
# If we've encountered the OrderedCollection and not the page,
# just call the same function on the page address
fetch_ordered_collection ( collection [ " first " ] , pages_left )
" OrderedCollectionPage " ->
if pages_left > 0 do
# There are still more pages
if Map . has_key? ( collection , " next " ) do
# There are still more pages, go deeper saving what we have into the accumulator
fetch_ordered_collection (
collection [ " next " ] ,
pages_left - 1 ,
acc ++ collection [ " orderedItems " ]
)
else
# No more pages left, just return whatever we already have
acc ++ collection [ " orderedItems " ]
end
else
# Got the amount of pages needed, add them all to the accumulator
acc ++ collection [ " orderedItems " ]
end
_ ->
{ :error , " Not an OrderedCollection or OrderedCollectionPage " }
end
end
end
2017-05-16 13:31:11 +00:00
end