2018-12-23 13:04:54 -07:00
# Pleroma: A lightweight social networking server
2018-12-31 08:41:47 -07:00
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
2018-12-23 13:04:54 -07:00
# SPDX-License-Identifier: AGPL-3.0-only
2017-05-16 07:31:11 -06:00
defmodule Pleroma.Web.ActivityPub.Utils do
2019-03-04 19:52:23 -07:00
alias Ecto.Changeset
alias Ecto.UUID
alias Pleroma.Activity
alias Pleroma.Notification
alias Pleroma.Object
2019-02-09 08:16:26 -07:00
alias Pleroma.Repo
2019-03-04 19:52:23 -07:00
alias Pleroma.User
2019-02-09 08:16:26 -07:00
alias Pleroma.Web
2019-03-04 08:09:58 -07:00
alias Pleroma.Web.ActivityPub.Visibility
2017-05-16 07:31:11 -06:00
alias Pleroma.Web.Endpoint
2019-03-04 19:52:23 -07:00
alias Pleroma.Web.Router.Helpers
2019-02-09 08:16:26 -07:00
2017-05-16 07:31:11 -06:00
import Ecto.Query
2019-02-09 08:16:26 -07:00
2018-05-26 12:03:23 -06:00
require Logger
2017-05-16 07:31:11 -06:00
2018-11-09 06:39:44 -07:00
@supported_object_types [ " Article " , " Note " , " Video " , " Page " ]
2018-05-19 01:03:53 -06:00
# Some implementations send the actor URI as the actor field, others send the entire actor object,
# so figure out what the actor's URI is based on what we have.
2018-05-26 05:52:05 -06:00
def get_ap_id ( object ) do
case object do
%{ " id " = > id } -> id
id -> id
2018-05-19 01:03:53 -06:00
end
end
def normalize_params ( params ) do
2018-05-26 05:52:05 -06:00
Map . put ( params , " actor " , get_ap_id ( params [ " actor " ] ) )
2018-05-19 01:03:53 -06:00
end
2018-11-17 08:51:02 -07:00
def determine_explicit_mentions ( %{ " tag " = > tag } = _object ) when is_list ( tag ) do
tag
|> Enum . filter ( fn x -> is_map ( x ) end )
|> Enum . filter ( fn x -> x [ " type " ] == " Mention " end )
|> Enum . map ( fn x -> x [ " href " ] end )
end
def determine_explicit_mentions ( %{ " tag " = > tag } = object ) when is_map ( tag ) do
Map . put ( object , " tag " , [ tag ] )
|> determine_explicit_mentions ( )
end
def determine_explicit_mentions ( _ ) , do : [ ]
2018-10-24 23:02:21 -06:00
defp recipient_in_collection ( ap_id , coll ) when is_binary ( coll ) , do : ap_id == coll
defp recipient_in_collection ( ap_id , coll ) when is_list ( coll ) , do : ap_id in coll
defp recipient_in_collection ( _ , _ ) , do : false
def recipient_in_message ( ap_id , params ) do
cond do
recipient_in_collection ( ap_id , params [ " to " ] ) ->
true
recipient_in_collection ( ap_id , params [ " cc " ] ) ->
true
recipient_in_collection ( ap_id , params [ " bto " ] ) ->
true
recipient_in_collection ( ap_id , params [ " bcc " ] ) ->
true
2018-10-25 19:24:22 -06:00
# if the message is unaddressed at all, then assume it is directly addressed
# to the recipient
! params [ " to " ] && ! params [ " cc " ] && ! params [ " bto " ] && ! params [ " bcc " ] ->
true
2018-10-24 23:02:21 -06:00
true ->
false
end
end
defp extract_list ( target ) when is_binary ( target ) , do : [ target ]
defp extract_list ( lst ) when is_list ( lst ) , do : lst
defp extract_list ( _ ) , do : [ ]
def maybe_splice_recipient ( ap_id , params ) do
need_splice =
! recipient_in_collection ( ap_id , params [ " to " ] ) &&
! recipient_in_collection ( ap_id , params [ " cc " ] )
cc_list = extract_list ( params [ " cc " ] )
if need_splice do
params
2018-10-24 23:24:01 -06:00
|> Map . put ( " cc " , [ ap_id | cc_list ] )
2018-10-24 23:02:21 -06:00
else
params
end
end
2018-03-21 11:23:27 -06:00
def make_json_ld_header do
%{
" @context " = > [
" https://www.w3.org/ns/activitystreams " ,
2019-04-08 05:03:10 -06:00
" #{ Web . base_url ( ) } /schemas/litepub-0.1.jsonld " ,
%{
" @language " = > " und "
}
2018-03-21 11:23:27 -06:00
]
}
end
2017-05-16 07:31:11 -06:00
def make_date do
2018-03-30 07:01:53 -06:00
DateTime . utc_now ( ) |> DateTime . to_iso8601 ( )
2017-05-16 07:31:11 -06:00
end
def generate_activity_id do
generate_id ( " activities " )
end
def generate_context_id do
generate_id ( " contexts " )
end
def generate_object_id do
2018-03-30 07:01:53 -06:00
Helpers . o_status_url ( Endpoint , :object , UUID . generate ( ) )
2017-05-16 07:31:11 -06:00
end
def generate_id ( type ) do
2018-03-30 07:01:53 -06:00
" #{ Web . base_url ( ) } / #{ type } / #{ UUID . generate ( ) } "
2017-05-16 07:31:11 -06:00
end
2018-11-09 06:39:44 -07:00
def get_notified_from_object ( %{ " type " = > type } = object ) when type in @supported_object_types do
2018-11-09 01:55:52 -07:00
fake_create_activity = %{
" to " = > object [ " to " ] ,
" cc " = > object [ " cc " ] ,
" type " = > " Create " ,
" object " = > object
}
Notification . get_notified_from_activity ( % Activity { data : fake_create_activity } , false )
end
2018-11-08 12:31:59 -07:00
def get_notified_from_object ( object ) do
2018-11-09 01:42:33 -07:00
Notification . get_notified_from_activity ( % Activity { data : object } , false )
2018-11-08 12:31:59 -07:00
end
2018-04-02 07:17:09 -06:00
def create_context ( context ) do
context = context || generate_id ( " contexts " )
changeset = Object . context_mapping ( context )
2019-02-11 01:07:39 -07:00
case Repo . insert ( changeset ) do
{ :ok , object } ->
object
# This should be solved by an upsert, but it seems ecto
# has problems accessing the constraint inside the jsonb.
{ :error , _ } ->
Object . get_cached_by_ap_id ( context )
2018-04-02 07:17:09 -06:00
end
end
2017-05-16 07:31:11 -06:00
@doc """
Enqueues an activity for federation if it ' s local
"""
def maybe_federate ( % Activity { local : true } = activity ) do
2018-03-30 07:01:53 -06:00
priority =
case activity . data [ " type " ] do
" Delete " -> 10
" Create " -> 1
_ -> 5
end
2019-01-28 08:17:17 -07:00
Pleroma.Web.Federator . publish ( activity , priority )
2017-05-16 07:31:11 -06:00
:ok
end
2018-03-30 07:01:53 -06:00
2017-05-16 07:31:11 -06:00
def maybe_federate ( _ ) , do : :ok
@doc """
Adds an id and a published data if they aren ' t there,
also adds it to an included object
"""
2019-03-30 04:57:54 -06:00
def lazy_put_activity_defaults ( map , fake \\ false ) do
2018-03-30 07:01:53 -06:00
map =
2019-04-01 03:16:51 -06:00
unless fake do
%{ data : %{ " id " = > context } , id : context_id } = create_context ( map [ " context " ] )
2019-03-30 04:57:54 -06:00
map
|> Map . put_new_lazy ( " id " , & generate_activity_id / 0 )
|> Map . put_new_lazy ( " published " , & make_date / 0 )
|> Map . put_new ( " context " , context )
|> Map . put_new ( " context_id " , context_id )
else
map
2019-04-01 03:16:51 -06:00
|> Map . put_new ( " id " , " pleroma:fakeid " )
|> Map . put_new_lazy ( " published " , & make_date / 0 )
|> Map . put_new ( " context " , " pleroma:fakecontext " )
|> Map . put_new ( " context_id " , - 1 )
2019-03-30 04:57:54 -06:00
end
2017-05-16 07:31:11 -06:00
if is_map ( map [ " object " ] ) do
2019-04-01 03:16:51 -06:00
object = lazy_put_object_defaults ( map [ " object " ] , map , fake )
2017-05-16 07:31:11 -06:00
%{ map | " object " = > object }
else
map
end
end
@doc """
Adds an id and published date if they aren ' t there.
"""
2019-04-01 03:16:51 -06:00
def lazy_put_object_defaults ( map , activity \\ %{ } , fake )
def lazy_put_object_defaults ( map , activity , true = _fake ) do
map
|> Map . put_new_lazy ( " published " , & make_date / 0 )
2019-04-01 03:25:53 -06:00
|> Map . put_new ( " id " , " pleroma:fake_object_id " )
2019-04-01 03:16:51 -06:00
|> Map . put_new ( " context " , activity [ " context " ] )
2019-04-01 03:25:53 -06:00
|> Map . put_new ( " fake " , true )
2019-04-01 03:16:51 -06:00
|> Map . put_new ( " context_id " , activity [ " context_id " ] )
end
def lazy_put_object_defaults ( map , activity , _fake ) do
2017-05-16 07:31:11 -06:00
map
|> Map . put_new_lazy ( " id " , & generate_object_id / 0 )
|> Map . put_new_lazy ( " published " , & make_date / 0 )
2018-04-02 07:17:09 -06:00
|> Map . put_new ( " context " , activity [ " context " ] )
|> Map . put_new ( " context_id " , activity [ " context_id " ] )
2017-05-16 07:31:11 -06:00
end
@doc """
Inserts a full object if it is contained in an activity .
"""
2018-03-30 07:01:53 -06:00
def insert_full_object ( %{ " object " = > %{ " type " = > type } = object_data } )
2018-11-09 06:39:44 -07:00
when is_map ( object_data ) and type in @supported_object_types do
2019-03-22 18:40:08 -06:00
with { :ok , object } <- Object . create ( object_data ) do
{ :ok , object }
2017-05-16 07:31:11 -06:00
end
end
2018-03-30 07:01:53 -06:00
2019-03-22 18:40:08 -06:00
def insert_full_object ( _ ) , do : { :ok , nil }
2017-05-16 07:31:11 -06:00
def update_object_in_activities ( %{ data : %{ " id " = > id } } = object ) do
# TODO
# Update activities that already had this. Could be done in a seperate process.
# Alternatively, just don't do this and fetch the current object each time. Most
# could probably be taken from cache.
2019-01-20 22:46:47 -07:00
relevant_activities = Activity . get_all_create_by_object_ap_id ( id )
2018-03-30 07:01:53 -06:00
Enum . map ( relevant_activities , fn activity ->
2017-05-16 07:31:11 -06:00
new_activity_data = activity . data |> Map . put ( " object " , object . data )
changeset = Changeset . change ( activity , data : new_activity_data )
Repo . update ( changeset )
end )
end
#### Like-related helpers
@doc """
Returns an existing like if a user already liked an object
"""
2017-11-18 18:22:07 -07:00
def get_existing_like ( actor , %{ data : %{ " id " = > id } } ) do
2018-03-30 07:01:53 -06:00
query =
from (
activity in Activity ,
where : fragment ( " (?)->>'actor' = ? " , activity . data , ^ actor ) ,
# this is to use the index
where :
fragment (
" coalesce((?)->'object'->>'id', (?)->>'object') = ? " ,
activity . data ,
activity . data ,
^ id
) ,
where : fragment ( " (?)->>'type' = 'Like' " , activity . data )
)
2017-10-24 06:39:01 -06:00
2017-05-16 07:31:11 -06:00
Repo . one ( query )
end
2019-01-11 15:34:32 -07:00
@doc """
Returns like activities targeting an object
"""
def get_object_likes ( %{ data : %{ " id " = > id } } ) do
query =
from (
activity in Activity ,
# this is to use the index
where :
fragment (
" coalesce((?)->'object'->>'id', (?)->>'object') = ? " ,
activity . data ,
activity . data ,
^ id
) ,
where : fragment ( " (?)->>'type' = 'Like' " , activity . data )
)
Repo . all ( query )
end
2019-03-04 08:09:58 -07:00
def make_like_data (
% User { ap_id : ap_id } = actor ,
%{ data : %{ " actor " = > object_actor_id , " id " = > id } } = object ,
activity_id
) do
object_actor = User . get_cached_by_ap_id ( object_actor_id )
to =
if Visibility . is_public? ( object ) do
[ actor . follower_address , object . data [ " actor " ] ]
else
[ object . data [ " actor " ] ]
end
cc =
( object . data [ " to " ] ++ ( object . data [ " cc " ] || [ ] ) )
|> List . delete ( actor . ap_id )
|> List . delete ( object_actor . follower_address )
2017-05-16 07:31:11 -06:00
data = %{
" type " = > " Like " ,
" actor " = > ap_id ,
" object " = > id ,
2019-03-04 08:09:58 -07:00
" to " = > to ,
" cc " = > cc ,
2017-05-16 07:31:11 -06:00
" context " = > object . data [ " context " ]
}
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
end
def update_element_in_object ( property , element , object ) do
2018-03-30 07:01:53 -06:00
with new_data <-
2018-04-21 01:43:53 -06:00
object . data
|> Map . put ( " #{ property } _count " , length ( element ) )
2018-03-30 07:01:53 -06:00
|> Map . put ( " #{ property } s " , element ) ,
2017-05-16 07:31:11 -06:00
changeset <- Changeset . change ( object , data : new_data ) ,
2019-02-03 10:28:14 -07:00
{ :ok , object } <- Object . update_and_set_cache ( changeset ) ,
2018-03-30 07:01:53 -06:00
_ <- update_object_in_activities ( object ) do
2017-05-16 07:31:11 -06:00
{ :ok , object }
end
end
def update_likes_in_object ( likes , object ) do
update_element_in_object ( " like " , likes , object )
end
def add_like_to_object ( % Activity { data : %{ " actor " = > actor } } , object ) do
2018-07-12 11:06:28 -06:00
likes = if is_list ( object . data [ " likes " ] ) , do : object . data [ " likes " ] , else : [ ]
with likes <- [ actor | likes ] |> Enum . uniq ( ) do
2017-05-16 07:31:11 -06:00
update_likes_in_object ( likes , object )
end
end
def remove_like_from_object ( % Activity { data : %{ " actor " = > actor } } , object ) do
2018-07-12 11:06:28 -06:00
likes = if is_list ( object . data [ " likes " ] ) , do : object . data [ " likes " ] , else : [ ]
with likes <- likes |> List . delete ( actor ) do
2017-05-16 07:31:11 -06:00
update_likes_in_object ( likes , object )
end
end
#### Follow-related helpers
2018-05-26 12:03:23 -06:00
@doc """
Updates a follow activity ' s state (for locked accounts).
"""
2019-01-29 05:21:02 -07:00
def update_follow_state (
% Activity { data : %{ " actor " = > actor , " object " = > object , " state " = > " pending " } } = activity ,
state
) do
try do
Ecto.Adapters.SQL . query! (
Repo ,
" UPDATE activities SET data = jsonb_set(data, '{state}', $1) WHERE data->>'type' = 'Follow' AND data->>'actor' = $2 AND data->>'object' = $3 AND data->>'state' = 'pending' " ,
[ state , actor , object ]
)
2019-04-02 03:50:31 -06:00
activity = Activity . get_by_id ( activity . id )
2019-01-29 05:21:02 -07:00
{ :ok , activity }
rescue
e ->
{ :error , e }
end
end
2018-05-26 12:03:23 -06:00
def update_follow_state ( % Activity { } = activity , state ) do
with new_data <-
activity . data
|> Map . put ( " state " , state ) ,
changeset <- Changeset . change ( activity , data : new_data ) ,
{ :ok , activity } <- Repo . update ( changeset ) do
{ :ok , activity }
end
end
2017-05-16 07:31:11 -06:00
@doc """
Makes a follow activity data for the given follower and followed
"""
2018-05-28 12:31:48 -06:00
def make_follow_data (
% User { ap_id : follower_id } ,
2018-12-09 02:12:48 -07:00
% User { ap_id : followed_id } = _followed ,
2018-05-28 12:31:48 -06:00
activity_id
) do
2017-05-16 07:31:11 -06:00
data = %{
" type " = > " Follow " ,
" actor " = > follower_id ,
" to " = > [ followed_id ] ,
2018-02-25 10:20:06 -07:00
" cc " = > [ " https://www.w3.org/ns/activitystreams # Public " ] ,
2018-10-05 17:31:00 -06:00
" object " = > followed_id ,
" state " = > " pending "
2017-05-16 07:31:11 -06:00
}
2018-05-26 12:03:23 -06:00
data = if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
data
2017-05-16 07:31:11 -06:00
end
2018-03-30 07:01:53 -06:00
def fetch_latest_follow ( % User { ap_id : follower_id } , % User { ap_id : followed_id } ) do
query =
from (
activity in Activity ,
2018-05-24 23:19:11 -06:00
where :
fragment (
" ? ->> 'type' = 'Follow' " ,
activity . data
) ,
where : activity . actor == ^ follower_id ,
2019-04-02 08:04:18 -06:00
# this is to use the index
2018-03-30 07:01:53 -06:00
where :
fragment (
2019-04-02 08:04:18 -06:00
" coalesce((?)->'object'->>'id', (?)->>'object') = ? " ,
activity . data ,
2018-03-30 07:01:53 -06:00
activity . data ,
2019-04-02 08:04:18 -06:00
^ followed_id
2018-03-30 07:01:53 -06:00
) ,
2019-04-02 08:04:18 -06:00
order_by : [ fragment ( " ? desc nulls last " , activity . id ) ] ,
2018-03-30 07:01:53 -06:00
limit : 1
)
2017-05-16 07:31:11 -06:00
Repo . one ( query )
end
#### Announce-related helpers
@doc """
2018-04-17 02:13:08 -06:00
Retruns an existing announce activity if the notice has already been announced
2017-05-16 07:31:11 -06:00
"""
2018-04-14 01:39:16 -06:00
def get_existing_announce ( actor , %{ data : %{ " id " = > id } } ) do
query =
from (
activity in Activity ,
2018-05-24 04:44:26 -06:00
where : activity . actor == ^ actor ,
2018-04-14 01:39:16 -06:00
# this is to use the index
where :
fragment (
" coalesce((?)->'object'->>'id', (?)->>'object') = ? " ,
activity . data ,
activity . data ,
^ id
) ,
where : fragment ( " (?)->>'type' = 'Announce' " , activity . data )
)
Repo . one ( query )
end
2018-04-16 08:59:32 -06:00
@doc """
Make announce activity data for the given actor and object
"""
2018-08-06 04:37:52 -06:00
# for relayed messages, we only want to send to subscribers
def make_announce_data (
2019-01-17 16:12:42 -07:00
% User { ap_id : ap_id } = user ,
2018-08-06 04:37:52 -06:00
% Object { data : %{ " id " = > id } } = object ,
2019-01-17 16:12:42 -07:00
activity_id ,
false
2018-08-06 04:37:52 -06:00
) do
data = %{
" type " = > " Announce " ,
" actor " = > ap_id ,
" object " = > id ,
" to " = > [ user . follower_address ] ,
" cc " = > [ ] ,
" context " = > object . data [ " context " ]
}
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
end
2018-03-30 07:01:53 -06:00
def make_announce_data (
% User { ap_id : ap_id } = user ,
% Object { data : %{ " id " = > id } } = object ,
2019-01-17 16:12:42 -07:00
activity_id ,
true
2018-03-30 07:01:53 -06:00
) do
2017-05-16 07:31:11 -06:00
data = %{
" type " = > " Announce " ,
" actor " = > ap_id ,
" object " = > id ,
2017-07-19 11:06:49 -06:00
" to " = > [ user . follower_address , object . data [ " actor " ] ] ,
2018-02-25 10:20:06 -07:00
" cc " = > [ " https://www.w3.org/ns/activitystreams # Public " ] ,
2017-05-16 07:31:11 -06:00
" context " = > object . data [ " context " ]
}
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
end
2018-04-17 02:13:08 -06:00
@doc """
Make unannounce activity data for the given actor and object
"""
def make_unannounce_data (
% User { ap_id : ap_id } = user ,
2018-04-22 19:28:51 -06:00
% Activity { data : %{ " context " = > context } } = activity ,
activity_id
2018-04-17 02:13:08 -06:00
) do
2018-04-22 19:28:51 -06:00
data = %{
2018-04-17 02:13:08 -06:00
" type " = > " Undo " ,
" actor " = > ap_id ,
2018-04-22 19:28:51 -06:00
" object " = > activity . data ,
2018-04-20 21:22:16 -06:00
" to " = > [ user . follower_address , activity . data [ " actor " ] ] ,
2018-04-17 02:13:08 -06:00
" cc " = > [ " https://www.w3.org/ns/activitystreams # Public " ] ,
2018-04-17 18:35:07 -06:00
" context " = > context
2018-04-17 02:13:08 -06:00
}
2018-04-22 19:28:51 -06:00
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
2018-04-17 02:13:08 -06:00
end
2018-05-19 07:22:43 -06:00
def make_unlike_data (
% User { ap_id : ap_id } = user ,
% Activity { data : %{ " context " = > context } } = activity ,
activity_id
) do
data = %{
" type " = > " Undo " ,
" actor " = > ap_id ,
" object " = > activity . data ,
" to " = > [ user . follower_address , activity . data [ " actor " ] ] ,
" cc " = > [ " https://www.w3.org/ns/activitystreams # Public " ] ,
" context " = > context
}
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
end
2018-08-06 04:37:52 -06:00
def add_announce_to_object (
% Activity {
data : %{ " actor " = > actor , " cc " = > [ " https://www.w3.org/ns/activitystreams # Public " ] }
} ,
object
) do
2018-07-12 11:06:28 -06:00
announcements =
if is_list ( object . data [ " announcements " ] ) , do : object . data [ " announcements " ] , else : [ ]
with announcements <- [ actor | announcements ] |> Enum . uniq ( ) do
2017-05-16 07:31:11 -06:00
update_element_in_object ( " announcement " , announcements , object )
end
end
2018-08-06 04:37:52 -06:00
def add_announce_to_object ( _ , object ) , do : { :ok , object }
2018-04-14 01:39:16 -06:00
def remove_announce_from_object ( % Activity { data : %{ " actor " = > actor } } , object ) do
2018-07-12 11:06:28 -06:00
announcements =
if is_list ( object . data [ " announcements " ] ) , do : object . data [ " announcements " ] , else : [ ]
with announcements <- announcements |> List . delete ( actor ) do
2018-04-14 01:39:16 -06:00
update_element_in_object ( " announcement " , announcements , object )
end
end
2017-05-16 07:31:11 -06:00
#### Unfollow-related helpers
2018-05-20 19:01:14 -06:00
def make_unfollow_data ( follower , followed , follow_activity , activity_id ) do
data = %{
2017-05-16 07:31:11 -06:00
" type " = > " Undo " ,
" actor " = > follower . ap_id ,
" to " = > [ followed . ap_id ] ,
2018-05-20 19:01:14 -06:00
" object " = > follow_activity . data
2017-05-16 07:31:11 -06:00
}
2018-05-20 19:02:06 -06:00
2018-05-20 19:01:14 -06:00
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
2017-05-16 07:31:11 -06:00
end
2018-05-18 16:09:56 -06:00
#### Block-related helpers
def fetch_latest_block ( % User { ap_id : blocker_id } , % User { ap_id : blocked_id } ) do
query =
from (
activity in Activity ,
2018-05-24 23:19:11 -06:00
where :
fragment (
" ? ->> 'type' = 'Block' " ,
activity . data
) ,
where : activity . actor == ^ blocker_id ,
2019-04-02 08:04:18 -06:00
# this is to use the index
2018-05-18 16:09:56 -06:00
where :
fragment (
2019-04-02 08:04:18 -06:00
" coalesce((?)->'object'->>'id', (?)->>'object') = ? " ,
activity . data ,
2018-05-18 16:09:56 -06:00
activity . data ,
2019-04-02 08:04:18 -06:00
^ blocked_id
2018-05-18 16:09:56 -06:00
) ,
2019-04-02 08:04:18 -06:00
order_by : [ fragment ( " ? desc nulls last " , activity . id ) ] ,
2018-05-18 16:09:56 -06:00
limit : 1
)
Repo . one ( query )
end
2018-05-20 19:01:14 -06:00
def make_block_data ( blocker , blocked , activity_id ) do
data = %{
2018-05-18 16:09:56 -06:00
" type " = > " Block " ,
" actor " = > blocker . ap_id ,
" to " = > [ blocked . ap_id ] ,
" object " = > blocked . ap_id
}
2018-05-20 19:02:06 -06:00
2018-05-20 19:01:14 -06:00
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
2018-05-18 16:09:56 -06:00
end
2018-05-20 19:01:14 -06:00
def make_unblock_data ( blocker , blocked , block_activity , activity_id ) do
data = %{
2018-05-18 16:09:56 -06:00
" type " = > " Undo " ,
" actor " = > blocker . ap_id ,
" to " = > [ blocked . ap_id ] ,
" object " = > block_activity . data
2017-05-16 07:31:11 -06:00
}
2018-05-20 19:02:06 -06:00
2018-05-20 19:01:14 -06:00
if activity_id , do : Map . put ( data , " id " , activity_id ) , else : data
2017-05-16 07:31:11 -06:00
end
#### Create-related helpers
def make_create_data ( params , additional ) do
published = params . published || make_date ( )
2018-03-30 07:01:53 -06:00
2017-11-18 18:22:07 -07:00
%{
2017-05-16 07:31:11 -06:00
" type " = > " Create " ,
2018-03-30 07:01:53 -06:00
" to " = > params . to |> Enum . uniq ( ) ,
2017-05-16 07:31:11 -06:00
" actor " = > params . actor . ap_id ,
" object " = > params . object ,
" published " = > published ,
" context " = > params . context
}
|> Map . merge ( additional )
end
2019-02-20 09:51:25 -07:00
#### Flag-related helpers
def make_flag_data ( params , additional ) do
2019-03-14 13:04:52 -06:00
status_ap_ids =
Enum . map ( params . statuses || [ ] , fn
2019-03-14 13:29:04 -06:00
% Activity { } = act -> act . data [ " id " ]
2019-03-14 13:04:52 -06:00
act when is_map ( act ) -> act [ " id " ]
act when is_binary ( act ) -> act
end )
2019-02-20 09:51:25 -07:00
object = [ params . account . ap_id ] ++ status_ap_ids
%{
" type " = > " Flag " ,
" actor " = > params . actor . ap_id ,
" content " = > params . content ,
" object " = > object ,
" context " = > params . context
}
|> Map . merge ( additional )
end
2019-03-06 14:13:26 -07:00
@doc """
Fetches the OrderedCollection / OrderedCollectionPage from ` from ` , limiting the amount of pages fetched after
the first one to ` pages_left ` pages .
If the amount of pages is higher than the collection has , it returns whatever was there .
"""
def fetch_ordered_collection ( from , pages_left , acc \\ [ ] ) do
with { :ok , response } <- Tesla . get ( from ) ,
{ :ok , collection } <- Poison . decode ( response . body ) do
case collection [ " type " ] do
" OrderedCollection " ->
# If we've encountered the OrderedCollection and not the page,
# just call the same function on the page address
fetch_ordered_collection ( collection [ " first " ] , pages_left )
" OrderedCollectionPage " ->
if pages_left > 0 do
# There are still more pages
if Map . has_key? ( collection , " next " ) do
# There are still more pages, go deeper saving what we have into the accumulator
fetch_ordered_collection (
collection [ " next " ] ,
pages_left - 1 ,
acc ++ collection [ " orderedItems " ]
)
else
# No more pages left, just return whatever we already have
acc ++ collection [ " orderedItems " ]
end
else
# Got the amount of pages needed, add them all to the accumulator
acc ++ collection [ " orderedItems " ]
end
_ ->
{ :error , " Not an OrderedCollection or OrderedCollectionPage " }
end
end
end
2017-05-16 07:31:11 -06:00
end