2018-12-23 13:04:54 -07:00
|
|
|
# Pleroma: A lightweight social networking server
|
2020-03-01 22:08:45 -07:00
|
|
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 13:04:54 -07:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
defmodule Pleroma.Web.CommonAPI.Utils do
|
2019-07-10 03:25:58 -06:00
|
|
|
import Pleroma.Web.Gettext
|
2019-09-24 03:10:54 -06:00
|
|
|
import Pleroma.Web.ControllerHelper, only: [truthy_param?: 1]
|
2019-07-10 03:25:58 -06:00
|
|
|
|
2018-12-11 05:31:52 -07:00
|
|
|
alias Calendar.Strftime
|
2019-02-09 08:16:26 -07:00
|
|
|
alias Pleroma.Activity
|
2019-03-04 19:52:23 -07:00
|
|
|
alias Pleroma.Config
|
2019-08-02 07:05:27 -06:00
|
|
|
alias Pleroma.Conversation.Participation
|
2019-08-29 13:01:37 -06:00
|
|
|
alias Pleroma.Emoji
|
2019-02-09 08:16:26 -07:00
|
|
|
alias Pleroma.Formatter
|
|
|
|
alias Pleroma.Object
|
2019-07-14 10:48:42 -06:00
|
|
|
alias Pleroma.Plugs.AuthenticationPlug
|
2019-02-09 08:16:26 -07:00
|
|
|
alias Pleroma.Repo
|
2018-12-11 05:31:52 -07:00
|
|
|
alias Pleroma.User
|
2017-05-17 10:00:20 -06:00
|
|
|
alias Pleroma.Web.ActivityPub.Utils
|
2019-04-07 08:11:29 -06:00
|
|
|
alias Pleroma.Web.ActivityPub.Visibility
|
2018-08-12 13:24:10 -06:00
|
|
|
alias Pleroma.Web.Endpoint
|
2018-10-29 11:26:15 -06:00
|
|
|
alias Pleroma.Web.MediaProxy
|
2017-05-17 10:00:20 -06:00
|
|
|
|
2019-04-02 03:25:51 -06:00
|
|
|
require Logger
|
2019-07-28 20:43:19 -06:00
|
|
|
require Pleroma.Constants
|
2017-05-17 10:00:20 -06:00
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
# This is a hack for twidere.
|
|
|
|
def get_by_id_or_ap_id(id) do
|
2019-03-22 18:28:16 -06:00
|
|
|
activity =
|
2019-09-18 08:54:31 -06:00
|
|
|
with true <- FlakeId.flake_id?(id),
|
2019-07-31 12:35:15 -06:00
|
|
|
%Activity{} = activity <- Activity.get_by_id_with_object(id) do
|
|
|
|
activity
|
|
|
|
else
|
|
|
|
_ -> Activity.get_create_by_object_ap_id_with_object(id)
|
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-06-03 11:11:22 -06:00
|
|
|
activity &&
|
|
|
|
if activity.data["type"] == "Create" do
|
|
|
|
activity
|
|
|
|
else
|
2019-03-22 18:22:14 -06:00
|
|
|
Activity.get_create_by_object_ap_id_with_object(activity.data["object"])
|
2018-06-03 11:11:22 -06:00
|
|
|
end
|
2017-09-15 06:17:36 -06:00
|
|
|
end
|
|
|
|
|
2019-08-05 09:37:05 -06:00
|
|
|
def attachments_from_ids(%{"media_ids" => ids, "descriptions" => desc} = _) do
|
|
|
|
attachments_from_ids_descs(ids, desc)
|
|
|
|
end
|
|
|
|
|
|
|
|
def attachments_from_ids(%{"media_ids" => ids} = _) do
|
|
|
|
attachments_from_ids_no_descs(ids)
|
2019-01-04 09:27:46 -07:00
|
|
|
end
|
|
|
|
|
2019-08-05 09:37:05 -06:00
|
|
|
def attachments_from_ids(_), do: []
|
|
|
|
|
|
|
|
def attachments_from_ids_no_descs([]), do: []
|
|
|
|
|
2019-01-04 09:27:46 -07:00
|
|
|
def attachments_from_ids_no_descs(ids) do
|
2019-08-05 09:37:05 -06:00
|
|
|
Enum.map(ids, fn media_id ->
|
|
|
|
case Repo.get(Object, media_id) do
|
|
|
|
%Object{data: data} = _ -> data
|
|
|
|
_ -> nil
|
|
|
|
end
|
2017-05-17 10:00:20 -06:00
|
|
|
end)
|
2019-08-05 09:37:05 -06:00
|
|
|
|> Enum.filter(& &1)
|
2017-05-17 10:00:20 -06:00
|
|
|
end
|
|
|
|
|
2019-08-05 09:37:05 -06:00
|
|
|
def attachments_from_ids_descs([], _), do: []
|
|
|
|
|
2019-01-04 09:27:46 -07:00
|
|
|
def attachments_from_ids_descs(ids, descs_str) do
|
2019-01-04 08:35:41 -07:00
|
|
|
{_, descs} = Jason.decode(descs_str)
|
|
|
|
|
2019-08-05 09:37:05 -06:00
|
|
|
Enum.map(ids, fn media_id ->
|
|
|
|
case Repo.get(Object, media_id) do
|
|
|
|
%Object{data: data} = _ ->
|
|
|
|
Map.put(data, "name", descs[media_id])
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
nil
|
|
|
|
end
|
2017-05-17 10:00:20 -06:00
|
|
|
end)
|
2019-08-05 09:37:05 -06:00
|
|
|
|> Enum.filter(& &1)
|
2017-05-17 10:00:20 -06:00
|
|
|
end
|
|
|
|
|
2019-08-02 07:05:27 -06:00
|
|
|
@spec get_to_and_cc(
|
|
|
|
User.t(),
|
|
|
|
list(String.t()),
|
|
|
|
Activity.t() | nil,
|
|
|
|
String.t(),
|
|
|
|
Participation.t() | nil
|
2019-08-25 13:39:37 -06:00
|
|
|
) :: {list(String.t()), list(String.t())}
|
2019-08-02 07:05:27 -06:00
|
|
|
|
|
|
|
def get_to_and_cc(_, _, _, _, %Participation{} = participation) do
|
|
|
|
participation = Repo.preload(participation, :recipients)
|
|
|
|
{Enum.map(participation.recipients, & &1.ap_id), []}
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_to_and_cc(user, mentioned_users, inReplyTo, "public", _) do
|
2019-07-28 20:43:19 -06:00
|
|
|
to = [Pleroma.Constants.as_public() | mentioned_users]
|
2018-11-08 12:17:01 -07:00
|
|
|
cc = [user.follower_address]
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
if inReplyTo do
|
2018-11-08 12:17:01 -07:00
|
|
|
{Enum.uniq([inReplyTo.data["actor"] | to]), cc}
|
2017-08-28 11:17:38 -06:00
|
|
|
else
|
2018-02-18 06:45:08 -07:00
|
|
|
{to, cc}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-08-02 07:05:27 -06:00
|
|
|
def get_to_and_cc(user, mentioned_users, inReplyTo, "unlisted", _) do
|
2018-11-08 12:17:01 -07:00
|
|
|
to = [user.follower_address | mentioned_users]
|
2019-07-28 20:43:19 -06:00
|
|
|
cc = [Pleroma.Constants.as_public()]
|
2018-11-08 12:17:01 -07:00
|
|
|
|
|
|
|
if inReplyTo do
|
|
|
|
{Enum.uniq([inReplyTo.data["actor"] | to]), cc}
|
|
|
|
else
|
|
|
|
{to, cc}
|
|
|
|
end
|
2018-02-18 06:45:08 -07:00
|
|
|
end
|
|
|
|
|
2019-08-02 07:05:27 -06:00
|
|
|
def get_to_and_cc(user, mentioned_users, inReplyTo, "private", _) do
|
|
|
|
{to, cc} = get_to_and_cc(user, mentioned_users, inReplyTo, "direct", nil)
|
2018-02-18 06:45:08 -07:00
|
|
|
{[user.follower_address | to], cc}
|
|
|
|
end
|
|
|
|
|
2019-08-02 07:05:27 -06:00
|
|
|
def get_to_and_cc(_user, mentioned_users, inReplyTo, "direct", _) do
|
2018-02-18 06:45:08 -07:00
|
|
|
if inReplyTo do
|
|
|
|
{Enum.uniq([inReplyTo.data["actor"] | mentioned_users]), []}
|
|
|
|
else
|
|
|
|
{mentioned_users, []}
|
2017-08-28 11:17:38 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-08-02 07:05:27 -06:00
|
|
|
def get_to_and_cc(_user, mentions, _inReplyTo, {:list, _}, _), do: {mentions, []}
|
2019-06-05 04:55:00 -06:00
|
|
|
|
2019-06-03 10:17:08 -06:00
|
|
|
def get_addressed_users(_, to) when is_list(to) do
|
|
|
|
User.get_ap_ids_by_nicknames(to)
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_addressed_users(mentioned_users, _), do: mentioned_users
|
2019-05-01 03:11:17 -06:00
|
|
|
|
2019-07-15 01:00:29 -06:00
|
|
|
def maybe_add_list_data(activity_params, user, {:list, list_id}) do
|
2019-07-11 03:36:08 -06:00
|
|
|
case Pleroma.List.get(list_id, user) do
|
|
|
|
%Pleroma.List{} = list ->
|
2019-07-15 01:00:29 -06:00
|
|
|
activity_params
|
|
|
|
|> put_in([:additional, "bcc"], [list.ap_id])
|
|
|
|
|> put_in([:additional, "listMessage"], list.ap_id)
|
|
|
|
|> put_in([:object, "listMessage"], list.ap_id)
|
2019-07-11 03:36:08 -06:00
|
|
|
|
|
|
|
_ ->
|
2019-07-15 01:00:29 -06:00
|
|
|
activity_params
|
2019-07-11 03:36:08 -06:00
|
|
|
end
|
2019-05-01 03:11:17 -06:00
|
|
|
end
|
|
|
|
|
2019-07-15 01:00:29 -06:00
|
|
|
def maybe_add_list_data(activity_params, _, _), do: activity_params
|
2019-05-01 03:11:17 -06:00
|
|
|
|
2019-09-24 03:10:54 -06:00
|
|
|
def make_poll_data(%{"poll" => %{"expires_in" => expires_in}} = data)
|
|
|
|
when is_binary(expires_in) do
|
|
|
|
# In some cases mastofe sends out strings instead of integers
|
|
|
|
data
|
|
|
|
|> put_in(["poll", "expires_in"], String.to_integer(expires_in))
|
|
|
|
|> make_poll_data()
|
|
|
|
end
|
|
|
|
|
2019-05-19 08:06:44 -06:00
|
|
|
def make_poll_data(%{"poll" => %{"options" => options, "expires_in" => expires_in}} = data)
|
2019-05-21 08:30:51 -06:00
|
|
|
when is_list(options) do
|
2019-09-24 03:10:54 -06:00
|
|
|
limits = Pleroma.Config.get([:instance, :poll_limits])
|
2019-05-21 01:54:20 -06:00
|
|
|
|
2019-09-24 03:10:54 -06:00
|
|
|
with :ok <- validate_poll_expiration(expires_in, limits),
|
|
|
|
:ok <- validate_poll_options_amount(options, limits),
|
|
|
|
:ok <- validate_poll_options_length(options, limits) do
|
|
|
|
{option_notes, emoji} =
|
2019-05-21 01:54:20 -06:00
|
|
|
Enum.map_reduce(options, %{}, fn option, emoji ->
|
2019-09-24 03:10:54 -06:00
|
|
|
note = %{
|
|
|
|
"name" => option,
|
|
|
|
"type" => "Note",
|
|
|
|
"replies" => %{"type" => "Collection", "totalItems" => 0}
|
|
|
|
}
|
2019-05-21 01:54:20 -06:00
|
|
|
|
2019-09-24 03:10:54 -06:00
|
|
|
{note, Map.merge(emoji, Emoji.Formatter.get_emoji_map(option))}
|
|
|
|
end)
|
2019-05-18 04:29:28 -06:00
|
|
|
|
2019-05-21 01:54:20 -06:00
|
|
|
end_time =
|
2020-02-07 08:57:46 -07:00
|
|
|
DateTime.utc_now()
|
|
|
|
|> DateTime.add(expires_in)
|
|
|
|
|> DateTime.to_iso8601()
|
2019-05-21 01:54:20 -06:00
|
|
|
|
2019-09-24 03:10:54 -06:00
|
|
|
key = if truthy_param?(data["poll"]["multiple"]), do: "anyOf", else: "oneOf"
|
|
|
|
poll = %{"type" => "Question", key => option_notes, "closed" => end_time}
|
2019-05-21 01:54:20 -06:00
|
|
|
|
2019-09-24 03:10:54 -06:00
|
|
|
{:ok, {poll, emoji}}
|
2019-05-21 01:54:20 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-05-21 05:19:03 -06:00
|
|
|
def make_poll_data(%{"poll" => poll}) when is_map(poll) do
|
2019-09-24 03:10:54 -06:00
|
|
|
{:error, "Invalid poll"}
|
2019-05-18 04:29:28 -06:00
|
|
|
end
|
|
|
|
|
2019-05-19 08:06:44 -06:00
|
|
|
def make_poll_data(_data) do
|
2019-09-24 03:10:54 -06:00
|
|
|
{:ok, {%{}, %{}}}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp validate_poll_options_amount(options, %{max_options: max_options}) do
|
|
|
|
if Enum.count(options) > max_options do
|
|
|
|
{:error, "Poll can't contain more than #{max_options} options"}
|
|
|
|
else
|
|
|
|
:ok
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp validate_poll_options_length(options, %{max_option_chars: max_option_chars}) do
|
|
|
|
if Enum.any?(options, &(String.length(&1) > max_option_chars)) do
|
|
|
|
{:error, "Poll options cannot be longer than #{max_option_chars} characters each"}
|
|
|
|
else
|
|
|
|
:ok
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp validate_poll_expiration(expires_in, %{min_expiration: min, max_expiration: max}) do
|
|
|
|
cond do
|
|
|
|
expires_in > max -> {:error, "Expiration date is too far in the future"}
|
|
|
|
expires_in < min -> {:error, "Expiration date is too soon"}
|
|
|
|
true -> :ok
|
|
|
|
end
|
2019-05-18 04:29:28 -06:00
|
|
|
end
|
|
|
|
|
2018-09-01 18:14:25 -06:00
|
|
|
def make_content_html(
|
|
|
|
status,
|
|
|
|
attachments,
|
2019-03-20 14:09:36 -06:00
|
|
|
data,
|
|
|
|
visibility
|
2018-09-01 18:14:25 -06:00
|
|
|
) do
|
2020-02-11 14:39:19 -07:00
|
|
|
attachment_links =
|
2019-02-26 16:32:26 -07:00
|
|
|
data
|
2020-02-11 14:39:19 -07:00
|
|
|
|> Map.get("attachment_links", Config.get([:instance, :attachment_links]))
|
2019-09-24 02:56:20 -06:00
|
|
|
|> truthy_param?()
|
2019-02-26 16:32:26 -07:00
|
|
|
|
|
|
|
content_type = get_content_type(data["content_type"])
|
|
|
|
|
2019-03-20 14:09:36 -06:00
|
|
|
options =
|
|
|
|
if visibility == "direct" && Config.get([:instance, :safe_dm_mentions]) do
|
|
|
|
[safe_mention: true]
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
status
|
2019-03-20 14:09:36 -06:00
|
|
|
|> format_input(content_type, options)
|
2020-02-11 14:39:19 -07:00
|
|
|
|> maybe_add_attachments(attachments, attachment_links)
|
2019-02-26 16:32:26 -07:00
|
|
|
|> maybe_add_nsfw_tag(data)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp get_content_type(content_type) do
|
|
|
|
if Enum.member?(Config.get([:instance, :allowed_post_formats]), content_type) do
|
|
|
|
content_type
|
|
|
|
else
|
|
|
|
"text/plain"
|
|
|
|
end
|
2017-09-15 06:17:36 -06:00
|
|
|
end
|
|
|
|
|
2019-02-26 16:32:26 -07:00
|
|
|
defp maybe_add_nsfw_tag({text, mentions, tags}, %{"sensitive" => sensitive})
|
|
|
|
when sensitive in [true, "True", "true", "1"] do
|
|
|
|
{text, mentions, [{"#nsfw", "nsfw"} | tags]}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp maybe_add_nsfw_tag(data, _), do: data
|
|
|
|
|
2019-08-05 07:33:22 -06:00
|
|
|
def make_context(_, %Participation{} = participation) do
|
|
|
|
Repo.preload(participation, :conversation).conversation.ap_id
|
|
|
|
end
|
|
|
|
|
|
|
|
def make_context(%Activity{data: %{"context" => context}}, _), do: context
|
|
|
|
def make_context(_, _), do: Utils.generate_context_id()
|
2017-09-15 06:17:36 -06:00
|
|
|
|
2020-02-11 14:39:19 -07:00
|
|
|
def maybe_add_attachments(parsed, _attachments, false = _no_links), do: parsed
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2019-02-26 16:32:26 -07:00
|
|
|
def maybe_add_attachments({text, mentions, tags}, attachments, _no_links) do
|
|
|
|
text = add_attachments(text, attachments)
|
|
|
|
{text, mentions, tags}
|
2017-12-07 11:44:09 -07:00
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-05-17 10:00:20 -06:00
|
|
|
def add_attachments(text, attachments) do
|
2019-08-05 09:37:05 -06:00
|
|
|
attachment_text = Enum.map(attachments, &build_attachment_link/1)
|
2017-11-18 05:46:54 -07:00
|
|
|
Enum.join([text | attachment_text], "<br>")
|
2017-05-17 10:00:20 -06:00
|
|
|
end
|
|
|
|
|
2019-08-05 09:37:05 -06:00
|
|
|
defp build_attachment_link(%{"url" => [%{"href" => href} | _]} = attachment) do
|
|
|
|
name = attachment["name"] || URI.decode(Path.basename(href))
|
|
|
|
href = MediaProxy.url(href)
|
|
|
|
"<a href=\"#{href}\" class='attachment'>#{shortname(name)}</a>"
|
|
|
|
end
|
|
|
|
|
|
|
|
defp build_attachment_link(_), do: ""
|
|
|
|
|
2019-02-26 16:32:26 -07:00
|
|
|
def format_input(text, format, options \\ [])
|
2019-01-17 23:30:16 -07:00
|
|
|
|
2018-12-14 02:41:55 -07:00
|
|
|
@doc """
|
|
|
|
Formatting text to plain text.
|
|
|
|
"""
|
2019-02-26 16:32:26 -07:00
|
|
|
def format_input(text, "text/plain", options) do
|
2017-12-07 12:34:25 -07:00
|
|
|
text
|
2018-09-01 18:14:25 -06:00
|
|
|
|> Formatter.html_escape("text/plain")
|
2019-02-26 16:32:26 -07:00
|
|
|
|> Formatter.linkify(options)
|
|
|
|
|> (fn {text, mentions, tags} ->
|
|
|
|
{String.replace(text, ~r/\r?\n/, "<br>"), mentions, tags}
|
|
|
|
end).()
|
2017-09-17 07:21:44 -06:00
|
|
|
end
|
|
|
|
|
2019-04-26 04:17:57 -06:00
|
|
|
@doc """
|
|
|
|
Formatting text as BBCode.
|
|
|
|
"""
|
|
|
|
def format_input(text, "text/bbcode", options) do
|
|
|
|
text
|
|
|
|
|> String.replace(~r/\r/, "")
|
|
|
|
|> Formatter.html_escape("text/plain")
|
|
|
|
|> BBCode.to_html()
|
|
|
|
|> (fn {:ok, html} -> html end).()
|
|
|
|
|> Formatter.linkify(options)
|
|
|
|
end
|
|
|
|
|
2018-12-14 02:41:55 -07:00
|
|
|
@doc """
|
|
|
|
Formatting text to html.
|
|
|
|
"""
|
2019-02-26 16:32:26 -07:00
|
|
|
def format_input(text, "text/html", options) do
|
2018-09-01 18:14:25 -06:00
|
|
|
text
|
|
|
|
|> Formatter.html_escape("text/html")
|
2019-02-26 16:32:26 -07:00
|
|
|
|> Formatter.linkify(options)
|
2018-09-01 18:14:25 -06:00
|
|
|
end
|
|
|
|
|
2018-12-14 02:41:55 -07:00
|
|
|
@doc """
|
|
|
|
Formatting text to markdown.
|
|
|
|
"""
|
2019-02-26 16:32:26 -07:00
|
|
|
def format_input(text, "text/markdown", options) do
|
2018-09-01 18:14:25 -06:00
|
|
|
text
|
2019-04-12 13:25:53 -06:00
|
|
|
|> Formatter.mentions_escape(options)
|
2020-02-12 19:39:47 -07:00
|
|
|
|> Earmark.as_html!(%Earmark.Options{renderer: Pleroma.EarmarkRenderer})
|
2019-02-26 16:32:26 -07:00
|
|
|
|> Formatter.linkify(options)
|
2018-09-01 18:14:25 -06:00
|
|
|
|> Formatter.html_escape("text/html")
|
2017-05-17 10:00:20 -06:00
|
|
|
end
|
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
def make_note_data(
|
|
|
|
actor,
|
|
|
|
to,
|
|
|
|
context,
|
|
|
|
content_html,
|
|
|
|
attachments,
|
2019-04-17 05:52:01 -06:00
|
|
|
in_reply_to,
|
2018-03-30 07:01:53 -06:00
|
|
|
tags,
|
2019-09-24 03:10:54 -06:00
|
|
|
summary \\ nil,
|
2019-05-17 14:42:51 -06:00
|
|
|
cc \\ [],
|
2019-05-18 04:37:38 -06:00
|
|
|
sensitive \\ false,
|
2019-09-24 03:10:54 -06:00
|
|
|
extra_params \\ %{}
|
2018-03-30 07:01:53 -06:00
|
|
|
) do
|
2019-08-05 09:37:05 -06:00
|
|
|
%{
|
2018-03-30 07:01:53 -06:00
|
|
|
"type" => "Note",
|
|
|
|
"to" => to,
|
|
|
|
"cc" => cc,
|
|
|
|
"content" => content_html,
|
2019-09-24 03:10:54 -06:00
|
|
|
"summary" => summary,
|
|
|
|
"sensitive" => truthy_param?(sensitive),
|
2018-03-30 07:01:53 -06:00
|
|
|
"context" => context,
|
|
|
|
"attachment" => attachments,
|
|
|
|
"actor" => actor,
|
2019-08-05 09:37:05 -06:00
|
|
|
"tag" => Keyword.values(tags) |> Enum.uniq()
|
2018-03-30 07:01:53 -06:00
|
|
|
}
|
2019-08-05 09:37:05 -06:00
|
|
|
|> add_in_reply_to(in_reply_to)
|
2019-09-24 03:10:54 -06:00
|
|
|
|> Map.merge(extra_params)
|
2019-08-05 09:37:05 -06:00
|
|
|
end
|
2017-05-17 10:00:20 -06:00
|
|
|
|
2019-08-05 09:37:05 -06:00
|
|
|
defp add_in_reply_to(object, nil), do: object
|
2019-05-18 04:29:28 -06:00
|
|
|
|
2019-08-05 09:37:05 -06:00
|
|
|
defp add_in_reply_to(object, in_reply_to) do
|
|
|
|
with %Object{} = in_reply_to_object <- Object.normalize(in_reply_to) do
|
|
|
|
Map.put(object, "inReplyTo", in_reply_to_object.data["id"])
|
|
|
|
else
|
|
|
|
_ -> object
|
|
|
|
end
|
2017-05-17 10:00:20 -06:00
|
|
|
end
|
2017-06-19 15:12:37 -06:00
|
|
|
|
|
|
|
def format_naive_asctime(date) do
|
|
|
|
date |> DateTime.from_naive!("Etc/UTC") |> format_asctime
|
|
|
|
end
|
|
|
|
|
|
|
|
def format_asctime(date) do
|
|
|
|
Strftime.strftime!(date, "%a %b %d %H:%M:%S %z %Y")
|
|
|
|
end
|
|
|
|
|
2019-04-02 03:25:51 -06:00
|
|
|
def date_to_asctime(date) when is_binary(date) do
|
|
|
|
with {:ok, date, _offset} <- DateTime.from_iso8601(date) do
|
2017-06-19 15:12:37 -06:00
|
|
|
format_asctime(date)
|
2018-03-30 07:01:53 -06:00
|
|
|
else
|
|
|
|
_e ->
|
2019-04-02 03:25:51 -06:00
|
|
|
Logger.warn("Date #{date} in wrong format, must be ISO 8601")
|
2017-06-19 15:12:37 -06:00
|
|
|
""
|
|
|
|
end
|
|
|
|
end
|
2017-09-15 06:17:36 -06:00
|
|
|
|
2017-06-19 15:12:37 -06:00
|
|
|
def date_to_asctime(date) do
|
2019-04-02 03:25:51 -06:00
|
|
|
Logger.warn("Date #{date} in wrong format, must be ISO 8601")
|
|
|
|
""
|
2017-06-19 15:12:37 -06:00
|
|
|
end
|
2017-09-15 06:17:36 -06:00
|
|
|
|
2017-09-15 09:50:47 -06:00
|
|
|
def to_masto_date(%NaiveDateTime{} = date) do
|
|
|
|
date
|
2018-03-30 07:01:53 -06:00
|
|
|
|> NaiveDateTime.to_iso8601()
|
2017-09-15 09:50:47 -06:00
|
|
|
|> String.replace(~r/(\.\d+)?$/, ".000Z", global: false)
|
|
|
|
end
|
|
|
|
|
2019-08-05 09:37:05 -06:00
|
|
|
def to_masto_date(date) when is_binary(date) do
|
|
|
|
with {:ok, date} <- NaiveDateTime.from_iso8601(date) do
|
|
|
|
to_masto_date(date)
|
|
|
|
else
|
|
|
|
_ -> ""
|
2017-09-15 09:50:47 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-08-05 09:37:05 -06:00
|
|
|
def to_masto_date(_), do: ""
|
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
defp shortname(name) do
|
|
|
|
if String.length(name) < 30 do
|
|
|
|
name
|
|
|
|
else
|
|
|
|
String.slice(name, 0..30) <> "…"
|
|
|
|
end
|
|
|
|
end
|
2018-05-11 05:32:59 -06:00
|
|
|
|
2018-05-21 15:17:34 -06:00
|
|
|
def confirm_current_password(user, password) do
|
2019-04-22 01:20:43 -06:00
|
|
|
with %User{local: true} = db_user <- User.get_cached_by_id(user.id),
|
2019-07-14 10:48:42 -06:00
|
|
|
true <- AuthenticationPlug.checkpw(password, db_user.password_hash) do
|
2018-05-13 07:24:15 -06:00
|
|
|
{:ok, db_user}
|
|
|
|
else
|
2019-07-10 03:25:58 -06:00
|
|
|
_ -> {:error, dgettext("errors", "Invalid password.")}
|
2018-05-11 05:32:59 -06:00
|
|
|
end
|
|
|
|
end
|
2018-08-12 13:24:10 -06:00
|
|
|
|
2019-09-24 02:56:20 -06:00
|
|
|
def emoji_from_profile(%User{bio: bio, name: name}) do
|
|
|
|
[bio, name]
|
|
|
|
|> Enum.map(&Emoji.Formatter.get_emoji/1)
|
|
|
|
|> Enum.concat()
|
|
|
|
|> Enum.map(fn {shortcode, %Emoji{file: path}} ->
|
2018-08-12 13:24:10 -06:00
|
|
|
%{
|
|
|
|
"type" => "Emoji",
|
2019-09-24 02:56:20 -06:00
|
|
|
"icon" => %{"type" => "Image", "url" => "#{Endpoint.url()}#{path}"},
|
2018-08-12 13:24:10 -06:00
|
|
|
"name" => ":#{shortcode}:"
|
|
|
|
}
|
|
|
|
end)
|
|
|
|
end
|
2019-01-24 13:30:43 -07:00
|
|
|
|
|
|
|
def maybe_notify_to_recipients(
|
|
|
|
recipients,
|
|
|
|
%Activity{data: %{"to" => to, "type" => _type}} = _activity
|
|
|
|
) do
|
|
|
|
recipients ++ to
|
|
|
|
end
|
|
|
|
|
2019-11-12 04:48:14 -07:00
|
|
|
def maybe_notify_to_recipients(recipients, _), do: recipients
|
|
|
|
|
2019-01-24 13:30:43 -07:00
|
|
|
def maybe_notify_mentioned_recipients(
|
|
|
|
recipients,
|
2019-03-22 18:22:14 -06:00
|
|
|
%Activity{data: %{"to" => _to, "type" => type} = data} = activity
|
2019-01-24 13:30:43 -07:00
|
|
|
)
|
|
|
|
when type == "Create" do
|
2019-03-22 18:22:14 -06:00
|
|
|
object = Object.normalize(activity)
|
2019-01-24 13:30:43 -07:00
|
|
|
|
|
|
|
object_data =
|
|
|
|
cond do
|
2019-08-05 09:37:05 -06:00
|
|
|
not is_nil(object) ->
|
2019-01-24 13:30:43 -07:00
|
|
|
object.data
|
|
|
|
|
|
|
|
is_map(data["object"]) ->
|
|
|
|
data["object"]
|
|
|
|
|
|
|
|
true ->
|
|
|
|
%{}
|
|
|
|
end
|
|
|
|
|
|
|
|
tagged_mentions = maybe_extract_mentions(object_data)
|
|
|
|
|
|
|
|
recipients ++ tagged_mentions
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_notify_mentioned_recipients(recipients, _), do: recipients
|
|
|
|
|
2019-07-23 12:53:05 -06:00
|
|
|
# Do not notify subscribers if author is making a reply
|
|
|
|
def maybe_notify_subscribers(recipients, %Activity{
|
|
|
|
object: %Object{data: %{"inReplyTo" => _ap_id}}
|
2019-07-23 12:14:26 -06:00
|
|
|
}) do
|
2019-07-23 12:53:05 -06:00
|
|
|
recipients
|
2019-07-23 12:14:26 -06:00
|
|
|
end
|
|
|
|
|
2019-04-05 07:20:13 -06:00
|
|
|
def maybe_notify_subscribers(
|
2019-04-05 07:59:34 -06:00
|
|
|
recipients,
|
2019-04-07 08:11:29 -06:00
|
|
|
%Activity{data: %{"actor" => actor, "type" => type}} = activity
|
2019-04-05 07:59:34 -06:00
|
|
|
)
|
|
|
|
when type == "Create" do
|
2019-04-08 08:56:14 -06:00
|
|
|
with %User{} = user <- User.get_cached_by_ap_id(actor) do
|
2019-04-05 07:20:13 -06:00
|
|
|
subscriber_ids =
|
|
|
|
user
|
2019-11-20 05:46:11 -07:00
|
|
|
|> User.subscriber_users()
|
2019-04-07 08:11:29 -06:00
|
|
|
|> Enum.filter(&Visibility.visible_for_user?(activity, &1))
|
2019-04-05 07:20:13 -06:00
|
|
|
|> Enum.map(& &1.ap_id)
|
|
|
|
|
|
|
|
recipients ++ subscriber_ids
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_notify_subscribers(recipients, _), do: recipients
|
|
|
|
|
2019-11-12 04:48:14 -07:00
|
|
|
def maybe_notify_followers(recipients, %Activity{data: %{"type" => "Move"}} = activity) do
|
|
|
|
with %User{} = user <- User.get_cached_by_ap_id(activity.actor) do
|
|
|
|
user
|
|
|
|
|> User.get_followers()
|
|
|
|
|> Enum.map(& &1.ap_id)
|
|
|
|
|> Enum.concat(recipients)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_notify_followers(recipients, _), do: recipients
|
|
|
|
|
2019-01-24 13:30:43 -07:00
|
|
|
def maybe_extract_mentions(%{"tag" => tag}) do
|
|
|
|
tag
|
2019-08-05 09:37:05 -06:00
|
|
|
|> Enum.filter(fn x -> is_map(x) && x["type"] == "Mention" end)
|
2019-01-24 13:30:43 -07:00
|
|
|
|> Enum.map(fn x -> x["href"] end)
|
2019-08-05 09:37:05 -06:00
|
|
|
|> Enum.uniq()
|
2019-01-24 13:30:43 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_extract_mentions(_), do: []
|
2019-02-20 09:51:25 -07:00
|
|
|
|
2019-02-26 16:32:26 -07:00
|
|
|
def make_report_content_html(nil), do: {:ok, {nil, [], []}}
|
2019-02-20 09:51:25 -07:00
|
|
|
|
|
|
|
def make_report_content_html(comment) do
|
|
|
|
max_size = Pleroma.Config.get([:instance, :max_report_comment_size], 1000)
|
|
|
|
|
|
|
|
if String.length(comment) <= max_size do
|
2019-02-26 16:32:26 -07:00
|
|
|
{:ok, format_input(comment, "text/plain")}
|
2019-02-20 09:51:25 -07:00
|
|
|
else
|
2019-07-10 03:25:58 -06:00
|
|
|
{:error,
|
|
|
|
dgettext("errors", "Comment must be up to %{max_size} characters", max_size: max_size)}
|
2019-02-20 09:51:25 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_report_statuses(%User{ap_id: actor}, %{"status_ids" => status_ids}) do
|
|
|
|
{:ok, Activity.all_by_actor_and_id(actor, status_ids)}
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_report_statuses(_, _), do: {:ok, nil}
|
2019-03-21 17:17:53 -06:00
|
|
|
|
|
|
|
# DEPRECATED mostly, context objects are now created at insertion time.
|
|
|
|
def context_to_conversation_id(context) do
|
|
|
|
with %Object{id: id} <- Object.get_cached_by_ap_id(context) do
|
|
|
|
id
|
|
|
|
else
|
|
|
|
_e ->
|
|
|
|
changeset = Object.context_mapping(context)
|
|
|
|
|
|
|
|
case Repo.insert(changeset) do
|
|
|
|
{:ok, %{id: id}} ->
|
|
|
|
id
|
|
|
|
|
|
|
|
# This should be solved by an upsert, but it seems ecto
|
|
|
|
# has problems accessing the constraint inside the jsonb.
|
|
|
|
{:error, _} ->
|
|
|
|
Object.get_cached_by_ap_id(context).id
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def conversation_id_to_context(id) do
|
|
|
|
with %Object{data: %{"id" => context}} <- Repo.get(Object, id) do
|
|
|
|
context
|
|
|
|
else
|
|
|
|
_e ->
|
2019-07-10 03:25:58 -06:00
|
|
|
{:error, dgettext("errors", "No such conversation")}
|
2019-03-21 17:17:53 -06:00
|
|
|
end
|
|
|
|
end
|
2019-06-01 07:07:01 -06:00
|
|
|
|
|
|
|
def make_answer_data(%User{ap_id: ap_id}, object, name) do
|
|
|
|
%{
|
|
|
|
"type" => "Answer",
|
|
|
|
"actor" => ap_id,
|
|
|
|
"cc" => [object.data["actor"]],
|
|
|
|
"to" => [],
|
|
|
|
"name" => name,
|
|
|
|
"inReplyTo" => object.data["id"]
|
|
|
|
}
|
|
|
|
end
|
2019-06-17 20:05:05 -06:00
|
|
|
|
2019-09-24 03:10:54 -06:00
|
|
|
def validate_character_limit("" = _full_payload, [] = _attachments) do
|
|
|
|
{:error, dgettext("errors", "Cannot post an empty status without attachments")}
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_character_limit(full_payload, _attachments) do
|
|
|
|
limit = Pleroma.Config.get([:instance, :limit])
|
2019-06-17 20:05:05 -06:00
|
|
|
length = String.length(full_payload)
|
|
|
|
|
2020-03-10 12:08:00 -06:00
|
|
|
if length <= limit do
|
2019-09-24 03:10:54 -06:00
|
|
|
:ok
|
2019-06-17 20:05:05 -06:00
|
|
|
else
|
2019-07-10 03:25:58 -06:00
|
|
|
{:error, dgettext("errors", "The status is over the character limit")}
|
2019-06-17 20:05:05 -06:00
|
|
|
end
|
|
|
|
end
|
2017-05-17 10:00:20 -06:00
|
|
|
end
|