2018-12-23 13:04:54 -07:00
|
|
|
# Pleroma: A lightweight social networking server
|
2019-01-04 08:35:41 -07:00
|
|
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 13:04:54 -07:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
defmodule Pleroma.Web.CommonAPI.Utils do
|
2018-12-11 05:31:52 -07:00
|
|
|
alias Calendar.Strftime
|
|
|
|
alias Comeonin.Pbkdf2
|
2019-02-09 08:16:26 -07:00
|
|
|
alias Pleroma.Activity
|
|
|
|
alias Pleroma.Formatter
|
|
|
|
alias Pleroma.Object
|
|
|
|
alias Pleroma.Repo
|
2018-12-11 05:31:52 -07:00
|
|
|
alias Pleroma.User
|
|
|
|
alias Pleroma.Web
|
2018-08-12 13:24:10 -06:00
|
|
|
alias Pleroma.Web.Endpoint
|
2018-10-29 11:26:15 -06:00
|
|
|
alias Pleroma.Web.MediaProxy
|
2017-05-17 10:00:20 -06:00
|
|
|
alias Pleroma.Web.ActivityPub.Utils
|
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
# This is a hack for twidere.
|
|
|
|
def get_by_id_or_ap_id(id) do
|
2019-01-20 23:14:20 -07:00
|
|
|
activity = Repo.get(Activity, id) || Activity.get_create_by_object_ap_id(id)
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-06-03 11:11:22 -06:00
|
|
|
activity &&
|
|
|
|
if activity.data["type"] == "Create" do
|
|
|
|
activity
|
|
|
|
else
|
2019-01-20 23:14:20 -07:00
|
|
|
Activity.get_create_by_object_ap_id(activity.data["object"])
|
2018-06-03 11:11:22 -06:00
|
|
|
end
|
2017-09-15 06:17:36 -06:00
|
|
|
end
|
|
|
|
|
2018-11-02 10:33:51 -06:00
|
|
|
def get_replied_to_activity(""), do: nil
|
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
def get_replied_to_activity(id) when not is_nil(id) do
|
|
|
|
Repo.get(Activity, id)
|
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
def get_replied_to_activity(_), do: nil
|
|
|
|
|
2019-01-04 09:27:46 -07:00
|
|
|
def attachments_from_ids(data) do
|
|
|
|
if Map.has_key?(data, "descriptions") do
|
|
|
|
attachments_from_ids_descs(data["media_ids"], data["descriptions"])
|
|
|
|
else
|
|
|
|
attachments_from_ids_no_descs(data["media_ids"])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def attachments_from_ids_no_descs(ids) do
|
|
|
|
Enum.map(ids || [], fn media_id ->
|
|
|
|
Repo.get(Object, media_id).data
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
|
|
|
def attachments_from_ids_descs(ids, descs_str) do
|
2019-01-04 08:35:41 -07:00
|
|
|
{_, descs} = Jason.decode(descs_str)
|
|
|
|
|
|
|
|
Enum.map(ids || [], fn media_id ->
|
|
|
|
Map.put(Repo.get(Object, media_id).data, "name", descs[media_id])
|
2017-05-17 10:00:20 -06:00
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2018-02-18 06:45:08 -07:00
|
|
|
def to_for_user_and_mentions(user, mentions, inReplyTo, "public") do
|
2018-03-30 07:01:53 -06:00
|
|
|
mentioned_users = Enum.map(mentions, fn {_, %{ap_id: ap_id}} -> ap_id end)
|
2018-11-08 12:17:01 -07:00
|
|
|
|
|
|
|
to = ["https://www.w3.org/ns/activitystreams#Public" | mentioned_users]
|
|
|
|
cc = [user.follower_address]
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
if inReplyTo do
|
2018-11-08 12:17:01 -07:00
|
|
|
{Enum.uniq([inReplyTo.data["actor"] | to]), cc}
|
2017-08-28 11:17:38 -06:00
|
|
|
else
|
2018-02-18 06:45:08 -07:00
|
|
|
{to, cc}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def to_for_user_and_mentions(user, mentions, inReplyTo, "unlisted") do
|
2018-11-08 12:17:01 -07:00
|
|
|
mentioned_users = Enum.map(mentions, fn {_, %{ap_id: ap_id}} -> ap_id end)
|
|
|
|
|
|
|
|
to = [user.follower_address | mentioned_users]
|
|
|
|
cc = ["https://www.w3.org/ns/activitystreams#Public"]
|
|
|
|
|
|
|
|
if inReplyTo do
|
|
|
|
{Enum.uniq([inReplyTo.data["actor"] | to]), cc}
|
|
|
|
else
|
|
|
|
{to, cc}
|
|
|
|
end
|
2018-02-18 06:45:08 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
def to_for_user_and_mentions(user, mentions, inReplyTo, "private") do
|
|
|
|
{to, cc} = to_for_user_and_mentions(user, mentions, inReplyTo, "direct")
|
|
|
|
{[user.follower_address | to], cc}
|
|
|
|
end
|
|
|
|
|
2018-05-04 14:59:01 -06:00
|
|
|
def to_for_user_and_mentions(_user, mentions, inReplyTo, "direct") do
|
2018-03-30 07:01:53 -06:00
|
|
|
mentioned_users = Enum.map(mentions, fn {_, %{ap_id: ap_id}} -> ap_id end)
|
|
|
|
|
2018-02-18 06:45:08 -07:00
|
|
|
if inReplyTo do
|
|
|
|
{Enum.uniq([inReplyTo.data["actor"] | mentioned_users]), []}
|
|
|
|
else
|
|
|
|
{mentioned_users, []}
|
2017-08-28 11:17:38 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-09-01 18:14:25 -06:00
|
|
|
def make_content_html(
|
|
|
|
status,
|
|
|
|
mentions,
|
|
|
|
attachments,
|
|
|
|
tags,
|
|
|
|
content_type,
|
|
|
|
no_attachment_links \\ false
|
|
|
|
) do
|
2017-09-15 06:17:36 -06:00
|
|
|
status
|
2018-09-01 18:14:25 -06:00
|
|
|
|> format_input(mentions, tags, content_type)
|
2017-12-07 11:44:09 -07:00
|
|
|
|> maybe_add_attachments(attachments, no_attachment_links)
|
2017-09-15 06:17:36 -06:00
|
|
|
end
|
|
|
|
|
|
|
|
def make_context(%Activity{data: %{"context" => context}}), do: context
|
2018-03-30 07:01:53 -06:00
|
|
|
def make_context(_), do: Utils.generate_context_id()
|
2017-09-15 06:17:36 -06:00
|
|
|
|
2019-02-06 12:19:39 -07:00
|
|
|
def maybe_add_attachments(text, _attachments, true = _no_links), do: text
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-12-07 11:44:09 -07:00
|
|
|
def maybe_add_attachments(text, attachments, _no_links) do
|
|
|
|
add_attachments(text, attachments)
|
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-05-17 10:00:20 -06:00
|
|
|
def add_attachments(text, attachments) do
|
2018-03-30 07:01:53 -06:00
|
|
|
attachment_text =
|
|
|
|
Enum.map(attachments, fn
|
2018-10-29 11:59:24 -06:00
|
|
|
%{"url" => [%{"href" => href} | _]} = attachment ->
|
|
|
|
name = attachment["name"] || URI.decode(Path.basename(href))
|
2018-10-29 11:26:15 -06:00
|
|
|
href = MediaProxy.url(href)
|
2018-03-30 07:01:53 -06:00
|
|
|
"<a href=\"#{href}\" class='attachment'>#{shortname(name)}</a>"
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
""
|
|
|
|
end)
|
|
|
|
|
2017-11-18 05:46:54 -07:00
|
|
|
Enum.join([text | attachment_text], "<br>")
|
2017-05-17 10:00:20 -06:00
|
|
|
end
|
|
|
|
|
2019-01-17 23:30:16 -07:00
|
|
|
def format_input(text, mentions, tags, format, options \\ [])
|
|
|
|
|
2018-12-14 02:41:55 -07:00
|
|
|
@doc """
|
|
|
|
Formatting text to plain text.
|
|
|
|
"""
|
2019-01-17 23:30:16 -07:00
|
|
|
def format_input(text, mentions, tags, "text/plain", options) do
|
2017-12-07 12:34:25 -07:00
|
|
|
text
|
2018-09-01 18:14:25 -06:00
|
|
|
|> Formatter.html_escape("text/plain")
|
2018-08-21 07:51:27 -06:00
|
|
|
|> String.replace(~r/\r?\n/, "<br>")
|
2018-03-30 07:01:53 -06:00
|
|
|
|> (&{[], &1}).()
|
|
|
|
|> Formatter.add_links()
|
2019-01-17 23:30:16 -07:00
|
|
|
|> Formatter.add_user_links(mentions, options[:user_links] || [])
|
2018-03-24 04:03:10 -06:00
|
|
|
|> Formatter.add_hashtag_links(tags)
|
2018-03-30 07:01:53 -06:00
|
|
|
|> Formatter.finalize()
|
2017-09-17 07:21:44 -06:00
|
|
|
end
|
|
|
|
|
2018-12-14 02:41:55 -07:00
|
|
|
@doc """
|
|
|
|
Formatting text to html.
|
|
|
|
"""
|
2019-01-17 23:30:16 -07:00
|
|
|
def format_input(text, mentions, _tags, "text/html", options) do
|
2018-09-01 18:14:25 -06:00
|
|
|
text
|
|
|
|
|> Formatter.html_escape("text/html")
|
|
|
|
|> (&{[], &1}).()
|
2019-01-17 23:30:16 -07:00
|
|
|
|> Formatter.add_user_links(mentions, options[:user_links] || [])
|
2018-09-01 18:14:25 -06:00
|
|
|
|> Formatter.finalize()
|
|
|
|
end
|
|
|
|
|
2018-12-14 02:41:55 -07:00
|
|
|
@doc """
|
|
|
|
Formatting text to markdown.
|
|
|
|
"""
|
2019-01-17 23:30:16 -07:00
|
|
|
def format_input(text, mentions, tags, "text/markdown", options) do
|
2018-09-01 18:14:25 -06:00
|
|
|
text
|
2018-12-14 02:41:55 -07:00
|
|
|
|> Formatter.mentions_escape(mentions)
|
2018-09-01 18:14:25 -06:00
|
|
|
|> Earmark.as_html!()
|
|
|
|
|> Formatter.html_escape("text/html")
|
|
|
|
|> (&{[], &1}).()
|
2019-01-17 23:30:16 -07:00
|
|
|
|> Formatter.add_user_links(mentions, options[:user_links] || [])
|
2018-10-14 14:36:11 -06:00
|
|
|
|> Formatter.add_hashtag_links(tags)
|
2018-09-01 18:14:25 -06:00
|
|
|
|> Formatter.finalize()
|
|
|
|
end
|
|
|
|
|
2017-09-17 07:21:44 -06:00
|
|
|
def add_tag_links(text, tags) do
|
2018-03-30 07:01:53 -06:00
|
|
|
tags =
|
|
|
|
tags
|
|
|
|
|> Enum.sort_by(fn {tag, _} -> -String.length(tag) end)
|
2017-09-17 07:21:44 -06:00
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
Enum.reduce(tags, text, fn {full, tag}, text ->
|
2018-12-11 05:31:52 -07:00
|
|
|
url = "<a href='#{Web.base_url()}/tag/#{tag}' rel='tag'>##{tag}</a>"
|
2017-09-17 07:21:44 -06:00
|
|
|
String.replace(text, full, url)
|
|
|
|
end)
|
2017-05-17 10:00:20 -06:00
|
|
|
end
|
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
def make_note_data(
|
|
|
|
actor,
|
|
|
|
to,
|
|
|
|
context,
|
|
|
|
content_html,
|
|
|
|
attachments,
|
|
|
|
inReplyTo,
|
|
|
|
tags,
|
|
|
|
cw \\ nil,
|
|
|
|
cc \\ []
|
|
|
|
) do
|
|
|
|
object = %{
|
|
|
|
"type" => "Note",
|
|
|
|
"to" => to,
|
|
|
|
"cc" => cc,
|
|
|
|
"content" => content_html,
|
|
|
|
"summary" => cw,
|
|
|
|
"context" => context,
|
|
|
|
"attachment" => attachments,
|
|
|
|
"actor" => actor,
|
2018-05-23 09:25:24 -06:00
|
|
|
"tag" => tags |> Enum.map(fn {_, tag} -> tag end) |> Enum.uniq()
|
2018-03-30 07:01:53 -06:00
|
|
|
}
|
2017-05-17 10:00:20 -06:00
|
|
|
|
|
|
|
if inReplyTo do
|
|
|
|
object
|
|
|
|
|> Map.put("inReplyTo", inReplyTo.data["object"]["id"])
|
|
|
|
|> Map.put("inReplyToStatusId", inReplyTo.id)
|
|
|
|
else
|
|
|
|
object
|
|
|
|
end
|
|
|
|
end
|
2017-06-19 15:12:37 -06:00
|
|
|
|
|
|
|
def format_naive_asctime(date) do
|
|
|
|
date |> DateTime.from_naive!("Etc/UTC") |> format_asctime
|
|
|
|
end
|
|
|
|
|
|
|
|
def format_asctime(date) do
|
|
|
|
Strftime.strftime!(date, "%a %b %d %H:%M:%S %z %Y")
|
|
|
|
end
|
|
|
|
|
|
|
|
def date_to_asctime(date) do
|
2018-03-30 07:01:53 -06:00
|
|
|
with {:ok, date, _offset} <- date |> DateTime.from_iso8601() do
|
2017-06-19 15:12:37 -06:00
|
|
|
format_asctime(date)
|
2018-03-30 07:01:53 -06:00
|
|
|
else
|
|
|
|
_e ->
|
2017-06-19 15:12:37 -06:00
|
|
|
""
|
|
|
|
end
|
|
|
|
end
|
2017-09-15 06:17:36 -06:00
|
|
|
|
2017-09-15 09:50:47 -06:00
|
|
|
def to_masto_date(%NaiveDateTime{} = date) do
|
|
|
|
date
|
2018-03-30 07:01:53 -06:00
|
|
|
|> NaiveDateTime.to_iso8601()
|
2017-09-15 09:50:47 -06:00
|
|
|
|> String.replace(~r/(\.\d+)?$/, ".000Z", global: false)
|
|
|
|
end
|
|
|
|
|
|
|
|
def to_masto_date(date) do
|
|
|
|
try do
|
|
|
|
date
|
2018-03-30 07:01:53 -06:00
|
|
|
|> NaiveDateTime.from_iso8601!()
|
|
|
|
|> NaiveDateTime.to_iso8601()
|
2017-09-15 09:50:47 -06:00
|
|
|
|> String.replace(~r/(\.\d+)?$/, ".000Z", global: false)
|
|
|
|
rescue
|
|
|
|
_e -> ""
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
defp shortname(name) do
|
|
|
|
if String.length(name) < 30 do
|
|
|
|
name
|
|
|
|
else
|
|
|
|
String.slice(name, 0..30) <> "…"
|
|
|
|
end
|
|
|
|
end
|
2018-05-11 05:32:59 -06:00
|
|
|
|
2018-05-21 15:17:34 -06:00
|
|
|
def confirm_current_password(user, password) do
|
2018-05-13 07:24:15 -06:00
|
|
|
with %User{local: true} = db_user <- Repo.get(User, user.id),
|
2018-05-21 15:17:34 -06:00
|
|
|
true <- Pbkdf2.checkpw(password, db_user.password_hash) do
|
2018-05-13 07:24:15 -06:00
|
|
|
{:ok, db_user}
|
|
|
|
else
|
|
|
|
_ -> {:error, "Invalid password."}
|
2018-05-11 05:32:59 -06:00
|
|
|
end
|
|
|
|
end
|
2018-08-12 13:24:10 -06:00
|
|
|
|
2018-12-09 02:12:48 -07:00
|
|
|
def emoji_from_profile(%{info: _info} = user) do
|
2018-08-12 13:24:10 -06:00
|
|
|
(Formatter.get_emoji(user.bio) ++ Formatter.get_emoji(user.name))
|
|
|
|
|> Enum.map(fn {shortcode, url} ->
|
|
|
|
%{
|
|
|
|
"type" => "Emoji",
|
2018-08-13 07:21:18 -06:00
|
|
|
"icon" => %{"type" => "Image", "url" => "#{Endpoint.url()}#{url}"},
|
2018-08-12 13:24:10 -06:00
|
|
|
"name" => ":#{shortcode}:"
|
|
|
|
}
|
|
|
|
end)
|
|
|
|
end
|
2019-01-24 13:30:43 -07:00
|
|
|
|
|
|
|
def maybe_notify_to_recipients(
|
|
|
|
recipients,
|
|
|
|
%Activity{data: %{"to" => to, "type" => _type}} = _activity
|
|
|
|
) do
|
|
|
|
recipients ++ to
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_notify_mentioned_recipients(
|
|
|
|
recipients,
|
|
|
|
%Activity{data: %{"to" => _to, "type" => type} = data} = _activity
|
|
|
|
)
|
|
|
|
when type == "Create" do
|
|
|
|
object = Object.normalize(data["object"])
|
|
|
|
|
|
|
|
object_data =
|
|
|
|
cond do
|
|
|
|
!is_nil(object) ->
|
|
|
|
object.data
|
|
|
|
|
|
|
|
is_map(data["object"]) ->
|
|
|
|
data["object"]
|
|
|
|
|
|
|
|
true ->
|
|
|
|
%{}
|
|
|
|
end
|
|
|
|
|
|
|
|
tagged_mentions = maybe_extract_mentions(object_data)
|
|
|
|
|
|
|
|
recipients ++ tagged_mentions
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_notify_mentioned_recipients(recipients, _), do: recipients
|
|
|
|
|
|
|
|
def maybe_extract_mentions(%{"tag" => tag}) do
|
|
|
|
tag
|
|
|
|
|> Enum.filter(fn x -> is_map(x) end)
|
|
|
|
|> Enum.filter(fn x -> x["type"] == "Mention" end)
|
|
|
|
|> Enum.map(fn x -> x["href"] end)
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_extract_mentions(_), do: []
|
2019-02-20 09:51:25 -07:00
|
|
|
|
|
|
|
def make_report_content_html(nil), do: {:ok, nil}
|
|
|
|
|
|
|
|
def make_report_content_html(comment) do
|
|
|
|
max_size = Pleroma.Config.get([:instance, :max_report_comment_size], 1000)
|
|
|
|
|
|
|
|
if String.length(comment) <= max_size do
|
|
|
|
{:ok, format_input(comment, [], [], "text/plain")}
|
|
|
|
else
|
|
|
|
{:error, "Comment must be up to #{max_size} characters"}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_report_statuses(%User{ap_id: actor}, %{"status_ids" => status_ids}) do
|
|
|
|
{:ok, Activity.all_by_actor_and_id(actor, status_ids)}
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_report_statuses(_, _), do: {:ok, nil}
|
2017-05-17 10:00:20 -06:00
|
|
|
end
|