2018-12-23 13:04:54 -07:00
|
|
|
|
# Pleroma: A lightweight social networking server
|
2021-01-12 23:49:20 -07:00
|
|
|
|
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 13:04:54 -07:00
|
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
|
defmodule Pleroma.Web.CommonAPI.Utils do
|
2019-07-10 03:25:58 -06:00
|
|
|
|
import Pleroma.Web.Gettext
|
|
|
|
|
|
2018-12-11 05:31:52 -07:00
|
|
|
|
alias Calendar.Strftime
|
2019-02-09 08:16:26 -07:00
|
|
|
|
alias Pleroma.Activity
|
2019-03-04 19:52:23 -07:00
|
|
|
|
alias Pleroma.Config
|
2019-08-02 07:05:27 -06:00
|
|
|
|
alias Pleroma.Conversation.Participation
|
2019-02-09 08:16:26 -07:00
|
|
|
|
alias Pleroma.Formatter
|
|
|
|
|
alias Pleroma.Object
|
|
|
|
|
alias Pleroma.Repo
|
2018-12-11 05:31:52 -07:00
|
|
|
|
alias Pleroma.User
|
2017-05-17 10:00:20 -06:00
|
|
|
|
alias Pleroma.Web.ActivityPub.Utils
|
2019-04-07 08:11:29 -06:00
|
|
|
|
alias Pleroma.Web.ActivityPub.Visibility
|
2020-10-02 11:00:50 -06:00
|
|
|
|
alias Pleroma.Web.CommonAPI.ActivityDraft
|
2018-10-29 11:26:15 -06:00
|
|
|
|
alias Pleroma.Web.MediaProxy
|
2021-06-07 15:45:33 -06:00
|
|
|
|
alias Pleroma.Web.Utils.Params
|
2017-05-17 10:00:20 -06:00
|
|
|
|
|
2019-04-02 03:25:51 -06:00
|
|
|
|
require Logger
|
2019-07-28 20:43:19 -06:00
|
|
|
|
require Pleroma.Constants
|
2017-05-17 10:00:20 -06:00
|
|
|
|
|
Restrict media usage to owners
In Mastodon media can only be used by owners and only be associated with
a single post. We currently allow media to be associated with several
posts and until now did not limit their usage in posts to media owners.
However, media update and GET lookup was already limited to owners.
(In accordance with allowing media reuse, we also still allow GET
lookups of media already used in a post unlike Mastodon)
Allowing reuse isn’t problematic per se, but allowing use by non-owners
can be problematic if media ids of private-scoped posts can be guessed
since creating a new post with this media id will reveal the uploaded
file content and alt text.
Given media ids are currently just part of a sequentieal series shared
with some other objects, guessing media ids is with some persistence
indeed feasible.
E.g. sampline some public media ids from a real-world
instance with 112 total and 61 monthly-active users:
17.465.096 at t0
17.472.673 at t1 = t0 + 4h
17.473.248 at t2 = t1 + 20min
This gives about 30 new ids per minute of which most won't be
local media but remote and local posts, poll answers etc.
Assuming the default ratelimit of 15 post actions per 10s, scraping all
media for the 4h interval takes about 84 minutes and scraping the 20min
range mere 6.3 minutes. (Until the preceding commit, post updates were
not rate limited at all, allowing even faster scraping.)
If an attacker can infer (e.g. via reply to a follower-only post not
accessbile to the attacker) some sensitive information was uploaded
during a specific time interval and has some pointers regarding the
nature of the information, identifying the specific upload out of all
scraped media for this timerange is not impossible.
Thus restrict media usage to owners.
Checking ownership just in ActivitDraft would already be sufficient,
since when a scheduled status actually gets posted it goes through
ActivityDraft again, but would erroneously return a success status
when scheduling an illegal post.
Independently discovered and fixed by mint in Pleroma
https://git.pleroma.social/pleroma/pleroma/-/commit/1afde067b12ad0062c1820091ea9b0a680819281
2024-04-24 09:46:18 -06:00
|
|
|
|
def attachments_from_ids(user, %{media_ids: ids}) do
|
|
|
|
|
attachments_from_ids(user, ids, [])
|
2019-01-04 09:27:46 -07:00
|
|
|
|
end
|
|
|
|
|
|
Restrict media usage to owners
In Mastodon media can only be used by owners and only be associated with
a single post. We currently allow media to be associated with several
posts and until now did not limit their usage in posts to media owners.
However, media update and GET lookup was already limited to owners.
(In accordance with allowing media reuse, we also still allow GET
lookups of media already used in a post unlike Mastodon)
Allowing reuse isn’t problematic per se, but allowing use by non-owners
can be problematic if media ids of private-scoped posts can be guessed
since creating a new post with this media id will reveal the uploaded
file content and alt text.
Given media ids are currently just part of a sequentieal series shared
with some other objects, guessing media ids is with some persistence
indeed feasible.
E.g. sampline some public media ids from a real-world
instance with 112 total and 61 monthly-active users:
17.465.096 at t0
17.472.673 at t1 = t0 + 4h
17.473.248 at t2 = t1 + 20min
This gives about 30 new ids per minute of which most won't be
local media but remote and local posts, poll answers etc.
Assuming the default ratelimit of 15 post actions per 10s, scraping all
media for the 4h interval takes about 84 minutes and scraping the 20min
range mere 6.3 minutes. (Until the preceding commit, post updates were
not rate limited at all, allowing even faster scraping.)
If an attacker can infer (e.g. via reply to a follower-only post not
accessbile to the attacker) some sensitive information was uploaded
during a specific time interval and has some pointers regarding the
nature of the information, identifying the specific upload out of all
scraped media for this timerange is not impossible.
Thus restrict media usage to owners.
Checking ownership just in ActivitDraft would already be sufficient,
since when a scheduled status actually gets posted it goes through
ActivityDraft again, but would erroneously return a success status
when scheduling an illegal post.
Independently discovered and fixed by mint in Pleroma
https://git.pleroma.social/pleroma/pleroma/-/commit/1afde067b12ad0062c1820091ea9b0a680819281
2024-04-24 09:46:18 -06:00
|
|
|
|
def attachments_from_ids(_, _), do: []
|
2019-08-05 09:37:05 -06:00
|
|
|
|
|
Restrict media usage to owners
In Mastodon media can only be used by owners and only be associated with
a single post. We currently allow media to be associated with several
posts and until now did not limit their usage in posts to media owners.
However, media update and GET lookup was already limited to owners.
(In accordance with allowing media reuse, we also still allow GET
lookups of media already used in a post unlike Mastodon)
Allowing reuse isn’t problematic per se, but allowing use by non-owners
can be problematic if media ids of private-scoped posts can be guessed
since creating a new post with this media id will reveal the uploaded
file content and alt text.
Given media ids are currently just part of a sequentieal series shared
with some other objects, guessing media ids is with some persistence
indeed feasible.
E.g. sampline some public media ids from a real-world
instance with 112 total and 61 monthly-active users:
17.465.096 at t0
17.472.673 at t1 = t0 + 4h
17.473.248 at t2 = t1 + 20min
This gives about 30 new ids per minute of which most won't be
local media but remote and local posts, poll answers etc.
Assuming the default ratelimit of 15 post actions per 10s, scraping all
media for the 4h interval takes about 84 minutes and scraping the 20min
range mere 6.3 minutes. (Until the preceding commit, post updates were
not rate limited at all, allowing even faster scraping.)
If an attacker can infer (e.g. via reply to a follower-only post not
accessbile to the attacker) some sensitive information was uploaded
during a specific time interval and has some pointers regarding the
nature of the information, identifying the specific upload out of all
scraped media for this timerange is not impossible.
Thus restrict media usage to owners.
Checking ownership just in ActivitDraft would already be sufficient,
since when a scheduled status actually gets posted it goes through
ActivityDraft again, but would erroneously return a success status
when scheduling an illegal post.
Independently discovered and fixed by mint in Pleroma
https://git.pleroma.social/pleroma/pleroma/-/commit/1afde067b12ad0062c1820091ea9b0a680819281
2024-04-24 09:46:18 -06:00
|
|
|
|
defp attachments_from_ids(_user, [], acc), do: Enum.reverse(acc)
|
2017-05-17 10:00:20 -06:00
|
|
|
|
|
Restrict media usage to owners
In Mastodon media can only be used by owners and only be associated with
a single post. We currently allow media to be associated with several
posts and until now did not limit their usage in posts to media owners.
However, media update and GET lookup was already limited to owners.
(In accordance with allowing media reuse, we also still allow GET
lookups of media already used in a post unlike Mastodon)
Allowing reuse isn’t problematic per se, but allowing use by non-owners
can be problematic if media ids of private-scoped posts can be guessed
since creating a new post with this media id will reveal the uploaded
file content and alt text.
Given media ids are currently just part of a sequentieal series shared
with some other objects, guessing media ids is with some persistence
indeed feasible.
E.g. sampline some public media ids from a real-world
instance with 112 total and 61 monthly-active users:
17.465.096 at t0
17.472.673 at t1 = t0 + 4h
17.473.248 at t2 = t1 + 20min
This gives about 30 new ids per minute of which most won't be
local media but remote and local posts, poll answers etc.
Assuming the default ratelimit of 15 post actions per 10s, scraping all
media for the 4h interval takes about 84 minutes and scraping the 20min
range mere 6.3 minutes. (Until the preceding commit, post updates were
not rate limited at all, allowing even faster scraping.)
If an attacker can infer (e.g. via reply to a follower-only post not
accessbile to the attacker) some sensitive information was uploaded
during a specific time interval and has some pointers regarding the
nature of the information, identifying the specific upload out of all
scraped media for this timerange is not impossible.
Thus restrict media usage to owners.
Checking ownership just in ActivitDraft would already be sufficient,
since when a scheduled status actually gets posted it goes through
ActivityDraft again, but would erroneously return a success status
when scheduling an illegal post.
Independently discovered and fixed by mint in Pleroma
https://git.pleroma.social/pleroma/pleroma/-/commit/1afde067b12ad0062c1820091ea9b0a680819281
2024-04-24 09:46:18 -06:00
|
|
|
|
defp attachments_from_ids(user, [media_id | ids], acc) do
|
|
|
|
|
with {_, %Object{} = object} <- {:get, get_attachment(media_id)},
|
|
|
|
|
:ok <- Object.authorize_access(object, user) do
|
|
|
|
|
attachments_from_ids(user, ids, [object.data | acc])
|
|
|
|
|
else
|
|
|
|
|
{:get, _} -> attachments_from_ids(user, ids, acc)
|
|
|
|
|
{:error, reason} -> {:error, reason}
|
|
|
|
|
end
|
|
|
|
|
end
|
2017-05-17 10:00:20 -06:00
|
|
|
|
|
2024-04-25 10:16:21 -06:00
|
|
|
|
def get_attachment(media_id) do
|
|
|
|
|
with %Object{} = object <- Repo.get(Object, media_id),
|
|
|
|
|
true <- object.data["type"] in Pleroma.Constants.attachment_types() do
|
|
|
|
|
object
|
|
|
|
|
else
|
|
|
|
|
_ -> nil
|
|
|
|
|
end
|
2022-09-06 13:24:02 -06:00
|
|
|
|
end
|
|
|
|
|
|
2020-10-02 11:00:50 -06:00
|
|
|
|
@spec get_to_and_cc(ActivityDraft.t()) :: {list(String.t()), list(String.t())}
|
2019-08-02 07:05:27 -06:00
|
|
|
|
|
2020-10-02 11:00:50 -06:00
|
|
|
|
def get_to_and_cc(%{in_reply_to_conversation: %Participation{} = participation}) do
|
2019-08-02 07:05:27 -06:00
|
|
|
|
participation = Repo.preload(participation, :recipients)
|
|
|
|
|
{Enum.map(participation.recipients, & &1.ap_id), []}
|
|
|
|
|
end
|
|
|
|
|
|
2020-11-11 07:47:57 -07:00
|
|
|
|
def get_to_and_cc(%{visibility: visibility} = draft) when visibility in ["public", "local"] do
|
|
|
|
|
to =
|
|
|
|
|
case visibility do
|
|
|
|
|
"public" -> [Pleroma.Constants.as_public() | draft.mentions]
|
2021-05-31 12:39:15 -06:00
|
|
|
|
"local" -> [Utils.as_local_public() | draft.mentions]
|
2020-11-11 07:47:57 -07:00
|
|
|
|
end
|
|
|
|
|
|
2020-10-02 11:00:50 -06:00
|
|
|
|
cc = [draft.user.follower_address]
|
2018-03-30 07:01:53 -06:00
|
|
|
|
|
2020-10-02 11:00:50 -06:00
|
|
|
|
if draft.in_reply_to do
|
|
|
|
|
{Enum.uniq([draft.in_reply_to.data["actor"] | to]), cc}
|
2017-08-28 11:17:38 -06:00
|
|
|
|
else
|
2018-02-18 06:45:08 -07:00
|
|
|
|
{to, cc}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2020-10-02 11:00:50 -06:00
|
|
|
|
def get_to_and_cc(%{visibility: "unlisted"} = draft) do
|
|
|
|
|
to = [draft.user.follower_address | draft.mentions]
|
2020-11-11 07:47:57 -07:00
|
|
|
|
cc = [Pleroma.Constants.as_public()]
|
2018-11-08 12:17:01 -07:00
|
|
|
|
|
2020-10-02 11:00:50 -06:00
|
|
|
|
if draft.in_reply_to do
|
|
|
|
|
{Enum.uniq([draft.in_reply_to.data["actor"] | to]), cc}
|
2018-11-08 12:17:01 -07:00
|
|
|
|
else
|
|
|
|
|
{to, cc}
|
|
|
|
|
end
|
2018-02-18 06:45:08 -07:00
|
|
|
|
end
|
|
|
|
|
|
2020-10-02 11:00:50 -06:00
|
|
|
|
def get_to_and_cc(%{visibility: "private"} = draft) do
|
|
|
|
|
{to, cc} = get_to_and_cc(struct(draft, visibility: "direct"))
|
|
|
|
|
{[draft.user.follower_address | to], cc}
|
2018-02-18 06:45:08 -07:00
|
|
|
|
end
|
|
|
|
|
|
2020-10-02 11:00:50 -06:00
|
|
|
|
def get_to_and_cc(%{visibility: "direct"} = draft) do
|
2020-05-20 05:38:47 -06:00
|
|
|
|
# If the OP is a DM already, add the implicit actor.
|
2020-10-02 11:00:50 -06:00
|
|
|
|
if draft.in_reply_to && Visibility.is_direct?(draft.in_reply_to) do
|
|
|
|
|
{Enum.uniq([draft.in_reply_to.data["actor"] | draft.mentions]), []}
|
2018-02-18 06:45:08 -07:00
|
|
|
|
else
|
2020-10-02 11:00:50 -06:00
|
|
|
|
{draft.mentions, []}
|
2017-08-28 11:17:38 -06:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2020-10-02 11:00:50 -06:00
|
|
|
|
def get_to_and_cc(%{visibility: {:list, _}, mentions: mentions}), do: {mentions, []}
|
|
|
|
|
|
2019-06-03 10:17:08 -06:00
|
|
|
|
def get_addressed_users(_, to) when is_list(to) do
|
|
|
|
|
User.get_ap_ids_by_nicknames(to)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def get_addressed_users(mentioned_users, _), do: mentioned_users
|
2019-05-01 03:11:17 -06:00
|
|
|
|
|
2019-07-15 01:00:29 -06:00
|
|
|
|
def maybe_add_list_data(activity_params, user, {:list, list_id}) do
|
2019-07-11 03:36:08 -06:00
|
|
|
|
case Pleroma.List.get(list_id, user) do
|
|
|
|
|
%Pleroma.List{} = list ->
|
2019-07-15 01:00:29 -06:00
|
|
|
|
activity_params
|
|
|
|
|
|> put_in([:additional, "bcc"], [list.ap_id])
|
|
|
|
|
|> put_in([:additional, "listMessage"], list.ap_id)
|
|
|
|
|
|> put_in([:object, "listMessage"], list.ap_id)
|
2019-07-11 03:36:08 -06:00
|
|
|
|
|
|
|
|
|
_ ->
|
2019-07-15 01:00:29 -06:00
|
|
|
|
activity_params
|
2019-07-11 03:36:08 -06:00
|
|
|
|
end
|
2019-05-01 03:11:17 -06:00
|
|
|
|
end
|
|
|
|
|
|
2019-07-15 01:00:29 -06:00
|
|
|
|
def maybe_add_list_data(activity_params, _, _), do: activity_params
|
2019-05-01 03:11:17 -06:00
|
|
|
|
|
2019-09-24 03:10:54 -06:00
|
|
|
|
def make_poll_data(%{"poll" => %{"expires_in" => expires_in}} = data)
|
|
|
|
|
when is_binary(expires_in) do
|
|
|
|
|
# In some cases mastofe sends out strings instead of integers
|
|
|
|
|
data
|
|
|
|
|
|> put_in(["poll", "expires_in"], String.to_integer(expires_in))
|
|
|
|
|
|> make_poll_data()
|
|
|
|
|
end
|
|
|
|
|
|
2020-05-12 13:59:26 -06:00
|
|
|
|
def make_poll_data(%{poll: %{options: options, expires_in: expires_in}} = data)
|
2019-05-21 08:30:51 -06:00
|
|
|
|
when is_list(options) do
|
2020-07-09 09:53:51 -06:00
|
|
|
|
limits = Config.get([:instance, :poll_limits])
|
2019-05-21 01:54:20 -06:00
|
|
|
|
|
2023-03-25 21:20:07 -06:00
|
|
|
|
options = options |> Enum.uniq()
|
|
|
|
|
|
2019-09-24 03:10:54 -06:00
|
|
|
|
with :ok <- validate_poll_expiration(expires_in, limits),
|
|
|
|
|
:ok <- validate_poll_options_amount(options, limits),
|
|
|
|
|
:ok <- validate_poll_options_length(options, limits) do
|
|
|
|
|
{option_notes, emoji} =
|
2019-05-21 01:54:20 -06:00
|
|
|
|
Enum.map_reduce(options, %{}, fn option, emoji ->
|
2019-09-24 03:10:54 -06:00
|
|
|
|
note = %{
|
|
|
|
|
"name" => option,
|
|
|
|
|
"type" => "Note",
|
|
|
|
|
"replies" => %{"type" => "Collection", "totalItems" => 0}
|
|
|
|
|
}
|
2019-05-21 01:54:20 -06:00
|
|
|
|
|
2020-04-03 05:03:32 -06:00
|
|
|
|
{note, Map.merge(emoji, Pleroma.Emoji.Formatter.get_emoji_map(option))}
|
2019-09-24 03:10:54 -06:00
|
|
|
|
end)
|
2019-05-18 04:29:28 -06:00
|
|
|
|
|
2019-05-21 01:54:20 -06:00
|
|
|
|
end_time =
|
2020-02-07 08:57:46 -07:00
|
|
|
|
DateTime.utc_now()
|
|
|
|
|
|> DateTime.add(expires_in)
|
|
|
|
|
|> DateTime.to_iso8601()
|
2019-05-21 01:54:20 -06:00
|
|
|
|
|
2021-05-22 10:41:55 -06:00
|
|
|
|
key = if Params.truthy_param?(data.poll[:multiple]), do: "anyOf", else: "oneOf"
|
2019-09-24 03:10:54 -06:00
|
|
|
|
poll = %{"type" => "Question", key => option_notes, "closed" => end_time}
|
2019-05-21 01:54:20 -06:00
|
|
|
|
|
2019-09-24 03:10:54 -06:00
|
|
|
|
{:ok, {poll, emoji}}
|
2019-05-21 01:54:20 -06:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-05-21 05:19:03 -06:00
|
|
|
|
def make_poll_data(%{"poll" => poll}) when is_map(poll) do
|
2019-09-24 03:10:54 -06:00
|
|
|
|
{:error, "Invalid poll"}
|
2019-05-18 04:29:28 -06:00
|
|
|
|
end
|
|
|
|
|
|
2019-05-19 08:06:44 -06:00
|
|
|
|
def make_poll_data(_data) do
|
2019-09-24 03:10:54 -06:00
|
|
|
|
{:ok, {%{}, %{}}}
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
defp validate_poll_options_amount(options, %{max_options: max_options}) do
|
2023-03-25 21:20:07 -06:00
|
|
|
|
cond do
|
|
|
|
|
Enum.count(options) < 2 ->
|
|
|
|
|
{:error, "Poll must contain at least 2 options"}
|
|
|
|
|
|
|
|
|
|
Enum.count(options) > max_options ->
|
|
|
|
|
{:error, "Poll can't contain more than #{max_options} options"}
|
|
|
|
|
|
|
|
|
|
true ->
|
|
|
|
|
:ok
|
2019-09-24 03:10:54 -06:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
defp validate_poll_options_length(options, %{max_option_chars: max_option_chars}) do
|
|
|
|
|
if Enum.any?(options, &(String.length(&1) > max_option_chars)) do
|
|
|
|
|
{:error, "Poll options cannot be longer than #{max_option_chars} characters each"}
|
|
|
|
|
else
|
|
|
|
|
:ok
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
defp validate_poll_expiration(expires_in, %{min_expiration: min, max_expiration: max}) do
|
|
|
|
|
cond do
|
|
|
|
|
expires_in > max -> {:error, "Expiration date is too far in the future"}
|
|
|
|
|
expires_in < min -> {:error, "Expiration date is too soon"}
|
|
|
|
|
true -> :ok
|
|
|
|
|
end
|
2019-05-18 04:29:28 -06:00
|
|
|
|
end
|
|
|
|
|
|
2020-10-02 11:00:50 -06:00
|
|
|
|
def make_content_html(%ActivityDraft{} = draft) do
|
2020-02-11 14:39:19 -07:00
|
|
|
|
attachment_links =
|
2020-10-02 11:00:50 -06:00
|
|
|
|
draft.params
|
2020-02-11 14:39:19 -07:00
|
|
|
|
|> Map.get("attachment_links", Config.get([:instance, :attachment_links]))
|
2021-05-22 10:41:55 -06:00
|
|
|
|
|> Params.truthy_param?()
|
2019-02-26 16:32:26 -07:00
|
|
|
|
|
2020-10-02 11:00:50 -06:00
|
|
|
|
content_type = get_content_type(draft.params[:content_type])
|
2019-02-26 16:32:26 -07:00
|
|
|
|
|
2019-03-20 14:09:36 -06:00
|
|
|
|
options =
|
2020-10-02 11:00:50 -06:00
|
|
|
|
if draft.visibility == "direct" && Config.get([:instance, :safe_dm_mentions]) do
|
2019-03-20 14:09:36 -06:00
|
|
|
|
[safe_mention: true]
|
|
|
|
|
else
|
|
|
|
|
[]
|
|
|
|
|
end
|
|
|
|
|
|
2020-10-02 11:00:50 -06:00
|
|
|
|
draft.status
|
2019-03-20 14:09:36 -06:00
|
|
|
|
|> format_input(content_type, options)
|
2020-10-02 11:00:50 -06:00
|
|
|
|
|> maybe_add_attachments(draft.attachments, attachment_links)
|
2019-02-26 16:32:26 -07:00
|
|
|
|
end
|
|
|
|
|
|
2022-09-06 13:24:02 -06:00
|
|
|
|
def get_content_type(content_type) do
|
2019-02-26 16:32:26 -07:00
|
|
|
|
if Enum.member?(Config.get([:instance, :allowed_post_formats]), content_type) do
|
|
|
|
|
content_type
|
|
|
|
|
else
|
|
|
|
|
"text/plain"
|
|
|
|
|
end
|
2017-09-15 06:17:36 -06:00
|
|
|
|
end
|
|
|
|
|
|
2022-12-31 11:09:27 -07:00
|
|
|
|
def make_context(%{in_reply_to_conversation: %Participation{} = participation}) do
|
2019-08-05 07:33:22 -06:00
|
|
|
|
Repo.preload(participation, :conversation).conversation.ap_id
|
|
|
|
|
end
|
|
|
|
|
|
2022-12-31 11:09:27 -07:00
|
|
|
|
def make_context(%{in_reply_to: %Activity{data: %{"context" => context}}}), do: context
|
|
|
|
|
def make_context(%{quote: %Activity{data: %{"context" => context}}}), do: context
|
|
|
|
|
def make_context(_), do: Utils.generate_context_id()
|
2017-09-15 06:17:36 -06:00
|
|
|
|
|
2020-02-11 14:39:19 -07:00
|
|
|
|
def maybe_add_attachments(parsed, _attachments, false = _no_links), do: parsed
|
2018-03-30 07:01:53 -06:00
|
|
|
|
|
2019-02-26 16:32:26 -07:00
|
|
|
|
def maybe_add_attachments({text, mentions, tags}, attachments, _no_links) do
|
|
|
|
|
text = add_attachments(text, attachments)
|
|
|
|
|
{text, mentions, tags}
|
2017-12-07 11:44:09 -07:00
|
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
|
2017-05-17 10:00:20 -06:00
|
|
|
|
def add_attachments(text, attachments) do
|
2019-08-05 09:37:05 -06:00
|
|
|
|
attachment_text = Enum.map(attachments, &build_attachment_link/1)
|
2017-11-18 05:46:54 -07:00
|
|
|
|
Enum.join([text | attachment_text], "<br>")
|
2017-05-17 10:00:20 -06:00
|
|
|
|
end
|
|
|
|
|
|
2019-08-05 09:37:05 -06:00
|
|
|
|
defp build_attachment_link(%{"url" => [%{"href" => href} | _]} = attachment) do
|
|
|
|
|
name = attachment["name"] || URI.decode(Path.basename(href))
|
|
|
|
|
href = MediaProxy.url(href)
|
|
|
|
|
"<a href=\"#{href}\" class='attachment'>#{shortname(name)}</a>"
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
defp build_attachment_link(_), do: ""
|
|
|
|
|
|
2019-02-26 16:32:26 -07:00
|
|
|
|
def format_input(text, format, options \\ [])
|
2019-01-17 23:30:16 -07:00
|
|
|
|
|
2018-12-14 02:41:55 -07:00
|
|
|
|
@doc """
|
2020-10-07 08:58:45 -06:00
|
|
|
|
Formatting text to plain text, BBCode, HTML, or Markdown
|
2018-12-14 02:41:55 -07:00
|
|
|
|
"""
|
2022-08-01 06:46:52 -06:00
|
|
|
|
def format_input(text, "text/plain", options) do
|
2017-12-07 12:34:25 -07:00
|
|
|
|
text
|
2018-09-01 18:14:25 -06:00
|
|
|
|
|> Formatter.html_escape("text/plain")
|
2019-02-26 16:32:26 -07:00
|
|
|
|
|> Formatter.linkify(options)
|
|
|
|
|
|> (fn {text, mentions, tags} ->
|
|
|
|
|
{String.replace(text, ~r/\r?\n/, "<br>"), mentions, tags}
|
|
|
|
|
end).()
|
2017-09-17 07:21:44 -06:00
|
|
|
|
end
|
|
|
|
|
|
2019-04-26 04:17:57 -06:00
|
|
|
|
def format_input(text, "text/bbcode", options) do
|
|
|
|
|
text
|
|
|
|
|
|> String.replace(~r/\r/, "")
|
|
|
|
|
|> Formatter.html_escape("text/plain")
|
|
|
|
|
|> BBCode.to_html()
|
|
|
|
|
|> (fn {:ok, html} -> html end).()
|
|
|
|
|
|> Formatter.linkify(options)
|
|
|
|
|
end
|
|
|
|
|
|
2019-02-26 16:32:26 -07:00
|
|
|
|
def format_input(text, "text/html", options) do
|
2018-09-01 18:14:25 -06:00
|
|
|
|
text
|
|
|
|
|
|> Formatter.html_escape("text/html")
|
2019-02-26 16:32:26 -07:00
|
|
|
|
|> Formatter.linkify(options)
|
2018-09-01 18:14:25 -06:00
|
|
|
|
end
|
|
|
|
|
|
2022-08-01 06:46:52 -06:00
|
|
|
|
def format_input(text, "text/x.misskeymarkdown", options) do
|
|
|
|
|
text
|
2023-02-18 11:30:27 -07:00
|
|
|
|
|> Formatter.markdown_to_html(%{breaks: true})
|
2022-08-17 21:14:48 -06:00
|
|
|
|
|> MfmParser.Parser.parse()
|
|
|
|
|
|> MfmParser.Encoder.to_html()
|
2022-08-01 06:46:52 -06:00
|
|
|
|
|> Formatter.linkify(options)
|
2022-08-17 21:14:48 -06:00
|
|
|
|
|> Formatter.html_escape("text/html")
|
2022-08-01 06:46:52 -06:00
|
|
|
|
end
|
|
|
|
|
|
2019-02-26 16:32:26 -07:00
|
|
|
|
def format_input(text, "text/markdown", options) do
|
2018-09-01 18:14:25 -06:00
|
|
|
|
text
|
2019-04-12 13:25:53 -06:00
|
|
|
|
|> Formatter.mentions_escape(options)
|
2020-10-13 13:27:50 -06:00
|
|
|
|
|> Formatter.markdown_to_html()
|
2019-02-26 16:32:26 -07:00
|
|
|
|
|> Formatter.linkify(options)
|
2018-09-01 18:14:25 -06:00
|
|
|
|
|> Formatter.html_escape("text/html")
|
2017-05-17 10:00:20 -06:00
|
|
|
|
end
|
|
|
|
|
|
2017-06-19 15:12:37 -06:00
|
|
|
|
def format_naive_asctime(date) do
|
|
|
|
|
date |> DateTime.from_naive!("Etc/UTC") |> format_asctime
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def format_asctime(date) do
|
|
|
|
|
Strftime.strftime!(date, "%a %b %d %H:%M:%S %z %Y")
|
|
|
|
|
end
|
|
|
|
|
|
2019-04-02 03:25:51 -06:00
|
|
|
|
def date_to_asctime(date) when is_binary(date) do
|
|
|
|
|
with {:ok, date, _offset} <- DateTime.from_iso8601(date) do
|
2017-06-19 15:12:37 -06:00
|
|
|
|
format_asctime(date)
|
2018-03-30 07:01:53 -06:00
|
|
|
|
else
|
|
|
|
|
_e ->
|
2023-08-01 04:43:50 -06:00
|
|
|
|
Logger.warning("Date #{date} in wrong format, must be ISO 8601")
|
2017-06-19 15:12:37 -06:00
|
|
|
|
""
|
|
|
|
|
end
|
|
|
|
|
end
|
2017-09-15 06:17:36 -06:00
|
|
|
|
|
2017-06-19 15:12:37 -06:00
|
|
|
|
def date_to_asctime(date) do
|
2023-08-01 04:43:50 -06:00
|
|
|
|
Logger.warning("Date #{date} in wrong format, must be ISO 8601")
|
2019-04-02 03:25:51 -06:00
|
|
|
|
""
|
2017-06-19 15:12:37 -06:00
|
|
|
|
end
|
2017-09-15 06:17:36 -06:00
|
|
|
|
|
2017-09-15 09:50:47 -06:00
|
|
|
|
def to_masto_date(%NaiveDateTime{} = date) do
|
2023-01-09 15:12:28 -07:00
|
|
|
|
# NOTE: Elixir’s ISO 8601 format is a superset of the real standard
|
|
|
|
|
# It supports negative years for example.
|
|
|
|
|
# ISO8601 only supports years before 1583 with mutual agreement
|
|
|
|
|
if date.year < 1583 do
|
|
|
|
|
"1970-01-01T00:00:00Z"
|
|
|
|
|
else
|
|
|
|
|
date
|
|
|
|
|
|> NaiveDateTime.to_iso8601()
|
|
|
|
|
|> String.replace(~r/(\.\d+)?$/, ".000Z", global: false)
|
|
|
|
|
end
|
2017-09-15 09:50:47 -06:00
|
|
|
|
end
|
|
|
|
|
|
2019-08-05 09:37:05 -06:00
|
|
|
|
def to_masto_date(date) when is_binary(date) do
|
|
|
|
|
with {:ok, date} <- NaiveDateTime.from_iso8601(date) do
|
|
|
|
|
to_masto_date(date)
|
|
|
|
|
else
|
2023-01-09 15:12:28 -07:00
|
|
|
|
_ -> "1970-01-01T00:00:00Z"
|
2017-09-15 09:50:47 -06:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2023-01-09 15:12:28 -07:00
|
|
|
|
def to_masto_date(_), do: "1970-01-01T00:00:00Z"
|
2019-08-05 09:37:05 -06:00
|
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
|
defp shortname(name) do
|
2020-05-22 09:19:25 -06:00
|
|
|
|
with max_length when max_length > 0 <-
|
2020-05-22 09:44:10 -06:00
|
|
|
|
Config.get([Pleroma.Upload, :filename_display_max_length], 30),
|
2020-05-22 09:19:25 -06:00
|
|
|
|
true <- String.length(name) > max_length do
|
|
|
|
|
String.slice(name, 0..max_length) <> "…"
|
2017-09-15 06:17:36 -06:00
|
|
|
|
else
|
2020-05-22 09:19:25 -06:00
|
|
|
|
_ -> name
|
2017-09-15 06:17:36 -06:00
|
|
|
|
end
|
|
|
|
|
end
|
2018-05-11 05:32:59 -06:00
|
|
|
|
|
2020-05-07 02:14:54 -06:00
|
|
|
|
@spec confirm_current_password(User.t(), String.t()) :: {:ok, User.t()} | {:error, String.t()}
|
2018-05-21 15:17:34 -06:00
|
|
|
|
def confirm_current_password(user, password) do
|
2019-04-22 01:20:43 -06:00
|
|
|
|
with %User{local: true} = db_user <- User.get_cached_by_id(user.id),
|
2022-12-29 19:46:58 -07:00
|
|
|
|
true <- Pleroma.Password.checkpw(password, db_user.password_hash) do
|
2018-05-13 07:24:15 -06:00
|
|
|
|
{:ok, db_user}
|
|
|
|
|
else
|
2019-07-10 03:25:58 -06:00
|
|
|
|
_ -> {:error, dgettext("errors", "Invalid password.")}
|
2018-05-11 05:32:59 -06:00
|
|
|
|
end
|
|
|
|
|
end
|
2018-08-12 13:24:10 -06:00
|
|
|
|
|
2019-01-24 13:30:43 -07:00
|
|
|
|
def maybe_notify_to_recipients(
|
|
|
|
|
recipients,
|
|
|
|
|
%Activity{data: %{"to" => to, "type" => _type}} = _activity
|
|
|
|
|
) do
|
|
|
|
|
recipients ++ to
|
|
|
|
|
end
|
|
|
|
|
|
2019-11-12 04:48:14 -07:00
|
|
|
|
def maybe_notify_to_recipients(recipients, _), do: recipients
|
|
|
|
|
|
2019-01-24 13:30:43 -07:00
|
|
|
|
def maybe_notify_mentioned_recipients(
|
|
|
|
|
recipients,
|
2019-03-22 18:22:14 -06:00
|
|
|
|
%Activity{data: %{"to" => _to, "type" => type} = data} = activity
|
2019-01-24 13:30:43 -07:00
|
|
|
|
)
|
|
|
|
|
when type == "Create" do
|
2021-01-04 05:38:31 -07:00
|
|
|
|
object = Object.normalize(activity, fetch: false)
|
2019-01-24 13:30:43 -07:00
|
|
|
|
|
|
|
|
|
object_data =
|
|
|
|
|
cond do
|
2019-08-05 09:37:05 -06:00
|
|
|
|
not is_nil(object) ->
|
2019-01-24 13:30:43 -07:00
|
|
|
|
object.data
|
|
|
|
|
|
|
|
|
|
is_map(data["object"]) ->
|
|
|
|
|
data["object"]
|
|
|
|
|
|
|
|
|
|
true ->
|
|
|
|
|
%{}
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
tagged_mentions = maybe_extract_mentions(object_data)
|
|
|
|
|
|
|
|
|
|
recipients ++ tagged_mentions
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def maybe_notify_mentioned_recipients(recipients, _), do: recipients
|
|
|
|
|
|
2019-04-05 07:20:13 -06:00
|
|
|
|
def maybe_notify_subscribers(
|
2019-04-05 07:59:34 -06:00
|
|
|
|
recipients,
|
2021-08-11 11:45:49 -06:00
|
|
|
|
%Activity{data: %{"actor" => actor, "type" => "Create"}} = activity
|
|
|
|
|
) do
|
|
|
|
|
# Do not notify subscribers if author is making a reply
|
|
|
|
|
with %Object{data: object} <- Object.normalize(activity, fetch: false),
|
|
|
|
|
nil <- object["inReplyTo"],
|
|
|
|
|
%User{} = user <- User.get_cached_by_ap_id(actor) do
|
2019-04-05 07:20:13 -06:00
|
|
|
|
subscriber_ids =
|
|
|
|
|
user
|
2019-11-20 05:46:11 -07:00
|
|
|
|
|> User.subscriber_users()
|
2019-04-07 08:11:29 -06:00
|
|
|
|
|> Enum.filter(&Visibility.visible_for_user?(activity, &1))
|
2019-04-05 07:20:13 -06:00
|
|
|
|
|> Enum.map(& &1.ap_id)
|
|
|
|
|
|
|
|
|
|
recipients ++ subscriber_ids
|
2020-05-22 09:06:12 -06:00
|
|
|
|
else
|
|
|
|
|
_e -> recipients
|
2019-04-05 07:20:13 -06:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def maybe_notify_subscribers(recipients, _), do: recipients
|
|
|
|
|
|
2019-11-12 04:48:14 -07:00
|
|
|
|
def maybe_notify_followers(recipients, %Activity{data: %{"type" => "Move"}} = activity) do
|
|
|
|
|
with %User{} = user <- User.get_cached_by_ap_id(activity.actor) do
|
|
|
|
|
user
|
|
|
|
|
|> User.get_followers()
|
|
|
|
|
|> Enum.map(& &1.ap_id)
|
|
|
|
|
|> Enum.concat(recipients)
|
2020-05-22 09:06:12 -06:00
|
|
|
|
else
|
|
|
|
|
_e -> recipients
|
2019-11-12 04:48:14 -07:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def maybe_notify_followers(recipients, _), do: recipients
|
|
|
|
|
|
2019-01-24 13:30:43 -07:00
|
|
|
|
def maybe_extract_mentions(%{"tag" => tag}) do
|
|
|
|
|
tag
|
2019-08-05 09:37:05 -06:00
|
|
|
|
|> Enum.filter(fn x -> is_map(x) && x["type"] == "Mention" end)
|
2019-01-24 13:30:43 -07:00
|
|
|
|
|> Enum.map(fn x -> x["href"] end)
|
2019-08-05 09:37:05 -06:00
|
|
|
|
|> Enum.uniq()
|
2019-01-24 13:30:43 -07:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def maybe_extract_mentions(_), do: []
|
2019-02-20 09:51:25 -07:00
|
|
|
|
|
2019-02-26 16:32:26 -07:00
|
|
|
|
def make_report_content_html(nil), do: {:ok, {nil, [], []}}
|
2019-02-20 09:51:25 -07:00
|
|
|
|
|
|
|
|
|
def make_report_content_html(comment) do
|
2020-07-09 09:53:51 -06:00
|
|
|
|
max_size = Config.get([:instance, :max_report_comment_size], 1000)
|
2019-02-20 09:51:25 -07:00
|
|
|
|
|
|
|
|
|
if String.length(comment) <= max_size do
|
2019-02-26 16:32:26 -07:00
|
|
|
|
{:ok, format_input(comment, "text/plain")}
|
2019-02-20 09:51:25 -07:00
|
|
|
|
else
|
2019-07-10 03:25:58 -06:00
|
|
|
|
{:error,
|
|
|
|
|
dgettext("errors", "Comment must be up to %{max_size} characters", max_size: max_size)}
|
2019-02-20 09:51:25 -07:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2020-04-28 06:50:37 -06:00
|
|
|
|
def get_report_statuses(%User{ap_id: actor}, %{status_ids: status_ids})
|
|
|
|
|
when is_list(status_ids) do
|
2019-02-20 09:51:25 -07:00
|
|
|
|
{:ok, Activity.all_by_actor_and_id(actor, status_ids)}
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def get_report_statuses(_, _), do: {:ok, nil}
|
2019-03-21 17:17:53 -06:00
|
|
|
|
|
2019-09-24 03:10:54 -06:00
|
|
|
|
def validate_character_limit("" = _full_payload, [] = _attachments) do
|
|
|
|
|
{:error, dgettext("errors", "Cannot post an empty status without attachments")}
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def validate_character_limit(full_payload, _attachments) do
|
2020-07-09 09:53:51 -06:00
|
|
|
|
limit = Config.get([:instance, :limit])
|
2019-06-17 20:05:05 -06:00
|
|
|
|
length = String.length(full_payload)
|
|
|
|
|
|
2020-03-10 12:08:00 -06:00
|
|
|
|
if length <= limit do
|
2019-09-24 03:10:54 -06:00
|
|
|
|
:ok
|
2019-06-17 20:05:05 -06:00
|
|
|
|
else
|
2019-07-10 03:25:58 -06:00
|
|
|
|
{:error, dgettext("errors", "The status is over the character limit")}
|
2019-06-17 20:05:05 -06:00
|
|
|
|
end
|
|
|
|
|
end
|
2017-05-17 10:00:20 -06:00
|
|
|
|
end
|