2017-05-17 10:00:09 -06:00
|
|
|
defmodule Pleroma.Formatter do
|
2017-06-14 05:58:56 -06:00
|
|
|
alias Pleroma.User
|
2017-12-12 04:30:24 -07:00
|
|
|
alias Pleroma.Web.MediaProxy
|
2017-05-17 10:00:09 -06:00
|
|
|
|
|
|
|
@tag_regex ~r/\#\w+/u
|
2017-11-18 07:30:18 -07:00
|
|
|
def parse_tags(text, data \\ %{}) do
|
2017-05-17 10:00:09 -06:00
|
|
|
Regex.scan(@tag_regex, text)
|
2017-06-18 11:04:46 -06:00
|
|
|
|> Enum.map(fn (["#" <> tag = full_tag]) -> {full_tag, String.downcase(tag)} end)
|
2017-12-19 08:15:07 -07:00
|
|
|
|> (fn map -> if data["sensitive"] in [true, "True", "true", "1"], do: [{"#nsfw", "nsfw"}] ++ map, else: map end).()
|
2017-05-17 10:00:09 -06:00
|
|
|
end
|
2017-06-14 05:58:56 -06:00
|
|
|
|
|
|
|
def parse_mentions(text) do
|
|
|
|
# Modified from https://www.w3.org/TR/html5/forms.html#valid-e-mail-address
|
2017-08-14 09:56:45 -06:00
|
|
|
regex = ~r/@[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@?[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*/u
|
2017-06-14 05:58:56 -06:00
|
|
|
|
|
|
|
Regex.scan(regex, text)
|
|
|
|
|> List.flatten
|
|
|
|
|> Enum.uniq
|
|
|
|
|> Enum.map(fn ("@" <> match = full_match) -> {full_match, User.get_cached_by_nickname(match)} end)
|
|
|
|
|> Enum.filter(fn ({_match, user}) -> user end)
|
|
|
|
end
|
2017-06-19 09:51:43 -06:00
|
|
|
|
2017-09-16 07:47:45 -06:00
|
|
|
@finmoji [
|
|
|
|
"a_trusted_friend",
|
|
|
|
"alandislands",
|
|
|
|
"association",
|
|
|
|
"auroraborealis",
|
|
|
|
"baby_in_a_box",
|
|
|
|
"bear",
|
|
|
|
"black_gold",
|
|
|
|
"christmasparty",
|
|
|
|
"crosscountryskiing",
|
|
|
|
"cupofcoffee",
|
|
|
|
"education",
|
|
|
|
"fashionista_finns",
|
|
|
|
"finnishlove",
|
|
|
|
"flag",
|
|
|
|
"forest",
|
|
|
|
"four_seasons_of_bbq",
|
|
|
|
"girlpower",
|
|
|
|
"handshake",
|
|
|
|
"happiness",
|
|
|
|
"headbanger",
|
|
|
|
"icebreaker",
|
|
|
|
"iceman",
|
|
|
|
"joulutorttu",
|
|
|
|
"kaamos",
|
|
|
|
"kalsarikannit_f",
|
|
|
|
"kalsarikannit_m",
|
|
|
|
"karjalanpiirakka",
|
|
|
|
"kicksled",
|
|
|
|
"kokko",
|
|
|
|
"lavatanssit",
|
|
|
|
"losthopes_f",
|
|
|
|
"losthopes_m",
|
|
|
|
"mattinykanen",
|
|
|
|
"meanwhileinfinland",
|
|
|
|
"moominmamma",
|
|
|
|
"nordicfamily",
|
|
|
|
"out_of_office",
|
|
|
|
"peacemaker",
|
|
|
|
"perkele",
|
|
|
|
"pesapallo",
|
|
|
|
"polarbear",
|
|
|
|
"pusa_hispida_saimensis",
|
|
|
|
"reindeer",
|
|
|
|
"sami",
|
|
|
|
"sauna_f",
|
|
|
|
"sauna_m",
|
|
|
|
"sauna_whisk",
|
|
|
|
"sisu",
|
|
|
|
"stuck",
|
|
|
|
"suomimainittu",
|
|
|
|
"superfood",
|
|
|
|
"swan",
|
|
|
|
"the_cap",
|
|
|
|
"the_conductor",
|
|
|
|
"the_king",
|
|
|
|
"the_voice",
|
|
|
|
"theoriginalsanta",
|
|
|
|
"tomoffinland",
|
|
|
|
"torillatavataan",
|
|
|
|
"unbreakable",
|
|
|
|
"waiting",
|
|
|
|
"white_nights",
|
|
|
|
"woollysocks"
|
|
|
|
]
|
|
|
|
|
|
|
|
@finmoji_with_filenames Enum.map(@finmoji, fn (finmoji) ->
|
|
|
|
{finmoji, "/finmoji/128px/#{finmoji}-128.png"}
|
|
|
|
end)
|
2017-06-19 09:51:43 -06:00
|
|
|
|
2018-01-31 07:23:31 -07:00
|
|
|
@emoji_from_file (with {:ok, default} <- File.read("config/emoji.txt") do
|
|
|
|
custom =
|
|
|
|
with {:ok, custom} <- File.read("config/custom_emoji.txt") do
|
|
|
|
custom
|
|
|
|
else
|
|
|
|
_e -> ""
|
|
|
|
end
|
|
|
|
(default <> "\n" <> custom)
|
|
|
|
|> String.trim()
|
|
|
|
|> String.split(~r/\n+/)
|
|
|
|
|> Enum.map(fn(line) ->
|
|
|
|
[name, file] = String.split(line, ~r/,\s*/)
|
|
|
|
{name, file}
|
2017-09-16 08:49:46 -06:00
|
|
|
end)
|
|
|
|
else
|
|
|
|
_ -> []
|
|
|
|
end)
|
|
|
|
|
|
|
|
@emoji @finmoji_with_filenames ++ @emoji_from_file
|
2017-09-16 07:47:45 -06:00
|
|
|
|
2018-03-28 01:56:32 -06:00
|
|
|
def emojify(text, emoji \\ @emoji)
|
|
|
|
def emojify(text, nil), do: text
|
|
|
|
def emojify(text, emoji) do
|
|
|
|
Enum.reduce(emoji, text, fn ({emoji, file}, text) ->
|
2018-03-23 12:52:08 -06:00
|
|
|
emoji = HtmlSanitizeEx.strip_tags(emoji)
|
|
|
|
file = HtmlSanitizeEx.strip_tags(file)
|
2017-12-12 04:30:24 -07:00
|
|
|
String.replace(text, ":#{emoji}:", "<img height='32px' width='32px' alt='#{emoji}' title='#{emoji}' src='#{MediaProxy.url(file)}' />")
|
2017-06-19 09:51:43 -06:00
|
|
|
end)
|
|
|
|
end
|
2017-09-16 08:14:23 -06:00
|
|
|
|
|
|
|
def get_emoji(text) do
|
|
|
|
Enum.filter(@emoji, fn ({emoji, _}) -> String.contains?(text, ":#{emoji}:") end)
|
|
|
|
end
|
2017-10-19 13:51:56 -06:00
|
|
|
|
|
|
|
def get_custom_emoji() do
|
|
|
|
@emoji
|
|
|
|
end
|
2018-03-24 04:03:10 -06:00
|
|
|
|
|
|
|
@link_regex ~r/https?:\/\/[\w\.\/?=\-#%&@~\(\)]+[\w\/]/u
|
|
|
|
|
|
|
|
def html_escape(text) do
|
|
|
|
Regex.split(@link_regex, text, include_captures: true)
|
|
|
|
|> Enum.map_every(2, fn chunk ->
|
|
|
|
{:safe, part} = Phoenix.HTML.html_escape(chunk)
|
|
|
|
part
|
|
|
|
end)
|
|
|
|
|> Enum.join("")
|
|
|
|
end
|
|
|
|
|
|
|
|
@doc "changes http:... links to html links"
|
|
|
|
def add_links({subs, text}) do
|
|
|
|
links = Regex.scan(@link_regex, text)
|
|
|
|
|> Enum.map(fn ([url]) -> {Ecto.UUID.generate, url} end)
|
|
|
|
|
|
|
|
uuid_text = links
|
|
|
|
|> Enum.reduce(text, fn({uuid, url}, acc) -> String.replace(acc, url, uuid) end)
|
|
|
|
|
|
|
|
subs = subs ++ Enum.map(links, fn({uuid, url}) ->
|
|
|
|
{uuid, "<a href='#{url}'>#{url}</a>"}
|
|
|
|
end)
|
|
|
|
|
|
|
|
{subs, uuid_text}
|
|
|
|
end
|
|
|
|
|
|
|
|
@doc "Adds the links to mentioned users"
|
|
|
|
def add_user_links({subs, text}, mentions) do
|
|
|
|
mentions = mentions
|
|
|
|
|> Enum.sort_by(fn ({name, _}) -> -String.length(name) end)
|
|
|
|
|> Enum.map(fn({name, user}) -> {name, user, Ecto.UUID.generate} end)
|
|
|
|
|
|
|
|
uuid_text = mentions
|
|
|
|
|> Enum.reduce(text, fn ({match, _user, uuid}, text) ->
|
|
|
|
String.replace(text, match, uuid)
|
|
|
|
end)
|
|
|
|
|
|
|
|
subs = subs ++ Enum.map(mentions, fn ({match, %User{ap_id: ap_id}, uuid}) ->
|
|
|
|
short_match = String.split(match, "@") |> tl() |> hd()
|
|
|
|
{uuid, "<span><a href='#{ap_id}'>@<span>#{short_match}</span></a></span>"}
|
|
|
|
end)
|
|
|
|
|
|
|
|
{subs, uuid_text}
|
|
|
|
end
|
|
|
|
|
|
|
|
@doc "Adds the hashtag links"
|
|
|
|
def add_hashtag_links({subs, text}, tags) do
|
|
|
|
tags = tags
|
|
|
|
|> Enum.sort_by(fn ({name, _}) -> -String.length(name) end)
|
|
|
|
|> Enum.map(fn({name, short}) -> {name, short, Ecto.UUID.generate} end)
|
|
|
|
|
|
|
|
uuid_text = tags
|
|
|
|
|> Enum.reduce(text, fn ({match, _short, uuid}, text) ->
|
|
|
|
String.replace(text, match, uuid)
|
|
|
|
end)
|
|
|
|
|
|
|
|
subs = subs ++ Enum.map(tags, fn ({_, tag, uuid}) ->
|
2018-03-27 00:00:04 -06:00
|
|
|
url = "<a href='#{Pleroma.Web.base_url}/tag/#{tag}' rel='tag'>##{tag}</a>"
|
2018-03-24 04:03:10 -06:00
|
|
|
{uuid, url}
|
|
|
|
end)
|
|
|
|
|
|
|
|
{subs, uuid_text}
|
|
|
|
end
|
|
|
|
|
|
|
|
def finalize({subs, text}) do
|
|
|
|
Enum.reduce(subs, text, fn({uuid, replacement}, result_text) ->
|
|
|
|
String.replace(result_text, uuid, replacement)
|
|
|
|
end)
|
|
|
|
end
|
2017-05-17 10:00:09 -06:00
|
|
|
end
|