2019-01-28 12:59:36 -07:00
|
|
|
# Pleroma: A lightweight social networking server
|
|
|
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2019-01-01 13:26:40 -07:00
|
|
|
defmodule Pleroma.Web.RichMedia.Parser do
|
2019-02-10 14:37:51 -07:00
|
|
|
@hackney_options [
|
|
|
|
pool: :media,
|
|
|
|
recv_timeout: 2_000,
|
2019-04-11 11:16:15 -06:00
|
|
|
max_body: 2_000_000,
|
|
|
|
with_body: true
|
2019-02-10 14:37:51 -07:00
|
|
|
]
|
|
|
|
|
2019-07-11 07:04:42 -06:00
|
|
|
defp parsers do
|
|
|
|
Pleroma.Config.get([:rich_media, :parsers])
|
|
|
|
end
|
|
|
|
|
2019-01-26 09:26:11 -07:00
|
|
|
def parse(nil), do: {:error, "No URL provided"}
|
|
|
|
|
2019-06-06 14:59:51 -06:00
|
|
|
if Pleroma.Config.get(:env) == :test do
|
2019-01-04 16:50:54 -07:00
|
|
|
def parse(url), do: parse_url(url)
|
|
|
|
else
|
2019-01-26 09:26:11 -07:00
|
|
|
def parse(url) do
|
2019-01-28 13:19:07 -07:00
|
|
|
try do
|
|
|
|
Cachex.fetch!(:rich_media_cache, url, fn _ ->
|
|
|
|
{:commit, parse_url(url)}
|
|
|
|
end)
|
2019-07-16 10:52:36 -06:00
|
|
|
|> set_ttl_based_on_image(url)
|
2019-01-28 13:19:07 -07:00
|
|
|
rescue
|
|
|
|
e ->
|
|
|
|
{:error, "Cachex error: #{inspect(e)}"}
|
2019-01-26 09:26:11 -07:00
|
|
|
end
|
|
|
|
end
|
2019-01-04 16:50:54 -07:00
|
|
|
end
|
2019-01-01 13:26:40 -07:00
|
|
|
|
2019-07-16 10:52:36 -06:00
|
|
|
@doc """
|
|
|
|
Set the rich media cache based on the expiration time of image.
|
|
|
|
|
2019-07-18 23:58:42 -06:00
|
|
|
Adopt behaviour `Pleroma.Web.RichMedia.Parser.TTL`
|
2019-07-16 10:52:36 -06:00
|
|
|
|
|
|
|
## Example
|
|
|
|
|
|
|
|
defmodule MyModule do
|
2019-07-18 23:58:42 -06:00
|
|
|
@behaviour Pleroma.Web.RichMedia.Parser.TTL
|
|
|
|
def ttl(data, url) do
|
2019-07-16 10:52:36 -06:00
|
|
|
image_url = Map.get(data, :image)
|
|
|
|
# do some parsing in the url and get the ttl of the image
|
2019-07-18 23:58:42 -06:00
|
|
|
# and return ttl is unix time
|
|
|
|
parse_ttl_from_url(image_url)
|
2019-07-16 10:52:36 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
Define the module in the config
|
|
|
|
|
|
|
|
config :pleroma, :rich_media,
|
|
|
|
ttl_setters: [MyModule]
|
|
|
|
"""
|
|
|
|
def set_ttl_based_on_image({:ok, data}, url) do
|
2019-07-21 09:22:22 -06:00
|
|
|
with {:ok, nil} <- Cachex.ttl(:rich_media_cache, url),
|
|
|
|
ttl when is_number(ttl) <- get_ttl_from_image(data, url) do
|
2019-07-18 23:58:42 -06:00
|
|
|
Cachex.expire_at(:rich_media_cache, url, ttl * 1000)
|
|
|
|
{:ok, data}
|
|
|
|
else
|
2019-07-16 10:52:36 -06:00
|
|
|
_ ->
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-18 23:58:42 -06:00
|
|
|
defp get_ttl_from_image(data, url) do
|
|
|
|
Pleroma.Config.get([:rich_media, :ttl_setters])
|
|
|
|
|> Enum.reduce({:ok, nil}, fn
|
|
|
|
module, {:ok, _ttl} ->
|
|
|
|
module.ttl(data, url)
|
|
|
|
|
|
|
|
_, error ->
|
|
|
|
error
|
|
|
|
end)
|
|
|
|
end
|
2019-07-16 10:52:36 -06:00
|
|
|
|
2019-01-04 16:50:54 -07:00
|
|
|
defp parse_url(url) do
|
2019-01-27 05:21:05 -07:00
|
|
|
try do
|
2019-02-10 14:37:51 -07:00
|
|
|
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: @hackney_options)
|
2019-01-04 16:23:47 -07:00
|
|
|
|
2019-05-30 15:03:31 -06:00
|
|
|
html
|
2020-02-11 09:55:18 -07:00
|
|
|
|> parse_html
|
2019-05-30 15:03:31 -06:00
|
|
|
|> maybe_parse()
|
2019-07-23 16:58:31 -06:00
|
|
|
|> Map.put(:url, url)
|
2019-05-30 15:03:31 -06:00
|
|
|
|> clean_parsed_data()
|
|
|
|
|> check_parsed_data()
|
2019-01-27 05:21:05 -07:00
|
|
|
rescue
|
2019-01-28 13:19:07 -07:00
|
|
|
e ->
|
2020-02-11 09:55:18 -07:00
|
|
|
{:error, "Parsing error: #{inspect(e)}"}
|
2019-01-27 05:21:05 -07:00
|
|
|
end
|
2019-01-02 07:02:50 -07:00
|
|
|
end
|
|
|
|
|
2020-02-11 09:55:18 -07:00
|
|
|
defp parse_html(html), do: Floki.parse(html)
|
2019-09-15 05:53:58 -06:00
|
|
|
|
2019-01-02 07:02:50 -07:00
|
|
|
defp maybe_parse(html) do
|
2019-07-11 07:04:42 -06:00
|
|
|
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
|
2019-01-01 13:26:40 -07:00
|
|
|
case parser.parse(html, acc) do
|
|
|
|
{:ok, data} -> {:halt, data}
|
|
|
|
{:error, _msg} -> {:cont, acc}
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
end
|
2019-01-02 07:02:50 -07:00
|
|
|
|
2019-09-15 05:53:58 -06:00
|
|
|
defp check_parsed_data(%{title: title} = data)
|
|
|
|
when is_binary(title) and byte_size(title) > 0 do
|
2019-01-28 13:31:43 -07:00
|
|
|
{:ok, data}
|
2019-01-02 07:02:50 -07:00
|
|
|
end
|
|
|
|
|
2019-01-31 09:03:56 -07:00
|
|
|
defp check_parsed_data(data) do
|
2019-01-28 13:31:43 -07:00
|
|
|
{:error, "Found metadata was invalid or incomplete: #{inspect(data)}"}
|
2019-01-02 07:02:50 -07:00
|
|
|
end
|
2019-01-31 09:03:56 -07:00
|
|
|
|
|
|
|
defp clean_parsed_data(data) do
|
|
|
|
data
|
2019-02-05 13:50:57 -07:00
|
|
|
|> Enum.reject(fn {key, val} ->
|
|
|
|
with {:ok, _} <- Jason.encode(%{key => val}) do
|
|
|
|
false
|
|
|
|
else
|
2019-01-31 09:03:56 -07:00
|
|
|
_ -> true
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
|> Map.new()
|
|
|
|
end
|
2019-01-01 13:26:40 -07:00
|
|
|
end
|