2019-01-28 12:59:36 -07:00
|
|
|
# Pleroma: A lightweight social networking server
|
2021-01-12 23:49:20 -07:00
|
|
|
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
2019-01-28 12:59:36 -07:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2019-01-01 13:26:40 -07:00
|
|
|
defmodule Pleroma.Web.RichMedia.Parser do
|
2020-09-01 10:12:45 -06:00
|
|
|
require Logger
|
|
|
|
|
2020-12-18 09:44:46 -07:00
|
|
|
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
|
|
|
|
2019-07-11 07:04:42 -06:00
|
|
|
defp parsers do
|
|
|
|
Pleroma.Config.get([:rich_media, :parsers])
|
|
|
|
end
|
|
|
|
|
2019-01-26 09:26:11 -07:00
|
|
|
def parse(nil), do: {:error, "No URL provided"}
|
|
|
|
|
2019-06-06 14:59:51 -06:00
|
|
|
if Pleroma.Config.get(:env) == :test do
|
2020-09-01 10:12:45 -06:00
|
|
|
@spec parse(String.t()) :: {:ok, map()} | {:error, any()}
|
2022-12-30 13:11:53 -07:00
|
|
|
def parse(url), do: parse_with_timeout(url)
|
2019-01-04 16:50:54 -07:00
|
|
|
else
|
2020-09-01 10:12:45 -06:00
|
|
|
@spec parse(String.t()) :: {:ok, map()} | {:error, any()}
|
2019-01-26 09:26:11 -07:00
|
|
|
def parse(url) do
|
2020-09-02 05:21:28 -06:00
|
|
|
with {:ok, data} <- get_cached_or_parse(url),
|
|
|
|
{:ok, _} <- set_ttl_based_on_image(data, url) do
|
|
|
|
{:ok, data}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp get_cached_or_parse(url) do
|
2020-12-18 09:44:46 -07:00
|
|
|
case @cachex.fetch(:rich_media_cache, url, fn ->
|
2022-12-30 13:11:53 -07:00
|
|
|
case parse_with_timeout(url) do
|
2020-09-17 07:13:21 -06:00
|
|
|
{:ok, _} = res ->
|
|
|
|
{:commit, res}
|
|
|
|
|
|
|
|
{:error, reason} = e ->
|
|
|
|
# Unfortunately we have to log errors here, instead of doing that
|
|
|
|
# along with ttl setting at the bottom. Otherwise we can get log spam
|
|
|
|
# if more than one process was waiting for the rich media card
|
|
|
|
# while it was generated. Ideally we would set ttl here as well,
|
|
|
|
# so we don't override it number_of_waiters_on_generation
|
|
|
|
# times, but one, obviously, can't set ttl for not-yet-created entry
|
|
|
|
# and Cachex doesn't support returning ttl from the fetch callback.
|
|
|
|
log_error(url, reason)
|
|
|
|
{:commit, e}
|
|
|
|
end
|
|
|
|
end) do
|
|
|
|
{action, res} when action in [:commit, :ok] ->
|
|
|
|
case res do
|
|
|
|
{:ok, _data} = res ->
|
|
|
|
res
|
|
|
|
|
|
|
|
{:error, reason} = e ->
|
|
|
|
if action == :commit, do: set_error_ttl(url, reason)
|
|
|
|
e
|
|
|
|
end
|
|
|
|
|
|
|
|
{:error, e} ->
|
|
|
|
{:error, {:cachex_error, e}}
|
2020-09-02 05:21:28 -06:00
|
|
|
end
|
2019-01-26 09:26:11 -07:00
|
|
|
end
|
2020-09-17 07:13:21 -06:00
|
|
|
|
|
|
|
defp set_error_ttl(_url, :body_too_large), do: :ok
|
|
|
|
defp set_error_ttl(_url, {:content_type, _}), do: :ok
|
|
|
|
|
|
|
|
# The TTL is not set for the errors above, since they are unlikely to change
|
|
|
|
# with time
|
|
|
|
|
|
|
|
defp set_error_ttl(url, _reason) do
|
|
|
|
ttl = Pleroma.Config.get([:rich_media, :failure_backoff], 60_000)
|
2020-12-18 09:44:46 -07:00
|
|
|
@cachex.expire(:rich_media_cache, url, ttl)
|
2020-09-17 07:13:21 -06:00
|
|
|
:ok
|
|
|
|
end
|
|
|
|
|
|
|
|
defp log_error(url, {:invalid_metadata, data}) do
|
|
|
|
Logger.debug(fn -> "Incomplete or invalid metadata for #{url}: #{inspect(data)}" end)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp log_error(url, reason) do
|
|
|
|
Logger.warn(fn -> "Rich media error for #{url}: #{inspect(reason)}" end)
|
|
|
|
end
|
2019-01-04 16:50:54 -07:00
|
|
|
end
|
2019-01-01 13:26:40 -07:00
|
|
|
|
2019-07-16 10:52:36 -06:00
|
|
|
@doc """
|
|
|
|
Set the rich media cache based on the expiration time of image.
|
|
|
|
|
2019-07-18 23:58:42 -06:00
|
|
|
Adopt behaviour `Pleroma.Web.RichMedia.Parser.TTL`
|
2019-07-16 10:52:36 -06:00
|
|
|
|
|
|
|
## Example
|
|
|
|
|
|
|
|
defmodule MyModule do
|
2019-07-18 23:58:42 -06:00
|
|
|
@behaviour Pleroma.Web.RichMedia.Parser.TTL
|
|
|
|
def ttl(data, url) do
|
2019-07-16 10:52:36 -06:00
|
|
|
image_url = Map.get(data, :image)
|
|
|
|
# do some parsing in the url and get the ttl of the image
|
2019-07-18 23:58:42 -06:00
|
|
|
# and return ttl is unix time
|
|
|
|
parse_ttl_from_url(image_url)
|
2019-07-16 10:52:36 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
Define the module in the config
|
|
|
|
|
|
|
|
config :pleroma, :rich_media,
|
|
|
|
ttl_setters: [MyModule]
|
|
|
|
"""
|
2020-09-02 05:21:28 -06:00
|
|
|
@spec set_ttl_based_on_image(map(), String.t()) ::
|
|
|
|
{:ok, Integer.t() | :noop} | {:error, :no_key}
|
|
|
|
def set_ttl_based_on_image(data, url) do
|
|
|
|
case get_ttl_from_image(data, url) do
|
|
|
|
{:ok, ttl} when is_number(ttl) ->
|
|
|
|
ttl = ttl * 1000
|
|
|
|
|
2020-12-18 09:44:46 -07:00
|
|
|
case @cachex.expire_at(:rich_media_cache, url, ttl) do
|
2020-09-02 05:21:28 -06:00
|
|
|
{:ok, true} -> {:ok, ttl}
|
|
|
|
{:ok, false} -> {:error, :no_key}
|
|
|
|
end
|
|
|
|
|
2019-07-16 10:52:36 -06:00
|
|
|
_ ->
|
2020-09-02 05:21:28 -06:00
|
|
|
{:ok, :noop}
|
2019-07-16 10:52:36 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-18 23:58:42 -06:00
|
|
|
defp get_ttl_from_image(data, url) do
|
2020-09-01 10:12:45 -06:00
|
|
|
[:rich_media, :ttl_setters]
|
|
|
|
|> Pleroma.Config.get()
|
2019-07-18 23:58:42 -06:00
|
|
|
|> Enum.reduce({:ok, nil}, fn
|
|
|
|
module, {:ok, _ttl} ->
|
|
|
|
module.ttl(data, url)
|
|
|
|
|
|
|
|
_, error ->
|
|
|
|
error
|
|
|
|
end)
|
|
|
|
end
|
2019-07-16 10:52:36 -06:00
|
|
|
|
2020-09-05 13:00:51 -06:00
|
|
|
def parse_url(url) do
|
2020-09-01 10:12:45 -06:00
|
|
|
with {:ok, %Tesla.Env{body: html}} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url),
|
|
|
|
{:ok, html} <- Floki.parse_document(html) do
|
2019-05-30 15:03:31 -06:00
|
|
|
html
|
|
|
|
|> maybe_parse()
|
2020-06-09 11:49:24 -06:00
|
|
|
|> Map.put("url", url)
|
2019-05-30 15:03:31 -06:00
|
|
|
|> clean_parsed_data()
|
|
|
|
|> check_parsed_data()
|
2019-01-27 05:21:05 -07:00
|
|
|
end
|
2019-01-02 07:02:50 -07:00
|
|
|
end
|
|
|
|
|
2022-12-30 13:11:53 -07:00
|
|
|
def parse_with_timeout(url) do
|
|
|
|
try do
|
|
|
|
task =
|
|
|
|
Task.Supervisor.async_nolink(Pleroma.TaskSupervisor, fn ->
|
|
|
|
parse_url(url)
|
|
|
|
end)
|
|
|
|
|
|
|
|
Task.await(task, 5000)
|
|
|
|
catch
|
|
|
|
:exit, {:timeout, _} ->
|
|
|
|
Logger.warn("Timeout while fetching rich media for #{url}")
|
|
|
|
{:error, :timeout}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-01-02 07:02:50 -07:00
|
|
|
defp maybe_parse(html) do
|
2019-07-11 07:04:42 -06:00
|
|
|
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
|
2019-01-01 13:26:40 -07:00
|
|
|
case parser.parse(html, acc) do
|
2020-06-11 07:57:31 -06:00
|
|
|
data when data != %{} -> {:halt, data}
|
|
|
|
_ -> {:cont, acc}
|
2019-01-01 13:26:40 -07:00
|
|
|
end
|
|
|
|
end)
|
|
|
|
end
|
2019-01-02 07:02:50 -07:00
|
|
|
|
2020-06-09 11:49:24 -06:00
|
|
|
defp check_parsed_data(%{"title" => title} = data)
|
|
|
|
when is_binary(title) and title != "" do
|
2019-01-28 13:31:43 -07:00
|
|
|
{:ok, data}
|
2019-01-02 07:02:50 -07:00
|
|
|
end
|
|
|
|
|
2019-01-31 09:03:56 -07:00
|
|
|
defp check_parsed_data(data) do
|
2020-09-05 13:00:51 -06:00
|
|
|
{:error, {:invalid_metadata, data}}
|
2019-01-02 07:02:50 -07:00
|
|
|
end
|
2019-01-31 09:03:56 -07:00
|
|
|
|
|
|
|
defp clean_parsed_data(data) do
|
|
|
|
data
|
2019-02-05 13:50:57 -07:00
|
|
|
|> Enum.reject(fn {key, val} ->
|
2020-06-09 11:49:24 -06:00
|
|
|
not match?({:ok, _}, Jason.encode(%{key => val}))
|
2019-01-31 09:03:56 -07:00
|
|
|
end)
|
|
|
|
|> Map.new()
|
|
|
|
end
|
2019-01-01 13:26:40 -07:00
|
|
|
end
|