2018-12-23 13:04:54 -07:00
|
|
|
# Pleroma: A lightweight social networking server
|
2019-01-04 08:35:41 -07:00
|
|
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 13:04:54 -07:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
defmodule Pleroma.Web.CommonAPI.Utils do
|
2018-12-11 05:31:52 -07:00
|
|
|
alias Calendar.Strftime
|
|
|
|
alias Comeonin.Pbkdf2
|
2019-02-09 08:16:26 -07:00
|
|
|
alias Pleroma.Activity
|
2019-03-04 19:52:23 -07:00
|
|
|
alias Pleroma.Config
|
2019-02-09 08:16:26 -07:00
|
|
|
alias Pleroma.Formatter
|
|
|
|
alias Pleroma.Object
|
|
|
|
alias Pleroma.Repo
|
2018-12-11 05:31:52 -07:00
|
|
|
alias Pleroma.User
|
2017-05-17 10:00:20 -06:00
|
|
|
alias Pleroma.Web.ActivityPub.Utils
|
2019-04-07 08:11:29 -06:00
|
|
|
alias Pleroma.Web.ActivityPub.Visibility
|
2018-08-12 13:24:10 -06:00
|
|
|
alias Pleroma.Web.Endpoint
|
2018-10-29 11:26:15 -06:00
|
|
|
alias Pleroma.Web.MediaProxy
|
2017-05-17 10:00:20 -06:00
|
|
|
|
2019-04-02 03:25:51 -06:00
|
|
|
require Logger
|
2017-05-17 10:00:20 -06:00
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
# This is a hack for twidere.
|
|
|
|
def get_by_id_or_ap_id(id) do
|
2019-03-22 18:28:16 -06:00
|
|
|
activity =
|
|
|
|
Activity.get_by_id_with_object(id) || Activity.get_create_by_object_ap_id_with_object(id)
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2018-06-03 11:11:22 -06:00
|
|
|
activity &&
|
|
|
|
if activity.data["type"] == "Create" do
|
|
|
|
activity
|
|
|
|
else
|
2019-03-22 18:22:14 -06:00
|
|
|
Activity.get_create_by_object_ap_id_with_object(activity.data["object"])
|
2018-06-03 11:11:22 -06:00
|
|
|
end
|
2017-09-15 06:17:36 -06:00
|
|
|
end
|
|
|
|
|
2018-11-02 10:33:51 -06:00
|
|
|
def get_replied_to_activity(""), do: nil
|
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
def get_replied_to_activity(id) when not is_nil(id) do
|
2019-04-02 03:50:31 -06:00
|
|
|
Activity.get_by_id(id)
|
2017-09-15 06:17:36 -06:00
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
def get_replied_to_activity(_), do: nil
|
|
|
|
|
2019-01-04 09:27:46 -07:00
|
|
|
def attachments_from_ids(data) do
|
|
|
|
if Map.has_key?(data, "descriptions") do
|
|
|
|
attachments_from_ids_descs(data["media_ids"], data["descriptions"])
|
|
|
|
else
|
|
|
|
attachments_from_ids_no_descs(data["media_ids"])
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def attachments_from_ids_no_descs(ids) do
|
2018-03-30 07:01:53 -06:00
|
|
|
Enum.map(ids || [], fn media_id ->
|
2017-05-17 10:00:20 -06:00
|
|
|
Repo.get(Object, media_id).data
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2019-01-04 09:27:46 -07:00
|
|
|
def attachments_from_ids_descs(ids, descs_str) do
|
2019-01-04 08:35:41 -07:00
|
|
|
{_, descs} = Jason.decode(descs_str)
|
|
|
|
|
|
|
|
Enum.map(ids || [], fn media_id ->
|
|
|
|
Map.put(Repo.get(Object, media_id).data, "name", descs[media_id])
|
2017-05-17 10:00:20 -06:00
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2018-02-18 06:45:08 -07:00
|
|
|
def to_for_user_and_mentions(user, mentions, inReplyTo, "public") do
|
2018-03-30 07:01:53 -06:00
|
|
|
mentioned_users = Enum.map(mentions, fn {_, %{ap_id: ap_id}} -> ap_id end)
|
2018-11-08 12:17:01 -07:00
|
|
|
|
|
|
|
to = ["https://www.w3.org/ns/activitystreams#Public" | mentioned_users]
|
|
|
|
cc = [user.follower_address]
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
if inReplyTo do
|
2018-11-08 12:17:01 -07:00
|
|
|
{Enum.uniq([inReplyTo.data["actor"] | to]), cc}
|
2017-08-28 11:17:38 -06:00
|
|
|
else
|
2018-02-18 06:45:08 -07:00
|
|
|
{to, cc}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def to_for_user_and_mentions(user, mentions, inReplyTo, "unlisted") do
|
2018-11-08 12:17:01 -07:00
|
|
|
mentioned_users = Enum.map(mentions, fn {_, %{ap_id: ap_id}} -> ap_id end)
|
|
|
|
|
|
|
|
to = [user.follower_address | mentioned_users]
|
|
|
|
cc = ["https://www.w3.org/ns/activitystreams#Public"]
|
|
|
|
|
|
|
|
if inReplyTo do
|
|
|
|
{Enum.uniq([inReplyTo.data["actor"] | to]), cc}
|
|
|
|
else
|
|
|
|
{to, cc}
|
|
|
|
end
|
2018-02-18 06:45:08 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
def to_for_user_and_mentions(user, mentions, inReplyTo, "private") do
|
|
|
|
{to, cc} = to_for_user_and_mentions(user, mentions, inReplyTo, "direct")
|
|
|
|
{[user.follower_address | to], cc}
|
|
|
|
end
|
|
|
|
|
2018-05-04 14:59:01 -06:00
|
|
|
def to_for_user_and_mentions(_user, mentions, inReplyTo, "direct") do
|
2018-03-30 07:01:53 -06:00
|
|
|
mentioned_users = Enum.map(mentions, fn {_, %{ap_id: ap_id}} -> ap_id end)
|
|
|
|
|
2018-02-18 06:45:08 -07:00
|
|
|
if inReplyTo do
|
|
|
|
{Enum.uniq([inReplyTo.data["actor"] | mentioned_users]), []}
|
|
|
|
else
|
|
|
|
{mentioned_users, []}
|
2017-08-28 11:17:38 -06:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-09-01 18:14:25 -06:00
|
|
|
def make_content_html(
|
|
|
|
status,
|
|
|
|
attachments,
|
2019-03-20 14:09:36 -06:00
|
|
|
data,
|
|
|
|
visibility
|
2018-09-01 18:14:25 -06:00
|
|
|
) do
|
2019-02-26 16:32:26 -07:00
|
|
|
no_attachment_links =
|
|
|
|
data
|
|
|
|
|> Map.get("no_attachment_links", Config.get([:instance, :no_attachment_links]))
|
|
|
|
|> Kernel.in([true, "true"])
|
|
|
|
|
|
|
|
content_type = get_content_type(data["content_type"])
|
|
|
|
|
2019-03-20 14:09:36 -06:00
|
|
|
options =
|
|
|
|
if visibility == "direct" && Config.get([:instance, :safe_dm_mentions]) do
|
|
|
|
[safe_mention: true]
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
status
|
2019-03-20 14:09:36 -06:00
|
|
|
|> format_input(content_type, options)
|
2017-12-07 11:44:09 -07:00
|
|
|
|> maybe_add_attachments(attachments, no_attachment_links)
|
2019-02-26 16:32:26 -07:00
|
|
|
|> maybe_add_nsfw_tag(data)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp get_content_type(content_type) do
|
|
|
|
if Enum.member?(Config.get([:instance, :allowed_post_formats]), content_type) do
|
|
|
|
content_type
|
|
|
|
else
|
|
|
|
"text/plain"
|
|
|
|
end
|
2017-09-15 06:17:36 -06:00
|
|
|
end
|
|
|
|
|
2019-02-26 16:32:26 -07:00
|
|
|
defp maybe_add_nsfw_tag({text, mentions, tags}, %{"sensitive" => sensitive})
|
|
|
|
when sensitive in [true, "True", "true", "1"] do
|
|
|
|
{text, mentions, [{"#nsfw", "nsfw"} | tags]}
|
|
|
|
end
|
|
|
|
|
|
|
|
defp maybe_add_nsfw_tag(data, _), do: data
|
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
def make_context(%Activity{data: %{"context" => context}}), do: context
|
2018-03-30 07:01:53 -06:00
|
|
|
def make_context(_), do: Utils.generate_context_id()
|
2017-09-15 06:17:36 -06:00
|
|
|
|
2019-02-26 16:32:26 -07:00
|
|
|
def maybe_add_attachments(parsed, _attachments, true = _no_links), do: parsed
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2019-02-26 16:32:26 -07:00
|
|
|
def maybe_add_attachments({text, mentions, tags}, attachments, _no_links) do
|
|
|
|
text = add_attachments(text, attachments)
|
|
|
|
{text, mentions, tags}
|
2017-12-07 11:44:09 -07:00
|
|
|
end
|
2018-03-30 07:01:53 -06:00
|
|
|
|
2017-05-17 10:00:20 -06:00
|
|
|
def add_attachments(text, attachments) do
|
2018-03-30 07:01:53 -06:00
|
|
|
attachment_text =
|
|
|
|
Enum.map(attachments, fn
|
2018-10-29 11:59:24 -06:00
|
|
|
%{"url" => [%{"href" => href} | _]} = attachment ->
|
|
|
|
name = attachment["name"] || URI.decode(Path.basename(href))
|
2018-10-29 11:26:15 -06:00
|
|
|
href = MediaProxy.url(href)
|
2018-03-30 07:01:53 -06:00
|
|
|
"<a href=\"#{href}\" class='attachment'>#{shortname(name)}</a>"
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
""
|
|
|
|
end)
|
|
|
|
|
2017-11-18 05:46:54 -07:00
|
|
|
Enum.join([text | attachment_text], "<br>")
|
2017-05-17 10:00:20 -06:00
|
|
|
end
|
|
|
|
|
2019-02-26 16:32:26 -07:00
|
|
|
def format_input(text, format, options \\ [])
|
2019-01-17 23:30:16 -07:00
|
|
|
|
2018-12-14 02:41:55 -07:00
|
|
|
@doc """
|
|
|
|
Formatting text to plain text.
|
|
|
|
"""
|
2019-02-26 16:32:26 -07:00
|
|
|
def format_input(text, "text/plain", options) do
|
2017-12-07 12:34:25 -07:00
|
|
|
text
|
2018-09-01 18:14:25 -06:00
|
|
|
|> Formatter.html_escape("text/plain")
|
2019-02-26 16:32:26 -07:00
|
|
|
|> Formatter.linkify(options)
|
|
|
|
|> (fn {text, mentions, tags} ->
|
|
|
|
{String.replace(text, ~r/\r?\n/, "<br>"), mentions, tags}
|
|
|
|
end).()
|
2017-09-17 07:21:44 -06:00
|
|
|
end
|
|
|
|
|
2018-12-14 02:41:55 -07:00
|
|
|
@doc """
|
|
|
|
Formatting text to html.
|
|
|
|
"""
|
2019-02-26 16:32:26 -07:00
|
|
|
def format_input(text, "text/html", options) do
|
2018-09-01 18:14:25 -06:00
|
|
|
text
|
|
|
|
|> Formatter.html_escape("text/html")
|
2019-02-26 16:32:26 -07:00
|
|
|
|> Formatter.linkify(options)
|
2018-09-01 18:14:25 -06:00
|
|
|
end
|
|
|
|
|
2018-12-14 02:41:55 -07:00
|
|
|
@doc """
|
|
|
|
Formatting text to markdown.
|
|
|
|
"""
|
2019-02-26 16:32:26 -07:00
|
|
|
def format_input(text, "text/markdown", options) do
|
2018-09-01 18:14:25 -06:00
|
|
|
text
|
2019-04-12 13:25:53 -06:00
|
|
|
|> Formatter.mentions_escape(options)
|
2018-09-01 18:14:25 -06:00
|
|
|
|> Earmark.as_html!()
|
2019-02-26 16:32:26 -07:00
|
|
|
|> Formatter.linkify(options)
|
2018-09-01 18:14:25 -06:00
|
|
|
|> Formatter.html_escape("text/html")
|
2017-05-17 10:00:20 -06:00
|
|
|
end
|
|
|
|
|
2018-03-30 07:01:53 -06:00
|
|
|
def make_note_data(
|
|
|
|
actor,
|
|
|
|
to,
|
|
|
|
context,
|
|
|
|
content_html,
|
|
|
|
attachments,
|
2019-04-17 05:52:01 -06:00
|
|
|
in_reply_to,
|
2018-03-30 07:01:53 -06:00
|
|
|
tags,
|
|
|
|
cw \\ nil,
|
|
|
|
cc \\ []
|
|
|
|
) do
|
|
|
|
object = %{
|
|
|
|
"type" => "Note",
|
|
|
|
"to" => to,
|
|
|
|
"cc" => cc,
|
|
|
|
"content" => content_html,
|
|
|
|
"summary" => cw,
|
|
|
|
"context" => context,
|
|
|
|
"attachment" => attachments,
|
|
|
|
"actor" => actor,
|
2018-05-23 09:25:24 -06:00
|
|
|
"tag" => tags |> Enum.map(fn {_, tag} -> tag end) |> Enum.uniq()
|
2018-03-30 07:01:53 -06:00
|
|
|
}
|
2017-05-17 10:00:20 -06:00
|
|
|
|
2019-04-17 05:52:01 -06:00
|
|
|
if in_reply_to do
|
2019-04-17 05:55:26 -06:00
|
|
|
in_reply_to_object = Object.normalize(in_reply_to.data["object"])
|
2018-11-25 12:00:05 -07:00
|
|
|
|
2017-05-17 10:00:20 -06:00
|
|
|
object
|
2019-04-17 05:52:01 -06:00
|
|
|
|> Map.put("inReplyTo", in_reply_to_object.data["id"])
|
2017-05-17 10:00:20 -06:00
|
|
|
else
|
|
|
|
object
|
|
|
|
end
|
|
|
|
end
|
2017-06-19 15:12:37 -06:00
|
|
|
|
|
|
|
def format_naive_asctime(date) do
|
|
|
|
date |> DateTime.from_naive!("Etc/UTC") |> format_asctime
|
|
|
|
end
|
|
|
|
|
|
|
|
def format_asctime(date) do
|
|
|
|
Strftime.strftime!(date, "%a %b %d %H:%M:%S %z %Y")
|
|
|
|
end
|
|
|
|
|
2019-04-02 03:25:51 -06:00
|
|
|
def date_to_asctime(date) when is_binary(date) do
|
|
|
|
with {:ok, date, _offset} <- DateTime.from_iso8601(date) do
|
2017-06-19 15:12:37 -06:00
|
|
|
format_asctime(date)
|
2018-03-30 07:01:53 -06:00
|
|
|
else
|
|
|
|
_e ->
|
2019-04-02 03:25:51 -06:00
|
|
|
Logger.warn("Date #{date} in wrong format, must be ISO 8601")
|
2017-06-19 15:12:37 -06:00
|
|
|
""
|
|
|
|
end
|
|
|
|
end
|
2017-09-15 06:17:36 -06:00
|
|
|
|
2017-06-19 15:12:37 -06:00
|
|
|
def date_to_asctime(date) do
|
2019-04-02 03:25:51 -06:00
|
|
|
Logger.warn("Date #{date} in wrong format, must be ISO 8601")
|
|
|
|
""
|
2017-06-19 15:12:37 -06:00
|
|
|
end
|
2017-09-15 06:17:36 -06:00
|
|
|
|
2017-09-15 09:50:47 -06:00
|
|
|
def to_masto_date(%NaiveDateTime{} = date) do
|
|
|
|
date
|
2018-03-30 07:01:53 -06:00
|
|
|
|> NaiveDateTime.to_iso8601()
|
2017-09-15 09:50:47 -06:00
|
|
|
|> String.replace(~r/(\.\d+)?$/, ".000Z", global: false)
|
|
|
|
end
|
|
|
|
|
|
|
|
def to_masto_date(date) do
|
|
|
|
try do
|
|
|
|
date
|
2018-03-30 07:01:53 -06:00
|
|
|
|> NaiveDateTime.from_iso8601!()
|
|
|
|
|> NaiveDateTime.to_iso8601()
|
2017-09-15 09:50:47 -06:00
|
|
|
|> String.replace(~r/(\.\d+)?$/, ".000Z", global: false)
|
|
|
|
rescue
|
|
|
|
_e -> ""
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-15 06:17:36 -06:00
|
|
|
defp shortname(name) do
|
|
|
|
if String.length(name) < 30 do
|
|
|
|
name
|
|
|
|
else
|
|
|
|
String.slice(name, 0..30) <> "…"
|
|
|
|
end
|
|
|
|
end
|
2018-05-11 05:32:59 -06:00
|
|
|
|
2018-05-21 15:17:34 -06:00
|
|
|
def confirm_current_password(user, password) do
|
2019-04-02 04:01:26 -06:00
|
|
|
with %User{local: true} = db_user <- User.get_by_id(user.id),
|
2018-05-21 15:17:34 -06:00
|
|
|
true <- Pbkdf2.checkpw(password, db_user.password_hash) do
|
2018-05-13 07:24:15 -06:00
|
|
|
{:ok, db_user}
|
|
|
|
else
|
|
|
|
_ -> {:error, "Invalid password."}
|
2018-05-11 05:32:59 -06:00
|
|
|
end
|
|
|
|
end
|
2018-08-12 13:24:10 -06:00
|
|
|
|
2018-12-09 02:12:48 -07:00
|
|
|
def emoji_from_profile(%{info: _info} = user) do
|
2018-08-12 13:24:10 -06:00
|
|
|
(Formatter.get_emoji(user.bio) ++ Formatter.get_emoji(user.name))
|
2019-04-01 04:17:57 -06:00
|
|
|
|> Enum.map(fn {shortcode, url, _} ->
|
2018-08-12 13:24:10 -06:00
|
|
|
%{
|
|
|
|
"type" => "Emoji",
|
2018-08-13 07:21:18 -06:00
|
|
|
"icon" => %{"type" => "Image", "url" => "#{Endpoint.url()}#{url}"},
|
2018-08-12 13:24:10 -06:00
|
|
|
"name" => ":#{shortcode}:"
|
|
|
|
}
|
|
|
|
end)
|
|
|
|
end
|
2019-01-24 13:30:43 -07:00
|
|
|
|
|
|
|
def maybe_notify_to_recipients(
|
|
|
|
recipients,
|
|
|
|
%Activity{data: %{"to" => to, "type" => _type}} = _activity
|
|
|
|
) do
|
|
|
|
recipients ++ to
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_notify_mentioned_recipients(
|
|
|
|
recipients,
|
2019-03-22 18:22:14 -06:00
|
|
|
%Activity{data: %{"to" => _to, "type" => type} = data} = activity
|
2019-01-24 13:30:43 -07:00
|
|
|
)
|
|
|
|
when type == "Create" do
|
2019-03-22 18:22:14 -06:00
|
|
|
object = Object.normalize(activity)
|
2019-01-24 13:30:43 -07:00
|
|
|
|
|
|
|
object_data =
|
|
|
|
cond do
|
|
|
|
!is_nil(object) ->
|
|
|
|
object.data
|
|
|
|
|
|
|
|
is_map(data["object"]) ->
|
|
|
|
data["object"]
|
|
|
|
|
|
|
|
true ->
|
|
|
|
%{}
|
|
|
|
end
|
|
|
|
|
|
|
|
tagged_mentions = maybe_extract_mentions(object_data)
|
|
|
|
|
|
|
|
recipients ++ tagged_mentions
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_notify_mentioned_recipients(recipients, _), do: recipients
|
|
|
|
|
2019-04-05 07:20:13 -06:00
|
|
|
def maybe_notify_subscribers(
|
2019-04-05 07:59:34 -06:00
|
|
|
recipients,
|
2019-04-07 08:11:29 -06:00
|
|
|
%Activity{data: %{"actor" => actor, "type" => type}} = activity
|
2019-04-05 07:59:34 -06:00
|
|
|
)
|
|
|
|
when type == "Create" do
|
2019-04-08 08:56:14 -06:00
|
|
|
with %User{} = user <- User.get_cached_by_ap_id(actor) do
|
2019-04-05 07:20:13 -06:00
|
|
|
subscriber_ids =
|
|
|
|
user
|
2019-04-05 09:51:45 -06:00
|
|
|
|> User.subscribers()
|
2019-04-07 08:11:29 -06:00
|
|
|
|> Enum.filter(&Visibility.visible_for_user?(activity, &1))
|
2019-04-05 07:20:13 -06:00
|
|
|
|> Enum.map(& &1.ap_id)
|
|
|
|
|
|
|
|
recipients ++ subscriber_ids
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_notify_subscribers(recipients, _), do: recipients
|
|
|
|
|
2019-01-24 13:30:43 -07:00
|
|
|
def maybe_extract_mentions(%{"tag" => tag}) do
|
|
|
|
tag
|
|
|
|
|> Enum.filter(fn x -> is_map(x) end)
|
|
|
|
|> Enum.filter(fn x -> x["type"] == "Mention" end)
|
|
|
|
|> Enum.map(fn x -> x["href"] end)
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_extract_mentions(_), do: []
|
2019-02-20 09:51:25 -07:00
|
|
|
|
2019-02-26 16:32:26 -07:00
|
|
|
def make_report_content_html(nil), do: {:ok, {nil, [], []}}
|
2019-02-20 09:51:25 -07:00
|
|
|
|
|
|
|
def make_report_content_html(comment) do
|
|
|
|
max_size = Pleroma.Config.get([:instance, :max_report_comment_size], 1000)
|
|
|
|
|
|
|
|
if String.length(comment) <= max_size do
|
2019-02-26 16:32:26 -07:00
|
|
|
{:ok, format_input(comment, "text/plain")}
|
2019-02-20 09:51:25 -07:00
|
|
|
else
|
|
|
|
{:error, "Comment must be up to #{max_size} characters"}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_report_statuses(%User{ap_id: actor}, %{"status_ids" => status_ids}) do
|
|
|
|
{:ok, Activity.all_by_actor_and_id(actor, status_ids)}
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_report_statuses(_, _), do: {:ok, nil}
|
2019-03-21 17:17:53 -06:00
|
|
|
|
|
|
|
# DEPRECATED mostly, context objects are now created at insertion time.
|
|
|
|
def context_to_conversation_id(context) do
|
|
|
|
with %Object{id: id} <- Object.get_cached_by_ap_id(context) do
|
|
|
|
id
|
|
|
|
else
|
|
|
|
_e ->
|
|
|
|
changeset = Object.context_mapping(context)
|
|
|
|
|
|
|
|
case Repo.insert(changeset) do
|
|
|
|
{:ok, %{id: id}} ->
|
|
|
|
id
|
|
|
|
|
|
|
|
# This should be solved by an upsert, but it seems ecto
|
|
|
|
# has problems accessing the constraint inside the jsonb.
|
|
|
|
{:error, _} ->
|
|
|
|
Object.get_cached_by_ap_id(context).id
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def conversation_id_to_context(id) do
|
|
|
|
with %Object{data: %{"id" => context}} <- Repo.get(Object, id) do
|
|
|
|
context
|
|
|
|
else
|
|
|
|
_e ->
|
|
|
|
{:error, "No such conversation"}
|
|
|
|
end
|
|
|
|
end
|
2017-05-17 10:00:20 -06:00
|
|
|
end
|