akkoma/lib/pleroma/web/ostatus/ostatus.ex

152 lines
4 KiB
Elixir
Raw Normal View History

2017-04-18 10:41:51 -06:00
defmodule Pleroma.Web.OStatus do
import Ecto.Query
2017-04-27 01:43:58 -06:00
import Pleroma.Web.XML
require Logger
2017-04-30 04:36:47 -06:00
alias Pleroma.{Repo, User, Web, Object}
alias Pleroma.Web.ActivityPub.ActivityPub
2017-04-29 09:51:59 -06:00
alias Pleroma.Web.{WebFinger, Websub}
2017-04-18 10:41:51 -06:00
def feed_path(user) do
"#{user.ap_id}/feed.atom"
end
2017-04-20 09:47:33 -06:00
def pubsub_path(user) do
"#{Web.base_url}/push/hub/#{user.nickname}"
2017-04-18 10:41:51 -06:00
end
def salmon_path(user) do
"#{user.ap_id}/salmon"
end
def handle_incoming(xml_string) do
2017-04-27 01:43:58 -06:00
doc = parse_document(xml_string)
2017-04-29 13:13:21 -06:00
entries = :xmerl_xpath.string('//entry', doc)
activities = Enum.map(entries, fn (entry) ->
{:xmlObj, :string, object_type } = :xmerl_xpath.string('string(/entry/activity:object-type[1])', entry)
case object_type do
'http://activitystrea.ms/schema/1.0/note' ->
2017-04-30 04:36:47 -06:00
with {:ok, activity} <- handle_note(entry, doc), do: activity
2017-04-30 03:39:27 -06:00
'http://activitystrea.ms/schema/1.0/comment' ->
2017-04-30 04:36:47 -06:00
with {:ok, activity} <- handle_note(entry, doc), do: activity
2017-04-29 13:13:21 -06:00
_ ->
Logger.error("Couldn't parse incoming document")
nil
end
end)
{:ok, activities}
end
2017-04-29 13:13:21 -06:00
def handle_note(entry, doc \\ nil) do
content_html = string_from_xpath("/entry/content[1]", entry)
2017-04-29 13:13:21 -06:00
uri = string_from_xpath("/entry/author/uri[1]", entry) || string_from_xpath("/feed/author/uri[1]", doc)
2017-04-29 11:06:01 -06:00
{:ok, actor} = find_or_make_user(uri)
2017-04-29 13:13:21 -06:00
context = string_from_xpath("/entry/ostatus:conversation[1]", entry) |> String.trim
2017-04-25 09:45:34 -06:00
context = if String.length(context) > 0 do
context
else
ActivityPub.generate_context_id
end
to = [
"https://www.w3.org/ns/activitystreams#Public"
]
2017-04-29 13:13:21 -06:00
mentions = :xmerl_xpath.string('/entry/link[@rel="mentioned" and @ostatus:object-type="http://activitystrea.ms/schema/1.0/person"]', entry)
2017-04-25 10:03:14 -06:00
|> Enum.map(fn(person) -> string_from_xpath("@href", person) end)
to = to ++ mentions
2017-04-29 13:13:21 -06:00
date = string_from_xpath("/entry/published", entry)
2017-04-30 01:25:46 -06:00
id = string_from_xpath("/entry/id", entry)
object = %{
"id" => id,
"type" => "Note",
"to" => to,
"content" => content_html,
"published" => date,
"context" => context,
"actor" => actor.ap_id
}
2017-04-30 03:39:27 -06:00
inReplyTo = string_from_xpath("/entry/thr:in-reply-to[1]/@ref", entry)
2017-04-26 02:22:51 -06:00
object = if inReplyTo do
Map.put(object, "inReplyTo", inReplyTo)
else
object
end
2017-04-30 04:36:47 -06:00
# TODO: Bail out sooner and use transaction.
if Object.get_by_ap_id(id) do
{:error, "duplicate activity"}
else
ActivityPub.create(to, actor, context, object, %{}, date)
end
end
2017-04-29 11:06:01 -06:00
def find_or_make_user(uri) do
query = from user in User,
2017-04-29 11:06:01 -06:00
where: user.local == false and fragment("? @> ?", user.info, ^%{uri: uri})
user = Repo.one(query)
if is_nil(user) do
2017-04-29 11:06:01 -06:00
make_user(uri)
else
{:ok, user}
end
end
2017-04-29 11:06:01 -06:00
def make_user(uri) do
with {:ok, info} <- gather_user_info(uri) do
data = %{
local: false,
name: info.name,
nickname: info.nickname <> "@" <> info.host,
2017-04-29 11:06:01 -06:00
ap_id: info.uri,
2017-04-30 04:53:49 -06:00
info: info,
avatar: info.avatar
2017-04-29 11:06:01 -06:00
}
# TODO: Make remote user changeset
# SHould enforce fqn nickname
2017-04-29 11:06:01 -06:00
Repo.insert(Ecto.Changeset.change(%User{}, data))
end
end
# TODO: Just takes the first one for now.
2017-04-30 04:53:49 -06:00
def make_avatar_object(author_doc) do
href = string_from_xpath("/feed/author[1]/link[@rel=\"avatar\"]/@href", author_doc)
type = string_from_xpath("/feed/author[1]/link[@rel=\"avatar\"]/@type", author_doc)
if href do
%{
"type" => "Image",
"url" =>
[%{
"type" => "Link",
"mediaType" => type,
"href" => href
}]
}
else
nil
end
2017-04-18 10:41:51 -06:00
end
2017-04-29 09:51:59 -06:00
def gather_user_info(username) do
with {:ok, webfinger_data} <- WebFinger.finger(username),
{:ok, feed_data} <- Websub.gather_feed_data(webfinger_data.topic) do
{:ok, Map.merge(webfinger_data, feed_data) |> Map.put(:fqn, username)}
else e ->
Logger.debug("Couldn't gather info for #{username}")
{:error, e}
end
end
2017-04-18 10:41:51 -06:00
end