2017-04-18 16:41:51 +00:00
|
|
|
defmodule Pleroma.Web.OStatus do
|
2017-05-05 14:07:44 +00:00
|
|
|
@httpoison Application.get_env(:pleroma, :httpoison)
|
|
|
|
|
2017-04-24 16:46:34 +00:00
|
|
|
import Ecto.Query
|
2017-04-27 07:43:58 +00:00
|
|
|
import Pleroma.Web.XML
|
2017-04-24 16:46:34 +00:00
|
|
|
require Logger
|
|
|
|
|
2017-05-05 14:27:03 +00:00
|
|
|
alias Pleroma.{Repo, User, Web, Object, Activity}
|
2017-04-24 16:46:34 +00:00
|
|
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
2017-04-29 15:51:59 +00:00
|
|
|
alias Pleroma.Web.{WebFinger, Websub}
|
2017-04-18 16:41:51 +00:00
|
|
|
|
|
|
|
def feed_path(user) do
|
|
|
|
"#{user.ap_id}/feed.atom"
|
|
|
|
end
|
|
|
|
|
2017-04-20 15:47:33 +00:00
|
|
|
def pubsub_path(user) do
|
|
|
|
"#{Web.base_url}/push/hub/#{user.nickname}"
|
2017-04-18 16:41:51 +00:00
|
|
|
end
|
|
|
|
|
2017-04-24 16:46:34 +00:00
|
|
|
def salmon_path(user) do
|
|
|
|
"#{user.ap_id}/salmon"
|
|
|
|
end
|
|
|
|
|
|
|
|
def handle_incoming(xml_string) do
|
2017-04-27 07:43:58 +00:00
|
|
|
doc = parse_document(xml_string)
|
2017-04-29 19:13:21 +00:00
|
|
|
entries = :xmerl_xpath.string('//entry', doc)
|
|
|
|
|
|
|
|
activities = Enum.map(entries, fn (entry) ->
|
2017-05-05 10:07:38 +00:00
|
|
|
{:xmlObj, :string, object_type} = :xmerl_xpath.string('string(/entry/activity:object-type[1])', entry)
|
|
|
|
{:xmlObj, :string, verb} = :xmerl_xpath.string('string(/entry/activity:verb[1])', entry)
|
2017-04-29 19:13:21 +00:00
|
|
|
|
2017-05-04 16:42:29 +00:00
|
|
|
case verb do
|
|
|
|
'http://activitystrea.ms/schema/1.0/share' ->
|
|
|
|
with {:ok, activity, retweeted_activity} <- handle_share(entry, doc), do: [activity, retweeted_activity]
|
2017-05-05 14:07:44 +00:00
|
|
|
'http://activitystrea.ms/schema/1.0/favorite' ->
|
|
|
|
with {:ok, activity, favorited_activity} <- handle_favorite(entry, doc), do: [activity, favorited_activity]
|
2017-04-29 19:13:21 +00:00
|
|
|
_ ->
|
2017-05-04 16:42:29 +00:00
|
|
|
case object_type do
|
|
|
|
'http://activitystrea.ms/schema/1.0/note' ->
|
|
|
|
with {:ok, activity} <- handle_note(entry, doc), do: activity
|
|
|
|
'http://activitystrea.ms/schema/1.0/comment' ->
|
|
|
|
with {:ok, activity} <- handle_note(entry, doc), do: activity
|
|
|
|
_ ->
|
|
|
|
Logger.error("Couldn't parse incoming document")
|
|
|
|
nil
|
|
|
|
end
|
2017-04-29 19:13:21 +00:00
|
|
|
end
|
|
|
|
end)
|
|
|
|
{:ok, activities}
|
2017-04-24 16:46:34 +00:00
|
|
|
end
|
|
|
|
|
2017-05-05 12:16:54 +00:00
|
|
|
def make_share(_entry, doc, retweeted_activity) do
|
2017-05-04 16:42:29 +00:00
|
|
|
with {:ok, actor} <- find_make_or_update_user(doc),
|
|
|
|
%Object{} = object <- Object.get_cached_by_ap_id(retweeted_activity.data["object"]["id"]),
|
2017-05-05 12:16:54 +00:00
|
|
|
{:ok, activity, _object} = ActivityPub.announce(actor, object, false) do
|
2017-05-04 16:42:29 +00:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def handle_share(entry, doc) do
|
|
|
|
with [object] <- :xmerl_xpath.string('/entry/activity:object', entry),
|
|
|
|
{:ok, retweeted_activity} <- handle_note(object, object),
|
|
|
|
{:ok, activity} <- make_share(entry, doc, retweeted_activity) do
|
|
|
|
{:ok, activity, retweeted_activity}
|
|
|
|
else
|
|
|
|
e -> {:error, e}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-05 14:07:44 +00:00
|
|
|
def make_favorite(_entry, doc, favorited_activity) do
|
|
|
|
with {:ok, actor} <- find_make_or_update_user(doc),
|
|
|
|
%Object{} = object <- Object.get_cached_by_ap_id(favorited_activity.data["object"]["id"]),
|
|
|
|
{:ok, activity, _object} = ActivityPub.like(actor, object, false) do
|
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-05 14:27:03 +00:00
|
|
|
def get_or_try_fetching(entry) do
|
|
|
|
with id when not is_nil(id) <- string_from_xpath("//activity:object[1]/id", entry),
|
|
|
|
%Activity{} = activity <- Activity.get_create_activity_by_object_ap_id(id) do
|
|
|
|
{:ok, activity}
|
|
|
|
else _e ->
|
|
|
|
with href when not is_nil(href) <- string_from_xpath("//activity:object[1]/link[@type=\"text/html\"]/@href", entry),
|
|
|
|
{:ok, [favorited_activity]} <- fetch_activity_from_html_url(href) do
|
|
|
|
{:ok, favorited_activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-05 14:07:44 +00:00
|
|
|
def handle_favorite(entry, doc) do
|
2017-05-05 14:27:03 +00:00
|
|
|
with {:ok, favorited_activity} <- get_or_try_fetching(entry),
|
2017-05-05 14:07:44 +00:00
|
|
|
{:ok, activity} <- make_favorite(entry, doc, favorited_activity) do
|
|
|
|
{:ok, activity, favorited_activity}
|
|
|
|
else
|
|
|
|
e -> {:error, e}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-03 12:26:49 +00:00
|
|
|
def get_attachments(entry) do
|
|
|
|
:xmerl_xpath.string('/entry/link[@rel="enclosure"]', entry)
|
|
|
|
|> Enum.map(fn (enclosure) ->
|
|
|
|
with href when not is_nil(href) <- string_from_xpath("/link/@href", enclosure),
|
|
|
|
type when not is_nil(type) <- string_from_xpath("/link/@type", enclosure) do
|
|
|
|
%{
|
|
|
|
"type" => "Attachment",
|
|
|
|
"url" => [%{
|
|
|
|
"type" => "Link",
|
|
|
|
"mediaType" => type,
|
|
|
|
"href" => href
|
|
|
|
}]
|
|
|
|
}
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
|> Enum.filter(&(&1))
|
|
|
|
end
|
|
|
|
|
2017-04-29 19:13:21 +00:00
|
|
|
def handle_note(entry, doc \\ nil) do
|
2017-05-04 16:42:29 +00:00
|
|
|
content_html = string_from_xpath("//content[1]", entry)
|
2017-04-24 16:46:34 +00:00
|
|
|
|
2017-05-02 15:16:01 +00:00
|
|
|
[author] = :xmerl_xpath.string('//author[1]', doc)
|
|
|
|
{:ok, actor} = find_make_or_update_user(author)
|
2017-05-04 16:42:29 +00:00
|
|
|
inReplyTo = string_from_xpath("//thr:in-reply-to[1]/@ref", entry)
|
2017-04-24 16:46:34 +00:00
|
|
|
|
2017-05-05 18:15:26 +00:00
|
|
|
if !Object.get_cached_by_ap_id(inReplyTo) do
|
|
|
|
inReplyToHref = string_from_xpath("//thr:in-reply-to[1]/@href", entry)
|
|
|
|
if inReplyToHref do
|
2017-05-06 10:15:48 +00:00
|
|
|
fetch_activity_from_html_url(inReplyToHref)
|
2017-05-05 18:15:26 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-05-04 16:42:29 +00:00
|
|
|
context = (string_from_xpath("//ostatus:conversation[1]", entry) || "") |> String.trim
|
2017-05-02 19:31:01 +00:00
|
|
|
|
2017-05-03 12:26:49 +00:00
|
|
|
attachments = get_attachments(entry)
|
|
|
|
|
2017-05-02 19:31:01 +00:00
|
|
|
context = with %{data: %{"context" => context}} <- Object.get_cached_by_ap_id(inReplyTo) do
|
|
|
|
context
|
|
|
|
else _e ->
|
|
|
|
if String.length(context) > 0 do
|
|
|
|
context
|
|
|
|
else
|
|
|
|
ActivityPub.generate_context_id
|
|
|
|
end
|
|
|
|
end
|
2017-04-24 16:46:34 +00:00
|
|
|
|
|
|
|
to = [
|
|
|
|
"https://www.w3.org/ns/activitystreams#Public"
|
|
|
|
]
|
|
|
|
|
2017-05-04 16:42:29 +00:00
|
|
|
mentions = :xmerl_xpath.string('//link[@rel="mentioned" and @ostatus:object-type="http://activitystrea.ms/schema/1.0/person"]', entry)
|
2017-04-25 16:03:14 +00:00
|
|
|
|> Enum.map(fn(person) -> string_from_xpath("@href", person) end)
|
|
|
|
|
|
|
|
to = to ++ mentions
|
|
|
|
|
2017-05-04 16:42:29 +00:00
|
|
|
date = string_from_xpath("//published", entry)
|
|
|
|
id = string_from_xpath("//id", entry)
|
2017-04-24 16:46:34 +00:00
|
|
|
|
|
|
|
object = %{
|
2017-04-30 09:55:19 +00:00
|
|
|
"id" => id,
|
2017-04-24 16:46:34 +00:00
|
|
|
"type" => "Note",
|
|
|
|
"to" => to,
|
|
|
|
"content" => content_html,
|
|
|
|
"published" => date,
|
|
|
|
"context" => context,
|
2017-05-03 12:26:49 +00:00
|
|
|
"actor" => actor.ap_id,
|
|
|
|
"attachment" => attachments
|
2017-04-24 16:46:34 +00:00
|
|
|
}
|
|
|
|
|
2017-04-26 08:22:51 +00:00
|
|
|
object = if inReplyTo do
|
|
|
|
Map.put(object, "inReplyTo", inReplyTo)
|
|
|
|
else
|
|
|
|
object
|
|
|
|
end
|
|
|
|
|
2017-04-30 10:36:47 +00:00
|
|
|
# TODO: Bail out sooner and use transaction.
|
|
|
|
if Object.get_by_ap_id(id) do
|
|
|
|
{:error, "duplicate activity"}
|
|
|
|
else
|
2017-05-02 08:47:04 +00:00
|
|
|
ActivityPub.create(to, actor, context, object, %{}, date, false)
|
2017-04-30 10:36:47 +00:00
|
|
|
end
|
2017-04-24 16:46:34 +00:00
|
|
|
end
|
|
|
|
|
2017-05-02 15:13:41 +00:00
|
|
|
def find_make_or_update_user(doc) do
|
|
|
|
uri = string_from_xpath("//author/uri[1]", doc)
|
|
|
|
with {:ok, user} <- find_or_make_user(uri) do
|
|
|
|
avatar = make_avatar_object(doc)
|
|
|
|
if user.avatar != avatar do
|
|
|
|
change = Ecto.Changeset.change(user, %{avatar: avatar})
|
|
|
|
Repo.update(change)
|
|
|
|
else
|
|
|
|
{:ok, user}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-04-29 17:06:01 +00:00
|
|
|
def find_or_make_user(uri) do
|
2017-04-24 16:46:34 +00:00
|
|
|
query = from user in User,
|
2017-04-29 17:06:01 +00:00
|
|
|
where: user.local == false and fragment("? @> ?", user.info, ^%{uri: uri})
|
2017-04-24 16:46:34 +00:00
|
|
|
|
|
|
|
user = Repo.one(query)
|
|
|
|
|
|
|
|
if is_nil(user) do
|
2017-04-29 17:06:01 +00:00
|
|
|
make_user(uri)
|
2017-04-24 16:46:34 +00:00
|
|
|
else
|
|
|
|
{:ok, user}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-04-29 17:06:01 +00:00
|
|
|
def make_user(uri) do
|
|
|
|
with {:ok, info} <- gather_user_info(uri) do
|
|
|
|
data = %{
|
|
|
|
local: false,
|
2017-05-03 12:26:49 +00:00
|
|
|
name: info["name"],
|
|
|
|
nickname: info["nickname"] <> "@" <> info["host"],
|
|
|
|
ap_id: info["uri"],
|
2017-04-30 10:53:49 +00:00
|
|
|
info: info,
|
2017-05-03 12:26:49 +00:00
|
|
|
avatar: info["avatar"]
|
2017-04-29 17:06:01 +00:00
|
|
|
}
|
2017-04-29 18:08:45 +00:00
|
|
|
# TODO: Make remote user changeset
|
|
|
|
# SHould enforce fqn nickname
|
2017-04-29 17:06:01 +00:00
|
|
|
Repo.insert(Ecto.Changeset.change(%User{}, data))
|
|
|
|
end
|
2017-04-24 16:46:34 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
# TODO: Just takes the first one for now.
|
2017-04-30 10:53:49 +00:00
|
|
|
def make_avatar_object(author_doc) do
|
2017-05-02 15:13:41 +00:00
|
|
|
href = string_from_xpath("//author[1]/link[@rel=\"avatar\"]/@href", author_doc)
|
|
|
|
type = string_from_xpath("//author[1]/link[@rel=\"avatar\"]/@type", author_doc)
|
2017-04-24 16:46:34 +00:00
|
|
|
|
|
|
|
if href do
|
|
|
|
%{
|
|
|
|
"type" => "Image",
|
|
|
|
"url" =>
|
|
|
|
[%{
|
|
|
|
"type" => "Link",
|
|
|
|
"mediaType" => type,
|
|
|
|
"href" => href
|
|
|
|
}]
|
|
|
|
}
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
2017-04-18 16:41:51 +00:00
|
|
|
end
|
2017-04-29 15:51:59 +00:00
|
|
|
|
|
|
|
def gather_user_info(username) do
|
|
|
|
with {:ok, webfinger_data} <- WebFinger.finger(username),
|
2017-05-03 12:26:49 +00:00
|
|
|
{:ok, feed_data} <- Websub.gather_feed_data(webfinger_data["topic"]) do
|
|
|
|
{:ok, Map.merge(webfinger_data, feed_data) |> Map.put("fqn", username)}
|
2017-04-29 15:51:59 +00:00
|
|
|
else e ->
|
2017-05-05 10:07:38 +00:00
|
|
|
Logger.debug(fn -> "Couldn't gather info for #{username}" end)
|
2017-04-29 15:51:59 +00:00
|
|
|
{:error, e}
|
|
|
|
end
|
|
|
|
end
|
2017-05-05 14:07:44 +00:00
|
|
|
|
|
|
|
# Regex-based 'parsing' so we don't have to pull in a full html parser
|
|
|
|
# It's a hack anyway. Maybe revisit this in the future
|
|
|
|
@mastodon_regex ~r/<link href='(.*)' rel='alternate' type='application\/atom\+xml'>/
|
|
|
|
@gs_regex ~r/<link title=.* href="(.*)" type="application\/atom\+xml" rel="alternate">/
|
2017-05-05 18:15:26 +00:00
|
|
|
@gs_classic_regex ~r/<link rel="alternate" href="(.*)" type="application\/atom\+xml" title=.*>/
|
2017-05-05 14:07:44 +00:00
|
|
|
def get_atom_url(body) do
|
|
|
|
cond do
|
|
|
|
Regex.match?(@mastodon_regex, body) ->
|
|
|
|
[[_, match]] = Regex.scan(@mastodon_regex, body)
|
|
|
|
{:ok, match}
|
|
|
|
Regex.match?(@gs_regex, body) ->
|
|
|
|
[[_, match]] = Regex.scan(@gs_regex, body)
|
|
|
|
{:ok, match}
|
2017-05-05 18:15:26 +00:00
|
|
|
Regex.match?(@gs_classic_regex, body) ->
|
|
|
|
[[_, match]] = Regex.scan(@gs_classic_regex, body)
|
|
|
|
{:ok, match}
|
2017-05-05 14:07:44 +00:00
|
|
|
true ->
|
|
|
|
Logger.debug(fn -> "Couldn't find atom link in #{inspect(body)}" end)
|
|
|
|
{:error, "Couldn't find the atom link"}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def fetch_activity_from_html_url(url) do
|
|
|
|
with {:ok, %{body: body}} <- @httpoison.get(url),
|
|
|
|
{:ok, atom_url} <- get_atom_url(body),
|
|
|
|
{:ok, %{status_code: code, body: body}} when code in 200..299 <- @httpoison.get(atom_url) do
|
|
|
|
handle_incoming(body)
|
|
|
|
end
|
|
|
|
end
|
2017-04-18 16:41:51 +00:00
|
|
|
end
|