akkoma/lib/pleroma/web/activity_pub/transmogrifier.ex

1157 lines
32 KiB
Elixir
Raw Normal View History

# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.Transmogrifier do
@moduledoc """
A module to handle coding from internal to wire ActivityPub and back.
"""
2019-02-09 15:16:26 +00:00
alias Pleroma.Activity
2020-04-28 06:32:43 +00:00
alias Pleroma.EarmarkRenderer
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.FollowingRelationship
alias Pleroma.Maps
2020-06-05 15:18:48 +00:00
alias Pleroma.Notification
2019-02-09 15:16:26 +00:00
alias Pleroma.Object
2019-04-17 11:52:01 +00:00
alias Pleroma.Object.Containment
2019-02-09 15:16:26 +00:00
alias Pleroma.Repo
alias Pleroma.User
2019-02-09 15:16:26 +00:00
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Builder
alias Pleroma.Web.ActivityPub.ObjectValidator
alias Pleroma.Web.ActivityPub.Pipeline
2019-02-09 15:16:26 +00:00
alias Pleroma.Web.ActivityPub.Utils
2019-02-22 12:29:52 +00:00
alias Pleroma.Web.ActivityPub.Visibility
alias Pleroma.Web.Federator
alias Pleroma.Workers.TransmogrifierWorker
2018-02-21 21:21:40 +00:00
import Ecto.Query
require Logger
require Pleroma.Constants
@doc """
Modifies an incoming AP object (mastodon format) to our internal format.
"""
def fix_object(object, options \\ []) do
object
|> strip_internal_fields
|> fix_actor
|> fix_url
|> fix_attachments
2018-02-19 09:39:03 +00:00
|> fix_context
|> fix_in_reply_to(options)
2018-03-13 07:05:43 +00:00
|> fix_emoji
|> fix_tag
|> fix_content_map
|> fix_addressing
|> fix_summary
|> fix_type(options)
2020-04-28 06:32:43 +00:00
|> fix_content
end
def fix_summary(%{"summary" => nil} = object) do
Map.put(object, "summary", "")
end
def fix_summary(%{"summary" => _} = object) do
# summary is present, nothing to do
object
end
def fix_summary(object), do: Map.put(object, "summary", "")
def fix_addressing_list(map, field) do
addrs = map[field]
cond do
is_list(addrs) ->
Map.put(map, field, Enum.filter(addrs, &is_binary/1))
is_binary(addrs) ->
Map.put(map, field, [addrs])
true ->
Map.put(map, field, [])
end
end
def fix_explicit_addressing(
%{"to" => to, "cc" => cc} = object,
explicit_mentions,
follower_collection
) do
explicit_to = Enum.filter(to, fn x -> x in explicit_mentions end)
explicit_cc = Enum.filter(to, fn x -> x not in explicit_mentions end)
final_cc =
(cc ++ explicit_cc)
|> Enum.reject(fn x -> String.ends_with?(x, "/followers") and x != follower_collection end)
|> Enum.uniq()
object
|> Map.put("to", explicit_to)
|> Map.put("cc", final_cc)
end
def fix_explicit_addressing(object, _explicit_mentions, _followers_collection), do: object
# if directMessage flag is set to true, leave the addressing alone
def fix_explicit_addressing(%{"directMessage" => true} = object), do: object
def fix_explicit_addressing(object) do
explicit_mentions = Utils.determine_explicit_mentions(object)
%User{follower_address: follower_collection} =
object
|> Containment.get_actor()
|> User.get_cached_by_ap_id()
explicit_mentions =
explicit_mentions ++
[
Pleroma.Constants.as_public(),
follower_collection
]
fix_explicit_addressing(object, explicit_mentions, follower_collection)
2018-02-19 09:39:03 +00:00
end
# if as:Public is addressed, then make sure the followers collection is also addressed
# so that the activities will be delivered to local users.
def fix_implicit_addressing(%{"to" => to, "cc" => cc} = object, followers_collection) do
recipients = to ++ cc
if followers_collection not in recipients do
cond do
Pleroma.Constants.as_public() in cc ->
to = to ++ [followers_collection]
Map.put(object, "to", to)
Pleroma.Constants.as_public() in to ->
cc = cc ++ [followers_collection]
Map.put(object, "cc", cc)
true ->
object
end
else
object
end
end
def fix_implicit_addressing(object, _), do: object
def fix_addressing(object) do
2019-05-01 09:09:53 +00:00
{:ok, %User{} = user} = User.get_or_fetch_by_ap_id(object["actor"])
followers_collection = User.ap_followers(user)
object
|> fix_addressing_list("to")
|> fix_addressing_list("cc")
|> fix_addressing_list("bto")
|> fix_addressing_list("bcc")
|> fix_explicit_addressing()
|> fix_implicit_addressing(followers_collection)
2018-02-19 09:39:03 +00:00
end
def fix_actor(%{"attributedTo" => actor} = object) do
actor = Containment.get_actor(%{"actor" => actor})
# TODO: Remove actor field for Objects
object
|> Map.put("actor", actor)
|> Map.put("attributedTo", actor)
end
def fix_in_reply_to(object, options \\ [])
def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object, options)
when not is_nil(in_reply_to) do
in_reply_to_id = prepare_in_reply_to(in_reply_to)
object = Map.put(object, "inReplyToAtomUri", in_reply_to_id)
depth = (options[:depth] || 0) + 1
2018-03-30 13:01:53 +00:00
if Federator.allowed_thread_distance?(depth) do
with {:ok, replied_object} <- get_obj_helper(in_reply_to_id, options),
2020-02-25 14:34:56 +00:00
%Activity{} <- Activity.get_create_by_object_ap_id(replied_object.data["id"]) do
object
|> Map.put("inReplyTo", replied_object.data["id"])
|> Map.put("inReplyToAtomUri", object["inReplyToAtomUri"] || in_reply_to_id)
|> Map.put("context", replied_object.data["context"] || object["conversation"])
|> Map.drop(["conversation"])
else
e ->
Logger.error("Couldn't fetch #{inspect(in_reply_to_id)}, error: #{inspect(e)}")
object
end
else
object
2018-02-25 09:56:01 +00:00
end
end
2018-03-30 13:01:53 +00:00
def fix_in_reply_to(object, _options), do: object
2018-02-25 09:56:01 +00:00
defp prepare_in_reply_to(in_reply_to) do
cond do
is_bitstring(in_reply_to) ->
in_reply_to
is_map(in_reply_to) && is_bitstring(in_reply_to["id"]) ->
in_reply_to["id"]
is_list(in_reply_to) && is_bitstring(Enum.at(in_reply_to, 0)) ->
Enum.at(in_reply_to, 0)
true ->
""
end
end
2018-02-19 09:39:03 +00:00
def fix_context(object) do
2018-07-12 17:06:28 +00:00
context = object["context"] || object["conversation"] || Utils.generate_context_id()
2018-02-19 09:39:03 +00:00
object
2018-07-12 17:06:28 +00:00
|> Map.put("context", context)
|> Map.drop(["conversation"])
2018-02-17 17:38:58 +00:00
end
def fix_attachments(%{"attachment" => attachment} = object) when is_list(attachment) do
2018-03-30 13:01:53 +00:00
attachments =
Enum.map(attachment, fn data ->
url =
cond do
is_list(data["url"]) -> List.first(data["url"])
is_map(data["url"]) -> data["url"]
true -> nil
end
media_type =
cond do
is_map(url) && MIME.valid?(url["mediaType"]) -> url["mediaType"]
MIME.valid?(data["mediaType"]) -> data["mediaType"]
MIME.valid?(data["mimeType"]) -> data["mimeType"]
true -> nil
end
href =
cond do
is_map(url) && is_binary(url["href"]) -> url["href"]
is_binary(data["url"]) -> data["url"]
is_binary(data["href"]) -> data["href"]
2020-05-25 11:13:42 +00:00
true -> nil
end
2020-05-25 11:13:42 +00:00
if href do
attachment_url =
2020-06-23 03:30:34 +00:00
%{
"href" => href,
"type" => Map.get(url || %{}, "type", "Link")
}
2020-05-25 11:13:42 +00:00
|> Maps.put_if_present("mediaType", media_type)
2020-06-23 03:30:34 +00:00
%{
"url" => [attachment_url],
"type" => data["type"] || "Document"
}
2020-05-25 11:13:42 +00:00
|> Maps.put_if_present("mediaType", media_type)
|> Maps.put_if_present("name", data["name"])
else
nil
end
2018-03-30 13:01:53 +00:00
end)
2020-05-25 11:13:42 +00:00
|> Enum.filter(& &1)
2018-02-17 17:38:58 +00:00
Map.put(object, "attachment", attachments)
end
def fix_attachments(%{"attachment" => attachment} = object) when is_map(attachment) do
2019-09-11 04:23:33 +00:00
object
|> Map.put("attachment", [attachment])
|> fix_attachments()
end
def fix_attachments(object), do: object
def fix_url(%{"url" => url} = object) when is_map(url) do
Map.put(object, "url", url["href"])
end
def fix_url(%{"type" => object_type, "url" => url} = object)
when object_type in ["Video", "Audio"] and is_list(url) do
2020-05-25 11:13:42 +00:00
attachment =
Enum.find(url, fn x ->
media_type = x["mediaType"] || x["mimeType"] || ""
2020-05-25 11:13:42 +00:00
is_map(x) and String.starts_with?(media_type, ["audio/", "video/"])
end)
link_element =
Enum.find(url, fn x -> is_map(x) and (x["mediaType"] || x["mimeType"]) == "text/html" end)
object
2020-05-25 11:13:42 +00:00
|> Map.put("attachment", [attachment])
|> Map.put("url", link_element["href"])
end
def fix_url(%{"type" => object_type, "url" => url} = object)
when object_type != "Video" and is_list(url) do
first_element = Enum.at(url, 0)
url_string =
cond do
is_bitstring(first_element) -> first_element
is_map(first_element) -> first_element["href"] || ""
true -> ""
end
Map.put(object, "url", url_string)
end
def fix_url(object), do: object
def fix_emoji(%{"tag" => tags} = object) when is_list(tags) do
2018-03-30 13:01:53 +00:00
emoji =
tags
|> Enum.filter(fn data -> data["type"] == "Emoji" and data["icon"] end)
2018-03-30 13:01:53 +00:00
|> Enum.reduce(%{}, fn data, mapping ->
name = String.trim(data["name"], ":")
2018-03-13 07:05:43 +00:00
Map.put(mapping, name, data["icon"]["url"])
2018-03-30 13:01:53 +00:00
end)
2018-03-13 07:05:43 +00:00
# we merge mastodon and pleroma emoji into a single mapping, to allow for both wire formats
emoji = Map.merge(object["emoji"] || %{}, emoji)
Map.put(object, "emoji", emoji)
2018-03-13 07:05:43 +00:00
end
def fix_emoji(%{"tag" => %{"type" => "Emoji"} = tag} = object) do
name = String.trim(tag["name"], ":")
emoji = %{name => tag["icon"]["url"]}
Map.put(object, "emoji", emoji)
end
def fix_emoji(object), do: object
def fix_tag(%{"tag" => tag} = object) when is_list(tag) do
2018-03-30 13:01:53 +00:00
tags =
tag
2018-03-30 13:01:53 +00:00
|> Enum.filter(fn data -> data["type"] == "Hashtag" and data["name"] end)
|> Enum.map(fn data -> String.slice(data["name"], 1..-1) end)
Map.put(object, "tag", tag ++ tags)
end
def fix_tag(%{"tag" => %{"type" => "Hashtag", "name" => hashtag} = tag} = object) do
combined = [tag, String.slice(hashtag, 1..-1)]
Map.put(object, "tag", combined)
end
def fix_tag(%{"tag" => %{} = tag} = object), do: Map.put(object, "tag", [tag])
def fix_tag(object), do: object
# content map usually only has one language so this will do for now.
def fix_content_map(%{"contentMap" => content_map} = object) do
content_groups = Map.to_list(content_map)
{_, content} = Enum.at(content_groups, 0)
Map.put(object, "content", content)
end
def fix_content_map(object), do: object
def fix_type(object, options \\ [])
def fix_type(%{"inReplyTo" => reply_id, "name" => _} = object, options)
when is_binary(reply_id) do
with true <- Federator.allowed_thread_distance?(options[:depth]),
{:ok, %{data: %{"type" => "Question"} = _} = _} <- get_obj_helper(reply_id, options) do
2019-05-22 18:17:57 +00:00
Map.put(object, "type", "Answer")
else
_ -> object
2019-05-22 18:17:57 +00:00
end
end
def fix_type(object, _), do: object
2019-05-22 18:17:57 +00:00
2020-04-28 06:32:43 +00:00
defp fix_content(%{"mediaType" => "text/markdown", "content" => content} = object)
when is_binary(content) do
html_content =
content
|> Earmark.as_html!(%Earmark.Options{renderer: EarmarkRenderer})
|> Pleroma.HTML.filter_tags()
Map.merge(object, %{"content" => html_content, "mediaType" => "text/html"})
2020-04-13 03:53:45 +00:00
end
2020-04-28 06:32:43 +00:00
defp fix_content(object), do: object
2020-04-13 03:53:45 +00:00
2018-12-09 09:12:48 +00:00
defp mastodon_follow_hack(%{"id" => id, "actor" => follower_id}, followed) do
with true <- id =~ "follows",
%User{local: true} = follower <- User.get_cached_by_ap_id(follower_id),
%Activity{} = activity <- Utils.fetch_latest_follow(follower, followed) do
{:ok, activity}
else
_ -> {:error, nil}
end
end
defp mastodon_follow_hack(_, _), do: {:error, nil}
defp get_follow_activity(follow_object, followed) do
with object_id when not is_nil(object_id) <- Utils.get_ap_id(follow_object),
{_, %Activity{} = activity} <- {:activity, Activity.get_by_ap_id(object_id)} do
{:ok, activity}
else
# Can't find the activity. This might a Mastodon 2.3 "Accept"
{:activity, nil} ->
mastodon_follow_hack(follow_object, followed)
_ ->
{:error, nil}
end
end
# Reduce the object list to find the reported user.
defp get_reported(objects) do
Enum.reduce_while(objects, nil, fn ap_id, _ ->
with %User{} = user <- User.get_cached_by_ap_id(ap_id) do
{:halt, user}
else
_ -> {:cont, nil}
end
end)
end
# Compatibility wrapper for Mastodon votes
defp handle_create(%{"object" => %{"type" => "Answer"}} = data, _user) do
handle_incoming(data)
end
defp handle_create(%{"object" => object} = data, user) do
%{
to: data["to"],
object: object,
actor: user,
context: object["context"],
local: false,
published: data["published"],
additional:
Map.take(data, [
"cc",
"directMessage",
"id"
])
}
|> ActivityPub.create()
end
def handle_incoming(data, options \\ [])
# Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them
# with nil ID.
def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data, _options) do
with context <- data["context"] || Utils.generate_context_id(),
content <- data["content"] || "",
%User{} = actor <- User.get_cached_by_ap_id(actor),
# Reduce the object list to find the reported user.
%User{} = account <- get_reported(objects),
# Remove the reported user from the object list.
statuses <- Enum.filter(objects, fn ap_id -> ap_id != account.ap_id end) do
%{
actor: actor,
context: context,
account: account,
statuses: statuses,
content: content,
additional: %{"cc" => [account.ap_id]}
}
|> ActivityPub.flag()
end
end
# disallow objects with bogus IDs
def handle_incoming(%{"id" => nil}, _options), do: :error
def handle_incoming(%{"id" => ""}, _options), do: :error
# length of https:// = 8, should validate better, but good enough for now.
def handle_incoming(%{"id" => id}, _options) when is_binary(id) and byte_size(id) < 8,
do: :error
# TODO: validate those with a Ecto scheme
# - tags
# - emoji
def handle_incoming(
%{"type" => "Create", "object" => %{"type" => objtype} = object} = data,
options
)
2020-06-11 18:43:01 +00:00
when objtype in ["Article", "Event", "Note", "Video", "Page", "Audio"] do
actor = Containment.get_actor(data)
with nil <- Activity.get_create_by_object_ap_id(object["id"]),
{:ok, %User{} = user} <- User.get_or_fetch_by_ap_id(actor) do
data =
data
|> Map.put("object", fix_object(object, options))
|> Map.put("actor", actor)
|> fix_addressing()
with {:ok, created_activity} <- handle_create(data, user) do
reply_depth = (options[:depth] || 0) + 1
if Federator.allowed_thread_distance?(reply_depth) do
for reply_id <- replies(object) do
Pleroma.Workers.RemoteFetcherWorker.enqueue("fetch_remote", %{
"id" => reply_id,
"depth" => reply_depth
})
end
end
{:ok, created_activity}
end
else
2018-02-19 16:37:45 +00:00
%Activity{} = activity -> {:ok, activity}
_e -> :error
end
end
def handle_incoming(
%{"type" => "Listen", "object" => %{"type" => "Audio"} = object} = data,
options
) do
actor = Containment.get_actor(data)
data =
Map.put(data, "actor", actor)
|> fix_addressing
with {:ok, %User{} = user} <- User.get_or_fetch_by_ap_id(data["actor"]) do
reply_depth = (options[:depth] || 0) + 1
options = Keyword.put(options, :depth, reply_depth)
object = fix_object(object, options)
params = %{
to: data["to"],
object: object,
actor: user,
context: nil,
local: false,
published: data["published"],
additional: Map.take(data, ["cc", "id"])
}
ActivityPub.listen(params)
else
_e -> :error
end
end
def handle_incoming(
%{"type" => "Accept", "object" => follow_object, "actor" => _actor, "id" => id} = data,
_options
) do
with actor <- Containment.get_actor(data),
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
{:ok, follow_activity} <- get_follow_activity(follow_object, followed),
{:ok, follow_activity} <- Utils.update_follow_state_for_all(follow_activity, "accept"),
%User{local: true} = follower <- User.get_cached_by_ap_id(follow_activity.data["actor"]),
{:ok, _relationship} <- FollowingRelationship.update(follower, followed, :follow_accept) do
User.update_follower_count(followed)
User.update_following_count(follower)
Notification.update_notification_type(followed, follow_activity)
ActivityPub.accept(%{
to: follow_activity.data["to"],
type: "Accept",
actor: followed,
object: follow_activity.data["id"],
local: false,
activity_id: id
})
else
_e ->
:error
end
end
def handle_incoming(
%{"type" => "Reject", "object" => follow_object, "actor" => _actor, "id" => id} = data,
_options
) do
with actor <- Containment.get_actor(data),
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
{:ok, follow_activity} <- get_follow_activity(follow_object, followed),
{:ok, follow_activity} <- Utils.update_follow_state_for_all(follow_activity, "reject"),
%User{local: true} = follower <- User.get_cached_by_ap_id(follow_activity.data["actor"]),
{:ok, _relationship} <- FollowingRelationship.update(follower, followed, :follow_reject),
2018-05-26 13:11:50 +00:00
{:ok, activity} <-
ActivityPub.reject(%{
2018-05-26 13:11:50 +00:00
to: follow_activity.data["to"],
type: "Reject",
actor: followed,
2018-05-26 13:11:50 +00:00
object: follow_activity.data["id"],
local: false,
activity_id: id
2018-05-26 13:11:50 +00:00
}) do
{:ok, activity}
else
_e -> :error
end
end
@misskey_reactions %{
"like" => "👍",
"love" => "❤️",
"laugh" => "😆",
"hmm" => "🤔",
"surprise" => "😮",
"congrats" => "🎉",
"angry" => "💢",
"confused" => "😥",
"rip" => "😇",
"pudding" => "🍮",
"star" => ""
}
2020-02-06 17:09:57 +00:00
@doc "Rewrite misskey likes into EmojiReacts"
def handle_incoming(
%{
"type" => "Like",
"_misskey_reaction" => reaction
} = data,
options
) do
data
2020-02-06 17:09:57 +00:00
|> Map.put("type", "EmojiReact")
|> Map.put("content", @misskey_reactions[reaction] || reaction)
|> handle_incoming(options)
end
2020-06-11 18:23:10 +00:00
def handle_incoming(
2020-06-11 18:43:01 +00:00
%{"type" => "Create", "object" => %{"type" => objtype} = object} = data,
2020-06-11 18:23:10 +00:00
_options
2020-06-11 18:43:01 +00:00
)
when objtype in ["Question", "Answer"] do
2020-06-11 18:23:10 +00:00
data =
data
|> Map.put("object", fix_object(object))
|> fix_addressing()
with {:ok, %User{}} <- ObjectValidator.fetch_actor(data),
{:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do
{:ok, activity}
end
end
2020-04-08 13:55:43 +00:00
def handle_incoming(
%{"type" => "Create", "object" => %{"type" => "ChatMessage"}} = data,
_options
) do
with {:ok, %User{}} <- ObjectValidator.fetch_actor(data),
{:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do
{:ok, activity}
end
end
2020-04-08 13:55:43 +00:00
def handle_incoming(%{"type" => type} = data, _options)
when type in ~w{Like EmojiReact Announce} do
2020-05-04 15:18:38 +00:00
with :ok <- ObjectValidator.fetch_actor_and_object(data),
{:ok, activity, _meta} <-
Pipeline.common_pipeline(data, local: false) do
2018-02-17 19:13:12 +00:00
{:ok, activity}
else
e -> {:error, e}
2018-02-17 19:13:12 +00:00
end
end
2018-03-30 13:01:53 +00:00
def handle_incoming(
%{"type" => type} = data,
_options
)
when type in ~w{Update Block Follow} do
with {:ok, %User{}} <- ObjectValidator.fetch_actor(data),
{:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do
{:ok, activity}
2018-02-25 15:14:25 +00:00
end
end
2018-03-30 13:01:53 +00:00
def handle_incoming(
%{"type" => "Delete"} = data,
_options
2018-03-30 13:01:53 +00:00
) do
with {:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do
2018-03-03 17:37:40 +00:00
{:ok, activity}
else
{:error, {:validate_object, _}} = e ->
# Check if we have a create activity for this
with {:ok, object_id} <- ObjectValidators.ObjectID.cast(data["object"]),
%Activity{data: %{"actor" => actor}} <-
Activity.create_by_object_ap_id(object_id) |> Repo.one(),
# We have one, insert a tombstone and retry
{:ok, tombstone_data, _} <- Builder.tombstone(actor, object_id),
{:ok, _tombstone} <- Object.create(tombstone_data) do
handle_incoming(data)
else
_ -> e
end
2018-03-03 17:37:40 +00:00
end
end
def handle_incoming(
%{
"type" => "Undo",
"object" => %{"type" => "Follow", "object" => followed},
"actor" => follower,
"id" => id
} = _data,
_options
) do
2018-05-21 08:35:43 +00:00
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
2018-05-21 01:01:14 +00:00
{:ok, activity} <- ActivityPub.unfollow(follower, followed, id, false) do
User.unfollow(follower, followed)
{:ok, activity}
else
2018-12-09 09:12:48 +00:00
_e -> :error
end
end
def handle_incoming(
%{
"type" => "Undo",
"object" => %{"type" => type}
} = data,
_options
)
when type in ["Like", "EmojiReact", "Announce", "Block"] do
with {:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do
{:ok, activity}
end
end
# For Undos that don't have the complete object attached, try to find it in our database.
2018-05-21 08:35:43 +00:00
def handle_incoming(
%{
"type" => "Undo",
"object" => object
} = activity,
options
)
when is_binary(object) do
with %Activity{data: data} <- Activity.get_by_ap_id(object) do
activity
|> Map.put("object", data)
|> handle_incoming(options)
2018-05-21 08:35:43 +00:00
else
2018-12-09 09:12:48 +00:00
_e -> :error
2018-05-21 08:35:43 +00:00
end
end
2019-10-30 11:21:49 +00:00
def handle_incoming(
%{
"type" => "Move",
"actor" => origin_actor,
"object" => origin_actor,
"target" => target_actor
},
_options
) do
with %User{} = origin_user <- User.get_cached_by_ap_id(origin_actor),
{:ok, %User{} = target_user} <- User.get_or_fetch_by_ap_id(target_actor),
true <- origin_actor in target_user.also_known_as do
ActivityPub.move(origin_user, target_user, false)
else
_e -> :error
end
end
def handle_incoming(_, _), do: :error
@spec get_obj_helper(String.t(), Keyword.t()) :: {:ok, Object.t()} | nil
def get_obj_helper(id, options \\ []) do
2019-09-11 04:23:33 +00:00
case Object.normalize(id, true, options) do
%Object{} = object -> {:ok, object}
_ -> nil
end
end
@spec get_embedded_obj_helper(String.t() | Object.t(), User.t()) :: {:ok, Object.t()} | nil
2019-10-02 11:46:06 +00:00
def get_embedded_obj_helper(%{"attributedTo" => attributed_to, "id" => object_id} = data, %User{
ap_id: ap_id
})
2019-10-02 11:46:06 +00:00
when attributed_to == ap_id do
with {:ok, activity} <-
handle_incoming(%{
"type" => "Create",
"to" => data["to"],
"cc" => data["cc"],
2019-10-02 11:46:06 +00:00
"actor" => attributed_to,
"object" => data
}) do
{:ok, Object.normalize(activity)}
else
_ -> get_obj_helper(object_id)
end
end
def get_embedded_obj_helper(object_id, _) do
get_obj_helper(object_id)
end
def set_reply_to_uri(%{"inReplyTo" => in_reply_to} = object) when is_binary(in_reply_to) do
with false <- String.starts_with?(in_reply_to, "http"),
{:ok, %{data: replied_to_object}} <- get_obj_helper(in_reply_to) do
Map.put(object, "inReplyTo", replied_to_object["external_url"] || in_reply_to)
else
_e -> object
end
end
2018-03-30 13:01:53 +00:00
def set_reply_to_uri(obj), do: obj
@doc """
Serialized Mastodon-compatible `replies` collection containing _self-replies_.
Based on Mastodon's ActivityPub::NoteSerializer#replies.
"""
def set_replies(obj_data) do
replies_uris =
2020-02-09 11:09:01 +00:00
with limit when limit > 0 <-
Pleroma.Config.get([:activitypub, :note_replies_output_limit], 0),
%Object{} = object <- Object.get_cached_by_ap_id(obj_data["id"]) do
object
|> Object.self_replies()
|> select([o], fragment("?->>'id'", o.data))
|> limit(^limit)
|> Repo.all()
2020-02-09 11:09:01 +00:00
else
_ -> []
end
2020-02-09 11:09:01 +00:00
set_replies(obj_data, replies_uris)
end
2020-02-09 11:09:01 +00:00
defp set_replies(obj, []) do
obj
end
defp set_replies(obj, replies_uris) do
replies_collection = %{
"type" => "Collection",
"items" => replies_uris
}
Map.merge(obj, %{"replies" => replies_collection})
end
2020-02-09 11:09:01 +00:00
def replies(%{"replies" => %{"first" => %{"items" => items}}}) when not is_nil(items) do
items
end
2020-02-09 11:09:01 +00:00
def replies(%{"replies" => %{"items" => items}}) when not is_nil(items) do
items
end
def replies(_), do: []
# Prepares the object of an outgoing create activity.
2018-02-24 19:16:41 +00:00
def prepare_object(object) do
object
2018-02-18 13:07:13 +00:00
|> set_sensitive
2018-02-18 12:51:03 +00:00
|> add_hashtags
|> add_mention_tags
2018-03-13 07:05:43 +00:00
|> add_emoji_tags
|> add_attributed_to
2018-02-17 17:38:58 +00:00
|> prepare_attachments
2018-02-18 12:58:52 +00:00
|> set_conversation
|> set_reply_to_uri
|> set_replies
|> strip_internal_fields
|> strip_internal_tags
2019-05-22 18:17:57 +00:00
|> set_type
2018-02-24 19:16:41 +00:00
end
2018-05-04 22:03:14 +00:00
# @doc
# """
# internal -> Mastodon
# """
2018-03-30 13:01:53 +00:00
def prepare_outgoing(%{"type" => activity_type, "object" => object_id} = data)
when activity_type in ["Create", "Listen"] do
2018-03-30 13:01:53 +00:00
object =
2019-05-01 09:11:17 +00:00
object_id
|> Object.normalize()
|> Map.get(:data)
2018-03-30 13:01:53 +00:00
|> prepare_object
data =
data
|> Map.put("object", object)
2018-11-08 15:39:38 +00:00
|> Map.merge(Utils.make_json_ld_header())
2019-05-01 09:11:17 +00:00
|> Map.delete("bcc")
{:ok, data}
end
def prepare_outgoing(%{"type" => "Announce", "actor" => ap_id, "object" => object_id} = data) do
object =
object_id
|> Object.normalize()
data =
if Visibility.is_private?(object) && object.data["actor"] == ap_id do
data |> Map.put("object", object |> Map.get(:data) |> prepare_object)
else
data |> maybe_fix_object_url
end
data =
data
|> strip_internal_fields
|> Map.merge(Utils.make_json_ld_header())
|> Map.delete("bcc")
{:ok, data}
end
2018-05-26 18:03:23 +00:00
# Mastodon Accept/Reject requires a non-normalized object containing the actor URIs,
# because of course it does.
def prepare_outgoing(%{"type" => "Accept"} = data) do
with follow_activity <- Activity.normalize(data["object"]) do
2018-05-26 18:03:23 +00:00
object = %{
"actor" => follow_activity.actor,
"object" => follow_activity.data["object"],
"id" => follow_activity.data["id"],
"type" => "Follow"
}
data =
data
|> Map.put("object", object)
2018-11-08 15:39:38 +00:00
|> Map.merge(Utils.make_json_ld_header())
2018-05-26 18:03:23 +00:00
{:ok, data}
end
end
def prepare_outgoing(%{"type" => "Reject"} = data) do
with follow_activity <- Activity.normalize(data["object"]) do
object = %{
"actor" => follow_activity.actor,
"object" => follow_activity.data["object"],
"id" => follow_activity.data["id"],
"type" => "Follow"
}
data =
data
|> Map.put("object", object)
2018-11-08 15:39:38 +00:00
|> Map.merge(Utils.make_json_ld_header())
{:ok, data}
end
end
2018-05-04 21:16:02 +00:00
def prepare_outgoing(%{"type" => _type} = data) do
2018-03-30 13:01:53 +00:00
data =
data
|> strip_internal_fields
2018-03-30 13:01:53 +00:00
|> maybe_fix_object_url
2018-11-08 15:39:38 +00:00
|> Map.merge(Utils.make_json_ld_header())
{:ok, data}
end
def maybe_fix_object_url(%{"object" => object} = data) when is_binary(object) do
with false <- String.starts_with?(object, "http"),
{:fetch, {:ok, relative_object}} <- {:fetch, get_obj_helper(object)},
%{data: %{"external_url" => external_url}} when not is_nil(external_url) <-
relative_object do
Map.put(data, "object", external_url)
else
{:fetch, e} ->
Logger.error("Couldn't fetch #{object} #{inspect(e)}")
data
_ ->
data
end
end
def maybe_fix_object_url(data), do: data
2018-02-18 12:51:03 +00:00
def add_hashtags(object) do
2018-03-30 13:01:53 +00:00
tags =
(object["tag"] || [])
|> Enum.map(fn
# Expand internal representation tags into AS2 tags.
tag when is_binary(tag) ->
%{
"href" => Pleroma.Web.Endpoint.url() <> "/tags/#{tag}",
"name" => "##{tag}",
"type" => "Hashtag"
}
# Do not process tags which are already AS2 tag objects.
tag when is_map(tag) ->
tag
2018-03-30 13:01:53 +00:00
end)
2018-02-18 12:51:03 +00:00
Map.put(object, "tag", tags)
2018-02-18 12:51:03 +00:00
end
# TODO These should be added on our side on insertion, it doesn't make much
# sense to regenerate these all the time
def add_mention_tags(object) do
to = object["to"] || []
cc = object["cc"] || []
mentioned = User.get_users_from_set(to ++ cc, local_only: false)
mentions = Enum.map(mentioned, &build_mention_tag/1)
2018-02-17 13:20:53 +00:00
tags = object["tag"] || []
Map.put(object, "tag", tags ++ mentions)
end
2019-09-11 20:19:06 +00:00
defp build_mention_tag(%{ap_id: ap_id, nickname: nickname} = _) do
%{"type" => "Mention", "href" => ap_id, "name" => "@#{nickname}"}
end
2019-02-12 13:59:34 +00:00
def take_emoji_tags(%User{emoji: emoji}) do
2019-09-11 20:19:06 +00:00
emoji
|> Map.to_list()
2019-09-11 20:19:06 +00:00
|> Enum.map(&build_emoji_tag/1)
2019-02-12 13:59:34 +00:00
end
2018-03-13 07:05:43 +00:00
# TODO: we should probably send mtime instead of unix epoch time for updated
2019-02-12 13:59:34 +00:00
def add_emoji_tags(%{"emoji" => emoji} = object) do
2018-03-13 07:05:43 +00:00
tags = object["tag"] || []
2018-03-30 13:01:53 +00:00
2019-09-11 20:19:06 +00:00
out = Enum.map(emoji, &build_emoji_tag/1)
2018-03-13 07:05:43 +00:00
Map.put(object, "tag", tags ++ out)
2018-03-13 07:05:43 +00:00
end
def add_emoji_tags(object), do: object
2019-02-12 13:59:34 +00:00
2019-09-11 20:19:06 +00:00
defp build_emoji_tag({name, url}) do
%{
"icon" => %{"url" => url, "type" => "Image"},
"name" => ":" <> name <> ":",
"type" => "Emoji",
"updated" => "1970-01-01T00:00:00Z",
"id" => url
}
end
2018-02-18 12:58:52 +00:00
def set_conversation(object) do
Map.put(object, "conversation", object["context"])
end
def set_sensitive(%{"sensitive" => true} = object) do
object
end
2018-02-18 13:07:13 +00:00
def set_sensitive(object) do
tags = object["tag"] || []
Map.put(object, "sensitive", "nsfw" in tags)
end
2019-05-22 18:17:57 +00:00
def set_type(%{"type" => "Answer"} = object) do
Map.put(object, "type", "Note")
end
def set_type(object), do: object
def add_attributed_to(object) do
attributed_to = object["attributedTo"] || object["actor"]
Map.put(object, "attributedTo", attributed_to)
end
2018-02-17 17:38:58 +00:00
# TODO: Revisit this
def prepare_attachments(%{"type" => "ChatMessage"} = object), do: object
2018-02-17 17:38:58 +00:00
def prepare_attachments(object) do
2018-03-30 13:01:53 +00:00
attachments =
2020-04-13 03:53:45 +00:00
object
|> Map.get("attachment", [])
2018-03-30 13:01:53 +00:00
|> Enum.map(fn data ->
[%{"mediaType" => media_type, "href" => href} | _] = data["url"]
2020-04-13 03:53:45 +00:00
%{
"url" => href,
"mediaType" => media_type,
"name" => data["name"],
"type" => "Document"
}
2018-03-30 13:01:53 +00:00
end)
2018-02-17 17:38:58 +00:00
Map.put(object, "attachment", attachments)
2018-02-17 17:38:58 +00:00
end
2018-02-21 21:21:40 +00:00
def strip_internal_fields(object) do
2020-04-13 03:53:45 +00:00
Map.drop(object, Pleroma.Constants.object_internal_fields())
end
defp strip_internal_tags(%{"tag" => tags} = object) do
tags = Enum.filter(tags, fn x -> is_map(x) end)
Map.put(object, "tag", tags)
end
defp strip_internal_tags(object), do: object
def perform(:user_upgrade, user) do
# we pass a fake user so that the followers collection is stripped away
old_follower_address = User.ap_followers(%User{nickname: user.nickname})
2018-03-30 13:01:53 +00:00
from(
a in Activity,
where: ^old_follower_address in a.recipients,
update: [
set: [
recipients:
fragment(
"array_replace(?,?,?)",
a.recipients,
^old_follower_address,
^user.follower_address
)
2018-03-30 13:01:53 +00:00
]
]
)
|> Repo.update_all([])
2018-02-24 09:51:15 +00:00
end
def upgrade_user_from_ap_id(ap_id) do
2019-04-22 07:20:43 +00:00
with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id),
{:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id),
{:ok, user} <- update_user(user, data) do
TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
2018-02-21 21:21:40 +00:00
{:ok, user}
else
%User{} = user -> {:ok, user}
2018-02-21 21:21:40 +00:00
e -> e
end
end
defp update_user(user, data) do
2019-09-12 06:59:34 +00:00
user
|> User.remote_user_changeset(data)
2019-09-12 06:59:34 +00:00
|> User.update_and_set_cache()
end
def maybe_fix_user_url(%{"url" => url} = data) when is_map(url) do
Map.put(data, "url", url["href"])
end
def maybe_fix_user_url(data), do: data
def maybe_fix_user_object(data), do: maybe_fix_user_url(data)
end