akkoma/lib/pleroma/web/common_api.ex

542 lines
18 KiB
Elixir
Raw Normal View History

# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
2017-09-09 11:56:51 +00:00
defmodule Pleroma.Web.CommonAPI do
2019-02-09 15:16:26 +00:00
alias Pleroma.Activity
alias Pleroma.Conversation.Participation
2019-02-09 15:16:26 +00:00
alias Pleroma.Object
alias Pleroma.ThreadMute
alias Pleroma.User
alias Pleroma.UserRelationship
2017-09-09 11:56:51 +00:00
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Builder
alias Pleroma.Web.ActivityPub.Pipeline
alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.Visibility
2020-10-02 17:00:50 +00:00
alias Pleroma.Web.CommonAPI.ActivityDraft
import Pleroma.Web.Gettext
import Pleroma.Web.CommonAPI.Utils
2017-09-09 11:56:51 +00:00
require Pleroma.Constants
require Logger
def block(blocker, blocked) do
with {:ok, block_data, _} <- Builder.block(blocker, blocked),
{:ok, block, _} <- Pipeline.common_pipeline(block_data, local: true) do
{:ok, block}
end
end
def unblock(blocker, blocked) do
with {_, %Activity{} = block} <- {:fetch_block, Utils.fetch_latest_block(blocker, blocked)},
{:ok, unblock_data, _} <- Builder.undo(blocker, block),
{:ok, unblock, _} <- Pipeline.common_pipeline(unblock_data, local: true) do
{:ok, unblock}
else
{:fetch_block, nil} ->
if User.blocks?(blocker, blocked) do
User.unblock(blocker, blocked)
{:ok, :no_activity}
else
{:error, :not_blocking}
end
e ->
e
end
end
def follow(follower, followed) do
2019-09-24 08:56:20 +00:00
timeout = Pleroma.Config.get([:activitypub, :follow_handshake_timeout])
with {:ok, follow_data, _} <- Builder.follow(follower, followed),
{:ok, activity, _} <- Pipeline.common_pipeline(follow_data, local: true),
2019-09-24 08:56:20 +00:00
{:ok, follower, followed} <- User.wait_and_refresh(timeout, follower, followed) do
if activity.data["state"] == "reject" do
{:error, :rejected}
else
{:ok, follower, followed, activity}
end
end
end
def unfollow(follower, unfollowed) do
with {:ok, follower, _follow_activity} <- User.unfollow(follower, unfollowed),
2019-07-14 19:25:03 +00:00
{:ok, _activity} <- ActivityPub.unfollow(follower, unfollowed),
{:ok, _subscription} <- User.unsubscribe(follower, unfollowed) do
{:ok, follower}
end
end
def accept_follow_request(follower, followed) do
with %Activity{} = follow_activity <- Utils.fetch_latest_follow(follower, followed),
{:ok, accept_data, _} <- Builder.accept(followed, follow_activity),
{:ok, _activity, _} <- Pipeline.common_pipeline(accept_data, local: true) do
{:ok, follower}
end
end
def reject_follow_request(follower, followed) do
with %Activity{} = follow_activity <- Utils.fetch_latest_follow(follower, followed),
{:ok, reject_data, _} <- Builder.reject(followed, follow_activity),
{:ok, _activity, _} <- Pipeline.common_pipeline(reject_data, local: true) do
{:ok, follower}
end
end
2017-09-09 11:56:51 +00:00
def delete(activity_id, user) do
with {_, %Activity{data: %{"object" => _, "type" => "Create"}} = activity} <-
{:find_activity, Activity.get_by_id(activity_id)},
{_, %Object{} = object, _} <-
{:find_object, Object.normalize(activity, fetch: false), activity},
2019-03-08 17:21:56 +00:00
true <- User.superuser?(user) || user.ap_id == object.data["actor"],
{:ok, delete_data, _} <- Builder.delete(user, object.data["id"]),
{:ok, delete, _} <- Pipeline.common_pipeline(delete_data, local: true) do
2017-09-09 11:56:51 +00:00
{:ok, delete}
2019-05-16 19:09:18 +00:00
else
{:find_activity, _} ->
{:error, :not_found}
{:find_object, nil, %Activity{data: %{"actor" => actor, "object" => object}}} ->
# We have the create activity, but not the object, it was probably pruned.
# Insert a tombstone and try again
with {:ok, tombstone_data, _} <- Builder.tombstone(actor, object),
{:ok, _tombstone} <- Object.create(tombstone_data) do
delete(activity_id, user)
else
_ ->
Logger.error(
"Could not insert tombstone for missing object on deletion. Object is #{object}."
)
{:error, dgettext("errors", "Could not delete")}
end
_ ->
{:error, dgettext("errors", "Could not delete")}
2017-09-09 11:56:51 +00:00
end
end
2017-09-09 15:48:57 +00:00
def repeat(id, user, params \\ %{}) do
with %Activity{data: %{"type" => "Create"}} = activity <- Activity.get_by_id(id),
object = %Object{} <- Object.normalize(activity, fetch: false),
{_, nil} <- {:existing_announce, Utils.get_existing_announce(user.ap_id, object)},
public = public_announce?(object, params),
{:ok, announce, _} <- Builder.announce(user, object, public: public),
{:ok, activity, _} <- Pipeline.common_pipeline(announce, local: true) do
{:ok, activity}
2017-09-09 15:48:57 +00:00
else
{:existing_announce, %Activity{} = announce} ->
{:ok, announce}
_ ->
{:error, :not_found}
2017-09-09 15:48:57 +00:00
end
end
def unrepeat(id, user) do
with {_, %Activity{data: %{"type" => "Create"}} = activity} <-
{:find_activity, Activity.get_by_id(id)},
%Object{} = note <- Object.normalize(activity, fetch: false),
%Activity{} = announce <- Utils.get_existing_announce(user.ap_id, note),
{:ok, undo, _} <- Builder.undo(user, announce),
{:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do
{:ok, activity}
2018-04-14 07:39:16 +00:00
else
{:find_activity, _} -> {:error, :not_found}
2019-09-24 08:56:20 +00:00
_ -> {:error, dgettext("errors", "Could not unrepeat")}
2018-04-14 07:39:16 +00:00
end
end
2020-03-31 15:56:05 +00:00
@spec favorite(User.t(), binary()) :: {:ok, Activity.t() | :already_liked} | {:error, any()}
def favorite(%User{} = user, id) do
2020-03-31 15:56:05 +00:00
case favorite_helper(user, id) do
{:ok, _} = res ->
res
{:error, :not_found} = res ->
res
{:error, e} ->
Logger.error("Could not favorite #{id}. Error: #{inspect(e, pretty: true)}")
{:error, dgettext("errors", "Could not favorite")}
end
end
def favorite_helper(user, id) do
with {_, %Activity{object: object}} <- {:find_object, Activity.get_by_id_with_object(id)},
{_, {:ok, like_object, meta}} <- {:build_object, Builder.like(user, object)},
{_, {:ok, %Activity{} = activity, _meta}} <-
{:common_pipeline,
Pipeline.common_pipeline(like_object, Keyword.put(meta, :local, true))} do
{:ok, activity}
2017-09-09 16:09:37 +00:00
else
{:find_object, _} ->
{:error, :not_found}
{:common_pipeline, {:error, {:validate, {:error, changeset}}}} = e ->
if {:object, {"already liked by this actor", []}} in changeset.errors do
{:ok, :already_liked}
else
2020-03-31 15:56:05 +00:00
{:error, e}
end
e ->
2020-03-31 15:56:05 +00:00
{:error, e}
2017-09-09 16:09:37 +00:00
end
end
def unfavorite(id, user) do
with {_, %Activity{data: %{"type" => "Create"}} = activity} <-
{:find_activity, Activity.get_by_id(id)},
%Object{} = note <- Object.normalize(activity, fetch: false),
%Activity{} = like <- Utils.get_existing_like(user.ap_id, note),
{:ok, undo, _} <- Builder.undo(user, like),
{:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do
{:ok, activity}
2017-09-09 16:30:02 +00:00
else
{:find_activity, _} -> {:error, :not_found}
2019-09-24 08:56:20 +00:00
_ -> {:error, dgettext("errors", "Could not unfavorite")}
2017-09-09 16:30:02 +00:00
end
end
2019-08-27 22:56:28 +00:00
def react_with_emoji(id, user, emoji) do
with %Activity{} = activity <- Activity.get_by_id(id),
object <- Object.normalize(activity, fetch: false),
{:ok, emoji_react, _} <- Builder.emoji_react(user, object, emoji),
{:ok, activity, _} <- Pipeline.common_pipeline(emoji_react, local: true) do
{:ok, activity}
2019-08-27 22:56:28 +00:00
else
2022-06-08 11:36:38 +00:00
_ -> {:error, dgettext("errors", "Could not add reaction emoji")}
2019-08-27 22:56:28 +00:00
end
end
2019-10-02 13:38:57 +00:00
def unreact_with_emoji(id, user, emoji) do
with %Activity{} = reaction_activity <- Utils.get_latest_reaction(id, user, emoji),
{:ok, undo, _} <- Builder.undo(user, reaction_activity),
{:ok, activity, _} <- Pipeline.common_pipeline(undo, local: true) do
{:ok, activity}
2019-10-02 13:38:57 +00:00
else
2022-06-11 15:48:04 +00:00
_ ->
2019-10-02 13:38:57 +00:00
{:error, dgettext("errors", "Could not remove reaction emoji")}
end
end
2019-09-24 08:56:20 +00:00
def vote(user, %{data: %{"type" => "Question"}} = object, choices) do
with :ok <- validate_not_author(object, user),
:ok <- validate_existing_votes(user, object),
{:ok, options, choices} <- normalize_and_validate_choices(choices, object) do
answer_activities =
Enum.map(choices, fn index ->
{:ok, answer_object, _meta} =
Builder.answer(user, object, Enum.at(options, index)["name"])
{:ok, activity_data, _meta} = Builder.create(user, answer_object, [])
{:ok, activity, _meta} =
activity_data
|> Map.put("cc", answer_object["cc"])
|> Map.put("context", answer_object["context"])
|> Pipeline.common_pipeline(local: true)
# TODO: Do preload of Pleroma.Object in Pipeline
Activity.normalize(activity.data)
end)
object = Object.get_cached_by_ap_id(object.data["id"])
{:ok, answer_activities, object}
end
end
2019-09-24 08:56:20 +00:00
defp validate_not_author(%{data: %{"actor" => ap_id}}, %{ap_id: ap_id}),
do: {:error, dgettext("errors", "Poll's author can't vote")}
defp validate_not_author(_, _), do: :ok
defp validate_existing_votes(%{ap_id: ap_id}, object) do
if Utils.get_existing_votes(ap_id, object) == [] do
:ok
else
2019-09-24 08:56:20 +00:00
{:error, dgettext("errors", "Already voted")}
end
end
2020-06-14 22:30:45 +00:00
defp get_options_and_max_count(%{data: %{"anyOf" => any_of}})
when is_list(any_of) and any_of != [],
do: {any_of, Enum.count(any_of)}
defp get_options_and_max_count(%{data: %{"oneOf" => one_of}})
when is_list(one_of) and one_of != [],
do: {one_of, 1}
2019-09-24 08:56:20 +00:00
defp normalize_and_validate_choices(choices, object) do
choices = Enum.map(choices, fn i -> if is_binary(i), do: String.to_integer(i), else: i end)
{options, max_count} = get_options_and_max_count(object)
count = Enum.count(options)
with {_, true} <- {:valid_choice, Enum.all?(choices, &(&1 < count))},
{_, true} <- {:count_check, Enum.count(choices) <= max_count} do
{:ok, options, choices}
else
{:valid_choice, _} -> {:error, dgettext("errors", "Invalid indices")}
{:count_check, _} -> {:error, dgettext("errors", "Too many choices")}
end
end
2020-05-12 19:59:26 +00:00
def public_announce?(_, %{visibility: visibility})
when visibility in ~w{public unlisted private direct},
do: visibility in ~w(public unlisted)
def public_announce?(object, _) do
Visibility.is_public?(object)
end
2019-09-24 09:10:54 +00:00
def get_visibility(_, _, %Participation{}), do: {"direct", "direct"}
2020-05-12 19:59:26 +00:00
def get_visibility(%{visibility: visibility}, in_reply_to, _)
2020-11-11 14:47:57 +00:00
when visibility in ~w{public local unlisted private direct},
2019-05-15 14:35:33 +00:00
do: {visibility, get_replied_to_visibility(in_reply_to)}
2018-03-30 13:01:53 +00:00
2020-05-12 19:59:26 +00:00
def get_visibility(%{visibility: "list:" <> list_id}, in_reply_to, _) do
visibility = {:list, String.to_integer(list_id)}
{visibility, get_replied_to_visibility(in_reply_to)}
2019-05-01 09:11:17 +00:00
end
def get_visibility(_, in_reply_to, _) when not is_nil(in_reply_to) do
2019-05-15 14:35:33 +00:00
visibility = get_replied_to_visibility(in_reply_to)
{visibility, visibility}
end
2018-03-30 13:01:53 +00:00
def get_visibility(_, in_reply_to, _), do: {"public", get_replied_to_visibility(in_reply_to)}
def get_replied_to_visibility(nil), do: nil
def get_replied_to_visibility(activity) do
with %Object{} = object <- Object.normalize(activity, fetch: false) do
2019-09-24 08:56:20 +00:00
Visibility.get_visibility(object)
end
end
2018-03-30 13:01:53 +00:00
2019-09-24 09:10:54 +00:00
def check_expiry_date({:ok, nil} = res), do: res
2019-09-24 09:10:54 +00:00
def check_expiry_date({:ok, in_seconds}) do
2020-08-22 17:46:01 +00:00
expiry = DateTime.add(DateTime.utc_now(), in_seconds)
2020-08-22 17:46:01 +00:00
if Pleroma.Workers.PurgeExpiredActivity.expires_late_enough?(expiry) do
{:ok, expiry}
else
{:error, "Expiry date is too soon"}
end
end
2019-09-24 09:10:54 +00:00
def check_expiry_date(expiry_str) do
Ecto.Type.cast(:integer, expiry_str)
|> check_expiry_date()
end
2020-05-12 19:59:26 +00:00
def post(user, %{status: _} = data) do
2020-10-02 17:00:50 +00:00
with {:ok, draft} <- ActivityDraft.create(user, data) do
2022-06-30 15:28:31 +00:00
ActivityPub.create(draft.changes, draft.preview?)
2017-09-09 15:48:57 +00:00
end
end
@spec pin(String.t(), User.t()) :: {:ok, Activity.t()} | {:error, term()}
def pin(id, %User{} = user) do
with %Activity{} = activity <- create_activity_by_id(id),
true <- activity_belongs_to_actor(activity, user.ap_id),
true <- object_type_is_allowed_for_pin(activity.object),
true <- activity_is_public(activity),
{:ok, pin_data, _} <- Builder.pin(user, activity.object),
{:ok, _pin, _} <-
Pipeline.common_pipeline(pin_data,
local: true,
2021-03-19 14:25:12 +00:00
activity_id: id
) do
2019-01-07 13:45:33 +00:00
{:ok, activity}
else
{:error, {:side_effects, error}} -> error
error -> error
2019-01-07 13:45:33 +00:00
end
end
defp create_activity_by_id(id) do
with nil <- Activity.create_by_id_with_object(id) do
{:error, :not_found}
end
end
defp activity_belongs_to_actor(%{actor: actor}, actor), do: true
defp activity_belongs_to_actor(_, _), do: {:error, :ownership_error}
defp object_type_is_allowed_for_pin(%{data: %{"type" => type}}) do
with false <- type in ["Note", "Article", "Question"] do
{:error, :not_allowed}
end
end
defp activity_is_public(activity) do
with false <- Visibility.is_public?(activity) do
{:error, :visibility_error}
end
end
@spec unpin(String.t(), User.t()) :: {:ok, User.t()} | {:error, term()}
def unpin(id, user) do
with %Activity{} = activity <- create_activity_by_id(id),
{:ok, unpin_data, _} <- Builder.unpin(user, activity.object),
{:ok, _unpin, _} <-
Pipeline.common_pipeline(unpin_data,
local: true,
activity_id: activity.id,
expires_at: activity.data["expires_at"],
featured_address: user.featured_address
) do
2019-01-07 13:45:33 +00:00
{:ok, activity}
end
end
2020-09-08 12:13:50 +00:00
def add_mute(user, activity, params \\ %{}) do
expires_in = Map.get(params, :expires_in, 0)
2020-08-28 15:17:44 +00:00
with {:ok, _} <- ThreadMute.add_mute(user.id, activity.data["context"]),
2020-08-31 09:02:54 +00:00
_ <- Pleroma.Notification.mark_context_as_read(user, activity.data["context"]) do
2020-09-08 12:13:50 +00:00
if expires_in > 0 do
Pleroma.Workers.MuteExpireWorker.enqueue(
"unmute_conversation",
%{"user_id" => user.id, "activity_id" => activity.id},
schedule_in: expires_in
)
end
{:ok, activity}
else
{:error, _} -> {:error, dgettext("errors", "conversation is already muted")}
end
end
def remove_mute(%User{} = user, %Activity{} = activity) do
ThreadMute.remove_mute(user.id, activity.data["context"])
{:ok, activity}
end
def remove_mute(user_id, activity_id) do
with {:user, %User{} = user} <- {:user, User.get_by_id(user_id)},
{:activity, %Activity{} = activity} <- {:activity, Activity.get_by_id(activity_id)} do
remove_mute(user, activity)
else
{what, result} = error ->
Logger.warn(
"CommonAPI.remove_mute/2 failed. #{what}: #{result}, user_id: #{user_id}, activity_id: #{activity_id}"
)
{:error, error}
end
end
def thread_muted?(%User{id: user_id}, %{data: %{"context" => context}})
when is_binary(context) do
ThreadMute.exists?(user_id, context)
end
2019-02-20 16:51:25 +00:00
def thread_muted?(_, _), do: false
2020-04-28 12:50:37 +00:00
def report(user, data) do
with {:ok, account} <- get_reported_account(data.account_id),
{:ok, {content_html, _, _}} <- make_report_content_html(data[:comment]),
2019-09-24 08:56:20 +00:00
{:ok, statuses} <- get_report_statuses(account, data) do
ActivityPub.flag(%{
context: Utils.generate_context_id(),
actor: user,
account: account,
statuses: statuses,
content: content_html,
2020-04-28 12:50:37 +00:00
forward: Map.get(data, :forward, false)
2019-09-24 08:56:20 +00:00
})
end
end
defp get_reported_account(account_id) do
case User.get_cached_by_id(account_id) do
%User{} = account -> {:ok, account}
_ -> {:error, dgettext("errors", "Account not found")}
2019-02-20 16:51:25 +00:00
end
end
def update_report_state(activity_ids, state) when is_list(activity_ids) do
case Utils.update_report_state(activity_ids, state) do
:ok -> {:ok, activity_ids}
_ -> {:error, dgettext("errors", "Could not update state")}
end
end
2019-05-16 19:09:18 +00:00
def update_report_state(activity_id, state) do
2019-09-24 08:56:20 +00:00
with %Activity{} = activity <- Activity.get_by_id(activity_id) do
Utils.update_report_state(activity, state)
2019-05-16 19:09:18 +00:00
else
nil -> {:error, :not_found}
_ -> {:error, dgettext("errors", "Could not update state")}
2019-05-16 19:09:18 +00:00
end
end
def update_activity_scope(activity_id, opts \\ %{}) do
with %Activity{} = activity <- Activity.get_by_id_with_object(activity_id),
2019-09-24 08:56:20 +00:00
{:ok, activity} <- toggle_sensitive(activity, opts) do
set_visibility(activity, opts)
2019-05-16 19:09:18 +00:00
else
nil -> {:error, :not_found}
{:error, reason} -> {:error, reason}
2019-05-16 19:09:18 +00:00
end
end
2020-05-12 19:59:26 +00:00
defp toggle_sensitive(activity, %{sensitive: sensitive}) when sensitive in ~w(true false) do
toggle_sensitive(activity, %{sensitive: String.to_existing_atom(sensitive)})
2019-05-16 19:09:18 +00:00
end
2020-05-12 19:59:26 +00:00
defp toggle_sensitive(%Activity{object: object} = activity, %{sensitive: sensitive})
2019-05-16 19:09:18 +00:00
when is_boolean(sensitive) do
new_data = Map.put(object.data, "sensitive", sensitive)
{:ok, object} =
object
|> Object.change(%{data: new_data})
|> Object.update_and_set_cache()
{:ok, Map.put(activity, :object, object)}
end
defp toggle_sensitive(activity, _), do: {:ok, activity}
2020-05-12 19:59:26 +00:00
defp set_visibility(activity, %{visibility: visibility}) do
2019-05-16 19:09:18 +00:00
Utils.update_activity_visibility(activity, visibility)
end
defp set_visibility(activity, _), do: {:ok, activity}
def hide_reblogs(%User{} = user, %User{} = target) do
UserRelationship.create_reblog_mute(user, target)
end
def show_reblogs(%User{} = user, %User{} = target) do
UserRelationship.delete_reblog_mute(user, target)
end
def get_user(ap_id, fake_record_fallback \\ true) do
cond do
user = User.get_cached_by_ap_id(ap_id) ->
user
user = User.get_by_guessed_nickname(ap_id) ->
user
fake_record_fallback ->
# TODO: refactor (fake records is never a good idea)
User.error_user(ap_id)
true ->
nil
end
end
2017-09-09 11:56:51 +00:00
end