qb_akkoma/lib/pleroma/object.ex

348 lines
9.2 KiB
Elixir
Raw Normal View History

# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
2017-03-21 08:21:52 +00:00
defmodule Pleroma.Object do
use Ecto.Schema
2019-02-09 15:16:26 +00:00
2019-12-30 09:35:41 +00:00
import Ecto.Query
import Ecto.Changeset
2019-02-09 15:16:26 +00:00
alias Pleroma.Activity
2020-05-15 18:34:46 +00:00
alias Pleroma.Config
alias Pleroma.Object
alias Pleroma.Object.Fetcher
2019-02-09 15:16:26 +00:00
alias Pleroma.ObjectTombstone
alias Pleroma.Repo
alias Pleroma.User
2020-05-15 18:34:46 +00:00
alias Pleroma.Workers.AttachmentsCleanupWorker
2019-02-09 15:16:26 +00:00
require Logger
2017-03-21 08:21:52 +00:00
@type t() :: %__MODULE__{}
2020-01-19 16:45:20 +00:00
@derive {Jason.Encoder, only: [:data]}
2017-03-21 08:21:52 +00:00
schema "objects" do
2018-03-30 13:01:53 +00:00
field(:data, :map)
2017-03-21 08:21:52 +00:00
timestamps()
end
2019-11-18 13:56:25 +00:00
def with_joined_activity(query, activity_type \\ "Create", join_type \\ :inner) do
object_position = Map.get(query.aliases, :object, 0)
join(query, join_type, [{object, object_position}], a in Activity,
on:
fragment(
"COALESCE(?->'object'->>'id', ?->>'object') = (? ->> 'id') AND (?->>'type' = ?) ",
a.data,
a.data,
object.data,
a.data,
^activity_type
),
as: :object_activity
)
end
2017-05-16 13:31:11 +00:00
def create(data) do
Object.change(%Object{}, %{data: data})
2018-03-30 13:01:53 +00:00
|> Repo.insert()
2017-05-16 13:31:11 +00:00
end
2017-05-09 16:11:51 +00:00
def change(struct, params \\ %{}) do
2017-11-19 01:22:07 +00:00
struct
2017-05-09 16:11:51 +00:00
|> cast(params, [:data])
|> validate_required([:data])
|> unique_constraint(:ap_id, name: :objects_unique_apid_index)
end
2019-05-21 17:40:35 +00:00
def get_by_id(nil), do: nil
def get_by_id(id), do: Repo.get(Object, id)
def get_by_id_and_maybe_refetch(id, opts \\ []) do
%{updated_at: updated_at} = object = get_by_id(id)
if opts[:interval] &&
NaiveDateTime.diff(NaiveDateTime.utc_now(), updated_at) > opts[:interval] do
case Fetcher.refetch_object(object) do
{:ok, %Object{} = object} ->
object
e ->
Logger.error("Couldn't refresh #{object.data["id"]}:\n#{inspect(e)}")
object
end
else
object
end
end
2017-10-24 06:39:24 +00:00
def get_by_ap_id(nil), do: nil
2018-03-30 13:01:53 +00:00
def get_by_ap_id(ap_id) do
2018-03-30 13:01:53 +00:00
Repo.one(from(object in Object, where: fragment("(?)->>'id' = ?", object.data, ^ap_id)))
end
@doc """
Get a single attachment by it's name and href
"""
@spec get_attachment_by_name_and_href(String.t(), String.t()) :: Object.t() | nil
def get_attachment_by_name_and_href(name, href) do
query =
from(o in Object,
where: fragment("(?)->>'name' = ?", o.data, ^name),
where: fragment("(?)->>'href' = ?", o.data, ^href)
)
Repo.one(query)
end
defp warn_on_no_object_preloaded(ap_id) do
"Object.normalize() called without preloaded object (#{inspect(ap_id)}). Consider preloading the object"
|> Logger.debug()
Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}")
end
def normalize(_, fetch_remote \\ true, options \\ [])
# If we pass an Activity to Object.normalize(), we can try to use the preloaded object.
# Use this whenever possible, especially when walking graphs in an O(N) loop!
def normalize(%Object{} = object, _, _), do: object
def normalize(%Activity{object: %Object{} = object}, _, _), do: object
2019-04-01 08:55:59 +00:00
# A hack for fake activities
def normalize(%Activity{data: %{"object" => %{"fake" => true} = data}}, _, _) do
2019-04-01 08:55:59 +00:00
%Object{id: "pleroma:fake_object_id", data: data}
end
# No preloaded object
def normalize(%Activity{data: %{"object" => %{"id" => ap_id}}}, fetch_remote, _) do
warn_on_no_object_preloaded(ap_id)
normalize(ap_id, fetch_remote)
end
# No preloaded object
def normalize(%Activity{data: %{"object" => ap_id}}, fetch_remote, _) do
warn_on_no_object_preloaded(ap_id)
normalize(ap_id, fetch_remote)
end
# Old way, try fetching the object through cache.
def normalize(%{"id" => ap_id}, fetch_remote, _), do: normalize(ap_id, fetch_remote)
def normalize(ap_id, false, _) when is_binary(ap_id), do: get_cached_by_ap_id(ap_id)
def normalize(ap_id, true, options) when is_binary(ap_id) do
Fetcher.fetch_object_from_id!(ap_id, options)
end
def normalize(_, _, _), do: nil
# Owned objects can only be accessed by their owner
def authorize_access(%Object{data: %{"actor" => actor}}, %User{ap_id: ap_id}) do
if actor == ap_id do
:ok
else
{:error, :forbidden}
end
end
# Legacy objects can be accessed by anybody
def authorize_access(%Object{}, %User{}), do: :ok
@spec get_cached_by_ap_id(String.t()) :: Object.t() | nil
2019-02-03 17:28:14 +00:00
def get_cached_by_ap_id(ap_id) do
key = "object:#{ap_id}"
with {:ok, nil} <- Cachex.get(:object_cache, key),
object when not is_nil(object) <- get_by_ap_id(ap_id),
{:ok, true} <- Cachex.put(:object_cache, key, object) do
object
else
{:ok, object} -> object
nil -> nil
end
2017-05-01 14:12:20 +00:00
end
def context_mapping(context) do
Object.change(%Object{}, %{data: %{"id" => context}})
end
def make_tombstone(%Object{data: %{"id" => id, "type" => type}}, deleted \\ DateTime.utc_now()) do
%ObjectTombstone{
id: id,
formerType: type,
deleted: deleted
}
|> Map.from_struct()
end
def swap_object_with_tombstone(object) do
tombstone = make_tombstone(object)
object
|> Object.change(%{data: tombstone})
|> Repo.update()
end
def delete(%Object{data: %{"id" => id}} = object) do
with {:ok, _obj} = swap_object_with_tombstone(object),
deleted_activity = Activity.delete_all_by_object_ap_id(id),
2020-05-15 18:34:46 +00:00
{:ok, _} <- invalid_object_cache(object) do
cleanup_attachments(
Config.get([:instance, :cleanup_attachments]),
%{"object" => object}
)
2020-01-30 22:20:37 +00:00
{:ok, object, deleted_activity}
end
end
2019-02-03 17:28:14 +00:00
2020-05-15 18:34:46 +00:00
@spec cleanup_attachments(boolean(), %{required(:object) => map()}) ::
{:ok, Oban.Job.t() | nil}
def cleanup_attachments(true, %{"object" => _} = params) do
AttachmentsCleanupWorker.enqueue("cleanup_attachments", params)
end
def cleanup_attachments(_, _), do: {:ok, nil}
def prune(%Object{data: %{"id" => _id}} = object) do
2019-05-21 00:41:40 +00:00
with {:ok, object} <- Repo.delete(object),
2020-05-15 18:34:46 +00:00
{:ok, _} <- invalid_object_cache(object) do
2019-05-21 00:41:40 +00:00
{:ok, object}
end
end
2020-05-15 18:34:46 +00:00
def invalid_object_cache(%Object{data: %{"id" => id}}) do
with {:ok, true} <- Cachex.del(:object_cache, "object:#{id}") do
Cachex.del(:web_resp_cache, URI.parse(id).path)
end
end
2019-02-03 17:28:14 +00:00
def set_cache(%Object{data: %{"id" => ap_id}} = object) do
Cachex.put(:object_cache, "object:#{ap_id}", object)
{:ok, object}
end
def update_and_set_cache(changeset) do
with {:ok, object} <- Repo.update(changeset) do
set_cache(object)
end
end
def increase_replies_count(ap_id) do
Object
|> where([o], fragment("?->>'id' = ?::text", o.data, ^to_string(ap_id)))
|> update([o],
set: [
data:
fragment(
"""
safe_jsonb_set(?, '{repliesCount}',
(coalesce((?->>'repliesCount')::int, 0) + 1)::varchar::jsonb, true)
""",
o.data,
o.data
)
]
)
|> Repo.update_all([])
|> case do
{1, [object]} -> set_cache(object)
_ -> {:error, "Not found"}
end
end
2020-08-05 13:40:32 +00:00
defp poll_is_multiple?(%Object{data: %{"anyOf" => [_ | _]}}), do: true
2020-06-14 22:30:45 +00:00
defp poll_is_multiple?(_), do: false
def decrease_replies_count(ap_id) do
Object
|> where([o], fragment("?->>'id' = ?::text", o.data, ^to_string(ap_id)))
|> update([o],
set: [
data:
fragment(
"""
safe_jsonb_set(?, '{repliesCount}',
(greatest(0, (?->>'repliesCount')::int - 1))::varchar::jsonb, true)
""",
o.data,
o.data
)
]
)
|> Repo.update_all([])
|> case do
{1, [object]} -> set_cache(object)
_ -> {:error, "Not found"}
end
end
2019-05-21 11:12:10 +00:00
2020-04-22 12:06:39 +00:00
def increase_vote_count(ap_id, name, actor) do
2019-05-21 11:12:10 +00:00
with %Object{} = object <- Object.normalize(ap_id),
"Question" <- object.data["type"] do
2020-06-14 22:30:45 +00:00
key = if poll_is_multiple?(object), do: "anyOf", else: "oneOf"
2019-05-21 11:12:10 +00:00
options =
2020-06-14 22:30:45 +00:00
object.data[key]
2019-05-21 11:12:10 +00:00
|> Enum.map(fn
%{"name" => ^name} = option ->
Kernel.update_in(option["replies"]["totalItems"], &(&1 + 1))
option ->
option
end)
2020-04-22 12:06:39 +00:00
voters = [actor | object.data["voters"] || []] |> Enum.uniq()
2019-05-21 11:12:10 +00:00
data =
2020-06-14 22:30:45 +00:00
object.data
|> Map.put(key, options)
2020-04-22 12:06:39 +00:00
|> Map.put("voters", voters)
2019-05-21 11:12:10 +00:00
object
|> Object.change(%{data: data})
|> update_and_set_cache()
else
_ -> :noop
end
end
2019-09-06 18:50:00 +00:00
@doc "Updates data field of an object"
def update_data(%Object{data: data} = object, attrs \\ %{}) do
object
|> Object.change(%{data: Map.merge(data || %{}, attrs)})
|> Repo.update()
end
def local?(%Object{data: %{"id" => id}}) do
String.starts_with?(id, Pleroma.Web.base_url() <> "/")
end
def replies(object, opts \\ []) do
object = Object.normalize(object)
query =
Object
|> where(
[o],
fragment("(?)->>'inReplyTo' = ?", o.data, ^object.data["id"])
)
|> order_by([o], asc: o.id)
if opts[:self_only] do
actor = object.data["actor"]
where(query, [o], fragment("(?)->>'actor' = ?", o.data, ^actor))
else
query
end
end
def self_replies(object, opts \\ []),
do: replies(object, Keyword.put(opts, :self_only, true))
2017-03-21 08:21:52 +00:00
end