Ensure deletes are handled after everything else

This commit is contained in:
FloatingGhost 2022-10-11 14:30:08 +01:00
parent cb9b0d3720
commit 856c57208b
6 changed files with 27 additions and 17 deletions

View file

@ -8,6 +8,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## Changes ## Changes
- Follows no longer override domain blocks, a domain block is final - Follows no longer override domain blocks, a domain block is final
- Deletes are now the lowest priority to publish and will be handled after creates
## 2022.10 ## 2022.10

View file

@ -569,7 +569,10 @@
mute_expire: 5, mute_expire: 5,
search_indexing: 10 search_indexing: 10
], ],
plugins: [Oban.Plugins.Pruner], plugins: [
Oban.Plugins.Pruner,
{Oban.Plugins.Reindexer, schedule: "@weekly"}
],
crontab: [ crontab: [
{"0 0 * * 0", Pleroma.Workers.Cron.DigestEmailsWorker}, {"0 0 * * 0", Pleroma.Workers.Cron.DigestEmailsWorker},
{"0 0 * * *", Pleroma.Workers.Cron.NewUsersDigestWorker} {"0 0 * * *", Pleroma.Workers.Cron.NewUsersDigestWorker}

View file

@ -538,6 +538,12 @@ def run(["fix_follow_state", local_user, remote_user]) do
end end
end end
def run(["convert_id", id]) do
{:ok, uuid} = FlakeId.Ecto.Type.dump(id)
{:ok, raw_id} = Ecto.UUID.load(uuid)
shell_info(raw_id)
end
defp refetch_public_keys(query) do defp refetch_public_keys(query) do
query query
|> Pleroma.Repo.chunk_stream(50, :batches) |> Pleroma.Repo.chunk_stream(50, :batches)

View file

@ -323,8 +323,6 @@ def handle(%{data: %{"type" => "Delete", "object" => deleted_object}} = object,
end end
if result == :ok do if result == :ok do
Notification.create_notifications(object)
# Only remove from index when deleting actual objects, not users or anything else # Only remove from index when deleting actual objects, not users or anything else
with %Pleroma.Object{} <- deleted_object do with %Pleroma.Object{} <- deleted_object do
Pleroma.Search.remove_from_index(deleted_object) Pleroma.Search.remove_from_index(deleted_object)

View file

@ -53,12 +53,19 @@ def publish(%{data: %{"object" => object}} = activity) when is_binary(object) do
@impl true @impl true
def publish(%{data: %{"object" => object}} = activity) when is_map(object) or is_list(object) do def publish(%{data: %{"object" => object}} = activity) when is_map(object) or is_list(object) do
PublisherWorker.enqueue("publish", %{ PublisherWorker.enqueue(
"publish",
%{
"activity_id" => activity.id, "activity_id" => activity.id,
"object_data" => Jason.encode!(object) "object_data" => Jason.encode!(object)
}) },
priority: publish_priority(activity)
)
end end
defp publish_priority(%{type: "Delete"}), do: 3
defp publish_priority(_), do: 0
# Job Worker Callbacks # Job Worker Callbacks
@spec perform(atom(), module(), any()) :: {:ok, any()} | {:error, any()} @spec perform(atom(), module(), any()) :: {:ok, any()} | {:error, any()}

View file

@ -63,16 +63,11 @@ def get_friends(user, params \\ %{}) do
def get_notifications(user, params \\ %{}) do def get_notifications(user, params \\ %{}) do
options = cast_params(params) options = cast_params(params)
query =
user user
|> Notification.for_user_query(options) |> Notification.for_user_query(options)
|> restrict(:include_types, options) |> restrict(:include_types, options)
|> restrict(:exclude_types, options) |> restrict(:exclude_types, options)
|> restrict(:account_ap_id, options) |> restrict(:account_ap_id, options)
IO.inspect(Pleroma.Repo.to_sql(:all, query))
query
|> Pagination.fetch_paginated(params) |> Pagination.fetch_paginated(params)
end end