forked from AkkomaGang/akkoma
retry behaviour
This commit is contained in:
parent
790c498818
commit
706223c2d8
5 changed files with 114 additions and 40 deletions
|
@ -4,6 +4,9 @@ defmodule Pleroma.Broadway do
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@queue "akkoma"
|
@queue "akkoma"
|
||||||
|
@exchange "akkoma_exchange"
|
||||||
|
@retry_header "x-retries"
|
||||||
|
@delay_header "x-delay"
|
||||||
|
|
||||||
def start_link(_args) do
|
def start_link(_args) do
|
||||||
Broadway.start_link(__MODULE__,
|
Broadway.start_link(__MODULE__,
|
||||||
|
@ -12,16 +15,9 @@ def start_link(_args) do
|
||||||
module:
|
module:
|
||||||
{BroadwayRabbitMQ.Producer,
|
{BroadwayRabbitMQ.Producer,
|
||||||
queue: @queue,
|
queue: @queue,
|
||||||
declare: [
|
after_connect: &declare_rabbitmq/1,
|
||||||
durable: true,
|
metadata: [:routing_key, :headers],
|
||||||
auto_delete: false,
|
on_failure: :reject}
|
||||||
exclusive: true,
|
|
||||||
arguments: [
|
|
||||||
{"x-delayed-type", "direct"}
|
|
||||||
]
|
|
||||||
],
|
|
||||||
on_failure: :reject_and_requeue
|
|
||||||
}
|
|
||||||
],
|
],
|
||||||
processors: [
|
processors: [
|
||||||
default: [
|
default: [
|
||||||
|
@ -38,23 +34,61 @@ def start_link(_args) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp declare_rabbitmq(amqp_channel) do
|
||||||
|
declare_exchanges(amqp_channel)
|
||||||
|
declare_queues(amqp_channel)
|
||||||
|
declare_bindings(amqp_channel)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp declare_exchanges(amqp_channel) do
|
||||||
|
# Main exchange, all messages go here
|
||||||
|
:ok =
|
||||||
|
AMQP.Exchange.declare(amqp_channel, @exchange, :"x-delayed-message",
|
||||||
|
durable: true,
|
||||||
|
arguments: [{"x-delayed-type", :longstr, "topic"}]
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp declare_queues(amqp_channel) do
|
||||||
|
# Main queue, bound to main exchange
|
||||||
|
{:ok, _} = AMQP.Queue.declare(amqp_channel, @queue, durable: true)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp declare_bindings(amqp_channel) do
|
||||||
|
:ok = AMQP.Queue.bind(amqp_channel, @queue, @exchange, routing_key: "#")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp retry_count(:undefined), do: 0
|
||||||
|
defp retry_count(headers) do
|
||||||
|
match = Enum.find(headers, fn {k, _t, _v} -> k == @retry_header end)
|
||||||
|
if is_nil(match) do
|
||||||
|
0
|
||||||
|
else
|
||||||
|
elem(match, 2)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def handle_message(_, %Message{data: data} = message, _) do
|
def handle_message(_, %Message{data: data, metadata: %{routing_key: routing_key, headers: headers}} = message, _) do
|
||||||
|
Logger.debug("Received message on #{routing_key}")
|
||||||
with {:ok, data} <- Jason.decode(data),
|
with {:ok, data} <- Jason.decode(data),
|
||||||
{module, data} <- Map.pop(data, "__module__"),
|
{module, data} <- Map.pop(data, "__module__"),
|
||||||
module <- String.to_existing_atom(module),
|
module <- String.to_existing_atom(module),
|
||||||
:ok <- perform_message(module, data) do
|
:ok <- perform_message(module, data) do
|
||||||
Logger.debug("Received message: #{inspect(data)}")
|
|
||||||
message
|
message
|
||||||
else
|
else
|
||||||
err ->
|
err ->
|
||||||
IO.inspect(err)
|
retries = retry_count(headers)
|
||||||
|
if retries > 5 do
|
||||||
Message.failed(message, err)
|
Message.failed(message, err)
|
||||||
|
else
|
||||||
|
{:ok, _} = produce(routing_key, data, scheduled_in: 5000, retry_count: retries + 1)
|
||||||
|
message
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp perform_message(module, args) do
|
defp perform_message(module, args) do
|
||||||
IO.inspect(args)
|
|
||||||
case module.perform(%Oban.Job{args: args}) do
|
case module.perform(%Oban.Job{args: args}) do
|
||||||
:ok ->
|
:ok ->
|
||||||
:ok
|
:ok
|
||||||
|
@ -74,7 +108,12 @@ def handle_batch(_, batch, _, _) do
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def handle_failed(messages, _) do
|
def handle_failed(messages, _) do
|
||||||
Logger.error("Failed messages: #{inspect(messages)}")
|
for message <- messages do
|
||||||
|
%Message{data: data, metadata: %{routing_key: topic}, status: {:failed, reason}} = message
|
||||||
|
{:ok, %{"op" => op}} = Jason.decode(data)
|
||||||
|
Logger.error("Processing task on #{topic}(#{op}) failed: #{inspect(reason)}")
|
||||||
|
end
|
||||||
|
|
||||||
messages
|
messages
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -87,35 +126,65 @@ def children do
|
||||||
[Pleroma.Broadway]
|
[Pleroma.Broadway]
|
||||||
end
|
end
|
||||||
|
|
||||||
defp maybe_add_headers([headers: headers] = opts, key, value) when is_list(headers) do
|
defp add_headers([headers: headers] = opts, key, type, value) when is_list(headers) do
|
||||||
Keyword.put(opts, :headers, [{key, value} | headers])
|
Keyword.put(opts, :headers, [{key, type, value} | headers])
|
||||||
end
|
|
||||||
defp maybe_add_headers(opts, key, value) do
|
|
||||||
Keyword.put(opts, :headers, [{key, value}])
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp maybe_with_priority(opts, [priority: priority]) when is_integer(priority) do
|
defp add_headers(opts, key, type, value) do
|
||||||
Keyword.put(opts, :priority, priority)
|
Keyword.put(opts, :headers, [{key, type, value}])
|
||||||
end
|
end
|
||||||
defp maybe_with_priority(opts, _), do: opts
|
|
||||||
|
|
||||||
defp maybe_with_delay(opts, [scheduled_at: scheduled_at]) do
|
defp maybe_with_priority(opts, params) do
|
||||||
time_in_ms = DateTime.diff(DateTime.utc_now(), scheduled_at)
|
if !is_nil(params[:priority]) do
|
||||||
|
Keyword.put(opts, :priority, params[:priority])
|
||||||
|
else
|
||||||
opts
|
opts
|
||||||
|> maybe_add_headers("x-delay", to_string(time_in_ms))
|
|
||||||
end
|
end
|
||||||
defp maybe_with_delay(opts, _), do: opts
|
end
|
||||||
|
|
||||||
|
defp maybe_schedule_at(opts, params) do
|
||||||
|
if !is_nil(params[:scheduled_at]) do
|
||||||
|
time_in_ms = DateTime.diff(params[:scheduled_at], DateTime.utc_now())
|
||||||
|
opts
|
||||||
|
|> add_headers(@delay_header, :long, time_in_ms)
|
||||||
|
else
|
||||||
|
opts
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_schedule_in(opts, params) do
|
||||||
|
if !is_nil(params[:scheduled_in]) do
|
||||||
|
opts
|
||||||
|
|> add_headers(@delay_header, :long, params[:scheduled_in])
|
||||||
|
else
|
||||||
|
opts
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
defp maybe_with_retry_count(opts, params) do
|
||||||
|
if !is_nil(params[:retry_count]) do
|
||||||
|
opts
|
||||||
|
|> add_headers(@retry_header, :long, params[:retry_count])
|
||||||
|
else
|
||||||
|
opts
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def produce(topic, args, opts \\ []) do
|
def produce(topic, args, opts \\ []) do
|
||||||
IO.puts("Producing message on #{topic}: #{inspect(args)}")
|
|
||||||
{:ok, connection} = AMQP.Connection.open()
|
{:ok, connection} = AMQP.Connection.open()
|
||||||
{:ok, channel} = AMQP.Channel.open(connection)
|
{:ok, channel} = AMQP.Channel.open(connection)
|
||||||
|
|
||||||
publish_options =
|
publish_options =
|
||||||
[]
|
[]
|
||||||
|> maybe_with_priority(opts)
|
|> maybe_with_priority(opts)
|
||||||
|> maybe_with_delay(opts)
|
|> maybe_schedule_at(opts)
|
||||||
|
|> maybe_schedule_in(opts)
|
||||||
|
|> maybe_with_retry_count(opts)
|
||||||
|
|
||||||
AMQP.Basic.publish(channel, "", @queue, args, publish_options)
|
Logger.debug("Sending to #{topic} with #{inspect(publish_options)}")
|
||||||
AMQP.Connection.close(connection)
|
:ok = AMQP.Basic.publish(channel, @exchange, topic, args, publish_options)
|
||||||
|
:ok = AMQP.Connection.close(connection)
|
||||||
|
{:ok, args}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -15,7 +15,6 @@ def backoff(%Job{attempt: attempt}) when is_integer(attempt) do
|
||||||
@impl Oban.Worker
|
@impl Oban.Worker
|
||||||
def perform(%Job{args: %{"op" => "publish", "activity_id" => activity_id, "object_data" => nil}}) do
|
def perform(%Job{args: %{"op" => "publish", "activity_id" => activity_id, "object_data" => nil}}) do
|
||||||
activity = Activity.get_by_id(activity_id)
|
activity = Activity.get_by_id(activity_id)
|
||||||
IO.inspect(activity)
|
|
||||||
Federator.perform(:publish, activity)
|
Federator.perform(:publish, activity)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,10 @@ defmodule Pleroma.Workers.PurgeExpiredActivity do
|
||||||
Worker which purges expired activity.
|
Worker which purges expired activity.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
use Pleroma.Workers.WorkerHelper, queue: "activity_expiration", max_attempts: 1, unique: [period: :infinity]
|
use Pleroma.Workers.WorkerHelper,
|
||||||
|
queue: "activity_expiration",
|
||||||
|
max_attempts: 1,
|
||||||
|
unique: [period: :infinity]
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,10 @@ defmodule Pleroma.Workers.PurgeExpiredFilter do
|
||||||
Worker which purges expired filters
|
Worker which purges expired filters
|
||||||
"""
|
"""
|
||||||
|
|
||||||
use Pleroma.Workers.WorkerHelper, queue: "filter_expiration", max_attempts: 1, unique: [period: :infinity]
|
use Pleroma.Workers.WorkerHelper,
|
||||||
|
queue: "filter_expiration",
|
||||||
|
max_attempts: 1,
|
||||||
|
unique: [period: :infinity]
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
|
|
|
@ -71,7 +71,7 @@ def enqueue(op, params, worker_args \\ []) do
|
||||||
|> Map.put("__module__", worker)
|
|> Map.put("__module__", worker)
|
||||||
|> Map.put("op", op)
|
|> Map.put("op", op)
|
||||||
|
|
||||||
Pleroma.Broadway.produce(unquote(queue), Jason.encode!(params))
|
Pleroma.Broadway.produce(unquote(queue), Jason.encode!(params), worker_args)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
Loading…
Reference in a new issue