From 97037c0b53472f61f17263f318166f986a25baa5 Mon Sep 17 00:00:00 2001 From: Bryan Fink Date: Fri, 7 Jul 2023 11:35:01 -0500 Subject: [PATCH 01/12] do not fetch if limit_to_local_content is enabled Prior to this change, anyone, authenticated or not, could submit a search query for an activity by URL, and cause the fetcher to go fetch it. That shouldn't happen if `limit_to_local_content` is set to `:all` or if it's set to `:unauthenticated` and the query came from an unauthenticated source. --- lib/pleroma/search/database_search.ex | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/lib/pleroma/search/database_search.ex b/lib/pleroma/search/database_search.ex index 3735a5fab..8f6bf30b4 100644 --- a/lib/pleroma/search/database_search.ex +++ b/lib/pleroma/search/database_search.ex @@ -132,21 +132,29 @@ defp query_with(q, :rum, search_query, :websearch) do ) end - def maybe_restrict_local(q, user) do + def should_restrict_local(user) do limit = Pleroma.Config.get([:instance, :limit_to_local_content], :unauthenticated) case {limit, user} do - {:all, _} -> restrict_local(q) - {:unauthenticated, %User{}} -> q - {:unauthenticated, _} -> restrict_local(q) - {false, _} -> q + {:all, _} -> true + {:unauthenticated, %User{}} -> false + {:unauthenticated, _} -> true + {false, _} -> false + end + end + + def maybe_restrict_local(q, user) do + case should_restrict_local(user) do + true -> restrict_local(q) + false -> q end end defp restrict_local(q), do: where(q, local: true) def maybe_fetch(activities, user, search_query) do - with true <- Regex.match?(~r/https?:/, search_query), + with false <- should_restrict_local(user), + true <- Regex.match?(~r/https?:/, search_query), {:ok, object} <- Fetcher.fetch_object_from_id(search_query), %Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]), true <- Visibility.visible_for_user?(activity, user) do From a2e397a79d05af35211f313a12ccb7e5ad800571 Mon Sep 17 00:00:00 2001 From: Norm Date: Tue, 22 Oct 2024 15:56:22 -0400 Subject: [PATCH 02/12] Update asdf install docs in Debian install guide Instead of trying to update the version of asdf being used, just point users to the guide on their website. Ideally we'd do this for Elixir and Erlang as well, but new versions of those packages may sometimes have compatibility issues with Akkoma. For now, update those to the latest OTP and Elixir versions known to be comaptible with Akkoma. --- docs/docs/installation/debian_based_en.md | 26 ++++++++--------------- 1 file changed, 9 insertions(+), 17 deletions(-) diff --git a/docs/docs/installation/debian_based_en.md b/docs/docs/installation/debian_based_en.md index 5dddabe7f..442849e69 100644 --- a/docs/docs/installation/debian_based_en.md +++ b/docs/docs/installation/debian_based_en.md @@ -35,32 +35,24 @@ sudo useradd -r -s /bin/false -m -d /var/lib/akkoma -U akkoma ### Install Elixir and Erlang +#### Using `apt` If your distribution packages a recent enough version of Elixir, you can install it directly from the distro repositories and skip to the next section of the guide: ```shell sudo apt install elixir erlang-dev erlang-nox ``` -Otherwise use [asdf](https://github.com/asdf-vm/asdf) to install the latest versions of Elixir and Erlang. +#### Using `asdf` +If your distribution does not have a recent version of Elxir in their repositories, you can use [asdf](https://asdf-vm.com/) to install a newer version of Elixir and Erlang. First, install some dependencies needed to build Elixir and Erlang: ```shell sudo apt install curl unzip build-essential autoconf m4 libncurses5-dev libssh-dev unixodbc-dev xsltproc libxml2-utils libncurses-dev ``` -Then login to the `akkoma` user and install asdf: -```shell -git clone https://github.com/asdf-vm/asdf.git ~/.asdf --branch v0.11.3 -``` +Then login to the `akkoma` user. -Add the following lines to `~/.bashrc`: -```shell -. "$HOME/.asdf/asdf.sh" -# asdf completions -. "$HOME/.asdf/completions/asdf.bash" -``` - -Restart the shell: +Install asdf by following steps 1 to 3 on [their website](https://asdf-vm.com/guide/getting-started.html), then restart the shell to load asdf: ```shell exec $SHELL ``` @@ -69,15 +61,15 @@ Next install Erlang: ```shell asdf plugin add erlang https://github.com/asdf-vm/asdf-erlang.git export KERL_CONFIGURE_OPTIONS="--disable-debug --without-javac" -asdf install erlang 25.3.2.5 -asdf global erlang 25.3.2.5 +asdf install erlang 26.2.5.4 +asdf global erlang 26.2.5.4 ``` Now install Elixir: ```shell asdf plugin-add elixir https://github.com/asdf-vm/asdf-elixir.git -asdf install elixir 1.15.4-otp-25 -asdf global elixir 1.15.4-otp-25 +asdf install elixir 1.17.3-otp-26 +asdf global elixir 1.17.3-otp-26 ``` Confirm that Elixir is installed correctly by checking the version: From 40da4e88ea24e85ffbd805fdc8d81921a9893cee Mon Sep 17 00:00:00 2001 From: Norm Date: Fri, 25 Oct 2024 11:09:20 -0400 Subject: [PATCH 03/12] Update hashtag prune to account for followed hashtags Currently pruning hashtags with the prune_objects task only accounts for whether that hashtag is associated with an object, but this may lead to a foreign key constraint violation if that hashtag has no objects but is followed by a local user. This adds an additional check to see if that hashtag has any followers before proceeding to delete it. --- lib/mix/tasks/pleroma/database.ex | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/mix/tasks/pleroma/database.ex b/lib/mix/tasks/pleroma/database.ex index 87ccfdff1..f85fe5bea 100644 --- a/lib/mix/tasks/pleroma/database.ex +++ b/lib/mix/tasks/pleroma/database.ex @@ -346,7 +346,10 @@ def run(["prune_objects" | args]) do DELETE FROM hashtags AS ht WHERE NOT EXISTS ( SELECT 1 FROM hashtags_objects hto - WHERE ht.id = hto.hashtag_id) + WHERE ht.id = hto.hashtag_id + UNION + SELECT 1 FROM user_follows_hashtag ufht + WHERE ht.id = ufht.hashtag_id) """ |> Repo.query!() From 88a8086ad38d7d0fc0992c06aa5dff71597fbb0b Mon Sep 17 00:00:00 2001 From: Norm Date: Fri, 25 Oct 2024 12:25:18 -0400 Subject: [PATCH 04/12] Use LEFT JOIN instead of UNION for hashtag pruning --- lib/mix/tasks/pleroma/database.ex | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/lib/mix/tasks/pleroma/database.ex b/lib/mix/tasks/pleroma/database.ex index f85fe5bea..0a09a1c4a 100644 --- a/lib/mix/tasks/pleroma/database.ex +++ b/lib/mix/tasks/pleroma/database.ex @@ -343,13 +343,16 @@ def run(["prune_objects" | args]) do %{:num_rows => del_hashtags} = """ - DELETE FROM hashtags AS ht - WHERE NOT EXISTS ( - SELECT 1 FROM hashtags_objects hto - WHERE ht.id = hto.hashtag_id - UNION - SELECT 1 FROM user_follows_hashtag ufht - WHERE ht.id = ufht.hashtag_id) + DELETE FROM hashtags + USING hashtags AS ht + LEFT JOIN hashtags_objects hto + ON ht.id = hto.hashtag_id + LEFT JOIN user_follows_hashtag ufht + ON ht.id = ufht.hashtag_id + WHERE + hashtags.id = ht.id + AND hto.hashtag_id is NULL + AND ufht.hashtag_id is NULL """ |> Repo.query!() From 6ed5be61ff0cf126968c7e76f7c2f2574735ed31 Mon Sep 17 00:00:00 2001 From: Norm Date: Sat, 26 Oct 2024 18:51:45 -0400 Subject: [PATCH 05/12] docs: Note that Elixir 1.17 has been tested as working --- docs/docs/installation/generic_dependencies.include | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/installation/generic_dependencies.include b/docs/docs/installation/generic_dependencies.include index 87669bd23..f3a347ccd 100644 --- a/docs/docs/installation/generic_dependencies.include +++ b/docs/docs/installation/generic_dependencies.include @@ -1,7 +1,7 @@ ## Required dependencies * PostgreSQL 12+ -* Elixir 1.14+ (currently tested up to 1.16) +* Elixir 1.14+ (currently tested up to 1.17) * Erlang OTP 25+ (currently tested up to OTP26) * git * file / libmagic From f048e0cf1bc7a2ca4c415bb29ae80bdea13d1d4b Mon Sep 17 00:00:00 2001 From: Calvin Lee Date: Mon, 25 Nov 2024 23:18:52 +0000 Subject: [PATCH 06/12] Allow MathML core tags in sanitized content --- config/config.exs | 1 + priv/scrubbers/default.ex | 113 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 114 insertions(+) diff --git a/config/config.exs b/config/config.exs index e919910b3..bca7211d5 100644 --- a/config/config.exs +++ b/config/config.exs @@ -302,6 +302,7 @@ allow_headings: false, allow_tables: false, allow_fonts: false, + allow_math: true, scrub_policy: [ Pleroma.HTML.Scrubber.Default, Pleroma.HTML.Transform.MediaProxy diff --git a/priv/scrubbers/default.ex b/priv/scrubbers/default.ex index 74de910fd..96473203e 100644 --- a/priv/scrubbers/default.ex +++ b/priv/scrubbers/default.ex @@ -124,6 +124,119 @@ defmodule Pleroma.HTML.Scrubber.Default do Meta.allow_tag_with_these_attributes(:font, ["face"]) end + if Pleroma.Config.get!([:markup, :allow_math]) do + Meta.allow_tag_with_these_attributes("annotation", ["encoding"]) + Meta.allow_tag_with_these_attributes(:"annotation-xml", ["encoding"]) + + Meta.allow_tag_with_these_attributes(:math, [ + "display", + "displaystyle", + "mathvariant", + "scriptlevel" + ]) + + basic_math_tags = [ + "maction", + "merror", + :mi, + "mmultiscripts", + :mn, + "mphantom", + "mprescripts", + "mroot", + "mrow", + "ms", + "msqrt", + "mstyle", + "msub", + "msubsup", + "msup", + "mtable", + "mtext", + "mtr", + "semantics" + ] + + for tag <- basic_math_tags do + Meta.allow_tag_with_these_attributes(unquote(tag), [ + "mathvariant", + "displaystyle", + "scriptlevel" + ]) + end + + Meta.allow_tag_with_these_attributes("mfrac", [ + "displaystyle", + "linethickness", + "mathvariant", + "scriptlevel" + ]) + + Meta.allow_tag_with_these_attributes(:mo, [ + "displaystyle", + "form", + "largeop", + "lspace", + "mathvariant", + "minsize", + "movablelimits", + "rspace", + "scriptlevel", + "stretchy", + "symmetric" + ]) + + Meta.allow_tag_with_these_attributes("mover", [ + "accent", + "displaystyle", + "mathvariant", + "scriptlevel" + ]) + + Meta.allow_tag_with_these_attributes("mpadded", [ + "depth", + "displaystyle", + "height", + "lspace", + "mathvariant", + "scriptlevel", + "voffset", + "width" + ]) + + Meta.allow_tag_with_these_attributes("mspace", [ + "depth", + "displaystyle", + "height", + "mathvariant", + "scriptlevel", + "width" + ]) + + Meta.allow_tag_with_these_attributes("mtd", [ + "columnspan", + "displaystyle", + "mathvariant", + "rowspan", + "scriptlevel" + ]) + + Meta.allow_tag_with_these_attributes("munder", [ + "accentunder", + "displaystyle", + "mathvariant", + "scriptlevel" + ]) + + Meta.allow_tag_with_these_attributes("munderover", [ + "accent", + "accentunder", + "displaystyle", + "mathvariant", + "scriptlevel" + ]) + end + Meta.allow_tag_with_these_attributes(:center, []) Meta.allow_tag_with_these_attributes(:small, []) From f19d5d13809f044580018d1ff65fa41e0335fa31 Mon Sep 17 00:00:00 2001 From: Norm Date: Tue, 17 Dec 2024 18:30:01 -0500 Subject: [PATCH 07/12] Set customize_hostname_check for Swoosh.Adapters.SMTP This should hopefully fix issues with connecting to SMTP servers with wildcard TLS certificates. Taken from https://erlef.github.io/security-wg/secure_coding_and_deployment_hardening/ssl Fixes https://akkoma.dev/AkkomaGang/akkoma/issues/660 --- lib/pleroma/emails/mailer.ex | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/lib/pleroma/emails/mailer.ex b/lib/pleroma/emails/mailer.ex index 6a79a7694..af513f1f1 100644 --- a/lib/pleroma/emails/mailer.ex +++ b/lib/pleroma/emails/mailer.ex @@ -84,8 +84,14 @@ defp default_config(Swoosh.Adapters.SMTP, conf, _) do cacerts: os_cacerts, versions: [:"tlsv1.2", :"tlsv1.3"], verify: :verify_peer, - # some versions have supposedly issues verifying wildcard certs without this server_name_indication: relay, + # This allows wildcard ceritifcates to be verified properly. + # The :https parameter simply means to use the HTTPS wildcard format + # (as opposed to say LDAP). SMTP servers tend to use the same type of + # certs as HTTPS ones so this should work for most. + customize_hostname_check: [ + match_fun: :public_key.pkix_verify_hostname_match_fun(:https) + ], # the default of 10 is too restrictive depth: 32 ] From 7615a11a1ef826ccf12455e4ad149f9da7f0f7bb Mon Sep 17 00:00:00 2001 From: Oneric Date: Fri, 3 Jan 2025 20:33:41 +0100 Subject: [PATCH 08/12] changelog: fix shuffled and add missing entries --- CHANGELOG.md | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 744e77dc8..3306f47c4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,14 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ## Fixed - Media proxy no longer attempts to proxy embedded images +- ObjectAge policy no longer lets unlisted posts slip through +- ObjectAge policy no longer leaks belated DMs and follower-only posts +- the NodeINfo endpoint now uses the correct content type + +## Changed +- Anonymous objects now federate completely without an id + adopting a proposed AP spec errata and restoring federation + with e.g. IceShrimp.NET and fedify-based implementations ## 3.13.3 @@ -28,8 +36,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - AP objects with additional JSON-LD profiles beyond ActivityStreams can now be fetched - Single-selection polls no longer expose the voter_count; MastoAPI demands it be null and this confused some clients leading to vote distributions >100% -- ObjectAge policy no longer lets unlisted posts slip through -- ObjectAge policy no longer leaks belated DMs and follower-only posts ## Changed - Refactored Rich Media to cache the content in the database. Fetching operations that could block status rendering have been eliminated. From f2e45d4d4bd7a87b5b6aa5abb975244c5eff31b5 Mon Sep 17 00:00:00 2001 From: Oneric Date: Thu, 10 Oct 2024 00:22:40 +0000 Subject: [PATCH 09/12] Teach admin-fe about custom source URLs Matching https://akkoma.dev/AkkomaGang/akkoma-fe/pulls/421 --- CHANGELOG.md | 4 ++++ config/description.exs | 16 +++++++++++++++- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 744e77dc8..238e23ef8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ## UNRELEASED +## Added +- It is now possible to display custom source URLs in akkoma-fe; + the settings are part of the frontend configuration + ## Fixed - Media proxy no longer attempts to proxy embedded images diff --git a/config/description.exs b/config/description.exs index b69478fdb..63113439a 100644 --- a/config/description.exs +++ b/config/description.exs @@ -1194,7 +1194,9 @@ showInstanceSpecificPanel: false, subjectLineBehavior: "email", theme: "pleroma-dark", - webPushNotifications: false + webPushNotifications: false, + backendCommitUrl: "", + frontendCommitUrl: "" } ], children: [ @@ -1398,6 +1400,18 @@ label: "Stop Gifs", type: :boolean, description: "Whether to pause animated images until they're hovered on" + }, + %{ + key: :backendCommitUrl, + label: "Backend Commit URL", + type: :string, + description: "URL prefix for backend commit hashes" + }, + %{ + key: :frontendCommitUrl, + label: "Frontend Commit URL", + type: :string, + description: "URL prefix for frontend commit hashes" } ] }, From bcfbfbcff594d3b4dc9241ad38df5c1ca5729145 Mon Sep 17 00:00:00 2001 From: Oneric Date: Sun, 2 Jun 2024 21:42:36 +0200 Subject: [PATCH 10/12] Don't try to cleanup remote attachments MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The cleanup attachment worker was run for every deleted post, even if it’s a remote post whose attachments we don't even store. This was especially bad due to attachment cleanup involving a particularly heavy query wasting a bunch of database perf for nil. This was uncovered by comparing statistics from https://akkoma.dev/AkkomaGang/akkoma/issues/784 and https://akkoma.dev/AkkomaGang/akkoma/issues/765#issuecomment-12256 --- CHANGELOG.md | 2 + lib/pleroma/object.ex | 15 +---- .../workers/attachments_cleanup_worker.ex | 49 ++++++++++++--- .../attachments_cleanup_worker_test.exs | 60 +++++++++++++++++++ 4 files changed, 103 insertions(+), 23 deletions(-) create mode 100644 test/pleroma/workers/attachments_cleanup_worker_test.exs diff --git a/CHANGELOG.md b/CHANGELOG.md index 744e77dc8..04186f771 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ## Fixed - Media proxy no longer attempts to proxy embedded images +- Fix significant uneccessary overhead of attachment cleanup; + it no longer attempts to cleanup attachments of deleted remote posts ## 3.13.3 diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex index 379b361f8..5d84bb286 100644 --- a/lib/pleroma/object.ex +++ b/lib/pleroma/object.ex @@ -9,7 +9,6 @@ defmodule Pleroma.Object do import Ecto.Changeset alias Pleroma.Activity - alias Pleroma.Config alias Pleroma.Hashtag alias Pleroma.Object alias Pleroma.Object.Fetcher @@ -241,23 +240,11 @@ def delete(%Object{data: %{"id" => id}} = object) do with {:ok, _obj} = swap_object_with_tombstone(object), deleted_activity = Activity.delete_all_by_object_ap_id(id), {:ok, _} <- invalid_object_cache(object) do - cleanup_attachments( - Config.get([:instance, :cleanup_attachments]), - %{object: object} - ) - + AttachmentsCleanupWorker.enqueue_if_needed(object.data) {:ok, object, deleted_activity} end end - @spec cleanup_attachments(boolean(), %{required(:object) => map()}) :: - {:ok, Oban.Job.t() | nil} - def cleanup_attachments(true, %{object: _} = params) do - AttachmentsCleanupWorker.enqueue("cleanup_attachments", params) - end - - def cleanup_attachments(_, _), do: {:ok, nil} - def prune(%Object{data: %{"id" => _id}} = object) do with {:ok, object} <- Repo.delete(object), {:ok, _} <- invalid_object_cache(object) do diff --git a/lib/pleroma/workers/attachments_cleanup_worker.ex b/lib/pleroma/workers/attachments_cleanup_worker.ex index f5090dae7..58bbda94b 100644 --- a/lib/pleroma/workers/attachments_cleanup_worker.ex +++ b/lib/pleroma/workers/attachments_cleanup_worker.ex @@ -5,30 +5,61 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorker do import Ecto.Query + alias Pleroma.Config alias Pleroma.Object alias Pleroma.Repo use Pleroma.Workers.WorkerHelper, queue: "attachments_cleanup" + @doc """ + Takes object data and if necessary enqueues a job, + deleting all attachments of the post eligible for cleanup + """ + @spec enqueue_if_needed(map()) :: {:ok, Oban.Job.t()} | {:ok, :skip} | {:error, any()} + def enqueue_if_needed(%{ + "actor" => actor, + "attachment" => [_ | _] = attachments + }) do + with true <- Config.get([:instance, :cleanup_attachments]), + true <- URI.parse(actor).host == Pleroma.Web.Endpoint.host(), + [_ | _] <- attachments do + enqueue("cleanup_attachments", %{"actor" => actor, "attachments" => attachments}) + else + _ -> {:ok, :skip} + end + end + + def enqueue_if_needed(_), do: {:ok, :skip} + @impl Oban.Worker def perform(%Job{ args: %{ "op" => "cleanup_attachments", - "object" => %{"data" => %{"attachment" => [_ | _] = attachments, "actor" => actor}} + "attachments" => [_ | _] = attachments, + "actor" => actor } }) do - if Pleroma.Config.get([:instance, :cleanup_attachments], false) do - attachments - |> Enum.flat_map(fn item -> Enum.map(item["url"], & &1["href"]) end) - |> fetch_objects - |> prepare_objects(actor, Enum.map(attachments, & &1["name"])) - |> filter_objects - |> do_clean - end + attachments + |> Enum.flat_map(fn item -> Enum.map(item["url"], & &1["href"]) end) + |> fetch_objects + |> prepare_objects(actor, Enum.map(attachments, & &1["name"])) + |> filter_objects + |> do_clean {:ok, :success} end + # Left over already enqueued jobs in the old format + # This function clause can be deleted once sufficient time passed after 3.14 + def perform(%Job{ + args: %{ + "op" => "cleanup_attachments", + "object" => %{"data" => data} + } + }) do + enqueue_if_needed(data) + end + def perform(%Job{args: %{"op" => "cleanup_attachments", "object" => _object}}), do: {:ok, :skip} defp do_clean({object_ids, attachment_urls}) do diff --git a/test/pleroma/workers/attachments_cleanup_worker_test.exs b/test/pleroma/workers/attachments_cleanup_worker_test.exs new file mode 100644 index 000000000..2212db927 --- /dev/null +++ b/test/pleroma/workers/attachments_cleanup_worker_test.exs @@ -0,0 +1,60 @@ +# Akkoma: Magically expressive social media +# Copyright © 2024 Akkoma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Workers.AttachmentsCleanupWorkerTest do + use Pleroma.DataCase, async: false + use Oban.Testing, repo: Pleroma.Repo + + import Pleroma.Factory + + alias Pleroma.Workers.AttachmentsCleanupWorker + + setup do + clear_config([:instance, :cleanup_attachments], true) + + file = %Plug.Upload{ + content_type: "image/jpeg", + path: Path.absname("test/fixtures/image.jpg"), + filename: "an_image.jpg" + } + + user = insert(:user) + + {:ok, %Pleroma.Object{} = attachment} = + Pleroma.Web.ActivityPub.ActivityPub.upload(file, actor: user.ap_id) + + {:ok, attachment: attachment, user: user} + end + + test "does not enqueue remote post" do + remote_data = %{ + "id" => "https://remote.example/obj/123", + "actor" => "https://remote.example/user/1", + "content" => "content", + "attachment" => [ + %{ + "type" => "Document", + "mediaType" => "image/png", + "name" => "marvellous image", + "url" => "https://remote.example/files/image.png" + } + ] + } + + assert {:ok, :skip} = AttachmentsCleanupWorker.enqueue_if_needed(remote_data) + end + + test "enqueues local post", %{attachment: attachment, user: user} do + local_url = Pleroma.Web.Endpoint.url() + + local_data = %{ + "id" => local_url <> "/obj/123", + "actor" => user.ap_id, + "content" => "content", + "attachment" => [attachment.data] + } + + assert {:ok, %Oban.Job{}} = AttachmentsCleanupWorker.enqueue_if_needed(local_data) + end +end From e8bf4422ff6440d4404ba6a5ed4092e717649f5e Mon Sep 17 00:00:00 2001 From: Oneric Date: Mon, 3 Jun 2024 23:07:10 +0200 Subject: [PATCH 11/12] Delay attachment deletion MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Otherwise attachments have a high chance to disappear with akkoma-fe’s “delete & redraft” feature when cleanup is enabled in the backend. Since we don't know whether a deletion was intended to be part of a redraft process or even if whether the redraft was abandoned we still have to delete attachments eventually. A thirty minute delay should provide sufficient time for redrafting. Fixes: https://akkoma.dev/AkkomaGang/akkoma/issues/775 --- CHANGELOG.md | 4 +++ config/config.exs | 1 + docs/docs/configuration/cheatsheet.md | 1 + .../workers/attachments_cleanup_worker.ex | 6 ++++- .../attachments_cleanup_worker_test.exs | 26 +++++++++++++++++++ 5 files changed, 37 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 04186f771..bd4bcccf9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,10 +6,14 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ## UNRELEASED +## Added +- New config option `:instance, :cleanup_attachments_delay` + ## Fixed - Media proxy no longer attempts to proxy embedded images - Fix significant uneccessary overhead of attachment cleanup; it no longer attempts to cleanup attachments of deleted remote posts +- Fix “Delete & Redraft” often losing attachments if attachment cleanup was enabled ## 3.13.3 diff --git a/config/config.exs b/config/config.exs index e919910b3..39b53a010 100644 --- a/config/config.exs +++ b/config/config.exs @@ -255,6 +255,7 @@ external_user_synchronization: true, extended_nickname_format: true, cleanup_attachments: false, + cleanup_attachments_delay: 1800, multi_factor_authentication: [ totp: [ # digits 6 or 8 diff --git a/docs/docs/configuration/cheatsheet.md b/docs/docs/configuration/cheatsheet.md index 916e1cc0c..9a50fc2bb 100644 --- a/docs/docs/configuration/cheatsheet.md +++ b/docs/docs/configuration/cheatsheet.md @@ -58,6 +58,7 @@ To add configuration to your config file, you can copy it from the base config. * `registration_reason_length`: Maximum registration reason length (default: `500`). * `external_user_synchronization`: Enabling following/followers counters synchronization for external users. * `cleanup_attachments`: Remove attachments along with statuses. Does not affect duplicate files and attachments without status. Enabling this will increase load to database when deleting statuses on larger instances. +* `cleanup_attachments_delay`: How many seconds to wait after post deletion before attempting to deletion; useful for “delete & redraft” functionality (default: `1800`) * `show_reactions`: Let favourites and emoji reactions be viewed through the API (default: `true`). * `password_reset_token_validity`: The time after which reset tokens aren't accepted anymore, in seconds (default: one day). * `local_bubble`: Array of domains representing instances closely related to yours. Used to populate the `bubble` timeline. e.g `["example.com"]`, (default: `[]`) diff --git a/lib/pleroma/workers/attachments_cleanup_worker.ex b/lib/pleroma/workers/attachments_cleanup_worker.ex index 58bbda94b..f1204a861 100644 --- a/lib/pleroma/workers/attachments_cleanup_worker.ex +++ b/lib/pleroma/workers/attachments_cleanup_worker.ex @@ -23,7 +23,11 @@ def enqueue_if_needed(%{ with true <- Config.get([:instance, :cleanup_attachments]), true <- URI.parse(actor).host == Pleroma.Web.Endpoint.host(), [_ | _] <- attachments do - enqueue("cleanup_attachments", %{"actor" => actor, "attachments" => attachments}) + enqueue( + "cleanup_attachments", + %{"actor" => actor, "attachments" => attachments}, + schedule_in: Config.get!([:instance, :cleanup_attachments_delay]) + ) else _ -> {:ok, :skip} end diff --git a/test/pleroma/workers/attachments_cleanup_worker_test.exs b/test/pleroma/workers/attachments_cleanup_worker_test.exs index 2212db927..d180763fb 100644 --- a/test/pleroma/workers/attachments_cleanup_worker_test.exs +++ b/test/pleroma/workers/attachments_cleanup_worker_test.exs @@ -8,7 +8,9 @@ defmodule Pleroma.Workers.AttachmentsCleanupWorkerTest do import Pleroma.Factory + alias Pleroma.Object alias Pleroma.Workers.AttachmentsCleanupWorker + alias Pleroma.Tests.ObanHelpers setup do clear_config([:instance, :cleanup_attachments], true) @@ -57,4 +59,28 @@ test "enqueues local post", %{attachment: attachment, user: user} do assert {:ok, %Oban.Job{}} = AttachmentsCleanupWorker.enqueue_if_needed(local_data) end + + test "doesn't delete immediately", %{attachment: attachment, user: user} do + delay = 6000 + clear_config([:instance, :cleanup_attachments_delay], delay) + + note = insert(:note, %{user: user, data: %{"attachment" => [attachment.data]}}) + + uploads_dir = Pleroma.Config.get!([Pleroma.Uploaders.Local, :uploads]) + %{"url" => [%{"href" => href}]} = attachment.data + path = "#{uploads_dir}/#{Path.basename(href)}" + + assert File.exists?(path) + + Object.delete(note) + Process.sleep(2000) + + assert File.exists?(path) + + ObanHelpers.perform(all_enqueued(worker: Pleroma.Workers.AttachmentsCleanupWorker)) + + assert Object.get_by_id(note.id).data["deleted"] + assert Object.get_by_id(attachment.id) == nil + refute File.exists?(path) + end end From d8c7ed70d0152059d60cd7d492927a7a407675ff Mon Sep 17 00:00:00 2001 From: eviloatmeal Date: Fri, 3 Jan 2025 21:17:32 +0100 Subject: [PATCH 12/12] openbsd: update service file Changes suggested and tested by eviloatmeal Fixes: https://akkoma.dev/AkkomaGang/akkoma/issues/864 --- installation/openbsd/rc.d/akkomad | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/installation/openbsd/rc.d/akkomad b/installation/openbsd/rc.d/akkomad index 68be46c9a..fa3c19b2b 100755 --- a/installation/openbsd/rc.d/akkomad +++ b/installation/openbsd/rc.d/akkomad @@ -11,11 +11,13 @@ # daemon="/usr/local/bin/elixir" -daemon_flags="--detached -S /usr/local/bin/mix phx.server" +daemon_flags="-S /usr/local/bin/mix phx.server" daemon_user="_akkoma" +daemon_execdir="/home/_akkoma/akkoma" . /etc/rc.d/rc.subr +rc_bg="YES" rc_reload=NO pexp="phx.server" @@ -24,7 +26,7 @@ rc_check() { } rc_start() { - ${rcexec} "cd akkoma; ${daemon} ${daemon_flags}" + rc_exec "${daemon} ${daemon_flags}" } rc_stop() {