WIP: Prebuilt docker image #803

Draft
floatingghost wants to merge 37 commits from customizable-docker-db into develop
11 changed files with 110 additions and 40 deletions
Showing only changes of commit 06dbb96b28 - Show all commits

View file

@ -14,6 +14,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## Added
- Implement [FEP-67ff](https://codeberg.org/fediverse/fep/src/branch/main/fep/67ff/fep-67ff.md) (federation documentation)
## Added
- Meilisearch: it is now possible to use separate keys for search and admin actions
## Fixed
- Meilisearch: order of results returned from our REST API now actually matches how Meilisearch ranks results
## 2024.04
## Added

View file

@ -33,6 +33,7 @@ indexes faster when it can process many posts in a single batch.
> config :pleroma, Pleroma.Search.Meilisearch,
> url: "http://127.0.0.1:7700/",
> private_key: "private key",
> search_key: "search key",
> initial_indexing_chunk_size: 100_000
Information about setting up meilisearch can be found in the
@ -45,7 +46,7 @@ is hardly usable on a somewhat big instance.
### Private key authentication (optional)
To set the private key, use the `MEILI_MASTER_KEY` environment variable when starting. After setting the _master key_,
you have to get the _private key_, which is actually used for authentication.
you have to get the _private key_ and possibly _search key_, which are actually used for authentication.
=== "OTP"
```sh
@ -57,7 +58,11 @@ you have to get the _private key_, which is actually used for authentication.
mix pleroma.search.meilisearch show-keys <your master key here>
```
You will see a "Default Admin API Key", this is the key you actually put into your configuration file.
You will see a "Default Admin API Key", this is the key you actually put into
your configuration file as `private_key`. You should also see a
"Default Search API key", put this into your config as `search_key`.
If your version of Meilisearch only showed the former,
just leave `search_key` completely unset in Akkoma's config.
### Initial indexing

View file

@ -16,7 +16,7 @@ defmodule Mix.Pleroma do
:fast_html,
:oban
]
@cachex_children ["object", "user", "scrubber", "web_resp"]
@cachex_children ["object", "user", "scrubber", "web_resp", "http_backoff"]
@doc "Common functions to be reused in mix tasks"
def start_pleroma do
Pleroma.Config.Holder.save_default()

View file

@ -17,6 +17,13 @@ def run(["http", url]) do
|> IO.inspect()
end
def run(["fetch_object", url]) do
start_pleroma()
Pleroma.Object.Fetcher.fetch_object_from_id(url)
|> IO.inspect()
end
def run(["home_timeline", nickname]) do
start_pleroma()
user = Repo.get_by!(User, nickname: nickname)

View file

@ -126,8 +126,12 @@ def run(["show-keys", master_key]) do
decoded = Jason.decode!(result.body)
if decoded["results"] do
Enum.each(decoded["results"], fn %{"description" => desc, "key" => key} ->
IO.puts("#{desc}: #{key}")
Enum.each(decoded["results"], fn
%{"name" => name, "key" => key} ->
IO.puts("#{name}: #{key}")
%{"description" => desc, "key" => key} ->
IO.puts("#{desc}: #{key}")
end)
else
IO.puts("Error fetching the keys, check the master key is correct: #{inspect(decoded)}")

View file

@ -258,6 +258,27 @@ def get_create_by_object_ap_id(ap_id) when is_binary(ap_id) do
def get_create_by_object_ap_id(_), do: nil
@doc """
Accepts a list of `ap__id`.
Returns a query yielding Create activities for the given objects,
in the same order as they were specified in the input list.
"""
@spec get_presorted_create_by_object_ap_id([String.t()]) :: Ecto.Queryable.t()
def get_presorted_create_by_object_ap_id(ap_ids) do
from(
a in Activity,
join:
ids in fragment(
"SELECT * FROM UNNEST(?::text[]) WITH ORDINALITY AS ids(ap_id, ord)",
^ap_ids
),
on:
ids.ap_id == fragment("?->>'object'", a.data) and
fragment("?->>'type'", a.data) == "Create",
order_by: [asc: ids.ord]
)
end
@doc """
Accepts `ap_id` or list of `ap_id`.
Returns a query.

View file

@ -5,15 +5,27 @@ defmodule Pleroma.Search.Meilisearch do
alias Pleroma.Activity
import Pleroma.Search.DatabaseSearch
import Ecto.Query
@behaviour Pleroma.Search.SearchBackend
defp meili_headers do
private_key = Pleroma.Config.get([Pleroma.Search.Meilisearch, :private_key])
defp meili_headers(key) do
key_header =
if is_nil(key), do: [], else: [{"Authorization", "Bearer #{key}"}]
[{"Content-Type", "application/json"}] ++
if is_nil(private_key), do: [], else: [{"Authorization", "Bearer #{private_key}"}]
[{"Content-Type", "application/json"} | key_header]
end
defp meili_headers_admin do
private_key = Pleroma.Config.get([Pleroma.Search.Meilisearch, :private_key])
meili_headers(private_key)
end
defp meili_headers_search do
search_key =
Pleroma.Config.get([Pleroma.Search.Meilisearch, :search_key]) ||
Pleroma.Config.get([Pleroma.Search.Meilisearch, :private_key])
meili_headers(search_key)
end
def meili_get(path) do
@ -22,7 +34,7 @@ def meili_get(path) do
result =
Pleroma.HTTP.get(
Path.join(endpoint, path),
meili_headers()
meili_headers_admin()
)
with {:ok, res} <- result do
@ -30,14 +42,14 @@ def meili_get(path) do
end
end
def meili_post(path, params) do
defp meili_search(params) do
endpoint = Pleroma.Config.get([Pleroma.Search.Meilisearch, :url])
result =
Pleroma.HTTP.post(
Path.join(endpoint, path),
Path.join(endpoint, "/indexes/objects/search"),
Jason.encode!(params),
meili_headers()
meili_headers_search()
)
with {:ok, res} <- result do
@ -53,7 +65,7 @@ def meili_put(path, params) do
:put,
Path.join(endpoint, path),
Jason.encode!(params),
meili_headers(),
meili_headers_admin(),
[]
)
@ -70,7 +82,7 @@ def meili_delete!(path) do
:delete,
Path.join(endpoint, path),
"",
meili_headers(),
meili_headers_admin(),
[]
)
end
@ -81,25 +93,20 @@ def search(user, query, options \\ []) do
author = Keyword.get(options, :author)
res =
meili_post(
"/indexes/objects/search",
%{q: query, offset: offset, limit: limit}
)
meili_search(%{q: query, offset: offset, limit: limit})
with {:ok, result} <- res do
hits = result["hits"] |> Enum.map(& &1["ap"])
try do
hits
|> Activity.create_by_object_ap_id()
|> Activity.with_preloaded_object()
|> Activity.get_presorted_create_by_object_ap_id()
|> Activity.with_preloaded_object()
|> Activity.restrict_deactivated_users()
|> maybe_restrict_local(user)
|> maybe_restrict_author(author)
|> maybe_restrict_blocked(user)
|> maybe_fetch(user, query)
|> order_by([object: obj], desc: obj.data["published"])
|> Pleroma.Repo.all()
rescue
_ -> maybe_fetch([], user, query)

View file

@ -1824,19 +1824,20 @@ def fetch_and_prepare_featured_from_ap_id(ap_id) do
end
end
def pinned_fetch_task(nil), do: nil
def pinned_fetch_task(%{pinned_objects: pins}) do
if Enum.all?(pins, fn {ap_id, _} ->
Object.get_cached_by_ap_id(ap_id) ||
match?({:ok, _object}, Fetcher.fetch_object_from_id(ap_id))
end) do
:ok
else
:error
end
def enqueue_pin_fetches(%{pinned_objects: pins}) do
# enqueue a task to fetch all pinned objects
Enum.each(pins, fn {ap_id, _} ->
if is_nil(Object.get_cached_by_ap_id(ap_id)) do
Pleroma.Workers.RemoteFetcherWorker.enqueue("fetch_remote", %{
"id" => ap_id,
"depth" => 1
})
end
end)
end
def enqueue_pin_fetches(_), do: nil
def make_user_from_ap_id(ap_id, additional \\ []) do
user = User.get_cached_by_ap_id(ap_id)
@ -1844,8 +1845,6 @@ def make_user_from_ap_id(ap_id, additional \\ []) do
Transmogrifier.upgrade_user_from_ap_id(ap_id)
else
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id, additional) do
{:ok, _pid} = Task.start(fn -> pinned_fetch_task(data) end)
user =
if data.ap_id != ap_id do
User.get_cached_by_ap_id(data.ap_id)
@ -1857,6 +1856,7 @@ def make_user_from_ap_id(ap_id, additional \\ []) do
user
|> User.remote_user_changeset(data)
|> User.update_and_set_cache()
|> tap(fn _ -> enqueue_pin_fetches(data) end)
else
maybe_handle_clashing_nickname(data)
@ -1864,6 +1864,7 @@ def make_user_from_ap_id(ap_id, additional \\ []) do
|> User.remote_user_changeset()
|> Repo.insert()
|> User.set_cache()
|> tap(fn _ -> enqueue_pin_fetches(data) end)
end
end
end

View file

@ -1034,7 +1034,7 @@ def upgrade_user_from_ap_id(ap_id) do
with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id),
{:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id),
{:ok, user} <- update_user(user, data) do
{:ok, _pid} = Task.start(fn -> ActivityPub.pinned_fetch_task(user) end)
ActivityPub.enqueue_pin_fetches(user)
TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
{:ok, user}
else

View file

@ -41,6 +41,26 @@ test "returns the activity that created an object" do
assert activity == found_activity
end
test "returns activities by object's AP id in requested presorted order" do
a1 = insert(:note_activity)
o1 = Object.normalize(a1, fetch: false).data["id"]
a2 = insert(:note_activity)
o2 = Object.normalize(a2, fetch: false).data["id"]
a3 = insert(:note_activity)
o3 = Object.normalize(a3, fetch: false).data["id"]
a4 = insert(:note_activity)
o4 = Object.normalize(a4, fetch: false).data["id"]
found_activities =
Activity.get_presorted_create_by_object_ap_id([o3, o2, o4, o1])
|> Repo.all()
assert found_activities == [a3, a2, a4, a1]
end
test "preloading a bookmark" do
user = insert(:user)
user2 = insert(:user)

View file

@ -325,9 +325,7 @@ test "fetches user featured collection" do
body: featured_data,
headers: [{"content-type", "application/activity+json"}]
}
end)
Tesla.Mock.mock_global(fn
%{
method: :get,
url: ^object_url
@ -340,7 +338,8 @@ test "fetches user featured collection" do
end)
{:ok, user} = ActivityPub.make_user_from_ap_id(ap_id)
Process.sleep(50)
# wait for oban
Pleroma.Tests.ObanHelpers.perform_all()
assert user.featured_address == featured_url
assert Map.has_key?(user.pinned_objects, object_url)