forked from YokaiRick/akkoma
integrate search endpoint with ES
This commit is contained in:
parent
9002e5155b
commit
449d8ff165
5 changed files with 69 additions and 2 deletions
|
@ -852,6 +852,9 @@
|
|||
{Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy, [max_running: 5, max_waiting: 5]}
|
||||
]
|
||||
|
||||
config :pleroma, :search,
|
||||
provider: :builtin
|
||||
|
||||
# Import environment specific config. This must remain at the bottom
|
||||
# of this file so it overrides the configuration defined above.
|
||||
import_config "#{Mix.env()}.exs"
|
||||
|
|
|
@ -4,6 +4,7 @@ defmodule Pleroma.Elasticsearch.DocumentMappings.Activity do
|
|||
def id(obj), do: obj.id
|
||||
def encode(%{object: %{data: %{ "type" => "Note" }}} = activity) do
|
||||
%{
|
||||
_timestamp: activity.inserted_at,
|
||||
user: activity.user_actor.nickname,
|
||||
content: activity.object.data["content"],
|
||||
instance: URI.parse(activity.user_actor.ap_id).host,
|
||||
|
|
|
@ -26,11 +26,20 @@ def bulk_post(data, :activities) do
|
|||
end)
|
||||
|> List.flatten()
|
||||
|
||||
IO.inspect Elastix.Bulk.post(
|
||||
Elastix.Bulk.post(
|
||||
url(),
|
||||
d,
|
||||
index: "activities",
|
||||
type: "activity"
|
||||
)
|
||||
end
|
||||
|
||||
def search(query) do
|
||||
Elastix.Search.search(
|
||||
url(),
|
||||
"activities",
|
||||
["activity"],
|
||||
%{query: %{term: %{content: query}}}
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -16,6 +16,8 @@ defmodule Pleroma.Web.CommonAPI do
|
|||
alias Pleroma.Web.ActivityPub.Utils
|
||||
alias Pleroma.Web.ActivityPub.Visibility
|
||||
alias Pleroma.Web.CommonAPI.ActivityDraft
|
||||
alias Pleroma.Elasticsearch
|
||||
alias Pleroma.Config
|
||||
|
||||
import Pleroma.Web.Gettext
|
||||
import Pleroma.Web.CommonAPI.Utils
|
||||
|
@ -395,9 +397,24 @@ def listen(user, data) do
|
|||
end
|
||||
end
|
||||
|
||||
def maybe_put_into_elasticsearch({:ok, activity}) do
|
||||
if Config.get([:search, :provider]) == :elasticsearch do
|
||||
actor = Pleroma.Activity.user_actor(activity)
|
||||
activity
|
||||
|> Map.put(:user_actor, actor)
|
||||
|> Elasticsearch.put()
|
||||
end
|
||||
end
|
||||
|
||||
def maybe_put_into_elasticsearch(_) do
|
||||
{:ok, :skipped}
|
||||
end
|
||||
|
||||
def post(user, %{status: _} = data) do
|
||||
with {:ok, draft} <- ActivityDraft.create(user, data) do
|
||||
ActivityPub.create(draft.changes, draft.preview?)
|
||||
activity = ActivityPub.create(draft.changes, draft.preview?)
|
||||
maybe_put_into_elasticsearch(activity)
|
||||
activity
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -45,6 +45,43 @@ def search(conn, params), do: do_search(:v1, conn, params)
|
|||
|
||||
defp do_search(version, %{assigns: %{user: user}} = conn, %{q: query} = params) do
|
||||
query = String.trim(query)
|
||||
options = search_options(params, user)
|
||||
if Pleroma.Config.get([:search, :provider]) == :elasticsearch do
|
||||
elasticsearch_search(conn, query, options)
|
||||
else
|
||||
builtin_search(version, conn, params)
|
||||
end
|
||||
end
|
||||
|
||||
defp elasticsearch_search(%{assigns: %{user: user}} = conn, query, options) do
|
||||
with {:ok, raw_results} <- Pleroma.Elasticsearch.search(query) do
|
||||
results = raw_results
|
||||
|> Map.get(:body)
|
||||
|> Map.get("hits")
|
||||
|> Map.get("hits")
|
||||
|> Enum.map(fn result -> result["_id"] end)
|
||||
|> Pleroma.Activity.all_by_ids_with_object()
|
||||
|
||||
json(
|
||||
conn,
|
||||
%{
|
||||
accounts: [],
|
||||
hashtags: [],
|
||||
statuses: StatusView.render("index.json",
|
||||
activities: results,
|
||||
for: user,
|
||||
as: :activity
|
||||
)}
|
||||
)
|
||||
else
|
||||
{:error, _} ->
|
||||
conn
|
||||
|> put_status(:internal_server_error)
|
||||
|> json(%{error: "Search failed"})
|
||||
end
|
||||
end
|
||||
|
||||
defp builtin_search(version, %{assigns: %{user: user}} = conn, %{q: query} = params) do
|
||||
options = search_options(params, user)
|
||||
timeout = Keyword.get(Repo.config(), :timeout, 15_000)
|
||||
default_values = %{"statuses" => [], "accounts" => [], "hashtags" => []}
|
||||
|
|
Loading…
Reference in a new issue