Add tasker to get nodeinfo

This commit is contained in:
FloatingGhost 2022-10-29 21:51:45 +01:00
parent e272430ef4
commit 881dbea92e
5 changed files with 26 additions and 16 deletions

View File

@ -32,7 +32,7 @@ defmodule Pleroma.Instances.Instance do
def changeset(struct, params \\ %{}) do def changeset(struct, params \\ %{}) do
struct struct
|> cast(params, [:host, :unreachable_since, :favicon, :favicon_updated_at]) |> cast(params, [:host, :unreachable_since, :favicon, :nodeinfo, :metadata_updated_at])
|> validate_required([:host]) |> validate_required([:host])
|> unique_constraint(:host) |> unique_constraint(:host)
end end
@ -141,34 +141,41 @@ defmodule Pleroma.Instances.Instance do
def needs_update(nil), do: true def needs_update(nil), do: true
def needs_update(%Instance{metadata_updated_at: nil}), do: true
def needs_update(%Instance{metadata_updated_at: metadata_updated_at}) do def needs_update(%Instance{metadata_updated_at: metadata_updated_at}) do
now = NaiveDateTime.utc_now() now = NaiveDateTime.utc_now()
NaiveDateTime.diff(now, metadata_updated_at) > 86_400 NaiveDateTime.diff(now, metadata_updated_at) > 86_400
end end
def update_metadata(%URI{host: host} = uri) do def update_metadata(%URI{host: host} = uri) do
Logger.info("Checking metadata for #{host}")
existing_record = Repo.get_by(Instance, %{host: host}) existing_record = Repo.get_by(Instance, %{host: host})
if existing_record do if existing_record do
if needs_update(existing_record) do if needs_update(existing_record) do
Logger.info("Updating metadata for #{host}")
favicon = scrape_favicon(uri) favicon = scrape_favicon(uri)
nodeinfo = scrape_nodeinfo(uri) nodeinfo = scrape_nodeinfo(uri)
%Instance{} existing_record
|> changeset(%{host: host, favicon: favicon, nodeinfo: nodeinfo}) |> changeset(%{host: host, favicon: favicon, nodeinfo: nodeinfo, metadata_updated_at: NaiveDateTime.utc_now()})
|> Repo.update() |> Repo.update()
else
{:discard, "Does not require update"}
end end
else else
favicon = scrape_favicon(uri) favicon = scrape_favicon(uri)
nodeinfo = scrape_nodeinfo(uri) nodeinfo = scrape_nodeinfo(uri)
Logger.info("Creating metadata for #{host}")
%Instance{} %Instance{}
|> changeset(%{host: host, favicon: favicon, nodeinfo: nodeinfo}) |> changeset(%{host: host, favicon: favicon, nodeinfo: nodeinfo, metadata_updated_at: NaiveDateTime.utc_now()})
|> Repo.insert() |> Repo.insert()
end end
end end
def get_favicon(%URI{host: host} = instance_uri) do def get_favicon(%URI{host: host}) do
existing_record = Repo.get_by(Instance, %{host: host}) existing_record = Repo.get_by(Instance, %{host: host})
if existing_record do if existing_record do
@ -191,7 +198,7 @@ defmodule Pleroma.Instances.Instance do
{:ok, {:ok,
Enum.find(links, &(&1["rel"] == "http://nodeinfo.diaspora.software/ns/schema/2.0"))}, Enum.find(links, &(&1["rel"] == "http://nodeinfo.diaspora.software/ns/schema/2.0"))},
{:ok, %Tesla.Env{body: data}} <- {:ok, %Tesla.Env{body: data}} <-
Pleroma.HTTP.get(ref, [{"accept", "application/json"}], []), Pleroma.HTTP.get(href, [{"accept", "application/json"}], []),
{:ok, nodeinfo} <- Jason.decode(data) do {:ok, nodeinfo} <- Jason.decode(data) do
nodeinfo nodeinfo
else else

View File

@ -209,6 +209,8 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
reply_depth = (meta[:depth] || 0) + 1 reply_depth = (meta[:depth] || 0) + 1
IO.puts("QUEUE!")
Pleroma.Workers.NodeInfoFetcherWorker.enqueue("process", %{"domain" => activity.data["actor"]})
# FIXME: Force inReplyTo to replies # FIXME: Force inReplyTo to replies
if Pleroma.Web.Federator.allowed_thread_distance?(reply_depth) and if Pleroma.Web.Federator.allowed_thread_distance?(reply_depth) and
object.data["replies"] != nil do object.data["replies"] != nil do

View File

@ -1,19 +1,17 @@
defmodule Pleroma.Workers.NodeInfoFetcherWorker do defmodule Pleroma.Workers.NodeInfoFetcherWorker do
use Oban.Worker, queue: :backup, max_attempts: 1 use Pleroma.Workers.WorkerHelper, queue: "nodeinfo_fetcher"
alias Oban.Job alias Oban.Job
alias Pleroma.Instance alias Pleroma.Instances.Instance
def process(domain) do
%{"op" => "process", "domain" => domain}
|> new()
|> Oban.insert()
end
@impl Oban.Worker
def perform(%Job{ def perform(%Job{
args: %{"op" => "process", "domain" => domain} args: %{"op" => "process", "domain" => domain}
}) do }) do
uri = URI.parse(domain) uri = domain
Instance.get_or_update_favicon(uri) |> URI.parse()
|> URI.merge("/")
Instance.update_metadata(uri)
end end
end end

View File

@ -42,6 +42,7 @@ defmodule Pleroma.Workers.WorkerHelper do
unquote(caller_module) unquote(caller_module)
|> apply(:new, [params, worker_args]) |> apply(:new, [params, worker_args])
|> Oban.insert() |> Oban.insert()
|> IO.inspect()
end end
end end
end end

View File

@ -4,12 +4,14 @@ defmodule Pleroma.Repo.Migrations.AddNodeinfo do
def up do def up do
alter table(:instances) do alter table(:instances) do
add_if_not_exists(:nodeinfo, :map, default: %{}) add_if_not_exists(:nodeinfo, :map, default: %{})
add_if_not_exists(:metadata_updated_at, :naive_datetime)
end end
end end
def down do def down do
alter table(:instances) do alter table(:instances) do
remove_if_exists(:nodeinfo, :map) remove_if_exists(:nodeinfo, :map)
remove_if_exists(:metadata_updated_at, :naive_datetime)
end end
end end
end end