Don't mess with the cache on metadata update

This commit is contained in:
FloatingGhost 2022-11-08 10:39:01 +00:00
parent 7bbaa8f8e0
commit a0b8e3c842
4 changed files with 16 additions and 21 deletions

View file

@ -157,7 +157,7 @@ defp cachex_children do
build_cachex("failed_proxy_url", limit: 2500),
build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000),
build_cachex("translations", default_ttl: :timer.hours(24 * 30), limit: 2500),
build_cachex("instances", default_ttl: :timer.hours(24), limit: 2500)
build_cachex("instances", default_ttl: :timer.hours(24), ttl_interval: 1000, limit: 2500)
]
end

View file

@ -176,7 +176,6 @@ defp do_update_metadata(%URI{host: host} = uri, existing_record) do
favicon = scrape_favicon(uri)
nodeinfo = scrape_nodeinfo(uri)
{:ok, instance} =
existing_record
|> changeset(%{
host: host,
@ -185,8 +184,6 @@ defp do_update_metadata(%URI{host: host} = uri, existing_record) do
metadata_updated_at: NaiveDateTime.utc_now()
})
|> Repo.update()
@cachex.put(:instances_cache, "instances:#{host}", instance)
else
{:discard, "Does not require update"}
end
@ -205,8 +202,6 @@ defp do_update_metadata(%URI{host: host} = uri, existing_record) do
metadata_updated_at: NaiveDateTime.utc_now()
})
|> Repo.insert()
@cachex.put(:instances_cache, "instances:#{host}", instance)
end
end

View file

@ -136,7 +136,7 @@ test "Scrapes favicon URLs and nodeinfo" do
}
end)
assert {:ok, true} ==
assert {:ok, %Instance{host: "favicon.example.org"}} =
Instance.update_metadata(URI.parse("https://favicon.example.org/"))
{:ok, instance} = Instance.get_cached_by_url("https://favicon.example.org/")
@ -177,7 +177,7 @@ test "Does not retain favicons that are too long" do
}
end)
assert {:ok, true} ==
assert {:ok, %Instance{host: "long-favicon.example.org"}} =
Instance.update_metadata(URI.parse("https://long-favicon.example.org/"))
{:ok, instance} = Instance.get_cached_by_url("https://long-favicon.example.org/")
@ -214,7 +214,7 @@ test "Handles not getting a favicon URL properly" do
end)
refute capture_log(fn ->
assert {:ok, true} =
assert {:ok, %Instance{host: "no-favicon.example.org"}} =
Instance.update_metadata(URI.parse("https://no-favicon.example.org/"))
end) =~ "Instance.update_metadata(\"https://no-favicon.example.org/\") error: "
end
@ -241,7 +241,7 @@ test "doesn't continue scraping nodeinfo if we can't find a link" do
}
end)
assert {:ok, true} ==
assert {:ok, %Instance{host: "bad-nodeinfo.example.org"}} =
Instance.update_metadata(URI.parse("https://bad-nodeinfo.example.org/"))
{:ok, instance} = Instance.get_cached_by_url("https://bad-nodeinfo.example.org/")
@ -277,7 +277,7 @@ test "doesn't store bad json in the nodeinfo" do
}
end)
assert {:ok, true} ==
assert {:ok, %Instance{host: "bad-nodeinfo.example.org"}} =
Instance.update_metadata(URI.parse("https://bad-nodeinfo.example.org/"))
{:ok, instance} = Instance.get_cached_by_url("https://bad-nodeinfo.example.org/")
@ -315,7 +315,7 @@ test "doesn't store incredibly long json nodeinfo" do
}
end)
assert {:ok, true} ==
assert {:ok, %Instance{host: "bad-nodeinfo.example.org"}} =
Instance.update_metadata(URI.parse("https://bad-nodeinfo.example.org/"))
{:ok, instance} = Instance.get_cached_by_url("https://bad-nodeinfo.example.org/")

View file

@ -45,7 +45,7 @@ test "renders a report" do
ReportView.render("show.json", Report.extract_report_info(activity))
|> Map.delete(:created_at)
assert result == expected
assert Jason.encode!(result) == Jason.encode!(expected)
end
test "includes reported statuses" do