forked from AkkomaGang/akkoma
fix invalidates media url's
This commit is contained in:
parent
f9dcf15ecb
commit
2e8a236cef
16 changed files with 346 additions and 114 deletions
|
@ -406,6 +406,13 @@
|
|||
],
|
||||
whitelist: []
|
||||
|
||||
config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Http,
|
||||
method: :purge,
|
||||
headers: [],
|
||||
options: []
|
||||
|
||||
config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Script, script_path: nil
|
||||
|
||||
config :pleroma, :chat, enabled: true
|
||||
|
||||
config :phoenix, :format_encoders, json: Jason
|
||||
|
|
|
@ -1637,6 +1637,31 @@
|
|||
"The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host/CDN fronts.",
|
||||
suggestions: ["https://example.com"]
|
||||
},
|
||||
%{
|
||||
key: :invalidation,
|
||||
type: :keyword,
|
||||
descpiption: "",
|
||||
suggestions: [
|
||||
enabled: true,
|
||||
provider: Pleroma.Web.MediaProxy.Invalidation.Script
|
||||
],
|
||||
children: [
|
||||
%{
|
||||
key: :enabled,
|
||||
type: :boolean,
|
||||
description: "Enables invalidate media cache"
|
||||
},
|
||||
%{
|
||||
key: :provider,
|
||||
type: :module,
|
||||
description: "Module which will be used to cache purge.",
|
||||
suggestions: [
|
||||
Pleroma.Web.MediaProxy.Invalidation.Script,
|
||||
Pleroma.Web.MediaProxy.Invalidation.Http
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
key: :proxy_opts,
|
||||
type: :keyword,
|
||||
|
@ -1709,6 +1734,45 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
group: :pleroma,
|
||||
key: Pleroma.Web.MediaProxy.Invalidation.Http,
|
||||
type: :group,
|
||||
description: "HTTP invalidate settings",
|
||||
children: [
|
||||
%{
|
||||
key: :method,
|
||||
type: :atom,
|
||||
description: "HTTP method of request. Default: :purge"
|
||||
},
|
||||
%{
|
||||
key: :headers,
|
||||
type: {:list, :tuple},
|
||||
description: "HTTP headers of request.",
|
||||
suggestions: [{"x-refresh", 1}]
|
||||
},
|
||||
%{
|
||||
key: :options,
|
||||
type: :keyword,
|
||||
description: "Request options.",
|
||||
suggestions: [params: %{ts: "xxx"}]
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
group: :pleroma,
|
||||
key: Pleroma.Web.MediaProxy.Invalidation.Script,
|
||||
type: :group,
|
||||
description: "Script invalidate settings",
|
||||
children: [
|
||||
%{
|
||||
key: :script_path,
|
||||
type: :string,
|
||||
description: "Path to shell script. Which will run purge cache.",
|
||||
suggestions: ["./installation/nginx-cache-purge.sh.example"]
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
group: :pleroma,
|
||||
key: :gopher,
|
||||
|
|
|
@ -262,7 +262,7 @@ This section describe PWA manifest instance-specific values. Currently this opti
|
|||
|
||||
#### Pleroma.Web.MediaProxy.Invalidation.Script
|
||||
|
||||
This strategy allow perform external bash script to purge cache.
|
||||
This strategy allow perform external shell script to purge cache.
|
||||
Urls of attachments pass to script as arguments.
|
||||
|
||||
* `script_path`: path to external script.
|
||||
|
@ -278,8 +278,8 @@ config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Script,
|
|||
This strategy allow perform custom http request to purge cache.
|
||||
|
||||
* `method`: http method. default is `purge`
|
||||
* `headers`: http headers. default is empty
|
||||
* `options`: request options. default is empty
|
||||
* `headers`: http headers.
|
||||
* `options`: request options.
|
||||
|
||||
Example:
|
||||
```elixir
|
||||
|
|
|
@ -13,7 +13,7 @@ CACHE_DIRECTORY="/tmp/pleroma-media-cache"
|
|||
## $3 - (optional) the number of parallel processes to run for grep.
|
||||
get_cache_files() {
|
||||
local max_parallel=${3-16}
|
||||
find $2 -maxdepth 2 -type d | xargs -P $max_parallel -n 1 grep -E Rl "^KEY:.*$1" | sort -u
|
||||
find $2 -maxdepth 2 -type d | xargs -P $max_parallel -n 1 grep -E -Rl "^KEY:.*$1" | sort -u
|
||||
}
|
||||
|
||||
## Removes an item from the given cache zone.
|
||||
|
@ -37,4 +37,4 @@ purge() {
|
|||
|
||||
}
|
||||
|
||||
purge $1
|
||||
purge $@
|
||||
|
|
|
@ -148,7 +148,8 @@ defp cachex_children do
|
|||
build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500),
|
||||
build_cachex("web_resp", limit: 2500),
|
||||
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
|
||||
build_cachex("failed_proxy_url", limit: 2500)
|
||||
build_cachex("failed_proxy_url", limit: 2500),
|
||||
build_cachex("deleted_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000)
|
||||
]
|
||||
end
|
||||
|
||||
|
|
|
@ -10,6 +10,8 @@ defmodule Pleroma.Plugs.UploadedMedia do
|
|||
import Pleroma.Web.Gettext
|
||||
require Logger
|
||||
|
||||
alias Pleroma.Web.MediaProxy
|
||||
|
||||
@behaviour Plug
|
||||
# no slashes
|
||||
@path "media"
|
||||
|
@ -35,8 +37,7 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
|
|||
%{query_params: %{"name" => name}} = conn ->
|
||||
name = String.replace(name, "\"", "\\\"")
|
||||
|
||||
conn
|
||||
|> put_resp_header("content-disposition", "filename=\"#{name}\"")
|
||||
put_resp_header(conn, "content-disposition", "filename=\"#{name}\"")
|
||||
|
||||
conn ->
|
||||
conn
|
||||
|
@ -47,7 +48,8 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
|
|||
|
||||
with uploader <- Keyword.fetch!(config, :uploader),
|
||||
proxy_remote = Keyword.get(config, :proxy_remote, false),
|
||||
{:ok, get_method} <- uploader.get_file(file) do
|
||||
{:ok, get_method} <- uploader.get_file(file),
|
||||
false <- media_is_deleted(conn, get_method) do
|
||||
get_media(conn, get_method, proxy_remote, opts)
|
||||
else
|
||||
_ ->
|
||||
|
@ -59,6 +61,14 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
|
|||
|
||||
def call(conn, _opts), do: conn
|
||||
|
||||
defp media_is_deleted(%{request_path: path} = _conn, {:static_dir, _}) do
|
||||
MediaProxy.in_deleted_urls(Pleroma.Web.base_url() <> path)
|
||||
end
|
||||
|
||||
defp media_is_deleted(_, {:url, url}), do: MediaProxy.in_deleted_urls(url)
|
||||
|
||||
defp media_is_deleted(_, _), do: false
|
||||
|
||||
defp get_media(conn, {:static_dir, directory}, _, opts) do
|
||||
static_opts =
|
||||
Map.get(opts, :static_plug_opts)
|
||||
|
|
|
@ -5,22 +5,33 @@
|
|||
defmodule Pleroma.Web.MediaProxy.Invalidation do
|
||||
@moduledoc false
|
||||
|
||||
@callback purge(list(String.t()), map()) :: {:ok, String.t()} | {:error, String.t()}
|
||||
@callback purge(list(String.t()), Keyword.t()) :: {:ok, list(String.t())} | {:error, String.t()}
|
||||
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.Web.MediaProxy
|
||||
|
||||
@spec purge(list(String.t())) :: {:ok, String.t()} | {:error, String.t()}
|
||||
@spec enabled?() :: boolean()
|
||||
def enabled?, do: Config.get([:media_proxy, :invalidation, :enabled])
|
||||
|
||||
@spec purge(list(String.t()) | String.t()) :: {:ok, list(String.t())} | {:error, String.t()}
|
||||
def purge(urls) do
|
||||
[:media_proxy, :invalidation, :enabled]
|
||||
|> Config.get()
|
||||
|> do_purge(urls)
|
||||
prepared_urls = prepare_urls(urls)
|
||||
|
||||
if enabled?() do
|
||||
do_purge(prepared_urls)
|
||||
else
|
||||
{:ok, prepared_urls}
|
||||
end
|
||||
end
|
||||
|
||||
defp do_purge(true, urls) do
|
||||
defp do_purge(urls) do
|
||||
provider = Config.get([:media_proxy, :invalidation, :provider])
|
||||
options = Config.get(provider)
|
||||
provider.purge(urls, options)
|
||||
provider.purge(urls, Config.get(provider))
|
||||
end
|
||||
|
||||
defp do_purge(_, _), do: :ok
|
||||
def prepare_urls(urls) do
|
||||
urls
|
||||
|> List.wrap()
|
||||
|> Enum.map(&MediaProxy.url(&1))
|
||||
end
|
||||
end
|
||||
|
|
|
@ -10,9 +10,9 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Http do
|
|||
|
||||
@impl Pleroma.Web.MediaProxy.Invalidation
|
||||
def purge(urls, opts) do
|
||||
method = Map.get(opts, :method, :purge)
|
||||
headers = Map.get(opts, :headers, [])
|
||||
options = Map.get(opts, :options, [])
|
||||
method = Keyword.get(opts, :method, :purge)
|
||||
headers = Keyword.get(opts, :headers, [])
|
||||
options = Keyword.get(opts, :options, [])
|
||||
|
||||
Logger.debug("Running cache purge: #{inspect(urls)}")
|
||||
|
||||
|
@ -22,7 +22,7 @@ def purge(urls, opts) do
|
|||
end
|
||||
end)
|
||||
|
||||
{:ok, "success"}
|
||||
{:ok, urls}
|
||||
end
|
||||
|
||||
defp do_purge(method, url, headers, options) do
|
||||
|
|
|
@ -10,32 +10,34 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Script do
|
|||
require Logger
|
||||
|
||||
@impl Pleroma.Web.MediaProxy.Invalidation
|
||||
def purge(urls, %{script_path: script_path} = _options) do
|
||||
def purge(urls, opts) do
|
||||
args =
|
||||
urls
|
||||
|> List.wrap()
|
||||
|> Enum.uniq()
|
||||
|> Enum.join(" ")
|
||||
|
||||
opts
|
||||
|> Keyword.get(:script_path, nil)
|
||||
|> do_purge([args])
|
||||
|> handle_result(urls)
|
||||
end
|
||||
|
||||
defp do_purge(script_path, args) when is_binary(script_path) do
|
||||
path = Path.expand(script_path)
|
||||
|
||||
Logger.debug("Running cache purge: #{inspect(urls)}, #{path}")
|
||||
|
||||
case do_purge(path, [args]) do
|
||||
{result, exit_status} when exit_status > 0 ->
|
||||
Logger.error("Error while cache purge: #{inspect(result)}")
|
||||
{:error, inspect(result)}
|
||||
|
||||
_ ->
|
||||
{:ok, "success"}
|
||||
end
|
||||
end
|
||||
|
||||
def purge(_, _), do: {:error, "not found script path"}
|
||||
|
||||
defp do_purge(path, args) do
|
||||
Logger.debug("Running cache purge: #{inspect(args)}, #{inspect(path)}")
|
||||
System.cmd(path, args)
|
||||
rescue
|
||||
error -> {inspect(error), 1}
|
||||
error -> error
|
||||
end
|
||||
|
||||
defp do_purge(_, _), do: {:error, "not found script path"}
|
||||
|
||||
defp handle_result({_result, 0}, urls), do: {:ok, urls}
|
||||
defp handle_result({:error, error}, urls), do: handle_result(error, urls)
|
||||
|
||||
defp handle_result(error, _) do
|
||||
Logger.error("Error while cache purge: #{inspect(error)}")
|
||||
{:error, inspect(error)}
|
||||
end
|
||||
end
|
||||
|
|
|
@ -6,20 +6,53 @@ defmodule Pleroma.Web.MediaProxy do
|
|||
alias Pleroma.Config
|
||||
alias Pleroma.Upload
|
||||
alias Pleroma.Web
|
||||
alias Pleroma.Web.MediaProxy.Invalidation
|
||||
|
||||
@base64_opts [padding: false]
|
||||
|
||||
@spec in_deleted_urls(String.t()) :: boolean()
|
||||
def in_deleted_urls(url), do: elem(Cachex.exists?(:deleted_urls_cache, url(url)), 1)
|
||||
|
||||
def remove_from_deleted_urls(urls) when is_list(urls) do
|
||||
Cachex.execute!(:deleted_urls_cache, fn cache ->
|
||||
Enum.each(Invalidation.prepare_urls(urls), &Cachex.del(cache, &1))
|
||||
end)
|
||||
end
|
||||
|
||||
def remove_from_deleted_urls(url) when is_binary(url) do
|
||||
Cachex.del(:deleted_urls_cache, url(url))
|
||||
end
|
||||
|
||||
def put_in_deleted_urls(urls) when is_list(urls) do
|
||||
Cachex.execute!(:deleted_urls_cache, fn cache ->
|
||||
Enum.each(Invalidation.prepare_urls(urls), &Cachex.put(cache, &1, true))
|
||||
end)
|
||||
end
|
||||
|
||||
def put_in_deleted_urls(url) when is_binary(url) do
|
||||
Cachex.put(:deleted_urls_cache, url(url), true)
|
||||
end
|
||||
|
||||
def url(url) when is_nil(url) or url == "", do: nil
|
||||
def url("/" <> _ = url), do: url
|
||||
|
||||
def url(url) do
|
||||
if disabled?() or local?(url) or whitelisted?(url) do
|
||||
if disabled?() or not is_url_proxiable?(url) do
|
||||
url
|
||||
else
|
||||
encode_url(url)
|
||||
end
|
||||
end
|
||||
|
||||
@spec is_url_proxiable?(String.t()) :: boolean()
|
||||
def is_url_proxiable?(url) do
|
||||
if local?(url) or whitelisted?(url) do
|
||||
false
|
||||
else
|
||||
true
|
||||
end
|
||||
end
|
||||
|
||||
defp disabled?, do: !Config.get([:media_proxy, :enabled], false)
|
||||
|
||||
defp local?(url), do: String.starts_with?(url, Pleroma.Web.base_url())
|
||||
|
|
|
@ -14,10 +14,11 @@ def remote(conn, %{"sig" => sig64, "url" => url64} = params) do
|
|||
with config <- Pleroma.Config.get([:media_proxy], []),
|
||||
true <- Keyword.get(config, :enabled, false),
|
||||
{:ok, url} <- MediaProxy.decode_url(sig64, url64),
|
||||
{_, false} <- {:in_deleted_urls, MediaProxy.in_deleted_urls(url)},
|
||||
:ok <- filename_matches(params, conn.request_path, url) do
|
||||
ReverseProxy.call(conn, url, Keyword.get(config, :proxy_opts, @default_proxy_opts))
|
||||
else
|
||||
false ->
|
||||
error when error in [false, {:in_deleted_urls, true}] ->
|
||||
send_resp(conn, 404, Plug.Conn.Status.reason_phrase(404))
|
||||
|
||||
{:error, :invalid_signature} ->
|
||||
|
|
|
@ -23,8 +23,25 @@ def perform(
|
|||
Enum.map(attachment["url"], & &1["href"])
|
||||
end)
|
||||
|
||||
names = Enum.map(attachments, & &1["name"])
|
||||
# find all objects for copies of the attachments, name and actor doesn't matter here
|
||||
hrefs
|
||||
|> fetch_objects
|
||||
|> prepare_objects(actor, Enum.map(attachments, & &1["name"]))
|
||||
|> Enum.reduce({[], []}, fn {href, %{id: id, count: count}}, {ids, hrefs} ->
|
||||
with 1 <- count do
|
||||
{ids ++ [id], hrefs ++ [href]}
|
||||
else
|
||||
_ -> {ids ++ [id], hrefs}
|
||||
end
|
||||
end)
|
||||
|> do_clean
|
||||
|
||||
{:ok, :success}
|
||||
end
|
||||
|
||||
def perform(%{"op" => "cleanup_attachments", "object" => _object}, _job), do: {:ok, :skip}
|
||||
|
||||
defp do_clean({object_ids, attachment_urls}) do
|
||||
uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
|
||||
|
||||
prefix =
|
||||
|
@ -39,22 +56,25 @@ def perform(
|
|||
"/"
|
||||
)
|
||||
|
||||
# find all objects for copies of the attachments, name and actor doesn't matter here
|
||||
object_ids_and_hrefs =
|
||||
from(o in Object,
|
||||
where:
|
||||
fragment(
|
||||
"to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)",
|
||||
o.data,
|
||||
o.data,
|
||||
^hrefs
|
||||
)
|
||||
)
|
||||
# The query above can be time consumptive on large instances until we
|
||||
# refactor how uploads are stored
|
||||
|> Repo.all(timeout: :infinity)
|
||||
Enum.each(attachment_urls, fn href ->
|
||||
href
|
||||
|> String.trim_leading("#{base_url}/#{prefix}")
|
||||
|> uploader.delete_file()
|
||||
end)
|
||||
|
||||
delete_objects(object_ids)
|
||||
end
|
||||
|
||||
defp delete_objects([_ | _] = object_ids) do
|
||||
Repo.delete_all(from(o in Object, where: o.id in ^object_ids))
|
||||
end
|
||||
|
||||
defp delete_objects(_), do: :ok
|
||||
|
||||
# we should delete 1 object for any given attachment, but don't delete
|
||||
# files if there are more than 1 object for it
|
||||
def prepare_objects(objects, actor, names) do
|
||||
objects
|
||||
|> Enum.reduce(%{}, fn %{
|
||||
id: id,
|
||||
data: %{
|
||||
|
@ -76,31 +96,20 @@ def perform(
|
|||
end
|
||||
end)
|
||||
end)
|
||||
|> Enum.map(fn {href, %{id: id, count: count}} ->
|
||||
# only delete files that have single instance
|
||||
with 1 <- count do
|
||||
href
|
||||
|> String.trim_leading("#{base_url}/#{prefix}")
|
||||
|> uploader.delete_file()
|
||||
|
||||
{id, href}
|
||||
else
|
||||
_ -> {id, nil}
|
||||
end
|
||||
end)
|
||||
|
||||
object_ids = Enum.map(object_ids_and_hrefs, fn {id, _} -> id end)
|
||||
|
||||
from(o in Object, where: o.id in ^object_ids)
|
||||
|> Repo.delete_all()
|
||||
|
||||
object_ids_and_hrefs
|
||||
|> Enum.filter(fn {_, href} -> not is_nil(href) end)
|
||||
|> Enum.map(&elem(&1, 1))
|
||||
|> Pleroma.Web.MediaProxy.Invalidation.purge()
|
||||
|
||||
{:ok, :success}
|
||||
end
|
||||
|
||||
def perform(%{"op" => "cleanup_attachments", "object" => _object}, _job), do: {:ok, :skip}
|
||||
def fetch_objects(hrefs) do
|
||||
from(o in Object,
|
||||
where:
|
||||
fragment(
|
||||
"to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)",
|
||||
o.data,
|
||||
o.data,
|
||||
^hrefs
|
||||
)
|
||||
)
|
||||
# The query above can be time consumptive on large instances until we
|
||||
# refactor how uploads are stored
|
||||
|> Repo.all(timeout: :infinity)
|
||||
end
|
||||
end
|
||||
|
|
65
test/web/media_proxy/invalidation_test.exs
Normal file
65
test/web/media_proxy/invalidation_test.exs
Normal file
|
@ -0,0 +1,65 @@
|
|||
defmodule Pleroma.Web.MediaProxy.InvalidationTest do
|
||||
use ExUnit.Case
|
||||
use Pleroma.Tests.Helpers
|
||||
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.Web.MediaProxy.Invalidation
|
||||
|
||||
import ExUnit.CaptureLog
|
||||
import Mock
|
||||
import Tesla.Mock
|
||||
|
||||
setup do: clear_config([:media_proxy])
|
||||
|
||||
setup do
|
||||
on_exit(fn -> Cachex.clear(:deleted_urls_cache) end)
|
||||
:ok
|
||||
end
|
||||
|
||||
describe "Invalidation.Http" do
|
||||
test "perform request to clear cache" do
|
||||
Config.put([:media_proxy, :enabled], false)
|
||||
Config.put([:media_proxy, :invalidation, :enabled], true)
|
||||
Config.put([:media_proxy, :invalidation, :provider], Invalidation.Http)
|
||||
|
||||
Config.put([Invalidation.Http], method: :purge, headers: [{"x-refresh", 1}])
|
||||
image_url = "http://example.com/media/example.jpg"
|
||||
Pleroma.Web.MediaProxy.put_in_deleted_urls(image_url)
|
||||
|
||||
mock(fn
|
||||
%{
|
||||
method: :purge,
|
||||
url: "http://example.com/media/example.jpg",
|
||||
headers: [{"x-refresh", 1}]
|
||||
} ->
|
||||
%Tesla.Env{status: 200}
|
||||
end)
|
||||
|
||||
assert capture_log(fn ->
|
||||
assert Pleroma.Web.MediaProxy.in_deleted_urls(image_url)
|
||||
assert Invalidation.purge([image_url]) == {:ok, [image_url]}
|
||||
assert Pleroma.Web.MediaProxy.in_deleted_urls(image_url)
|
||||
end) =~ "Running cache purge: [\"#{image_url}\"]"
|
||||
end
|
||||
end
|
||||
|
||||
describe "Invalidation.Script" do
|
||||
test "run script to clear cache" do
|
||||
Config.put([:media_proxy, :enabled], false)
|
||||
Config.put([:media_proxy, :invalidation, :enabled], true)
|
||||
Config.put([:media_proxy, :invalidation, :provider], Invalidation.Script)
|
||||
Config.put([Invalidation.Script], script_path: "purge-nginx")
|
||||
|
||||
image_url = "http://example.com/media/example.jpg"
|
||||
Pleroma.Web.MediaProxy.put_in_deleted_urls(image_url)
|
||||
|
||||
with_mocks [{System, [], [cmd: fn _, _ -> {"ok", 0} end]}] do
|
||||
assert capture_log(fn ->
|
||||
assert Pleroma.Web.MediaProxy.in_deleted_urls(image_url)
|
||||
assert Invalidation.purge([image_url]) == {:ok, [image_url]}
|
||||
assert Pleroma.Web.MediaProxy.in_deleted_urls(image_url)
|
||||
end) =~ "Running cache purge: [\"#{image_url}\"]"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -5,6 +5,11 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.HttpTest do
|
|||
import ExUnit.CaptureLog
|
||||
import Tesla.Mock
|
||||
|
||||
setup do
|
||||
on_exit(fn -> Cachex.clear(:deleted_urls_cache) end)
|
||||
:ok
|
||||
end
|
||||
|
||||
test "logs hasn't error message when request is valid" do
|
||||
mock(fn
|
||||
%{method: :purge, url: "http://example.com/media/example.jpg"} ->
|
||||
|
@ -14,8 +19,8 @@ test "logs hasn't error message when request is valid" do
|
|||
refute capture_log(fn ->
|
||||
assert Invalidation.Http.purge(
|
||||
["http://example.com/media/example.jpg"],
|
||||
%{}
|
||||
) == {:ok, "success"}
|
||||
[]
|
||||
) == {:ok, ["http://example.com/media/example.jpg"]}
|
||||
end) =~ "Error while cache purge"
|
||||
end
|
||||
|
||||
|
@ -28,8 +33,8 @@ test "it write error message in logs when request invalid" do
|
|||
assert capture_log(fn ->
|
||||
assert Invalidation.Http.purge(
|
||||
["http://example.com/media/example1.jpg"],
|
||||
%{}
|
||||
) == {:ok, "success"}
|
||||
[]
|
||||
) == {:ok, ["http://example.com/media/example1.jpg"]}
|
||||
end) =~ "Error while cache purge: url - http://example.com/media/example1.jpg"
|
||||
end
|
||||
end
|
||||
|
|
|
@ -4,17 +4,24 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.ScriptTest do
|
|||
|
||||
import ExUnit.CaptureLog
|
||||
|
||||
setup do
|
||||
on_exit(fn -> Cachex.clear(:deleted_urls_cache) end)
|
||||
:ok
|
||||
end
|
||||
|
||||
test "it logger error when script not found" do
|
||||
assert capture_log(fn ->
|
||||
assert Invalidation.Script.purge(
|
||||
["http://example.com/media/example.jpg"],
|
||||
%{script_path: "./example"}
|
||||
) == {:error, "\"%ErlangError{original: :enoent}\""}
|
||||
end) =~ "Error while cache purge: \"%ErlangError{original: :enoent}\""
|
||||
script_path: "./example"
|
||||
) == {:error, "%ErlangError{original: :enoent}"}
|
||||
end) =~ "Error while cache purge: %ErlangError{original: :enoent}"
|
||||
|
||||
capture_log(fn ->
|
||||
assert Invalidation.Script.purge(
|
||||
["http://example.com/media/example.jpg"],
|
||||
%{}
|
||||
) == {:error, "not found script path"}
|
||||
[]
|
||||
) == {:error, "\"not found script path\""}
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -10,6 +10,11 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do
|
|||
setup do: clear_config(:media_proxy)
|
||||
setup do: clear_config([Pleroma.Web.Endpoint, :secret_key_base])
|
||||
|
||||
setup do
|
||||
on_exit(fn -> Cachex.clear(:deleted_urls_cache) end)
|
||||
:ok
|
||||
end
|
||||
|
||||
test "it returns 404 when MediaProxy disabled", %{conn: conn} do
|
||||
Config.put([:media_proxy, :enabled], false)
|
||||
|
||||
|
@ -66,4 +71,16 @@ test "it performs ReverseProxy.call when signature valid", %{conn: conn} do
|
|||
assert %Plug.Conn{status: :success} = get(conn, url)
|
||||
end
|
||||
end
|
||||
|
||||
test "it returns 404 when url contains in deleted_urls cache", %{conn: conn} do
|
||||
Config.put([:media_proxy, :enabled], true)
|
||||
Config.put([Pleroma.Web.Endpoint, :secret_key_base], "00000000000")
|
||||
url = Pleroma.Web.MediaProxy.encode_url("https://google.fn/test.png")
|
||||
Pleroma.Web.MediaProxy.put_in_deleted_urls("https://google.fn/test.png")
|
||||
|
||||
with_mock Pleroma.ReverseProxy,
|
||||
call: fn _conn, _url, _opts -> %Plug.Conn{status: :success} end do
|
||||
assert %Plug.Conn{status: 404, resp_body: "Not Found"} = get(conn, url)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in a new issue