forked from AkkomaGang/akkoma
Merge branch 'develop' of git.pleroma.social:pleroma/pleroma into unlisted
This commit is contained in:
commit
21efda2edb
34 changed files with 256 additions and 125 deletions
|
@ -5,31 +5,32 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
## Unreleased
|
## Unreleased
|
||||||
|
|
||||||
### Added
|
### Changed
|
||||||
|
|
||||||
- MRF policy to rewrite bot posts scope from public to unlisted
|
- Renamed `:await_up_timeout` in `:connections_pool` namespace to `:connect_timeout`, old name is deprecated.
|
||||||
|
- Renamed `:timeout` in `pools` namespace to `:recv_timeout`, old name is deprecated.
|
||||||
|
|
||||||
### Removed
|
### Removed
|
||||||
|
|
||||||
- **Breaking:** Removed `Pleroma.Workers.Cron.StatsWorker` setting from Oban `:crontab`.
|
- **Breaking:** Removed `Pleroma.Workers.Cron.StatsWorker` setting from Oban `:crontab`.
|
||||||
|
|
||||||
|
|
||||||
## unreleased-patch - ???
|
## unreleased-patch - ???
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
- Rich media failure tracking (along with `:failure_backoff` option)
|
- Rich media failure tracking (along with `:failure_backoff` option)
|
||||||
|
- MRF policy to rewrite bot posts scope from public to unlisted
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
|
||||||
- Possible OOM errors with the default HTTP adapter
|
- Possible OOM errors with the default HTTP adapter
|
||||||
|
- Fixed uploading webp images when the Exiftool Upload Filter is enabled by skipping them
|
||||||
- Mastodon API: Search parameter `following` now correctly returns the followings rather than the followers
|
- Mastodon API: Search parameter `following` now correctly returns the followings rather than the followers
|
||||||
- Mastodon API: Timelines hanging for (`number of posts with links * rich media timeout`) in the worst case.
|
- Mastodon API: Timelines hanging for (`number of posts with links * rich media timeout`) in the worst case.
|
||||||
Reduced to just rich media timeout.
|
Reduced to just rich media timeout.
|
||||||
- Mastodon API: Cards being wrong for preview statuses due to cache key collision
|
- Mastodon API: Cards being wrong for preview statuses due to cache key collision
|
||||||
- Password resets no longer processed for deactivated accounts
|
- Password resets no longer processed for deactivated accounts
|
||||||
|
|
||||||
|
|
||||||
## [2.1.0] - 2020-08-28
|
## [2.1.0] - 2020-08-28
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
|
|
@ -735,28 +735,28 @@
|
||||||
max_connections: 250,
|
max_connections: 250,
|
||||||
max_idle_time: 30_000,
|
max_idle_time: 30_000,
|
||||||
retry: 0,
|
retry: 0,
|
||||||
await_up_timeout: 5_000
|
connect_timeout: 5_000
|
||||||
|
|
||||||
config :pleroma, :pools,
|
config :pleroma, :pools,
|
||||||
federation: [
|
federation: [
|
||||||
size: 50,
|
size: 50,
|
||||||
max_waiting: 10,
|
max_waiting: 10,
|
||||||
timeout: 10_000
|
recv_timeout: 10_000
|
||||||
],
|
],
|
||||||
media: [
|
media: [
|
||||||
size: 50,
|
size: 50,
|
||||||
max_waiting: 10,
|
max_waiting: 10,
|
||||||
timeout: 10_000
|
recv_timeout: 10_000
|
||||||
],
|
],
|
||||||
upload: [
|
upload: [
|
||||||
size: 25,
|
size: 25,
|
||||||
max_waiting: 5,
|
max_waiting: 5,
|
||||||
timeout: 15_000
|
recv_timeout: 15_000
|
||||||
],
|
],
|
||||||
default: [
|
default: [
|
||||||
size: 10,
|
size: 10,
|
||||||
max_waiting: 2,
|
max_waiting: 2,
|
||||||
timeout: 5_000
|
recv_timeout: 5_000
|
||||||
]
|
]
|
||||||
|
|
||||||
config :pleroma, :hackney_pools,
|
config :pleroma, :hackney_pools,
|
||||||
|
|
|
@ -3386,7 +3386,7 @@
|
||||||
suggestions: [250]
|
suggestions: [250]
|
||||||
},
|
},
|
||||||
%{
|
%{
|
||||||
key: :await_up_timeout,
|
key: :connect_timeout,
|
||||||
type: :integer,
|
type: :integer,
|
||||||
description: "Timeout while `gun` will wait until connection is up. Default: 5000ms.",
|
description: "Timeout while `gun` will wait until connection is up. Default: 5000ms.",
|
||||||
suggestions: [5000]
|
suggestions: [5000]
|
||||||
|
@ -3424,6 +3424,12 @@
|
||||||
description:
|
description:
|
||||||
"Maximum number of requests waiting for other requests to finish. After this number is reached, the pool will start returning errrors when a new request is made",
|
"Maximum number of requests waiting for other requests to finish. After this number is reached, the pool will start returning errrors when a new request is made",
|
||||||
suggestions: [10]
|
suggestions: [10]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :recv_timeout,
|
||||||
|
type: :integer,
|
||||||
|
description: "Timeout for the pool while gun will wait for response",
|
||||||
|
suggestions: [10_000]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -114,7 +114,7 @@
|
||||||
|
|
||||||
config :pleroma, Pleroma.Web.ApiSpec.CastAndValidate, strict: true
|
config :pleroma, Pleroma.Web.ApiSpec.CastAndValidate, strict: true
|
||||||
|
|
||||||
config :pleroma, :instances_favicons, enabled: true
|
config :pleroma, :instances_favicons, enabled: false
|
||||||
|
|
||||||
config :pleroma, Pleroma.Uploaders.S3,
|
config :pleroma, Pleroma.Uploaders.S3,
|
||||||
bucket: nil,
|
bucket: nil,
|
||||||
|
|
|
@ -499,7 +499,7 @@ Settings for HTTP connection pool.
|
||||||
* `:connection_acquisition_wait` - Timeout to acquire a connection from pool.The total max time is this value multiplied by the number of retries.
|
* `:connection_acquisition_wait` - Timeout to acquire a connection from pool.The total max time is this value multiplied by the number of retries.
|
||||||
* `connection_acquisition_retries` - Number of attempts to acquire the connection from the pool if it is overloaded. Each attempt is timed `:connection_acquisition_wait` apart.
|
* `connection_acquisition_retries` - Number of attempts to acquire the connection from the pool if it is overloaded. Each attempt is timed `:connection_acquisition_wait` apart.
|
||||||
* `:max_connections` - Maximum number of connections in the pool.
|
* `:max_connections` - Maximum number of connections in the pool.
|
||||||
* `:await_up_timeout` - Timeout to connect to the host.
|
* `:connect_timeout` - Timeout to connect to the host.
|
||||||
* `:reclaim_multiplier` - Multiplied by `:max_connections` this will be the maximum number of idle connections that will be reclaimed in case the pool is overloaded.
|
* `:reclaim_multiplier` - Multiplied by `:max_connections` this will be the maximum number of idle connections that will be reclaimed in case the pool is overloaded.
|
||||||
|
|
||||||
### :pools
|
### :pools
|
||||||
|
@ -518,7 +518,7 @@ There are four pools used:
|
||||||
For each pool, the options are:
|
For each pool, the options are:
|
||||||
|
|
||||||
* `:size` - limit to how much requests can be concurrently executed.
|
* `:size` - limit to how much requests can be concurrently executed.
|
||||||
* `:timeout` - timeout while `gun` will wait for response
|
* `:recv_timeout` - timeout while `gun` will wait for response
|
||||||
* `:max_waiting` - limit to how much requests can be waiting for others to finish, after this is reached, subsequent requests will be dropped.
|
* `:max_waiting` - limit to how much requests can be waiting for others to finish, after this is reached, subsequent requests will be dropped.
|
||||||
|
|
||||||
## Captcha
|
## Captcha
|
||||||
|
|
|
@ -91,20 +91,17 @@ def run(["adapters"]) do
|
||||||
"Without conn and without pool" => fn ->
|
"Without conn and without pool" => fn ->
|
||||||
{:ok, %Tesla.Env{}} =
|
{:ok, %Tesla.Env{}} =
|
||||||
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
|
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
|
||||||
adapter: [pool: :no_pool, receive_conn: false]
|
pool: :no_pool,
|
||||||
|
receive_conn: false
|
||||||
)
|
)
|
||||||
end,
|
end,
|
||||||
"Without conn and with pool" => fn ->
|
"Without conn and with pool" => fn ->
|
||||||
{:ok, %Tesla.Env{}} =
|
{:ok, %Tesla.Env{}} =
|
||||||
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
|
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [], receive_conn: false)
|
||||||
adapter: [receive_conn: false]
|
|
||||||
)
|
|
||||||
end,
|
end,
|
||||||
"With reused conn and without pool" => fn ->
|
"With reused conn and without pool" => fn ->
|
||||||
{:ok, %Tesla.Env{}} =
|
{:ok, %Tesla.Env{}} =
|
||||||
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
|
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [], pool: :no_pool)
|
||||||
adapter: [pool: :no_pool]
|
|
||||||
)
|
|
||||||
end,
|
end,
|
||||||
"With reused conn and with pool" => fn ->
|
"With reused conn and with pool" => fn ->
|
||||||
{:ok, %Tesla.Env{}} = Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500")
|
{:ok, %Tesla.Env{}} = Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500")
|
||||||
|
|
|
@ -124,9 +124,7 @@ defp download_build(frontend_info, dest) do
|
||||||
url = String.replace(frontend_info["build_url"], "${ref}", frontend_info["ref"])
|
url = String.replace(frontend_info["build_url"], "${ref}", frontend_info["ref"])
|
||||||
|
|
||||||
with {:ok, %{status: 200, body: zip_body}} <-
|
with {:ok, %{status: 200, body: zip_body}} <-
|
||||||
Pleroma.HTTP.get(url, [],
|
Pleroma.HTTP.get(url, [], pool: :media, recv_timeout: 120_000) do
|
||||||
adapter: [pool: :media, timeout: 120_000, recv_timeout: 120_000]
|
|
||||||
) do
|
|
||||||
unzip(zip_body, dest)
|
unzip(zip_body, dest)
|
||||||
else
|
else
|
||||||
e -> {:error, e}
|
e -> {:error, e}
|
||||||
|
|
|
@ -56,6 +56,7 @@ def warn do
|
||||||
check_old_mrf_config()
|
check_old_mrf_config()
|
||||||
check_media_proxy_whitelist_config()
|
check_media_proxy_whitelist_config()
|
||||||
check_welcome_message_config()
|
check_welcome_message_config()
|
||||||
|
check_gun_pool_options()
|
||||||
end
|
end
|
||||||
|
|
||||||
def check_welcome_message_config do
|
def check_welcome_message_config do
|
||||||
|
@ -115,4 +116,46 @@ def check_media_proxy_whitelist_config do
|
||||||
""")
|
""")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def check_gun_pool_options do
|
||||||
|
pool_config = Config.get(:connections_pool)
|
||||||
|
|
||||||
|
if timeout = pool_config[:await_up_timeout] do
|
||||||
|
Logger.warn("""
|
||||||
|
!!!DEPRECATION WARNING!!!
|
||||||
|
Your config is using old setting name `await_up_timeout` instead of `connect_timeout`. Setting should work for now, but you are advised to change format to scheme with port to prevent possible issues later.
|
||||||
|
""")
|
||||||
|
|
||||||
|
Config.put(:connections_pool, Keyword.put_new(pool_config, :connect_timeout, timeout))
|
||||||
|
end
|
||||||
|
|
||||||
|
pools_configs = Config.get(:pools)
|
||||||
|
|
||||||
|
warning_preface = """
|
||||||
|
!!!DEPRECATION WARNING!!!
|
||||||
|
Your config is using old setting name `timeout` instead of `recv_timeout` in pool settings. Setting should work for now, but you are advised to change format to scheme with port to prevent possible issues later.
|
||||||
|
"""
|
||||||
|
|
||||||
|
updated_config =
|
||||||
|
Enum.reduce(pools_configs, [], fn {pool_name, config}, acc ->
|
||||||
|
if timeout = config[:timeout] do
|
||||||
|
Keyword.put(acc, pool_name, Keyword.put_new(config, :recv_timeout, timeout))
|
||||||
|
else
|
||||||
|
acc
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|
||||||
|
if updated_config != [] do
|
||||||
|
pool_warnings =
|
||||||
|
updated_config
|
||||||
|
|> Keyword.keys()
|
||||||
|
|> Enum.map(fn pool_name ->
|
||||||
|
"\n* `:timeout` options in #{pool_name} pool is now `:recv_timeout`"
|
||||||
|
end)
|
||||||
|
|
||||||
|
Logger.warn(Enum.join([warning_preface | pool_warnings]))
|
||||||
|
|
||||||
|
Config.put(:pools, updated_config)
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -13,7 +13,7 @@ def open(%URI{} = uri, opts) do
|
||||||
opts =
|
opts =
|
||||||
opts
|
opts
|
||||||
|> Enum.into(%{})
|
|> Enum.into(%{})
|
||||||
|> Map.put_new(:await_up_timeout, pool_opts[:await_up_timeout] || 5_000)
|
|> Map.put_new(:connect_timeout, pool_opts[:connect_timeout] || 5_000)
|
||||||
|> Map.put_new(:supervise, false)
|
|> Map.put_new(:supervise, false)
|
||||||
|> maybe_add_tls_opts(uri)
|
|> maybe_add_tls_opts(uri)
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ defp do_open(uri, %{proxy: {proxy_host, proxy_port}} = opts) do
|
||||||
|
|
||||||
with open_opts <- Map.delete(opts, :tls_opts),
|
with open_opts <- Map.delete(opts, :tls_opts),
|
||||||
{:ok, conn} <- Gun.open(proxy_host, proxy_port, open_opts),
|
{:ok, conn} <- Gun.open(proxy_host, proxy_port, open_opts),
|
||||||
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]),
|
{:ok, _} <- Gun.await_up(conn, opts[:connect_timeout]),
|
||||||
stream <- Gun.connect(conn, connect_opts),
|
stream <- Gun.connect(conn, connect_opts),
|
||||||
{:response, :fin, 200, _} <- Gun.await(conn, stream) do
|
{:response, :fin, 200, _} <- Gun.await(conn, stream) do
|
||||||
{:ok, conn}
|
{:ok, conn}
|
||||||
|
@ -88,7 +88,7 @@ defp do_open(uri, %{proxy: {proxy_type, proxy_host, proxy_port}} = opts) do
|
||||||
|> Map.put(:socks_opts, socks_opts)
|
|> Map.put(:socks_opts, socks_opts)
|
||||||
|
|
||||||
with {:ok, conn} <- Gun.open(proxy_host, proxy_port, opts),
|
with {:ok, conn} <- Gun.open(proxy_host, proxy_port, opts),
|
||||||
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]) do
|
{:ok, _} <- Gun.await_up(conn, opts[:connect_timeout]) do
|
||||||
{:ok, conn}
|
{:ok, conn}
|
||||||
else
|
else
|
||||||
error ->
|
error ->
|
||||||
|
@ -106,7 +106,7 @@ defp do_open(%URI{host: host, port: port} = uri, opts) do
|
||||||
host = Pleroma.HTTP.AdapterHelper.parse_host(host)
|
host = Pleroma.HTTP.AdapterHelper.parse_host(host)
|
||||||
|
|
||||||
with {:ok, conn} <- Gun.open(host, port, opts),
|
with {:ok, conn} <- Gun.open(host, port, opts),
|
||||||
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]) do
|
{:ok, _} <- Gun.await_up(conn, opts[:connect_timeout]) do
|
||||||
{:ok, conn}
|
{:ok, conn}
|
||||||
else
|
else
|
||||||
error ->
|
error ->
|
||||||
|
|
|
@ -6,7 +6,7 @@ defmodule Pleroma.HTTP.AdapterHelper do
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
Configure Tesla.Client with default and customized adapter options.
|
Configure Tesla.Client with default and customized adapter options.
|
||||||
"""
|
"""
|
||||||
@defaults [pool: :federation]
|
@defaults [pool: :federation, connect_timeout: 5_000, recv_timeout: 5_000]
|
||||||
|
|
||||||
@type proxy_type() :: :socks4 | :socks5
|
@type proxy_type() :: :socks4 | :socks5
|
||||||
@type host() :: charlist() | :inet.ip_address()
|
@type host() :: charlist() | :inet.ip_address()
|
||||||
|
|
|
@ -11,12 +11,8 @@ defmodule Pleroma.HTTP.AdapterHelper.Gun do
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@defaults [
|
@defaults [
|
||||||
connect_timeout: 5_000,
|
|
||||||
domain_lookup_timeout: 5_000,
|
|
||||||
tls_handshake_timeout: 5_000,
|
|
||||||
retry: 1,
|
retry: 1,
|
||||||
retry_timeout: 1000,
|
retry_timeout: 1_000
|
||||||
await_up_timeout: 5_000
|
|
||||||
]
|
]
|
||||||
|
|
||||||
@type pool() :: :federation | :upload | :media | :default
|
@type pool() :: :federation | :upload | :media | :default
|
||||||
|
@ -45,15 +41,17 @@ defp add_scheme_opts(opts, %{scheme: "https"}) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp put_timeout(opts) do
|
defp put_timeout(opts) do
|
||||||
|
{recv_timeout, opts} = Keyword.pop(opts, :recv_timeout, pool_timeout(opts[:pool]))
|
||||||
# this is the timeout to receive a message from Gun
|
# this is the timeout to receive a message from Gun
|
||||||
Keyword.put_new(opts, :timeout, pool_timeout(opts[:pool]))
|
# `:timeout` key is used in Tesla
|
||||||
|
Keyword.put(opts, :timeout, recv_timeout)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec pool_timeout(pool()) :: non_neg_integer()
|
@spec pool_timeout(pool()) :: non_neg_integer()
|
||||||
def pool_timeout(pool) do
|
def pool_timeout(pool) do
|
||||||
default = Config.get([:pools, :default, :timeout], 5_000)
|
default = Config.get([:pools, :default, :recv_timeout], 5_000)
|
||||||
|
|
||||||
Config.get([:pools, pool, :timeout], default)
|
Config.get([:pools, pool, :recv_timeout], default)
|
||||||
end
|
end
|
||||||
|
|
||||||
@prefix Pleroma.Gun.ConnectionPool
|
@prefix Pleroma.Gun.ConnectionPool
|
||||||
|
|
|
@ -2,11 +2,8 @@ defmodule Pleroma.HTTP.AdapterHelper.Hackney do
|
||||||
@behaviour Pleroma.HTTP.AdapterHelper
|
@behaviour Pleroma.HTTP.AdapterHelper
|
||||||
|
|
||||||
@defaults [
|
@defaults [
|
||||||
connect_timeout: 10_000,
|
|
||||||
recv_timeout: 20_000,
|
|
||||||
follow_redirect: true,
|
follow_redirect: true,
|
||||||
force_redirect: true,
|
force_redirect: true
|
||||||
pool: :federation
|
|
||||||
]
|
]
|
||||||
|
|
||||||
@spec options(keyword(), URI.t()) :: keyword()
|
@spec options(keyword(), URI.t()) :: keyword()
|
||||||
|
@ -19,6 +16,7 @@ def options(connection_opts \\ [], %URI{} = uri) do
|
||||||
|> Keyword.merge(config_opts)
|
|> Keyword.merge(config_opts)
|
||||||
|> Keyword.merge(connection_opts)
|
|> Keyword.merge(connection_opts)
|
||||||
|> add_scheme_opts(uri)
|
|> add_scheme_opts(uri)
|
||||||
|
|> maybe_add_with_body()
|
||||||
|> Pleroma.HTTP.AdapterHelper.maybe_add_proxy(proxy)
|
|> Pleroma.HTTP.AdapterHelper.maybe_add_proxy(proxy)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -27,4 +25,12 @@ defp add_scheme_opts(opts, %URI{scheme: "https"}) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp add_scheme_opts(opts, _), do: opts
|
defp add_scheme_opts(opts, _), do: opts
|
||||||
|
|
||||||
|
defp maybe_add_with_body(opts) do
|
||||||
|
if opts[:max_body] do
|
||||||
|
Keyword.put(opts, :with_body, true)
|
||||||
|
else
|
||||||
|
opts
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -11,7 +11,7 @@ defmodule Pleroma.HTTP.ExAws do
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def request(method, url, body \\ "", headers \\ [], http_opts \\ []) do
|
def request(method, url, body \\ "", headers \\ [], http_opts \\ []) do
|
||||||
http_opts = Keyword.put_new(http_opts, :adapter, pool: :upload)
|
http_opts = Keyword.put_new(http_opts, :pool, :upload)
|
||||||
|
|
||||||
case HTTP.request(method, url, body, headers, http_opts) do
|
case HTTP.request(method, url, body, headers, http_opts) do
|
||||||
{:ok, env} ->
|
{:ok, env} ->
|
||||||
|
|
|
@ -60,7 +60,7 @@ def post(url, body, headers \\ [], options \\ []),
|
||||||
{:ok, Env.t()} | {:error, any()}
|
{:ok, Env.t()} | {:error, any()}
|
||||||
def request(method, url, body, headers, options) when is_binary(url) do
|
def request(method, url, body, headers, options) when is_binary(url) do
|
||||||
uri = URI.parse(url)
|
uri = URI.parse(url)
|
||||||
adapter_opts = AdapterHelper.options(uri, options[:adapter] || [])
|
adapter_opts = AdapterHelper.options(uri, options || [])
|
||||||
|
|
||||||
options = put_in(options[:adapter], adapter_opts)
|
options = put_in(options[:adapter], adapter_opts)
|
||||||
params = options[:params] || []
|
params = options[:params] || []
|
||||||
|
|
|
@ -11,7 +11,7 @@ defmodule Pleroma.HTTP.Tzdata do
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def get(url, headers, options) do
|
def get(url, headers, options) do
|
||||||
options = Keyword.put_new(options, :adapter, pool: :default)
|
options = Keyword.put_new(options, :pool, :default)
|
||||||
|
|
||||||
with {:ok, %Tesla.Env{} = env} <- HTTP.get(url, headers, options) do
|
with {:ok, %Tesla.Env{} = env} <- HTTP.get(url, headers, options) do
|
||||||
{:ok, {env.status, env.headers, env.body}}
|
{:ok, {env.status, env.headers, env.body}}
|
||||||
|
@ -20,7 +20,7 @@ def get(url, headers, options) do
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def head(url, headers, options) do
|
def head(url, headers, options) do
|
||||||
options = Keyword.put_new(options, :adapter, pool: :default)
|
options = Keyword.put_new(options, :pool, :default)
|
||||||
|
|
||||||
with {:ok, %Tesla.Env{} = env} <- HTTP.head(url, headers, options) do
|
with {:ok, %Tesla.Env{} = env} <- HTTP.head(url, headers, options) do
|
||||||
{:ok, {env.status, env.headers}}
|
{:ok, {env.status, env.headers}}
|
||||||
|
|
|
@ -15,7 +15,11 @@ defmodule Pleroma.Upload.Filter do
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@callback filter(Pleroma.Upload.t()) :: :ok | {:ok, Pleroma.Upload.t()} | {:error, any()}
|
@callback filter(Pleroma.Upload.t()) ::
|
||||||
|
{:ok, :filtered}
|
||||||
|
| {:ok, :noop}
|
||||||
|
| {:ok, :filtered, Pleroma.Upload.t()}
|
||||||
|
| {:error, any()}
|
||||||
|
|
||||||
@spec filter([module()], Pleroma.Upload.t()) :: {:ok, Pleroma.Upload.t()} | {:error, any()}
|
@spec filter([module()], Pleroma.Upload.t()) :: {:ok, Pleroma.Upload.t()} | {:error, any()}
|
||||||
|
|
||||||
|
@ -25,10 +29,13 @@ def filter([], upload) do
|
||||||
|
|
||||||
def filter([filter | rest], upload) do
|
def filter([filter | rest], upload) do
|
||||||
case filter.filter(upload) do
|
case filter.filter(upload) do
|
||||||
:ok ->
|
{:ok, :filtered} ->
|
||||||
filter(rest, upload)
|
filter(rest, upload)
|
||||||
|
|
||||||
{:ok, upload} ->
|
{:ok, :filtered, upload} ->
|
||||||
|
filter(rest, upload)
|
||||||
|
|
||||||
|
{:ok, :noop} ->
|
||||||
filter(rest, upload)
|
filter(rest, upload)
|
||||||
|
|
||||||
error ->
|
error ->
|
||||||
|
|
|
@ -16,9 +16,11 @@ defmodule Pleroma.Upload.Filter.AnonymizeFilename do
|
||||||
def filter(%Upload{name: name} = upload) do
|
def filter(%Upload{name: name} = upload) do
|
||||||
extension = List.last(String.split(name, "."))
|
extension = List.last(String.split(name, "."))
|
||||||
name = predefined_name(extension) || random(extension)
|
name = predefined_name(extension) || random(extension)
|
||||||
{:ok, %Upload{upload | name: name}}
|
{:ok, :filtered, %Upload{upload | name: name}}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def filter(_), do: {:ok, :noop}
|
||||||
|
|
||||||
@spec predefined_name(String.t()) :: String.t() | nil
|
@spec predefined_name(String.t()) :: String.t() | nil
|
||||||
defp predefined_name(extension) do
|
defp predefined_name(extension) do
|
||||||
with name when not is_nil(name) <- Config.get([__MODULE__, :text]),
|
with name when not is_nil(name) <- Config.get([__MODULE__, :text]),
|
||||||
|
|
|
@ -17,8 +17,8 @@ def filter(%Upload{name: name, tempfile: tempfile} = upload) do
|
||||||
|> Base.encode16(case: :lower)
|
|> Base.encode16(case: :lower)
|
||||||
|
|
||||||
filename = shasum <> "." <> extension
|
filename = shasum <> "." <> extension
|
||||||
{:ok, %Upload{upload | id: shasum, path: filename}}
|
{:ok, :filtered, %Upload{upload | id: shasum, path: filename}}
|
||||||
end
|
end
|
||||||
|
|
||||||
def filter(_), do: :ok
|
def filter(_), do: {:ok, :noop}
|
||||||
end
|
end
|
||||||
|
|
|
@ -9,11 +9,15 @@ defmodule Pleroma.Upload.Filter.Exiftool do
|
||||||
"""
|
"""
|
||||||
@behaviour Pleroma.Upload.Filter
|
@behaviour Pleroma.Upload.Filter
|
||||||
|
|
||||||
@spec filter(Pleroma.Upload.t()) :: :ok | {:error, String.t()}
|
@spec filter(Pleroma.Upload.t()) :: {:ok, any()} | {:error, String.t()}
|
||||||
|
|
||||||
|
# webp is not compatible with exiftool at this time
|
||||||
|
def filter(%Pleroma.Upload{content_type: "image/webp"}), do: {:ok, :noop}
|
||||||
|
|
||||||
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
||||||
try do
|
try do
|
||||||
case System.cmd("exiftool", ["-overwrite_original", "-gps:all=", file], parallelism: true) do
|
case System.cmd("exiftool", ["-overwrite_original", "-gps:all=", file], parallelism: true) do
|
||||||
{_response, 0} -> :ok
|
{_response, 0} -> {:ok, :filtered}
|
||||||
{error, 1} -> {:error, error}
|
{error, 1} -> {:error, error}
|
||||||
end
|
end
|
||||||
rescue
|
rescue
|
||||||
|
@ -22,5 +26,5 @@ def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def filter(_), do: :ok
|
def filter(_), do: {:ok, :noop}
|
||||||
end
|
end
|
||||||
|
|
|
@ -38,16 +38,16 @@ defmodule Pleroma.Upload.Filter.Mogrifun do
|
||||||
[{"fill", "yellow"}, {"tint", "40"}]
|
[{"fill", "yellow"}, {"tint", "40"}]
|
||||||
]
|
]
|
||||||
|
|
||||||
@spec filter(Pleroma.Upload.t()) :: :ok | {:error, String.t()}
|
@spec filter(Pleroma.Upload.t()) :: {:ok, atom()} | {:error, String.t()}
|
||||||
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
||||||
try do
|
try do
|
||||||
Filter.Mogrify.do_filter(file, [Enum.random(@filters)])
|
Filter.Mogrify.do_filter(file, [Enum.random(@filters)])
|
||||||
:ok
|
{:ok, :filtered}
|
||||||
rescue
|
rescue
|
||||||
_e in ErlangError ->
|
_e in ErlangError ->
|
||||||
{:error, "mogrify command not found"}
|
{:error, "mogrify command not found"}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def filter(_), do: :ok
|
def filter(_), do: {:ok, :noop}
|
||||||
end
|
end
|
||||||
|
|
|
@ -8,18 +8,18 @@ defmodule Pleroma.Upload.Filter.Mogrify do
|
||||||
@type conversion :: action :: String.t() | {action :: String.t(), opts :: String.t()}
|
@type conversion :: action :: String.t() | {action :: String.t(), opts :: String.t()}
|
||||||
@type conversions :: conversion() | [conversion()]
|
@type conversions :: conversion() | [conversion()]
|
||||||
|
|
||||||
@spec filter(Pleroma.Upload.t()) :: :ok | {:error, String.t()}
|
@spec filter(Pleroma.Upload.t()) :: {:ok, :atom} | {:error, String.t()}
|
||||||
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
||||||
try do
|
try do
|
||||||
do_filter(file, Pleroma.Config.get!([__MODULE__, :args]))
|
do_filter(file, Pleroma.Config.get!([__MODULE__, :args]))
|
||||||
:ok
|
{:ok, :filtered}
|
||||||
rescue
|
rescue
|
||||||
_e in ErlangError ->
|
_e in ErlangError ->
|
||||||
{:error, "mogrify command not found"}
|
{:error, "mogrify command not found"}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def filter(_), do: :ok
|
def filter(_), do: {:ok, :noop}
|
||||||
|
|
||||||
def do_filter(file, filters) do
|
def do_filter(file, filters) do
|
||||||
file
|
file
|
||||||
|
|
|
@ -13,22 +13,16 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@options [
|
@options [
|
||||||
pool: :media
|
pool: :media,
|
||||||
|
recv_timeout: 10_000
|
||||||
]
|
]
|
||||||
|
|
||||||
def perform(:prefetch, url) do
|
def perform(:prefetch, url) do
|
||||||
Logger.debug("Prefetching #{inspect(url)}")
|
Logger.debug("Prefetching #{inspect(url)}")
|
||||||
|
|
||||||
opts =
|
|
||||||
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
|
|
||||||
Keyword.put(@options, :recv_timeout, 10_000)
|
|
||||||
else
|
|
||||||
@options
|
|
||||||
end
|
|
||||||
|
|
||||||
url
|
url
|
||||||
|> MediaProxy.url()
|
|> MediaProxy.url()
|
||||||
|> HTTP.get([], adapter: opts)
|
|> HTTP.get([], @options)
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(:preload, %{"object" => %{"attachment" => attachments}} = _message) do
|
def perform(:preload, %{"object" => %{"attachment" => attachments}} = _message) do
|
||||||
|
|
|
@ -5,7 +5,8 @@
|
||||||
defmodule Pleroma.Web.RelMe do
|
defmodule Pleroma.Web.RelMe do
|
||||||
@options [
|
@options [
|
||||||
pool: :media,
|
pool: :media,
|
||||||
max_body: 2_000_000
|
max_body: 2_000_000,
|
||||||
|
recv_timeout: 2_000
|
||||||
]
|
]
|
||||||
|
|
||||||
if Pleroma.Config.get(:env) == :test do
|
if Pleroma.Config.get(:env) == :test do
|
||||||
|
@ -23,18 +24,8 @@ def parse(url) when is_binary(url) do
|
||||||
def parse(_), do: {:error, "No URL provided"}
|
def parse(_), do: {:error, "No URL provided"}
|
||||||
|
|
||||||
defp parse_url(url) do
|
defp parse_url(url) do
|
||||||
opts =
|
|
||||||
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
|
|
||||||
Keyword.merge(@options,
|
|
||||||
recv_timeout: 2_000,
|
|
||||||
with_body: true
|
|
||||||
)
|
|
||||||
else
|
|
||||||
@options
|
|
||||||
end
|
|
||||||
|
|
||||||
with {:ok, %Tesla.Env{body: html, status: status}} when status in 200..299 <-
|
with {:ok, %Tesla.Env{body: html, status: status}} when status in 200..299 <-
|
||||||
Pleroma.HTTP.get(url, [], adapter: opts),
|
Pleroma.HTTP.get(url, [], @options),
|
||||||
{:ok, html_tree} <- Floki.parse_document(html),
|
{:ok, html_tree} <- Floki.parse_document(html),
|
||||||
data <-
|
data <-
|
||||||
Floki.attribute(html_tree, "link[rel~=me]", "href") ++
|
Floki.attribute(html_tree, "link[rel~=me]", "href") ++
|
||||||
|
|
|
@ -9,14 +9,15 @@ defmodule Pleroma.Web.RichMedia.Helpers do
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Web.RichMedia.Parser
|
alias Pleroma.Web.RichMedia.Parser
|
||||||
|
|
||||||
@rich_media_options [
|
@options [
|
||||||
pool: :media,
|
pool: :media,
|
||||||
max_body: 2_000_000
|
max_body: 2_000_000,
|
||||||
|
recv_timeout: 2_000
|
||||||
]
|
]
|
||||||
|
|
||||||
@spec validate_page_url(URI.t() | binary()) :: :ok | :error
|
@spec validate_page_url(URI.t() | binary()) :: :ok | :error
|
||||||
defp validate_page_url(page_url) when is_binary(page_url) do
|
defp validate_page_url(page_url) when is_binary(page_url) do
|
||||||
validate_tld = Pleroma.Config.get([Pleroma.Formatter, :validate_tld])
|
validate_tld = Config.get([Pleroma.Formatter, :validate_tld])
|
||||||
|
|
||||||
page_url
|
page_url
|
||||||
|> Linkify.Parser.url?(validate_tld: validate_tld)
|
|> Linkify.Parser.url?(validate_tld: validate_tld)
|
||||||
|
@ -86,16 +87,6 @@ def perform(:fetch, %Activity{} = activity) do
|
||||||
def rich_media_get(url) do
|
def rich_media_get(url) do
|
||||||
headers = [{"user-agent", Pleroma.Application.user_agent() <> "; Bot"}]
|
headers = [{"user-agent", Pleroma.Application.user_agent() <> "; Bot"}]
|
||||||
|
|
||||||
options =
|
Pleroma.HTTP.get(url, headers, @options)
|
||||||
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
|
|
||||||
Keyword.merge(@rich_media_options,
|
|
||||||
recv_timeout: 2_000,
|
|
||||||
with_body: true
|
|
||||||
)
|
|
||||||
else
|
|
||||||
@rich_media_options
|
|
||||||
end
|
|
||||||
|
|
||||||
Pleroma.HTTP.get(url, headers, adapter: options)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
defmodule Pleroma.Repo.Migrations.RenameAwaitUpTimeoutInConnectionsPool do
|
||||||
|
use Ecto.Migration
|
||||||
|
|
||||||
|
def change do
|
||||||
|
with %Pleroma.ConfigDB{} = config <-
|
||||||
|
Pleroma.ConfigDB.get_by_params(%{group: :pleroma, key: :connections_pool}),
|
||||||
|
{timeout, value} when is_integer(timeout) <- Keyword.pop(config.value, :await_up_timeout) do
|
||||||
|
config
|
||||||
|
|> Ecto.Changeset.change(value: Keyword.put(value, :connect_timeout, timeout))
|
||||||
|
|> Pleroma.Repo.update()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,19 @@
|
||||||
|
defmodule Pleroma.Repo.Migrations.RenameTimeoutInPools do
|
||||||
|
use Ecto.Migration
|
||||||
|
|
||||||
|
def change do
|
||||||
|
with %Pleroma.ConfigDB{} = config <-
|
||||||
|
Pleroma.ConfigDB.get_by_params(%{group: :pleroma, key: :pools}) do
|
||||||
|
updated_value =
|
||||||
|
Enum.map(config.value, fn {pool, pool_value} ->
|
||||||
|
with {timeout, value} when is_integer(timeout) <- Keyword.pop(pool_value, :timeout) do
|
||||||
|
{pool, Keyword.put(value, :recv_timeout, timeout)}
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|
||||||
|
config
|
||||||
|
|> Ecto.Changeset.change(value: updated_value)
|
||||||
|
|> Pleroma.Repo.update()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -4,12 +4,15 @@ defmodule Pleroma.Config.DeprecationWarningsTest do
|
||||||
|
|
||||||
import ExUnit.CaptureLog
|
import ExUnit.CaptureLog
|
||||||
|
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.Config.DeprecationWarnings
|
||||||
|
|
||||||
test "check_old_mrf_config/0" do
|
test "check_old_mrf_config/0" do
|
||||||
clear_config([:instance, :rewrite_policy], Pleroma.Web.ActivityPub.MRF.NoOpPolicy)
|
clear_config([:instance, :rewrite_policy], Pleroma.Web.ActivityPub.MRF.NoOpPolicy)
|
||||||
clear_config([:instance, :mrf_transparency], true)
|
clear_config([:instance, :mrf_transparency], true)
|
||||||
clear_config([:instance, :mrf_transparency_exclusions], [])
|
clear_config([:instance, :mrf_transparency_exclusions], [])
|
||||||
|
|
||||||
assert capture_log(fn -> Pleroma.Config.DeprecationWarnings.check_old_mrf_config() end) =~
|
assert capture_log(fn -> DeprecationWarnings.check_old_mrf_config() end) =~
|
||||||
"""
|
"""
|
||||||
!!!DEPRECATION WARNING!!!
|
!!!DEPRECATION WARNING!!!
|
||||||
Your config is using old namespaces for MRF configuration. They should work for now, but you are advised to change to new namespaces to prevent possible issues later:
|
Your config is using old namespaces for MRF configuration. They should work for now, but you are advised to change to new namespaces to prevent possible issues later:
|
||||||
|
@ -44,22 +47,66 @@ test "move_namespace_and_warn/2" do
|
||||||
]
|
]
|
||||||
|
|
||||||
assert capture_log(fn ->
|
assert capture_log(fn ->
|
||||||
Pleroma.Config.DeprecationWarnings.move_namespace_and_warn(
|
DeprecationWarnings.move_namespace_and_warn(
|
||||||
config_map,
|
config_map,
|
||||||
"Warning preface"
|
"Warning preface"
|
||||||
)
|
)
|
||||||
end) =~ "Warning preface\n error :key\n error :key2\n error :key3"
|
end) =~ "Warning preface\n error :key\n error :key2\n error :key3"
|
||||||
|
|
||||||
assert Pleroma.Config.get(new_group1) == 1
|
assert Config.get(new_group1) == 1
|
||||||
assert Pleroma.Config.get(new_group2) == 2
|
assert Config.get(new_group2) == 2
|
||||||
assert Pleroma.Config.get(new_group3) == 3
|
assert Config.get(new_group3) == 3
|
||||||
end
|
end
|
||||||
|
|
||||||
test "check_media_proxy_whitelist_config/0" do
|
test "check_media_proxy_whitelist_config/0" do
|
||||||
clear_config([:media_proxy, :whitelist], ["https://example.com", "example2.com"])
|
clear_config([:media_proxy, :whitelist], ["https://example.com", "example2.com"])
|
||||||
|
|
||||||
assert capture_log(fn ->
|
assert capture_log(fn ->
|
||||||
Pleroma.Config.DeprecationWarnings.check_media_proxy_whitelist_config()
|
DeprecationWarnings.check_media_proxy_whitelist_config()
|
||||||
end) =~ "Your config is using old format (only domain) for MediaProxy whitelist option"
|
end) =~ "Your config is using old format (only domain) for MediaProxy whitelist option"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
describe "check_gun_pool_options/0" do
|
||||||
|
test "await_up_timeout" do
|
||||||
|
config = Config.get(:connections_pool)
|
||||||
|
clear_config(:connections_pool, Keyword.put(config, :await_up_timeout, 5_000))
|
||||||
|
|
||||||
|
assert capture_log(fn ->
|
||||||
|
DeprecationWarnings.check_gun_pool_options()
|
||||||
|
end) =~
|
||||||
|
"Your config is using old setting name `await_up_timeout` instead of `connect_timeout`"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "pool timeout" do
|
||||||
|
old_config = [
|
||||||
|
federation: [
|
||||||
|
size: 50,
|
||||||
|
max_waiting: 10,
|
||||||
|
timeout: 10_000
|
||||||
|
],
|
||||||
|
media: [
|
||||||
|
size: 50,
|
||||||
|
max_waiting: 10,
|
||||||
|
timeout: 10_000
|
||||||
|
],
|
||||||
|
upload: [
|
||||||
|
size: 25,
|
||||||
|
max_waiting: 5,
|
||||||
|
timeout: 15_000
|
||||||
|
],
|
||||||
|
default: [
|
||||||
|
size: 10,
|
||||||
|
max_waiting: 2,
|
||||||
|
timeout: 5_000
|
||||||
|
]
|
||||||
|
]
|
||||||
|
|
||||||
|
clear_config(:pools, old_config)
|
||||||
|
|
||||||
|
assert capture_log(fn ->
|
||||||
|
DeprecationWarnings.check_gun_pool_options()
|
||||||
|
end) =~
|
||||||
|
"Your config is using old setting name `timeout` instead of `recv_timeout` in pool settings"
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -24,18 +24,18 @@ defmodule Pleroma.Upload.Filter.AnonymizeFilenameTest do
|
||||||
|
|
||||||
test "it replaces filename on pre-defined text", %{upload_file: upload_file} do
|
test "it replaces filename on pre-defined text", %{upload_file: upload_file} do
|
||||||
Config.put([Upload.Filter.AnonymizeFilename, :text], "custom-file.png")
|
Config.put([Upload.Filter.AnonymizeFilename, :text], "custom-file.png")
|
||||||
{:ok, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
|
{:ok, :filtered, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
|
||||||
assert name == "custom-file.png"
|
assert name == "custom-file.png"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it replaces filename on pre-defined text expression", %{upload_file: upload_file} do
|
test "it replaces filename on pre-defined text expression", %{upload_file: upload_file} do
|
||||||
Config.put([Upload.Filter.AnonymizeFilename, :text], "custom-file.{extension}")
|
Config.put([Upload.Filter.AnonymizeFilename, :text], "custom-file.{extension}")
|
||||||
{:ok, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
|
{:ok, :filtered, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
|
||||||
assert name == "custom-file.jpg"
|
assert name == "custom-file.jpg"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it replaces filename on random text", %{upload_file: upload_file} do
|
test "it replaces filename on random text", %{upload_file: upload_file} do
|
||||||
{:ok, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
|
{:ok, :filtered, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
|
||||||
assert <<_::bytes-size(14)>> <> ".jpg" = name
|
assert <<_::bytes-size(14)>> <> ".jpg" = name
|
||||||
refute name == "an… image.jpg"
|
refute name == "an… image.jpg"
|
||||||
end
|
end
|
||||||
|
|
|
@ -25,6 +25,7 @@ test "adds shasum" do
|
||||||
|
|
||||||
assert {
|
assert {
|
||||||
:ok,
|
:ok,
|
||||||
|
:filtered,
|
||||||
%Pleroma.Upload{id: @shasum, path: @shasum <> ".jpg"}
|
%Pleroma.Upload{id: @shasum, path: @shasum <> ".jpg"}
|
||||||
} = Dedupe.filter(upload)
|
} = Dedupe.filter(upload)
|
||||||
end
|
end
|
||||||
|
|
|
@ -21,7 +21,7 @@ test "apply exiftool filter" do
|
||||||
tempfile: Path.absname("test/fixtures/DSCN0010_tmp.jpg")
|
tempfile: Path.absname("test/fixtures/DSCN0010_tmp.jpg")
|
||||||
}
|
}
|
||||||
|
|
||||||
assert Filter.Exiftool.filter(upload) == :ok
|
assert Filter.Exiftool.filter(upload) == {:ok, :filtered}
|
||||||
|
|
||||||
{exif_original, 0} = System.cmd("exiftool", ["test/fixtures/DSCN0010.jpg"])
|
{exif_original, 0} = System.cmd("exiftool", ["test/fixtures/DSCN0010.jpg"])
|
||||||
{exif_filtered, 0} = System.cmd("exiftool", ["test/fixtures/DSCN0010_tmp.jpg"])
|
{exif_filtered, 0} = System.cmd("exiftool", ["test/fixtures/DSCN0010_tmp.jpg"])
|
||||||
|
@ -30,4 +30,13 @@ test "apply exiftool filter" do
|
||||||
assert String.match?(exif_original, ~r/GPS/)
|
assert String.match?(exif_original, ~r/GPS/)
|
||||||
refute String.match?(exif_filtered, ~r/GPS/)
|
refute String.match?(exif_filtered, ~r/GPS/)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "verify webp files are skipped" do
|
||||||
|
upload = %Pleroma.Upload{
|
||||||
|
name: "sample.webp",
|
||||||
|
content_type: "image/webp"
|
||||||
|
}
|
||||||
|
|
||||||
|
assert Filter.Exiftool.filter(upload) == {:ok, :noop}
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -36,7 +36,7 @@ test "apply mogrify filter" do
|
||||||
save: fn _f, _o -> :ok end
|
save: fn _f, _o -> :ok end
|
||||||
]}
|
]}
|
||||||
]) do
|
]) do
|
||||||
assert Filter.Mogrifun.filter(upload) == :ok
|
assert Filter.Mogrifun.filter(upload) == {:ok, :filtered}
|
||||||
end
|
end
|
||||||
|
|
||||||
Task.await(task)
|
Task.await(task)
|
||||||
|
|
|
@ -33,7 +33,7 @@ test "apply mogrify filter" do
|
||||||
custom: fn _m, _a -> :ok end,
|
custom: fn _m, _a -> :ok end,
|
||||||
custom: fn m, a, o -> send(task.pid, {:apply_filter, {m, a, o}}) end,
|
custom: fn m, a, o -> send(task.pid, {:apply_filter, {m, a, o}}) end,
|
||||||
save: fn _f, _o -> :ok end do
|
save: fn _f, _o -> :ok end do
|
||||||
assert Filter.Mogrify.filter(upload) == :ok
|
assert Filter.Mogrify.filter(upload) == {:ok, :filtered}
|
||||||
end
|
end
|
||||||
|
|
||||||
Task.await(task)
|
Task.await(task)
|
||||||
|
|
|
@ -112,6 +112,8 @@ test "Scrapes favicon URLs" do
|
||||||
end
|
end
|
||||||
|
|
||||||
test "Returns nil on too long favicon URLs" do
|
test "Returns nil on too long favicon URLs" do
|
||||||
|
clear_config([:instances_favicons, :enabled], true)
|
||||||
|
|
||||||
long_favicon_url =
|
long_favicon_url =
|
||||||
"https://Lorem.ipsum.dolor.sit.amet/consecteturadipiscingelit/Praesentpharetrapurusutaliquamtempus/Mauriseulaoreetarcu/atfacilisisorci/Nullamporttitor/nequesedfeugiatmollis/dolormagnaefficiturlorem/nonpretiumsapienorcieurisus/Nullamveleratsem/Maecenassedaccumsanexnam/favicon.png"
|
"https://Lorem.ipsum.dolor.sit.amet/consecteturadipiscingelit/Praesentpharetrapurusutaliquamtempus/Mauriseulaoreetarcu/atfacilisisorci/Nullamporttitor/nequesedfeugiatmollis/dolormagnaefficiturlorem/nonpretiumsapienorcieurisus/Nullamveleratsem/Maecenassedaccumsanexnam/favicon.png"
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,6 @@
|
||||||
defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
|
defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
|
||||||
alias Pleroma.Config
|
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.UserRelationship
|
alias Pleroma.UserRelationship
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
@ -19,8 +18,6 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
|
||||||
:ok
|
:ok
|
||||||
end
|
end
|
||||||
|
|
||||||
setup do: clear_config([:instances_favicons, :enabled])
|
|
||||||
|
|
||||||
test "Represent a user account" do
|
test "Represent a user account" do
|
||||||
background_image = %{
|
background_image = %{
|
||||||
"url" => [%{"href" => "https://example.com/images/asuka_hospital.png"}]
|
"url" => [%{"href" => "https://example.com/images/asuka_hospital.png"}]
|
||||||
|
@ -78,8 +75,7 @@ test "Represent a user account" do
|
||||||
pleroma: %{
|
pleroma: %{
|
||||||
ap_id: user.ap_id,
|
ap_id: user.ap_id,
|
||||||
background_image: "https://example.com/images/asuka_hospital.png",
|
background_image: "https://example.com/images/asuka_hospital.png",
|
||||||
favicon:
|
favicon: nil,
|
||||||
"https://shitposter.club/plugins/Qvitter/img/gnusocial-favicons/favicon-16x16.png",
|
|
||||||
confirmation_pending: false,
|
confirmation_pending: false,
|
||||||
tags: [],
|
tags: [],
|
||||||
is_admin: false,
|
is_admin: false,
|
||||||
|
@ -98,22 +94,29 @@ test "Represent a user account" do
|
||||||
assert expected == AccountView.render("show.json", %{user: user, skip_visibility_check: true})
|
assert expected == AccountView.render("show.json", %{user: user, skip_visibility_check: true})
|
||||||
end
|
end
|
||||||
|
|
||||||
test "Favicon is nil when :instances_favicons is disabled" do
|
describe "favicon" do
|
||||||
user = insert(:user)
|
setup do
|
||||||
|
[user: insert(:user)]
|
||||||
|
end
|
||||||
|
|
||||||
Config.put([:instances_favicons, :enabled], true)
|
test "is parsed when :instance_favicons is enabled", %{user: user} do
|
||||||
|
clear_config([:instances_favicons, :enabled], true)
|
||||||
|
|
||||||
assert %{
|
assert %{
|
||||||
pleroma: %{
|
pleroma: %{
|
||||||
favicon:
|
favicon:
|
||||||
"https://shitposter.club/plugins/Qvitter/img/gnusocial-favicons/favicon-16x16.png"
|
"https://shitposter.club/plugins/Qvitter/img/gnusocial-favicons/favicon-16x16.png"
|
||||||
}
|
}
|
||||||
} = AccountView.render("show.json", %{user: user, skip_visibility_check: true})
|
} = AccountView.render("show.json", %{user: user, skip_visibility_check: true})
|
||||||
|
end
|
||||||
|
|
||||||
Config.put([:instances_favicons, :enabled], false)
|
test "is nil when :instances_favicons is disabled", %{user: user} do
|
||||||
|
assert %{pleroma: %{favicon: nil}} =
|
||||||
|
AccountView.render("show.json", %{user: user, skip_visibility_check: true})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
assert %{pleroma: %{favicon: nil}} =
|
test "Favicon when :instance_favicons is enabled" do
|
||||||
AccountView.render("show.json", %{user: user, skip_visibility_check: true})
|
|
||||||
end
|
end
|
||||||
|
|
||||||
test "Represent the user account for the account owner" do
|
test "Represent the user account for the account owner" do
|
||||||
|
@ -173,8 +176,7 @@ test "Represent a Service(bot) account" do
|
||||||
pleroma: %{
|
pleroma: %{
|
||||||
ap_id: user.ap_id,
|
ap_id: user.ap_id,
|
||||||
background_image: nil,
|
background_image: nil,
|
||||||
favicon:
|
favicon: nil,
|
||||||
"https://shitposter.club/plugins/Qvitter/img/gnusocial-favicons/favicon-16x16.png",
|
|
||||||
confirmation_pending: false,
|
confirmation_pending: false,
|
||||||
tags: [],
|
tags: [],
|
||||||
is_admin: false,
|
is_admin: false,
|
||||||
|
|
Loading…
Reference in a new issue