forked from YokaiRick/akkoma
Merge branch 'develop' of git.pleroma.social:pleroma/pleroma into unlisted
This commit is contained in:
commit
21efda2edb
34 changed files with 256 additions and 125 deletions
|
@ -5,31 +5,32 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
|
||||
## Unreleased
|
||||
|
||||
### Added
|
||||
### Changed
|
||||
|
||||
- MRF policy to rewrite bot posts scope from public to unlisted
|
||||
- Renamed `:await_up_timeout` in `:connections_pool` namespace to `:connect_timeout`, old name is deprecated.
|
||||
- Renamed `:timeout` in `pools` namespace to `:recv_timeout`, old name is deprecated.
|
||||
|
||||
### Removed
|
||||
|
||||
- **Breaking:** Removed `Pleroma.Workers.Cron.StatsWorker` setting from Oban `:crontab`.
|
||||
|
||||
|
||||
## unreleased-patch - ???
|
||||
|
||||
### Added
|
||||
|
||||
- Rich media failure tracking (along with `:failure_backoff` option)
|
||||
- MRF policy to rewrite bot posts scope from public to unlisted
|
||||
|
||||
### Fixed
|
||||
|
||||
- Possible OOM errors with the default HTTP adapter
|
||||
- Fixed uploading webp images when the Exiftool Upload Filter is enabled by skipping them
|
||||
- Mastodon API: Search parameter `following` now correctly returns the followings rather than the followers
|
||||
- Mastodon API: Timelines hanging for (`number of posts with links * rich media timeout`) in the worst case.
|
||||
Reduced to just rich media timeout.
|
||||
- Mastodon API: Cards being wrong for preview statuses due to cache key collision
|
||||
- Password resets no longer processed for deactivated accounts
|
||||
|
||||
|
||||
## [2.1.0] - 2020-08-28
|
||||
|
||||
### Changed
|
||||
|
|
|
@ -735,28 +735,28 @@
|
|||
max_connections: 250,
|
||||
max_idle_time: 30_000,
|
||||
retry: 0,
|
||||
await_up_timeout: 5_000
|
||||
connect_timeout: 5_000
|
||||
|
||||
config :pleroma, :pools,
|
||||
federation: [
|
||||
size: 50,
|
||||
max_waiting: 10,
|
||||
timeout: 10_000
|
||||
recv_timeout: 10_000
|
||||
],
|
||||
media: [
|
||||
size: 50,
|
||||
max_waiting: 10,
|
||||
timeout: 10_000
|
||||
recv_timeout: 10_000
|
||||
],
|
||||
upload: [
|
||||
size: 25,
|
||||
max_waiting: 5,
|
||||
timeout: 15_000
|
||||
recv_timeout: 15_000
|
||||
],
|
||||
default: [
|
||||
size: 10,
|
||||
max_waiting: 2,
|
||||
timeout: 5_000
|
||||
recv_timeout: 5_000
|
||||
]
|
||||
|
||||
config :pleroma, :hackney_pools,
|
||||
|
|
|
@ -3386,7 +3386,7 @@
|
|||
suggestions: [250]
|
||||
},
|
||||
%{
|
||||
key: :await_up_timeout,
|
||||
key: :connect_timeout,
|
||||
type: :integer,
|
||||
description: "Timeout while `gun` will wait until connection is up. Default: 5000ms.",
|
||||
suggestions: [5000]
|
||||
|
@ -3424,6 +3424,12 @@
|
|||
description:
|
||||
"Maximum number of requests waiting for other requests to finish. After this number is reached, the pool will start returning errrors when a new request is made",
|
||||
suggestions: [10]
|
||||
},
|
||||
%{
|
||||
key: :recv_timeout,
|
||||
type: :integer,
|
||||
description: "Timeout for the pool while gun will wait for response",
|
||||
suggestions: [10_000]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -114,7 +114,7 @@
|
|||
|
||||
config :pleroma, Pleroma.Web.ApiSpec.CastAndValidate, strict: true
|
||||
|
||||
config :pleroma, :instances_favicons, enabled: true
|
||||
config :pleroma, :instances_favicons, enabled: false
|
||||
|
||||
config :pleroma, Pleroma.Uploaders.S3,
|
||||
bucket: nil,
|
||||
|
|
|
@ -499,7 +499,7 @@ Settings for HTTP connection pool.
|
|||
* `:connection_acquisition_wait` - Timeout to acquire a connection from pool.The total max time is this value multiplied by the number of retries.
|
||||
* `connection_acquisition_retries` - Number of attempts to acquire the connection from the pool if it is overloaded. Each attempt is timed `:connection_acquisition_wait` apart.
|
||||
* `:max_connections` - Maximum number of connections in the pool.
|
||||
* `:await_up_timeout` - Timeout to connect to the host.
|
||||
* `:connect_timeout` - Timeout to connect to the host.
|
||||
* `:reclaim_multiplier` - Multiplied by `:max_connections` this will be the maximum number of idle connections that will be reclaimed in case the pool is overloaded.
|
||||
|
||||
### :pools
|
||||
|
@ -518,7 +518,7 @@ There are four pools used:
|
|||
For each pool, the options are:
|
||||
|
||||
* `:size` - limit to how much requests can be concurrently executed.
|
||||
* `:timeout` - timeout while `gun` will wait for response
|
||||
* `:recv_timeout` - timeout while `gun` will wait for response
|
||||
* `:max_waiting` - limit to how much requests can be waiting for others to finish, after this is reached, subsequent requests will be dropped.
|
||||
|
||||
## Captcha
|
||||
|
|
|
@ -91,20 +91,17 @@ def run(["adapters"]) do
|
|||
"Without conn and without pool" => fn ->
|
||||
{:ok, %Tesla.Env{}} =
|
||||
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
|
||||
adapter: [pool: :no_pool, receive_conn: false]
|
||||
pool: :no_pool,
|
||||
receive_conn: false
|
||||
)
|
||||
end,
|
||||
"Without conn and with pool" => fn ->
|
||||
{:ok, %Tesla.Env{}} =
|
||||
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
|
||||
adapter: [receive_conn: false]
|
||||
)
|
||||
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [], receive_conn: false)
|
||||
end,
|
||||
"With reused conn and without pool" => fn ->
|
||||
{:ok, %Tesla.Env{}} =
|
||||
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
|
||||
adapter: [pool: :no_pool]
|
||||
)
|
||||
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [], pool: :no_pool)
|
||||
end,
|
||||
"With reused conn and with pool" => fn ->
|
||||
{:ok, %Tesla.Env{}} = Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500")
|
||||
|
|
|
@ -124,9 +124,7 @@ defp download_build(frontend_info, dest) do
|
|||
url = String.replace(frontend_info["build_url"], "${ref}", frontend_info["ref"])
|
||||
|
||||
with {:ok, %{status: 200, body: zip_body}} <-
|
||||
Pleroma.HTTP.get(url, [],
|
||||
adapter: [pool: :media, timeout: 120_000, recv_timeout: 120_000]
|
||||
) do
|
||||
Pleroma.HTTP.get(url, [], pool: :media, recv_timeout: 120_000) do
|
||||
unzip(zip_body, dest)
|
||||
else
|
||||
e -> {:error, e}
|
||||
|
|
|
@ -56,6 +56,7 @@ def warn do
|
|||
check_old_mrf_config()
|
||||
check_media_proxy_whitelist_config()
|
||||
check_welcome_message_config()
|
||||
check_gun_pool_options()
|
||||
end
|
||||
|
||||
def check_welcome_message_config do
|
||||
|
@ -115,4 +116,46 @@ def check_media_proxy_whitelist_config do
|
|||
""")
|
||||
end
|
||||
end
|
||||
|
||||
def check_gun_pool_options do
|
||||
pool_config = Config.get(:connections_pool)
|
||||
|
||||
if timeout = pool_config[:await_up_timeout] do
|
||||
Logger.warn("""
|
||||
!!!DEPRECATION WARNING!!!
|
||||
Your config is using old setting name `await_up_timeout` instead of `connect_timeout`. Setting should work for now, but you are advised to change format to scheme with port to prevent possible issues later.
|
||||
""")
|
||||
|
||||
Config.put(:connections_pool, Keyword.put_new(pool_config, :connect_timeout, timeout))
|
||||
end
|
||||
|
||||
pools_configs = Config.get(:pools)
|
||||
|
||||
warning_preface = """
|
||||
!!!DEPRECATION WARNING!!!
|
||||
Your config is using old setting name `timeout` instead of `recv_timeout` in pool settings. Setting should work for now, but you are advised to change format to scheme with port to prevent possible issues later.
|
||||
"""
|
||||
|
||||
updated_config =
|
||||
Enum.reduce(pools_configs, [], fn {pool_name, config}, acc ->
|
||||
if timeout = config[:timeout] do
|
||||
Keyword.put(acc, pool_name, Keyword.put_new(config, :recv_timeout, timeout))
|
||||
else
|
||||
acc
|
||||
end
|
||||
end)
|
||||
|
||||
if updated_config != [] do
|
||||
pool_warnings =
|
||||
updated_config
|
||||
|> Keyword.keys()
|
||||
|> Enum.map(fn pool_name ->
|
||||
"\n* `:timeout` options in #{pool_name} pool is now `:recv_timeout`"
|
||||
end)
|
||||
|
||||
Logger.warn(Enum.join([warning_preface | pool_warnings]))
|
||||
|
||||
Config.put(:pools, updated_config)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -13,7 +13,7 @@ def open(%URI{} = uri, opts) do
|
|||
opts =
|
||||
opts
|
||||
|> Enum.into(%{})
|
||||
|> Map.put_new(:await_up_timeout, pool_opts[:await_up_timeout] || 5_000)
|
||||
|> Map.put_new(:connect_timeout, pool_opts[:connect_timeout] || 5_000)
|
||||
|> Map.put_new(:supervise, false)
|
||||
|> maybe_add_tls_opts(uri)
|
||||
|
||||
|
@ -50,7 +50,7 @@ defp do_open(uri, %{proxy: {proxy_host, proxy_port}} = opts) do
|
|||
|
||||
with open_opts <- Map.delete(opts, :tls_opts),
|
||||
{:ok, conn} <- Gun.open(proxy_host, proxy_port, open_opts),
|
||||
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]),
|
||||
{:ok, _} <- Gun.await_up(conn, opts[:connect_timeout]),
|
||||
stream <- Gun.connect(conn, connect_opts),
|
||||
{:response, :fin, 200, _} <- Gun.await(conn, stream) do
|
||||
{:ok, conn}
|
||||
|
@ -88,7 +88,7 @@ defp do_open(uri, %{proxy: {proxy_type, proxy_host, proxy_port}} = opts) do
|
|||
|> Map.put(:socks_opts, socks_opts)
|
||||
|
||||
with {:ok, conn} <- Gun.open(proxy_host, proxy_port, opts),
|
||||
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]) do
|
||||
{:ok, _} <- Gun.await_up(conn, opts[:connect_timeout]) do
|
||||
{:ok, conn}
|
||||
else
|
||||
error ->
|
||||
|
@ -106,7 +106,7 @@ defp do_open(%URI{host: host, port: port} = uri, opts) do
|
|||
host = Pleroma.HTTP.AdapterHelper.parse_host(host)
|
||||
|
||||
with {:ok, conn} <- Gun.open(host, port, opts),
|
||||
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]) do
|
||||
{:ok, _} <- Gun.await_up(conn, opts[:connect_timeout]) do
|
||||
{:ok, conn}
|
||||
else
|
||||
error ->
|
||||
|
|
|
@ -6,7 +6,7 @@ defmodule Pleroma.HTTP.AdapterHelper do
|
|||
@moduledoc """
|
||||
Configure Tesla.Client with default and customized adapter options.
|
||||
"""
|
||||
@defaults [pool: :federation]
|
||||
@defaults [pool: :federation, connect_timeout: 5_000, recv_timeout: 5_000]
|
||||
|
||||
@type proxy_type() :: :socks4 | :socks5
|
||||
@type host() :: charlist() | :inet.ip_address()
|
||||
|
|
|
@ -11,12 +11,8 @@ defmodule Pleroma.HTTP.AdapterHelper.Gun do
|
|||
require Logger
|
||||
|
||||
@defaults [
|
||||
connect_timeout: 5_000,
|
||||
domain_lookup_timeout: 5_000,
|
||||
tls_handshake_timeout: 5_000,
|
||||
retry: 1,
|
||||
retry_timeout: 1000,
|
||||
await_up_timeout: 5_000
|
||||
retry_timeout: 1_000
|
||||
]
|
||||
|
||||
@type pool() :: :federation | :upload | :media | :default
|
||||
|
@ -45,15 +41,17 @@ defp add_scheme_opts(opts, %{scheme: "https"}) do
|
|||
end
|
||||
|
||||
defp put_timeout(opts) do
|
||||
{recv_timeout, opts} = Keyword.pop(opts, :recv_timeout, pool_timeout(opts[:pool]))
|
||||
# this is the timeout to receive a message from Gun
|
||||
Keyword.put_new(opts, :timeout, pool_timeout(opts[:pool]))
|
||||
# `:timeout` key is used in Tesla
|
||||
Keyword.put(opts, :timeout, recv_timeout)
|
||||
end
|
||||
|
||||
@spec pool_timeout(pool()) :: non_neg_integer()
|
||||
def pool_timeout(pool) do
|
||||
default = Config.get([:pools, :default, :timeout], 5_000)
|
||||
default = Config.get([:pools, :default, :recv_timeout], 5_000)
|
||||
|
||||
Config.get([:pools, pool, :timeout], default)
|
||||
Config.get([:pools, pool, :recv_timeout], default)
|
||||
end
|
||||
|
||||
@prefix Pleroma.Gun.ConnectionPool
|
||||
|
|
|
@ -2,11 +2,8 @@ defmodule Pleroma.HTTP.AdapterHelper.Hackney do
|
|||
@behaviour Pleroma.HTTP.AdapterHelper
|
||||
|
||||
@defaults [
|
||||
connect_timeout: 10_000,
|
||||
recv_timeout: 20_000,
|
||||
follow_redirect: true,
|
||||
force_redirect: true,
|
||||
pool: :federation
|
||||
force_redirect: true
|
||||
]
|
||||
|
||||
@spec options(keyword(), URI.t()) :: keyword()
|
||||
|
@ -19,6 +16,7 @@ def options(connection_opts \\ [], %URI{} = uri) do
|
|||
|> Keyword.merge(config_opts)
|
||||
|> Keyword.merge(connection_opts)
|
||||
|> add_scheme_opts(uri)
|
||||
|> maybe_add_with_body()
|
||||
|> Pleroma.HTTP.AdapterHelper.maybe_add_proxy(proxy)
|
||||
end
|
||||
|
||||
|
@ -27,4 +25,12 @@ defp add_scheme_opts(opts, %URI{scheme: "https"}) do
|
|||
end
|
||||
|
||||
defp add_scheme_opts(opts, _), do: opts
|
||||
|
||||
defp maybe_add_with_body(opts) do
|
||||
if opts[:max_body] do
|
||||
Keyword.put(opts, :with_body, true)
|
||||
else
|
||||
opts
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -11,7 +11,7 @@ defmodule Pleroma.HTTP.ExAws do
|
|||
|
||||
@impl true
|
||||
def request(method, url, body \\ "", headers \\ [], http_opts \\ []) do
|
||||
http_opts = Keyword.put_new(http_opts, :adapter, pool: :upload)
|
||||
http_opts = Keyword.put_new(http_opts, :pool, :upload)
|
||||
|
||||
case HTTP.request(method, url, body, headers, http_opts) do
|
||||
{:ok, env} ->
|
||||
|
|
|
@ -60,7 +60,7 @@ def post(url, body, headers \\ [], options \\ []),
|
|||
{:ok, Env.t()} | {:error, any()}
|
||||
def request(method, url, body, headers, options) when is_binary(url) do
|
||||
uri = URI.parse(url)
|
||||
adapter_opts = AdapterHelper.options(uri, options[:adapter] || [])
|
||||
adapter_opts = AdapterHelper.options(uri, options || [])
|
||||
|
||||
options = put_in(options[:adapter], adapter_opts)
|
||||
params = options[:params] || []
|
||||
|
|
|
@ -11,7 +11,7 @@ defmodule Pleroma.HTTP.Tzdata do
|
|||
|
||||
@impl true
|
||||
def get(url, headers, options) do
|
||||
options = Keyword.put_new(options, :adapter, pool: :default)
|
||||
options = Keyword.put_new(options, :pool, :default)
|
||||
|
||||
with {:ok, %Tesla.Env{} = env} <- HTTP.get(url, headers, options) do
|
||||
{:ok, {env.status, env.headers, env.body}}
|
||||
|
@ -20,7 +20,7 @@ def get(url, headers, options) do
|
|||
|
||||
@impl true
|
||||
def head(url, headers, options) do
|
||||
options = Keyword.put_new(options, :adapter, pool: :default)
|
||||
options = Keyword.put_new(options, :pool, :default)
|
||||
|
||||
with {:ok, %Tesla.Env{} = env} <- HTTP.head(url, headers, options) do
|
||||
{:ok, {env.status, env.headers}}
|
||||
|
|
|
@ -15,7 +15,11 @@ defmodule Pleroma.Upload.Filter do
|
|||
|
||||
require Logger
|
||||
|
||||
@callback filter(Pleroma.Upload.t()) :: :ok | {:ok, Pleroma.Upload.t()} | {:error, any()}
|
||||
@callback filter(Pleroma.Upload.t()) ::
|
||||
{:ok, :filtered}
|
||||
| {:ok, :noop}
|
||||
| {:ok, :filtered, Pleroma.Upload.t()}
|
||||
| {:error, any()}
|
||||
|
||||
@spec filter([module()], Pleroma.Upload.t()) :: {:ok, Pleroma.Upload.t()} | {:error, any()}
|
||||
|
||||
|
@ -25,10 +29,13 @@ def filter([], upload) do
|
|||
|
||||
def filter([filter | rest], upload) do
|
||||
case filter.filter(upload) do
|
||||
:ok ->
|
||||
{:ok, :filtered} ->
|
||||
filter(rest, upload)
|
||||
|
||||
{:ok, upload} ->
|
||||
{:ok, :filtered, upload} ->
|
||||
filter(rest, upload)
|
||||
|
||||
{:ok, :noop} ->
|
||||
filter(rest, upload)
|
||||
|
||||
error ->
|
||||
|
|
|
@ -16,9 +16,11 @@ defmodule Pleroma.Upload.Filter.AnonymizeFilename do
|
|||
def filter(%Upload{name: name} = upload) do
|
||||
extension = List.last(String.split(name, "."))
|
||||
name = predefined_name(extension) || random(extension)
|
||||
{:ok, %Upload{upload | name: name}}
|
||||
{:ok, :filtered, %Upload{upload | name: name}}
|
||||
end
|
||||
|
||||
def filter(_), do: {:ok, :noop}
|
||||
|
||||
@spec predefined_name(String.t()) :: String.t() | nil
|
||||
defp predefined_name(extension) do
|
||||
with name when not is_nil(name) <- Config.get([__MODULE__, :text]),
|
||||
|
|
|
@ -17,8 +17,8 @@ def filter(%Upload{name: name, tempfile: tempfile} = upload) do
|
|||
|> Base.encode16(case: :lower)
|
||||
|
||||
filename = shasum <> "." <> extension
|
||||
{:ok, %Upload{upload | id: shasum, path: filename}}
|
||||
{:ok, :filtered, %Upload{upload | id: shasum, path: filename}}
|
||||
end
|
||||
|
||||
def filter(_), do: :ok
|
||||
def filter(_), do: {:ok, :noop}
|
||||
end
|
||||
|
|
|
@ -9,11 +9,15 @@ defmodule Pleroma.Upload.Filter.Exiftool do
|
|||
"""
|
||||
@behaviour Pleroma.Upload.Filter
|
||||
|
||||
@spec filter(Pleroma.Upload.t()) :: :ok | {:error, String.t()}
|
||||
@spec filter(Pleroma.Upload.t()) :: {:ok, any()} | {:error, String.t()}
|
||||
|
||||
# webp is not compatible with exiftool at this time
|
||||
def filter(%Pleroma.Upload{content_type: "image/webp"}), do: {:ok, :noop}
|
||||
|
||||
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
||||
try do
|
||||
case System.cmd("exiftool", ["-overwrite_original", "-gps:all=", file], parallelism: true) do
|
||||
{_response, 0} -> :ok
|
||||
{_response, 0} -> {:ok, :filtered}
|
||||
{error, 1} -> {:error, error}
|
||||
end
|
||||
rescue
|
||||
|
@ -22,5 +26,5 @@ def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
|||
end
|
||||
end
|
||||
|
||||
def filter(_), do: :ok
|
||||
def filter(_), do: {:ok, :noop}
|
||||
end
|
||||
|
|
|
@ -38,16 +38,16 @@ defmodule Pleroma.Upload.Filter.Mogrifun do
|
|||
[{"fill", "yellow"}, {"tint", "40"}]
|
||||
]
|
||||
|
||||
@spec filter(Pleroma.Upload.t()) :: :ok | {:error, String.t()}
|
||||
@spec filter(Pleroma.Upload.t()) :: {:ok, atom()} | {:error, String.t()}
|
||||
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
||||
try do
|
||||
Filter.Mogrify.do_filter(file, [Enum.random(@filters)])
|
||||
:ok
|
||||
{:ok, :filtered}
|
||||
rescue
|
||||
_e in ErlangError ->
|
||||
{:error, "mogrify command not found"}
|
||||
end
|
||||
end
|
||||
|
||||
def filter(_), do: :ok
|
||||
def filter(_), do: {:ok, :noop}
|
||||
end
|
||||
|
|
|
@ -8,18 +8,18 @@ defmodule Pleroma.Upload.Filter.Mogrify do
|
|||
@type conversion :: action :: String.t() | {action :: String.t(), opts :: String.t()}
|
||||
@type conversions :: conversion() | [conversion()]
|
||||
|
||||
@spec filter(Pleroma.Upload.t()) :: :ok | {:error, String.t()}
|
||||
@spec filter(Pleroma.Upload.t()) :: {:ok, :atom} | {:error, String.t()}
|
||||
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
||||
try do
|
||||
do_filter(file, Pleroma.Config.get!([__MODULE__, :args]))
|
||||
:ok
|
||||
{:ok, :filtered}
|
||||
rescue
|
||||
_e in ErlangError ->
|
||||
{:error, "mogrify command not found"}
|
||||
end
|
||||
end
|
||||
|
||||
def filter(_), do: :ok
|
||||
def filter(_), do: {:ok, :noop}
|
||||
|
||||
def do_filter(file, filters) do
|
||||
file
|
||||
|
|
|
@ -13,22 +13,16 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
|
|||
require Logger
|
||||
|
||||
@options [
|
||||
pool: :media
|
||||
pool: :media,
|
||||
recv_timeout: 10_000
|
||||
]
|
||||
|
||||
def perform(:prefetch, url) do
|
||||
Logger.debug("Prefetching #{inspect(url)}")
|
||||
|
||||
opts =
|
||||
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
|
||||
Keyword.put(@options, :recv_timeout, 10_000)
|
||||
else
|
||||
@options
|
||||
end
|
||||
|
||||
url
|
||||
|> MediaProxy.url()
|
||||
|> HTTP.get([], adapter: opts)
|
||||
|> HTTP.get([], @options)
|
||||
end
|
||||
|
||||
def perform(:preload, %{"object" => %{"attachment" => attachments}} = _message) do
|
||||
|
|
|
@ -5,7 +5,8 @@
|
|||
defmodule Pleroma.Web.RelMe do
|
||||
@options [
|
||||
pool: :media,
|
||||
max_body: 2_000_000
|
||||
max_body: 2_000_000,
|
||||
recv_timeout: 2_000
|
||||
]
|
||||
|
||||
if Pleroma.Config.get(:env) == :test do
|
||||
|
@ -23,18 +24,8 @@ def parse(url) when is_binary(url) do
|
|||
def parse(_), do: {:error, "No URL provided"}
|
||||
|
||||
defp parse_url(url) do
|
||||
opts =
|
||||
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
|
||||
Keyword.merge(@options,
|
||||
recv_timeout: 2_000,
|
||||
with_body: true
|
||||
)
|
||||
else
|
||||
@options
|
||||
end
|
||||
|
||||
with {:ok, %Tesla.Env{body: html, status: status}} when status in 200..299 <-
|
||||
Pleroma.HTTP.get(url, [], adapter: opts),
|
||||
Pleroma.HTTP.get(url, [], @options),
|
||||
{:ok, html_tree} <- Floki.parse_document(html),
|
||||
data <-
|
||||
Floki.attribute(html_tree, "link[rel~=me]", "href") ++
|
||||
|
|
|
@ -9,14 +9,15 @@ defmodule Pleroma.Web.RichMedia.Helpers do
|
|||
alias Pleroma.Object
|
||||
alias Pleroma.Web.RichMedia.Parser
|
||||
|
||||
@rich_media_options [
|
||||
@options [
|
||||
pool: :media,
|
||||
max_body: 2_000_000
|
||||
max_body: 2_000_000,
|
||||
recv_timeout: 2_000
|
||||
]
|
||||
|
||||
@spec validate_page_url(URI.t() | binary()) :: :ok | :error
|
||||
defp validate_page_url(page_url) when is_binary(page_url) do
|
||||
validate_tld = Pleroma.Config.get([Pleroma.Formatter, :validate_tld])
|
||||
validate_tld = Config.get([Pleroma.Formatter, :validate_tld])
|
||||
|
||||
page_url
|
||||
|> Linkify.Parser.url?(validate_tld: validate_tld)
|
||||
|
@ -86,16 +87,6 @@ def perform(:fetch, %Activity{} = activity) do
|
|||
def rich_media_get(url) do
|
||||
headers = [{"user-agent", Pleroma.Application.user_agent() <> "; Bot"}]
|
||||
|
||||
options =
|
||||
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
|
||||
Keyword.merge(@rich_media_options,
|
||||
recv_timeout: 2_000,
|
||||
with_body: true
|
||||
)
|
||||
else
|
||||
@rich_media_options
|
||||
end
|
||||
|
||||
Pleroma.HTTP.get(url, headers, adapter: options)
|
||||
Pleroma.HTTP.get(url, headers, @options)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
defmodule Pleroma.Repo.Migrations.RenameAwaitUpTimeoutInConnectionsPool do
|
||||
use Ecto.Migration
|
||||
|
||||
def change do
|
||||
with %Pleroma.ConfigDB{} = config <-
|
||||
Pleroma.ConfigDB.get_by_params(%{group: :pleroma, key: :connections_pool}),
|
||||
{timeout, value} when is_integer(timeout) <- Keyword.pop(config.value, :await_up_timeout) do
|
||||
config
|
||||
|> Ecto.Changeset.change(value: Keyword.put(value, :connect_timeout, timeout))
|
||||
|> Pleroma.Repo.update()
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,19 @@
|
|||
defmodule Pleroma.Repo.Migrations.RenameTimeoutInPools do
|
||||
use Ecto.Migration
|
||||
|
||||
def change do
|
||||
with %Pleroma.ConfigDB{} = config <-
|
||||
Pleroma.ConfigDB.get_by_params(%{group: :pleroma, key: :pools}) do
|
||||
updated_value =
|
||||
Enum.map(config.value, fn {pool, pool_value} ->
|
||||
with {timeout, value} when is_integer(timeout) <- Keyword.pop(pool_value, :timeout) do
|
||||
{pool, Keyword.put(value, :recv_timeout, timeout)}
|
||||
end
|
||||
end)
|
||||
|
||||
config
|
||||
|> Ecto.Changeset.change(value: updated_value)
|
||||
|> Pleroma.Repo.update()
|
||||
end
|
||||
end
|
||||
end
|
|
@ -4,12 +4,15 @@ defmodule Pleroma.Config.DeprecationWarningsTest do
|
|||
|
||||
import ExUnit.CaptureLog
|
||||
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.Config.DeprecationWarnings
|
||||
|
||||
test "check_old_mrf_config/0" do
|
||||
clear_config([:instance, :rewrite_policy], Pleroma.Web.ActivityPub.MRF.NoOpPolicy)
|
||||
clear_config([:instance, :mrf_transparency], true)
|
||||
clear_config([:instance, :mrf_transparency_exclusions], [])
|
||||
|
||||
assert capture_log(fn -> Pleroma.Config.DeprecationWarnings.check_old_mrf_config() end) =~
|
||||
assert capture_log(fn -> DeprecationWarnings.check_old_mrf_config() end) =~
|
||||
"""
|
||||
!!!DEPRECATION WARNING!!!
|
||||
Your config is using old namespaces for MRF configuration. They should work for now, but you are advised to change to new namespaces to prevent possible issues later:
|
||||
|
@ -44,22 +47,66 @@ test "move_namespace_and_warn/2" do
|
|||
]
|
||||
|
||||
assert capture_log(fn ->
|
||||
Pleroma.Config.DeprecationWarnings.move_namespace_and_warn(
|
||||
DeprecationWarnings.move_namespace_and_warn(
|
||||
config_map,
|
||||
"Warning preface"
|
||||
)
|
||||
end) =~ "Warning preface\n error :key\n error :key2\n error :key3"
|
||||
|
||||
assert Pleroma.Config.get(new_group1) == 1
|
||||
assert Pleroma.Config.get(new_group2) == 2
|
||||
assert Pleroma.Config.get(new_group3) == 3
|
||||
assert Config.get(new_group1) == 1
|
||||
assert Config.get(new_group2) == 2
|
||||
assert Config.get(new_group3) == 3
|
||||
end
|
||||
|
||||
test "check_media_proxy_whitelist_config/0" do
|
||||
clear_config([:media_proxy, :whitelist], ["https://example.com", "example2.com"])
|
||||
|
||||
assert capture_log(fn ->
|
||||
Pleroma.Config.DeprecationWarnings.check_media_proxy_whitelist_config()
|
||||
DeprecationWarnings.check_media_proxy_whitelist_config()
|
||||
end) =~ "Your config is using old format (only domain) for MediaProxy whitelist option"
|
||||
end
|
||||
|
||||
describe "check_gun_pool_options/0" do
|
||||
test "await_up_timeout" do
|
||||
config = Config.get(:connections_pool)
|
||||
clear_config(:connections_pool, Keyword.put(config, :await_up_timeout, 5_000))
|
||||
|
||||
assert capture_log(fn ->
|
||||
DeprecationWarnings.check_gun_pool_options()
|
||||
end) =~
|
||||
"Your config is using old setting name `await_up_timeout` instead of `connect_timeout`"
|
||||
end
|
||||
|
||||
test "pool timeout" do
|
||||
old_config = [
|
||||
federation: [
|
||||
size: 50,
|
||||
max_waiting: 10,
|
||||
timeout: 10_000
|
||||
],
|
||||
media: [
|
||||
size: 50,
|
||||
max_waiting: 10,
|
||||
timeout: 10_000
|
||||
],
|
||||
upload: [
|
||||
size: 25,
|
||||
max_waiting: 5,
|
||||
timeout: 15_000
|
||||
],
|
||||
default: [
|
||||
size: 10,
|
||||
max_waiting: 2,
|
||||
timeout: 5_000
|
||||
]
|
||||
]
|
||||
|
||||
clear_config(:pools, old_config)
|
||||
|
||||
assert capture_log(fn ->
|
||||
DeprecationWarnings.check_gun_pool_options()
|
||||
end) =~
|
||||
"Your config is using old setting name `timeout` instead of `recv_timeout` in pool settings"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -24,18 +24,18 @@ defmodule Pleroma.Upload.Filter.AnonymizeFilenameTest do
|
|||
|
||||
test "it replaces filename on pre-defined text", %{upload_file: upload_file} do
|
||||
Config.put([Upload.Filter.AnonymizeFilename, :text], "custom-file.png")
|
||||
{:ok, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
|
||||
{:ok, :filtered, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
|
||||
assert name == "custom-file.png"
|
||||
end
|
||||
|
||||
test "it replaces filename on pre-defined text expression", %{upload_file: upload_file} do
|
||||
Config.put([Upload.Filter.AnonymizeFilename, :text], "custom-file.{extension}")
|
||||
{:ok, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
|
||||
{:ok, :filtered, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
|
||||
assert name == "custom-file.jpg"
|
||||
end
|
||||
|
||||
test "it replaces filename on random text", %{upload_file: upload_file} do
|
||||
{:ok, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
|
||||
{:ok, :filtered, %Upload{name: name}} = Upload.Filter.AnonymizeFilename.filter(upload_file)
|
||||
assert <<_::bytes-size(14)>> <> ".jpg" = name
|
||||
refute name == "an… image.jpg"
|
||||
end
|
||||
|
|
|
@ -25,6 +25,7 @@ test "adds shasum" do
|
|||
|
||||
assert {
|
||||
:ok,
|
||||
:filtered,
|
||||
%Pleroma.Upload{id: @shasum, path: @shasum <> ".jpg"}
|
||||
} = Dedupe.filter(upload)
|
||||
end
|
||||
|
|
|
@ -21,7 +21,7 @@ test "apply exiftool filter" do
|
|||
tempfile: Path.absname("test/fixtures/DSCN0010_tmp.jpg")
|
||||
}
|
||||
|
||||
assert Filter.Exiftool.filter(upload) == :ok
|
||||
assert Filter.Exiftool.filter(upload) == {:ok, :filtered}
|
||||
|
||||
{exif_original, 0} = System.cmd("exiftool", ["test/fixtures/DSCN0010.jpg"])
|
||||
{exif_filtered, 0} = System.cmd("exiftool", ["test/fixtures/DSCN0010_tmp.jpg"])
|
||||
|
@ -30,4 +30,13 @@ test "apply exiftool filter" do
|
|||
assert String.match?(exif_original, ~r/GPS/)
|
||||
refute String.match?(exif_filtered, ~r/GPS/)
|
||||
end
|
||||
|
||||
test "verify webp files are skipped" do
|
||||
upload = %Pleroma.Upload{
|
||||
name: "sample.webp",
|
||||
content_type: "image/webp"
|
||||
}
|
||||
|
||||
assert Filter.Exiftool.filter(upload) == {:ok, :noop}
|
||||
end
|
||||
end
|
||||
|
|
|
@ -36,7 +36,7 @@ test "apply mogrify filter" do
|
|||
save: fn _f, _o -> :ok end
|
||||
]}
|
||||
]) do
|
||||
assert Filter.Mogrifun.filter(upload) == :ok
|
||||
assert Filter.Mogrifun.filter(upload) == {:ok, :filtered}
|
||||
end
|
||||
|
||||
Task.await(task)
|
||||
|
|
|
@ -33,7 +33,7 @@ test "apply mogrify filter" do
|
|||
custom: fn _m, _a -> :ok end,
|
||||
custom: fn m, a, o -> send(task.pid, {:apply_filter, {m, a, o}}) end,
|
||||
save: fn _f, _o -> :ok end do
|
||||
assert Filter.Mogrify.filter(upload) == :ok
|
||||
assert Filter.Mogrify.filter(upload) == {:ok, :filtered}
|
||||
end
|
||||
|
||||
Task.await(task)
|
||||
|
|
|
@ -112,6 +112,8 @@ test "Scrapes favicon URLs" do
|
|||
end
|
||||
|
||||
test "Returns nil on too long favicon URLs" do
|
||||
clear_config([:instances_favicons, :enabled], true)
|
||||
|
||||
long_favicon_url =
|
||||
"https://Lorem.ipsum.dolor.sit.amet/consecteturadipiscingelit/Praesentpharetrapurusutaliquamtempus/Mauriseulaoreetarcu/atfacilisisorci/Nullamporttitor/nequesedfeugiatmollis/dolormagnaefficiturlorem/nonpretiumsapienorcieurisus/Nullamveleratsem/Maecenassedaccumsanexnam/favicon.png"
|
||||
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
|
||||
use Pleroma.DataCase
|
||||
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.User
|
||||
alias Pleroma.UserRelationship
|
||||
alias Pleroma.Web.CommonAPI
|
||||
|
@ -19,8 +18,6 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
|
|||
:ok
|
||||
end
|
||||
|
||||
setup do: clear_config([:instances_favicons, :enabled])
|
||||
|
||||
test "Represent a user account" do
|
||||
background_image = %{
|
||||
"url" => [%{"href" => "https://example.com/images/asuka_hospital.png"}]
|
||||
|
@ -78,8 +75,7 @@ test "Represent a user account" do
|
|||
pleroma: %{
|
||||
ap_id: user.ap_id,
|
||||
background_image: "https://example.com/images/asuka_hospital.png",
|
||||
favicon:
|
||||
"https://shitposter.club/plugins/Qvitter/img/gnusocial-favicons/favicon-16x16.png",
|
||||
favicon: nil,
|
||||
confirmation_pending: false,
|
||||
tags: [],
|
||||
is_admin: false,
|
||||
|
@ -98,22 +94,29 @@ test "Represent a user account" do
|
|||
assert expected == AccountView.render("show.json", %{user: user, skip_visibility_check: true})
|
||||
end
|
||||
|
||||
test "Favicon is nil when :instances_favicons is disabled" do
|
||||
user = insert(:user)
|
||||
describe "favicon" do
|
||||
setup do
|
||||
[user: insert(:user)]
|
||||
end
|
||||
|
||||
Config.put([:instances_favicons, :enabled], true)
|
||||
test "is parsed when :instance_favicons is enabled", %{user: user} do
|
||||
clear_config([:instances_favicons, :enabled], true)
|
||||
|
||||
assert %{
|
||||
pleroma: %{
|
||||
favicon:
|
||||
"https://shitposter.club/plugins/Qvitter/img/gnusocial-favicons/favicon-16x16.png"
|
||||
}
|
||||
} = AccountView.render("show.json", %{user: user, skip_visibility_check: true})
|
||||
assert %{
|
||||
pleroma: %{
|
||||
favicon:
|
||||
"https://shitposter.club/plugins/Qvitter/img/gnusocial-favicons/favicon-16x16.png"
|
||||
}
|
||||
} = AccountView.render("show.json", %{user: user, skip_visibility_check: true})
|
||||
end
|
||||
|
||||
Config.put([:instances_favicons, :enabled], false)
|
||||
test "is nil when :instances_favicons is disabled", %{user: user} do
|
||||
assert %{pleroma: %{favicon: nil}} =
|
||||
AccountView.render("show.json", %{user: user, skip_visibility_check: true})
|
||||
end
|
||||
end
|
||||
|
||||
assert %{pleroma: %{favicon: nil}} =
|
||||
AccountView.render("show.json", %{user: user, skip_visibility_check: true})
|
||||
test "Favicon when :instance_favicons is enabled" do
|
||||
end
|
||||
|
||||
test "Represent the user account for the account owner" do
|
||||
|
@ -173,8 +176,7 @@ test "Represent a Service(bot) account" do
|
|||
pleroma: %{
|
||||
ap_id: user.ap_id,
|
||||
background_image: nil,
|
||||
favicon:
|
||||
"https://shitposter.club/plugins/Qvitter/img/gnusocial-favicons/favicon-16x16.png",
|
||||
favicon: nil,
|
||||
confirmation_pending: false,
|
||||
tags: [],
|
||||
is_admin: false,
|
||||
|
|
Loading…
Reference in a new issue