forked from AkkomaGang/akkoma
Merge branch 'develop' of git.pleroma.social:pleroma/pleroma into seanking/pleroma-fix_install_fe_bug
This commit is contained in:
commit
f26b580e80
31 changed files with 283 additions and 157 deletions
11
CHANGELOG.md
11
CHANGELOG.md
|
@ -3,6 +3,17 @@ All notable changes to this project will be documented in this file.
|
|||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||
|
||||
## unreleased-patch - ???
|
||||
|
||||
### Added
|
||||
- Rich media failure tracking (along with `:failure_backoff` option)
|
||||
|
||||
### Fixed
|
||||
- Mastodon API: Search parameter `following` now correctly returns the followings rather than the followers
|
||||
- Mastodon API: Timelines hanging for (`number of posts with links * rich media timeout`) in the worst case.
|
||||
Reduced to just rich media timeout.
|
||||
- Password resets no longer processed for deactivated accounts
|
||||
|
||||
## [2.1.0] - 2020-08-28
|
||||
|
||||
### Changed
|
||||
|
|
|
@ -412,6 +412,7 @@
|
|||
Pleroma.Web.RichMedia.Parsers.TwitterCard,
|
||||
Pleroma.Web.RichMedia.Parsers.OEmbed
|
||||
],
|
||||
failure_backoff: 60_000,
|
||||
ttl_setters: [Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl]
|
||||
|
||||
config :pleroma, :media_proxy,
|
||||
|
@ -740,19 +741,23 @@
|
|||
config :pleroma, :pools,
|
||||
federation: [
|
||||
size: 50,
|
||||
max_waiting: 10
|
||||
max_waiting: 10,
|
||||
timeout: 10_000
|
||||
],
|
||||
media: [
|
||||
size: 50,
|
||||
max_waiting: 10
|
||||
max_waiting: 10,
|
||||
timeout: 10_000
|
||||
],
|
||||
upload: [
|
||||
size: 25,
|
||||
max_waiting: 5
|
||||
max_waiting: 5,
|
||||
timeout: 15_000
|
||||
],
|
||||
default: [
|
||||
size: 10,
|
||||
max_waiting: 2
|
||||
max_waiting: 2,
|
||||
timeout: 5_000
|
||||
]
|
||||
|
||||
config :pleroma, :hackney_pools,
|
||||
|
|
|
@ -2385,6 +2385,13 @@
|
|||
suggestions: [
|
||||
Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl
|
||||
]
|
||||
},
|
||||
%{
|
||||
key: :failure_backoff,
|
||||
type: :integer,
|
||||
description:
|
||||
"Amount of milliseconds after request failure, during which the request will not be retried.",
|
||||
suggestions: [60_000]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
@ -361,6 +361,7 @@ config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Http,
|
|||
* `ignore_hosts`: list of hosts which will be ignored by the metadata parser. For example `["accounts.google.com", "xss.website"]`, defaults to `[]`.
|
||||
* `ignore_tld`: list TLDs (top-level domains) which will ignore for parse metadata. default is ["local", "localdomain", "lan"].
|
||||
* `parsers`: list of Rich Media parsers.
|
||||
* `failure_backoff`: Amount of milliseconds after request failure, during which the request will not be retried.
|
||||
|
||||
## HTTP server
|
||||
|
||||
|
|
|
@ -124,7 +124,9 @@ defp download_build(frontend_info, dest) do
|
|||
url = String.replace(frontend_info["build_url"], "${ref}", frontend_info["ref"])
|
||||
|
||||
with {:ok, %{status: 200, body: zip_body}} <-
|
||||
Pleroma.HTTP.get(url, [], timeout: 120_000, recv_timeout: 120_000) do
|
||||
Pleroma.HTTP.get(url, [],
|
||||
adapter: [pool: :media, timeout: 120_000, recv_timeout: 120_000]
|
||||
) do
|
||||
unzip(zip_body, dest)
|
||||
else
|
||||
e -> {:error, e}
|
||||
|
|
|
@ -22,13 +22,18 @@ def named_version, do: @name <> " " <> @version
|
|||
def repository, do: @repository
|
||||
|
||||
def user_agent do
|
||||
case Config.get([:http, :user_agent], :default) do
|
||||
:default ->
|
||||
info = "#{Pleroma.Web.base_url()} <#{Config.get([:instance, :email], "")}>"
|
||||
named_version() <> "; " <> info
|
||||
if Process.whereis(Pleroma.Web.Endpoint) do
|
||||
case Config.get([:http, :user_agent], :default) do
|
||||
:default ->
|
||||
info = "#{Pleroma.Web.base_url()} <#{Config.get([:instance, :email], "")}>"
|
||||
named_version() <> "; " <> info
|
||||
|
||||
custom ->
|
||||
custom
|
||||
custom ->
|
||||
custom
|
||||
end
|
||||
else
|
||||
# fallback, if endpoint is not started yet
|
||||
"Pleroma Data Loader"
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -83,17 +83,25 @@ def handle_call(:remove_client, {client_pid, _}, %{key: key} = state) do
|
|||
end)
|
||||
|
||||
{ref, state} = pop_in(state.client_monitors[client_pid])
|
||||
Process.demonitor(ref)
|
||||
|
||||
timer =
|
||||
if used_by == [] do
|
||||
max_idle = Pleroma.Config.get([:connections_pool, :max_idle_time], 30_000)
|
||||
Process.send_after(self(), :idle_close, max_idle)
|
||||
# DOWN message can receive right after `remove_client` call and cause worker to terminate
|
||||
state =
|
||||
if is_nil(ref) do
|
||||
state
|
||||
else
|
||||
nil
|
||||
Process.demonitor(ref)
|
||||
|
||||
timer =
|
||||
if used_by == [] do
|
||||
max_idle = Pleroma.Config.get([:connections_pool, :max_idle_time], 30_000)
|
||||
Process.send_after(self(), :idle_close, max_idle)
|
||||
else
|
||||
nil
|
||||
end
|
||||
|
||||
%{state | timer: timer}
|
||||
end
|
||||
|
||||
{:reply, :ok, %{state | timer: timer}, :hibernate}
|
||||
{:reply, :ok, state, :hibernate}
|
||||
end
|
||||
|
||||
@impl true
|
||||
|
@ -103,16 +111,21 @@ def handle_info(:idle_close, state) do
|
|||
{:stop, :normal, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info({:gun_up, _pid, _protocol}, state) do
|
||||
{:noreply, state, :hibernate}
|
||||
end
|
||||
|
||||
# Gracefully shutdown if the connection got closed without any streams left
|
||||
@impl true
|
||||
def handle_info({:gun_down, _pid, _protocol, _reason, []}, state) do
|
||||
{:stop, :normal, state}
|
||||
end
|
||||
|
||||
# Otherwise, shutdown with an error
|
||||
# Otherwise, wait for retry
|
||||
@impl true
|
||||
def handle_info({:gun_down, _pid, _protocol, _reason, _killed_streams} = down_message, state) do
|
||||
{:stop, {:error, down_message}, state}
|
||||
def handle_info({:gun_down, _pid, _protocol, _reason, _killed_streams}, state) do
|
||||
{:noreply, state, :hibernate}
|
||||
end
|
||||
|
||||
@impl true
|
||||
|
|
|
@ -109,8 +109,9 @@ def extract_first_external_url(object, content) do
|
|||
result =
|
||||
content
|
||||
|> Floki.parse_fragment!()
|
||||
|> Floki.filter_out("a.mention,a.hashtag,a.attachment,a[rel~=\"tag\"]")
|
||||
|> Floki.attribute("a", "href")
|
||||
|> Floki.find("a:not(.mention,.hashtag,.attachment,[rel~=\"tag\"])")
|
||||
|> Enum.take(1)
|
||||
|> Floki.attribute("href")
|
||||
|> Enum.at(0)
|
||||
|
||||
{:commit, {:ok, result}}
|
||||
|
|
|
@ -11,7 +11,6 @@ defmodule Pleroma.HTTP.AdapterHelper do
|
|||
@type proxy_type() :: :socks4 | :socks5
|
||||
@type host() :: charlist() | :inet.ip_address()
|
||||
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.HTTP.AdapterHelper
|
||||
require Logger
|
||||
|
||||
|
@ -44,27 +43,13 @@ def maybe_add_proxy(opts, proxy), do: Keyword.put_new(opts, :proxy, proxy)
|
|||
@spec options(URI.t(), keyword()) :: keyword()
|
||||
def options(%URI{} = uri, opts \\ []) do
|
||||
@defaults
|
||||
|> put_timeout()
|
||||
|> Keyword.merge(opts)
|
||||
|> adapter_helper().options(uri)
|
||||
end
|
||||
|
||||
# For Hackney, this is the time a connection can stay idle in the pool.
|
||||
# For Gun, this is the timeout to receive a message from Gun.
|
||||
defp put_timeout(opts) do
|
||||
{config_key, default} =
|
||||
if adapter() == Tesla.Adapter.Gun do
|
||||
{:pools, Config.get([:pools, :default, :timeout], 5_000)}
|
||||
else
|
||||
{:hackney_pools, 10_000}
|
||||
end
|
||||
|
||||
timeout = Config.get([config_key, opts[:pool], :timeout], default)
|
||||
|
||||
Keyword.merge(opts, timeout: timeout)
|
||||
end
|
||||
|
||||
@spec get_conn(URI.t(), keyword()) :: {:ok, keyword()} | {:error, atom()}
|
||||
def get_conn(uri, opts), do: adapter_helper().get_conn(uri, opts)
|
||||
|
||||
defp adapter, do: Application.get_env(:tesla, :adapter)
|
||||
|
||||
defp adapter_helper do
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
defmodule Pleroma.HTTP.AdapterHelper.Gun do
|
||||
@behaviour Pleroma.HTTP.AdapterHelper
|
||||
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.Gun.ConnectionPool
|
||||
alias Pleroma.HTTP.AdapterHelper
|
||||
|
||||
|
@ -14,31 +15,46 @@ defmodule Pleroma.HTTP.AdapterHelper.Gun do
|
|||
connect_timeout: 5_000,
|
||||
domain_lookup_timeout: 5_000,
|
||||
tls_handshake_timeout: 5_000,
|
||||
retry: 0,
|
||||
retry: 1,
|
||||
retry_timeout: 1000,
|
||||
await_up_timeout: 5_000
|
||||
]
|
||||
|
||||
@type pool() :: :federation | :upload | :media | :default
|
||||
|
||||
@spec options(keyword(), URI.t()) :: keyword()
|
||||
def options(incoming_opts \\ [], %URI{} = uri) do
|
||||
proxy =
|
||||
Pleroma.Config.get([:http, :proxy_url])
|
||||
[:http, :proxy_url]
|
||||
|> Config.get()
|
||||
|> AdapterHelper.format_proxy()
|
||||
|
||||
config_opts = Pleroma.Config.get([:http, :adapter], [])
|
||||
config_opts = Config.get([:http, :adapter], [])
|
||||
|
||||
@defaults
|
||||
|> Keyword.merge(config_opts)
|
||||
|> add_scheme_opts(uri)
|
||||
|> AdapterHelper.maybe_add_proxy(proxy)
|
||||
|> Keyword.merge(incoming_opts)
|
||||
|> put_timeout()
|
||||
end
|
||||
|
||||
defp add_scheme_opts(opts, %{scheme: "http"}), do: opts
|
||||
|
||||
defp add_scheme_opts(opts, %{scheme: "https"}) do
|
||||
opts
|
||||
|> Keyword.put(:certificates_verification, true)
|
||||
Keyword.put(opts, :certificates_verification, true)
|
||||
end
|
||||
|
||||
defp put_timeout(opts) do
|
||||
# this is the timeout to receive a message from Gun
|
||||
Keyword.put_new(opts, :timeout, pool_timeout(opts[:pool]))
|
||||
end
|
||||
|
||||
@spec pool_timeout(pool()) :: non_neg_integer()
|
||||
def pool_timeout(pool) do
|
||||
default = Config.get([:pools, :default, :timeout], 5_000)
|
||||
|
||||
Config.get([:pools, pool, :timeout], default)
|
||||
end
|
||||
|
||||
@spec get_conn(URI.t(), keyword()) :: {:ok, keyword()} | {:error, atom()}
|
||||
|
@ -51,11 +67,11 @@ def get_conn(uri, opts) do
|
|||
|
||||
@prefix Pleroma.Gun.ConnectionPool
|
||||
def limiter_setup do
|
||||
wait = Pleroma.Config.get([:connections_pool, :connection_acquisition_wait])
|
||||
retries = Pleroma.Config.get([:connections_pool, :connection_acquisition_retries])
|
||||
wait = Config.get([:connections_pool, :connection_acquisition_wait])
|
||||
retries = Config.get([:connections_pool, :connection_acquisition_retries])
|
||||
|
||||
:pools
|
||||
|> Pleroma.Config.get([])
|
||||
|> Config.get([])
|
||||
|> Enum.each(fn {name, opts} ->
|
||||
max_running = Keyword.get(opts, :size, 50)
|
||||
max_waiting = Keyword.get(opts, :max_waiting, 10)
|
||||
|
@ -69,7 +85,6 @@ def limiter_setup do
|
|||
case result do
|
||||
:ok -> :ok
|
||||
{:error, :existing} -> :ok
|
||||
e -> raise e
|
||||
end
|
||||
end)
|
||||
|
||||
|
|
|
@ -11,6 +11,8 @@ defmodule Pleroma.HTTP.ExAws do
|
|||
|
||||
@impl true
|
||||
def request(method, url, body \\ "", headers \\ [], http_opts \\ []) do
|
||||
http_opts = Keyword.put_new(http_opts, :adapter, pool: :upload)
|
||||
|
||||
case HTTP.request(method, url, body, headers, http_opts) do
|
||||
{:ok, env} ->
|
||||
{:ok, %{status_code: env.status, headers: env.headers, body: env.body}}
|
||||
|
|
|
@ -11,6 +11,8 @@ defmodule Pleroma.HTTP.Tzdata do
|
|||
|
||||
@impl true
|
||||
def get(url, headers, options) do
|
||||
options = Keyword.put_new(options, :adapter, pool: :default)
|
||||
|
||||
with {:ok, %Tesla.Env{} = env} <- HTTP.get(url, headers, options) do
|
||||
{:ok, {env.status, env.headers, env.body}}
|
||||
end
|
||||
|
@ -18,6 +20,8 @@ def get(url, headers, options) do
|
|||
|
||||
@impl true
|
||||
def head(url, headers, options) do
|
||||
options = Keyword.put_new(options, :adapter, pool: :default)
|
||||
|
||||
with {:ok, %Tesla.Env{} = env} <- HTTP.head(url, headers, options) do
|
||||
{:ok, {env.status, env.headers}}
|
||||
end
|
||||
|
|
|
@ -150,7 +150,9 @@ def get_or_update_favicon(%URI{host: host} = instance_uri) do
|
|||
defp scrape_favicon(%URI{} = instance_uri) do
|
||||
try do
|
||||
with {:ok, %Tesla.Env{body: html}} <-
|
||||
Pleroma.HTTP.get(to_string(instance_uri), [{:Accept, "text/html"}]),
|
||||
Pleroma.HTTP.get(to_string(instance_uri), [{"accept", "text/html"}],
|
||||
adapter: [pool: :media]
|
||||
),
|
||||
favicon_rel <-
|
||||
html
|
||||
|> Floki.parse_document!()
|
||||
|
|
|
@ -164,12 +164,12 @@ defp make_signature(id, date) do
|
|||
date: date
|
||||
})
|
||||
|
||||
[{"signature", signature}]
|
||||
{"signature", signature}
|
||||
end
|
||||
|
||||
defp sign_fetch(headers, id, date) do
|
||||
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
|
||||
headers ++ make_signature(id, date)
|
||||
[make_signature(id, date) | headers]
|
||||
else
|
||||
headers
|
||||
end
|
||||
|
@ -177,7 +177,7 @@ defp sign_fetch(headers, id, date) do
|
|||
|
||||
defp maybe_date_fetch(headers, date) do
|
||||
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
|
||||
headers ++ [{"date", date}]
|
||||
[{"date", date} | headers]
|
||||
else
|
||||
headers
|
||||
end
|
||||
|
|
|
@ -46,12 +46,23 @@ def put_file(%Pleroma.Upload{} = upload) do
|
|||
|
||||
op =
|
||||
if streaming do
|
||||
upload.tempfile
|
||||
|> ExAws.S3.Upload.stream_file()
|
||||
|> ExAws.S3.upload(bucket, s3_name, [
|
||||
{:acl, :public_read},
|
||||
{:content_type, upload.content_type}
|
||||
])
|
||||
op =
|
||||
upload.tempfile
|
||||
|> ExAws.S3.Upload.stream_file()
|
||||
|> ExAws.S3.upload(bucket, s3_name, [
|
||||
{:acl, :public_read},
|
||||
{:content_type, upload.content_type}
|
||||
])
|
||||
|
||||
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Gun do
|
||||
# set s3 upload timeout to respect :upload pool timeout
|
||||
# timeout should be slightly larger, so s3 can retry upload on fail
|
||||
timeout = Pleroma.HTTP.AdapterHelper.Gun.pool_timeout(:upload) + 1_000
|
||||
opts = Keyword.put(op.opts, :timeout, timeout)
|
||||
Map.put(op, :opts, opts)
|
||||
else
|
||||
op
|
||||
end
|
||||
else
|
||||
{:ok, file_data} = File.read(upload.tempfile)
|
||||
|
||||
|
|
|
@ -116,7 +116,7 @@ defp trigram_rank(query, query_string) do
|
|||
end
|
||||
|
||||
defp base_query(_user, false), do: User
|
||||
defp base_query(user, true), do: User.get_followers_query(user)
|
||||
defp base_query(user, true), do: User.get_friends_query(user)
|
||||
|
||||
defp filter_invisible_users(query) do
|
||||
from(q in query, where: q.invisible == false)
|
||||
|
|
|
@ -114,7 +114,7 @@ def add_to_list_operation do
|
|||
description: "Add accounts to the given list.",
|
||||
operationId: "ListController.add_to_list",
|
||||
parameters: [id_param()],
|
||||
requestBody: add_remove_accounts_request(),
|
||||
requestBody: add_remove_accounts_request(true),
|
||||
security: [%{"oAuth" => ["write:lists"]}],
|
||||
responses: %{
|
||||
200 => Operation.response("Empty object", "application/json", %Schema{type: :object})
|
||||
|
@ -127,8 +127,16 @@ def remove_from_list_operation do
|
|||
tags: ["Lists"],
|
||||
summary: "Remove accounts from list",
|
||||
operationId: "ListController.remove_from_list",
|
||||
parameters: [id_param()],
|
||||
requestBody: add_remove_accounts_request(),
|
||||
parameters: [
|
||||
id_param(),
|
||||
Operation.parameter(
|
||||
:account_ids,
|
||||
:query,
|
||||
%Schema{type: :array, items: %Schema{type: :string}},
|
||||
"Array of account IDs"
|
||||
)
|
||||
],
|
||||
requestBody: add_remove_accounts_request(false),
|
||||
security: [%{"oAuth" => ["write:lists"]}],
|
||||
responses: %{
|
||||
200 => Operation.response("Empty object", "application/json", %Schema{type: :object})
|
||||
|
@ -171,7 +179,7 @@ defp create_update_request do
|
|||
)
|
||||
end
|
||||
|
||||
defp add_remove_accounts_request do
|
||||
defp add_remove_accounts_request(required) when is_boolean(required) do
|
||||
request_body(
|
||||
"Parameters",
|
||||
%Schema{
|
||||
|
@ -180,9 +188,9 @@ defp add_remove_accounts_request do
|
|||
properties: %{
|
||||
account_ids: %Schema{type: :array, description: "Array of account IDs", items: FlakeID}
|
||||
},
|
||||
required: [:account_ids]
|
||||
required: required && [:account_ids]
|
||||
},
|
||||
required: true
|
||||
required: required
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -59,17 +59,11 @@ def logout(conn, _) do
|
|||
def password_reset(conn, params) do
|
||||
nickname_or_email = params["email"] || params["nickname"]
|
||||
|
||||
with {:ok, _} <- TwitterAPI.password_reset(nickname_or_email) do
|
||||
conn
|
||||
|> put_status(:no_content)
|
||||
|> json("")
|
||||
else
|
||||
{:error, "unknown user"} ->
|
||||
send_resp(conn, :not_found, "")
|
||||
TwitterAPI.password_reset(nickname_or_email)
|
||||
|
||||
{:error, _} ->
|
||||
send_resp(conn, :bad_request, "")
|
||||
end
|
||||
conn
|
||||
|> put_status(:no_content)
|
||||
|> json("")
|
||||
end
|
||||
|
||||
defp local_mastodon_root_path(conn) do
|
||||
|
|
|
@ -74,7 +74,7 @@ def add_to_list(%{assigns: %{list: list}, body_params: %{account_ids: account_id
|
|||
|
||||
# DELETE /api/v1/lists/:id/accounts
|
||||
def remove_from_list(
|
||||
%{assigns: %{list: list}, body_params: %{account_ids: account_ids}} = conn,
|
||||
%{assigns: %{list: list}, params: %{account_ids: account_ids}} = conn,
|
||||
_
|
||||
) do
|
||||
Enum.each(account_ids, fn account_id ->
|
||||
|
@ -86,6 +86,10 @@ def remove_from_list(
|
|||
json(conn, %{})
|
||||
end
|
||||
|
||||
def remove_from_list(%{body_params: params} = conn, _) do
|
||||
remove_from_list(%{conn | params: params}, %{})
|
||||
end
|
||||
|
||||
defp list_by_id_and_user(%{assigns: %{user: user}, params: %{id: id}} = conn, _) do
|
||||
case Pleroma.List.get(id, user) do
|
||||
%Pleroma.List{} = list -> assign(conn, :list, list)
|
||||
|
|
|
@ -23,6 +23,17 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
|
|||
|
||||
import Pleroma.Web.ActivityPub.Visibility, only: [get_visibility: 1, visible_for_user?: 2]
|
||||
|
||||
# This is a naive way to do this, just spawning a process per activity
|
||||
# to fetch the preview. However it should be fine considering
|
||||
# pagination is restricted to 40 activities at a time
|
||||
defp fetch_rich_media_for_activities(activities) do
|
||||
Enum.each(activities, fn activity ->
|
||||
spawn(fn ->
|
||||
Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
|
||||
end)
|
||||
end)
|
||||
end
|
||||
|
||||
# TODO: Add cached version.
|
||||
defp get_replied_to_activities([]), do: %{}
|
||||
|
||||
|
@ -80,6 +91,11 @@ def render("index.json", opts) do
|
|||
|
||||
# To do: check AdminAPIControllerTest on the reasons behind nil activities in the list
|
||||
activities = Enum.filter(opts.activities, & &1)
|
||||
|
||||
# Start fetching rich media before doing anything else, so that later calls to get the cards
|
||||
# only block for timeout in the worst case, as opposed to
|
||||
# length(activities_with_links) * timeout
|
||||
fetch_rich_media_for_activities(activities)
|
||||
replied_to_activities = get_replied_to_activities(activities)
|
||||
|
||||
parent_activities =
|
||||
|
|
|
@ -96,6 +96,6 @@ def rich_media_get(url) do
|
|||
@rich_media_options
|
||||
end
|
||||
|
||||
Pleroma.HTTP.get(url, headers, options)
|
||||
Pleroma.HTTP.get(url, headers, adapter: options)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.RichMedia.Parser do
|
||||
require Logger
|
||||
|
||||
defp parsers do
|
||||
Pleroma.Config.get([:rich_media, :parsers])
|
||||
end
|
||||
|
@ -10,17 +12,29 @@ defp parsers do
|
|||
def parse(nil), do: {:error, "No URL provided"}
|
||||
|
||||
if Pleroma.Config.get(:env) == :test do
|
||||
@spec parse(String.t()) :: {:ok, map()} | {:error, any()}
|
||||
def parse(url), do: parse_url(url)
|
||||
else
|
||||
@spec parse(String.t()) :: {:ok, map()} | {:error, any()}
|
||||
def parse(url) do
|
||||
try do
|
||||
Cachex.fetch!(:rich_media_cache, url, fn _ ->
|
||||
{:commit, parse_url(url)}
|
||||
end)
|
||||
|> set_ttl_based_on_image(url)
|
||||
rescue
|
||||
e ->
|
||||
{:error, "Cachex error: #{inspect(e)}"}
|
||||
with {:ok, data} <- get_cached_or_parse(url),
|
||||
{:ok, _} <- set_ttl_based_on_image(data, url) do
|
||||
{:ok, data}
|
||||
else
|
||||
error ->
|
||||
Logger.error(fn -> "Rich media error: #{inspect(error)}" end)
|
||||
end
|
||||
end
|
||||
|
||||
defp get_cached_or_parse(url) do
|
||||
case Cachex.fetch!(:rich_media_cache, url, fn _ -> {:commit, parse_url(url)} end) do
|
||||
{:ok, _data} = res ->
|
||||
res
|
||||
|
||||
{:error, _} = e ->
|
||||
ttl = Pleroma.Config.get([:rich_media, :failure_backoff], 60_000)
|
||||
Cachex.expire(:rich_media_cache, url, ttl)
|
||||
e
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -47,19 +61,26 @@ def ttl(data, url) do
|
|||
config :pleroma, :rich_media,
|
||||
ttl_setters: [MyModule]
|
||||
"""
|
||||
def set_ttl_based_on_image({:ok, data}, url) do
|
||||
with {:ok, nil} <- Cachex.ttl(:rich_media_cache, url),
|
||||
ttl when is_number(ttl) <- get_ttl_from_image(data, url) do
|
||||
Cachex.expire_at(:rich_media_cache, url, ttl * 1000)
|
||||
{:ok, data}
|
||||
else
|
||||
@spec set_ttl_based_on_image(map(), String.t()) ::
|
||||
{:ok, Integer.t() | :noop} | {:error, :no_key}
|
||||
def set_ttl_based_on_image(data, url) do
|
||||
case get_ttl_from_image(data, url) do
|
||||
{:ok, ttl} when is_number(ttl) ->
|
||||
ttl = ttl * 1000
|
||||
|
||||
case Cachex.expire_at(:rich_media_cache, url, ttl) do
|
||||
{:ok, true} -> {:ok, ttl}
|
||||
{:ok, false} -> {:error, :no_key}
|
||||
end
|
||||
|
||||
_ ->
|
||||
{:ok, data}
|
||||
{:ok, :noop}
|
||||
end
|
||||
end
|
||||
|
||||
defp get_ttl_from_image(data, url) do
|
||||
Pleroma.Config.get([:rich_media, :ttl_setters])
|
||||
[:rich_media, :ttl_setters]
|
||||
|> Pleroma.Config.get()
|
||||
|> Enum.reduce({:ok, nil}, fn
|
||||
module, {:ok, _ttl} ->
|
||||
module.ttl(data, url)
|
||||
|
@ -70,23 +91,16 @@ defp get_ttl_from_image(data, url) do
|
|||
end
|
||||
|
||||
defp parse_url(url) do
|
||||
try do
|
||||
{:ok, %Tesla.Env{body: html}} = Pleroma.Web.RichMedia.Helpers.rich_media_get(url)
|
||||
|
||||
with {:ok, %Tesla.Env{body: html}} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url),
|
||||
{:ok, html} <- Floki.parse_document(html) do
|
||||
html
|
||||
|> parse_html()
|
||||
|> maybe_parse()
|
||||
|> Map.put("url", url)
|
||||
|> clean_parsed_data()
|
||||
|> check_parsed_data()
|
||||
rescue
|
||||
e ->
|
||||
{:error, "Parsing error: #{inspect(e)} #{inspect(__STACKTRACE__)}"}
|
||||
end
|
||||
end
|
||||
|
||||
defp parse_html(html), do: Floki.parse_document!(html)
|
||||
|
||||
defp maybe_parse(html) do
|
||||
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
|
||||
case parser.parse(html, acc) do
|
||||
|
|
|
@ -10,20 +10,15 @@ def ttl(data, _url) do
|
|||
|> parse_query_params()
|
||||
|> format_query_params()
|
||||
|> get_expiration_timestamp()
|
||||
else
|
||||
{:error, "Not aws signed url #{inspect(image)}"}
|
||||
end
|
||||
end
|
||||
|
||||
defp is_aws_signed_url(""), do: nil
|
||||
defp is_aws_signed_url(nil), do: nil
|
||||
|
||||
defp is_aws_signed_url(image) when is_binary(image) do
|
||||
defp is_aws_signed_url(image) when is_binary(image) and image != "" do
|
||||
%URI{host: host, query: query} = URI.parse(image)
|
||||
|
||||
if String.contains?(host, "amazonaws.com") and String.contains?(query, "X-Amz-Expires") do
|
||||
image
|
||||
else
|
||||
nil
|
||||
end
|
||||
String.contains?(host, "amazonaws.com") and String.contains?(query, "X-Amz-Expires")
|
||||
end
|
||||
|
||||
defp is_aws_signed_url(_), do: nil
|
||||
|
@ -46,6 +41,6 @@ defp get_expiration_timestamp(params) when is_map(params) do
|
|||
|> Map.get("X-Amz-Date")
|
||||
|> Timex.parse("{ISO:Basic:Z}")
|
||||
|
||||
Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))
|
||||
{:ok, Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))}
|
||||
end
|
||||
end
|
||||
|
|
|
@ -72,7 +72,7 @@ defp maybe_notify_admins(%User{} = account) do
|
|||
|
||||
def password_reset(nickname_or_email) do
|
||||
with true <- is_binary(nickname_or_email),
|
||||
%User{local: true, email: email} = user when is_binary(email) <-
|
||||
%User{local: true, email: email, deactivated: false} = user when is_binary(email) <-
|
||||
User.get_by_nickname_or_email(nickname_or_email),
|
||||
{:ok, token_record} <- Pleroma.PasswordResetToken.create_token(user) do
|
||||
user
|
||||
|
@ -81,17 +81,8 @@ def password_reset(nickname_or_email) do
|
|||
|
||||
{:ok, :enqueued}
|
||||
else
|
||||
false ->
|
||||
{:error, "bad user identifier"}
|
||||
|
||||
%User{local: true, email: nil} ->
|
||||
_ ->
|
||||
{:ok, :noop}
|
||||
|
||||
%User{local: false} ->
|
||||
{:error, "remote user"}
|
||||
|
||||
nil ->
|
||||
{:error, "unknown user"}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -136,12 +136,12 @@ def get_template_from_xml(body) do
|
|||
|
||||
def find_lrdd_template(domain) do
|
||||
with {:ok, %{status: status, body: body}} when status in 200..299 <-
|
||||
HTTP.get("http://#{domain}/.well-known/host-meta", []) do
|
||||
HTTP.get("http://#{domain}/.well-known/host-meta") do
|
||||
get_template_from_xml(body)
|
||||
else
|
||||
_ ->
|
||||
with {:ok, %{body: body, status: status}} when status in 200..299 <-
|
||||
HTTP.get("https://#{domain}/.well-known/host-meta", []) do
|
||||
HTTP.get("https://#{domain}/.well-known/host-meta") do
|
||||
get_template_from_xml(body)
|
||||
else
|
||||
e -> {:error, "Can't find LRDD template: #{inspect(e)}"}
|
||||
|
|
|
@ -1350,11 +1350,11 @@ def get("https://relay.mastodon.host/actor", _, _, _) do
|
|||
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/relay/relay.json")}}
|
||||
end
|
||||
|
||||
def get("http://localhost:4001/", _, "", Accept: "text/html") do
|
||||
def get("http://localhost:4001/", _, "", [{"accept", "text/html"}]) do
|
||||
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/tesla_mock/7369654.html")}}
|
||||
end
|
||||
|
||||
def get("https://osada.macgirvin.com/", _, "", Accept: "text/html") do
|
||||
def get("https://osada.macgirvin.com/", _, "", [{"accept", "text/html"}]) do
|
||||
{:ok,
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
|
|
|
@ -109,22 +109,22 @@ test "finds users, boosting ranks of friends and followers" do
|
|||
Enum.map(User.search("doe", resolve: false, for_user: u1), & &1.id) == []
|
||||
end
|
||||
|
||||
test "finds followers of user by partial name" do
|
||||
u1 = insert(:user)
|
||||
u2 = insert(:user, %{name: "Jimi"})
|
||||
follower_jimi = insert(:user, %{name: "Jimi Hendrix"})
|
||||
follower_lizz = insert(:user, %{name: "Lizz Wright"})
|
||||
friend = insert(:user, %{name: "Jimi"})
|
||||
test "finds followings of user by partial name" do
|
||||
lizz = insert(:user, %{name: "Lizz"})
|
||||
jimi = insert(:user, %{name: "Jimi"})
|
||||
following_lizz = insert(:user, %{name: "Jimi Hendrix"})
|
||||
following_jimi = insert(:user, %{name: "Lizz Wright"})
|
||||
follower_lizz = insert(:user, %{name: "Jimi"})
|
||||
|
||||
{:ok, follower_jimi} = User.follow(follower_jimi, u1)
|
||||
{:ok, _follower_lizz} = User.follow(follower_lizz, u2)
|
||||
{:ok, u1} = User.follow(u1, friend)
|
||||
{:ok, lizz} = User.follow(lizz, following_lizz)
|
||||
{:ok, _jimi} = User.follow(jimi, following_jimi)
|
||||
{:ok, _follower_lizz} = User.follow(follower_lizz, lizz)
|
||||
|
||||
assert Enum.map(User.search("jimi", following: true, for_user: u1), & &1.id) == [
|
||||
follower_jimi.id
|
||||
assert Enum.map(User.search("jimi", following: true, for_user: lizz), & &1.id) == [
|
||||
following_lizz.id
|
||||
]
|
||||
|
||||
assert User.search("lizz", following: true, for_user: u1) == []
|
||||
assert User.search("lizz", following: true, for_user: lizz) == []
|
||||
end
|
||||
|
||||
test "find local and remote users for authenticated users" do
|
||||
|
|
|
@ -122,17 +122,27 @@ test "it doesn't fail when a user has no email", %{conn: conn} do
|
|||
{:ok, user: user}
|
||||
end
|
||||
|
||||
test "it returns 404 when user is not found", %{conn: conn, user: user} do
|
||||
test "it returns 204 when user is not found", %{conn: conn, user: user} do
|
||||
conn = post(conn, "/auth/password?email=nonexisting_#{user.email}")
|
||||
assert conn.status == 404
|
||||
assert conn.resp_body == ""
|
||||
|
||||
assert conn
|
||||
|> json_response(:no_content)
|
||||
end
|
||||
|
||||
test "it returns 400 when user is not local", %{conn: conn, user: user} do
|
||||
test "it returns 204 when user is not local", %{conn: conn, user: user} do
|
||||
{:ok, user} = Repo.update(Ecto.Changeset.change(user, local: false))
|
||||
conn = post(conn, "/auth/password?email=#{user.email}")
|
||||
assert conn.status == 400
|
||||
assert conn.resp_body == ""
|
||||
|
||||
assert conn
|
||||
|> json_response(:no_content)
|
||||
end
|
||||
|
||||
test "it returns 204 when user is deactivated", %{conn: conn, user: user} do
|
||||
{:ok, user} = Repo.update(Ecto.Changeset.change(user, deactivated: true, local: true))
|
||||
conn = post(conn, "/auth/password?email=#{user.email}")
|
||||
|
||||
assert conn
|
||||
|> json_response(:no_content)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -67,7 +67,7 @@ test "adding users to a list" do
|
|||
assert following == [other_user.follower_address]
|
||||
end
|
||||
|
||||
test "removing users from a list" do
|
||||
test "removing users from a list, body params" do
|
||||
%{user: user, conn: conn} = oauth_access(["write:lists"])
|
||||
other_user = insert(:user)
|
||||
third_user = insert(:user)
|
||||
|
@ -85,6 +85,24 @@ test "removing users from a list" do
|
|||
assert following == [third_user.follower_address]
|
||||
end
|
||||
|
||||
test "removing users from a list, query params" do
|
||||
%{user: user, conn: conn} = oauth_access(["write:lists"])
|
||||
other_user = insert(:user)
|
||||
third_user = insert(:user)
|
||||
{:ok, list} = Pleroma.List.create("name", user)
|
||||
{:ok, list} = Pleroma.List.follow(list, other_user)
|
||||
{:ok, list} = Pleroma.List.follow(list, third_user)
|
||||
|
||||
assert %{} ==
|
||||
conn
|
||||
|> put_req_header("content-type", "application/json")
|
||||
|> delete("/api/v1/lists/#{list.id}/accounts?account_ids[]=#{other_user.id}")
|
||||
|> json_response_and_validate_schema(:ok)
|
||||
|
||||
%Pleroma.List{following: following} = Pleroma.List.get(list.id, user)
|
||||
assert following == [third_user.follower_address]
|
||||
end
|
||||
|
||||
test "listing users in a list" do
|
||||
%{user: user, conn: conn} = oauth_access(["read:lists"])
|
||||
other_user = insert(:user)
|
||||
|
|
|
@ -21,7 +21,7 @@ test "s3 signed url is parsed correct for expiration time" do
|
|||
expire_time =
|
||||
Timex.parse!(timestamp, "{ISO:Basic:Z}") |> Timex.to_unix() |> Kernel.+(valid_till)
|
||||
|
||||
assert expire_time == Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl.ttl(metadata, url)
|
||||
assert {:ok, expire_time} == Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl.ttl(metadata, url)
|
||||
end
|
||||
|
||||
test "s3 signed url is parsed and correct ttl is set for rich media" do
|
||||
|
@ -55,7 +55,7 @@ test "s3 signed url is parsed and correct ttl is set for rich media" do
|
|||
|
||||
Cachex.put(:rich_media_cache, url, metadata)
|
||||
|
||||
Pleroma.Web.RichMedia.Parser.set_ttl_based_on_image({:ok, metadata}, url)
|
||||
Pleroma.Web.RichMedia.Parser.set_ttl_based_on_image(metadata, url)
|
||||
|
||||
{:ok, cache_ttl} = Cachex.ttl(:rich_media_cache, url)
|
||||
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
defmodule Pleroma.Web.RichMedia.ParserTest do
|
||||
use ExUnit.Case, async: true
|
||||
|
||||
alias Pleroma.Web.RichMedia.Parser
|
||||
|
||||
setup do
|
||||
Tesla.Mock.mock(fn
|
||||
%{
|
||||
|
@ -48,23 +50,29 @@ defmodule Pleroma.Web.RichMedia.ParserTest do
|
|||
|
||||
%{method: :get, url: "http://example.com/empty"} ->
|
||||
%Tesla.Env{status: 200, body: "hello"}
|
||||
|
||||
%{method: :get, url: "http://example.com/malformed"} ->
|
||||
%Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/malformed-data.html")}
|
||||
|
||||
%{method: :get, url: "http://example.com/error"} ->
|
||||
{:error, :overload}
|
||||
end)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
test "returns error when no metadata present" do
|
||||
assert {:error, _} = Pleroma.Web.RichMedia.Parser.parse("http://example.com/empty")
|
||||
assert {:error, _} = Parser.parse("http://example.com/empty")
|
||||
end
|
||||
|
||||
test "doesn't just add a title" do
|
||||
assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/non-ogp") ==
|
||||
assert Parser.parse("http://example.com/non-ogp") ==
|
||||
{:error,
|
||||
"Found metadata was invalid or incomplete: %{\"url\" => \"http://example.com/non-ogp\"}"}
|
||||
end
|
||||
|
||||
test "parses ogp" do
|
||||
assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/ogp") ==
|
||||
assert Parser.parse("http://example.com/ogp") ==
|
||||
{:ok,
|
||||
%{
|
||||
"image" => "http://ia.media-imdb.com/images/rock.jpg",
|
||||
|
@ -77,7 +85,7 @@ test "parses ogp" do
|
|||
end
|
||||
|
||||
test "falls back to <title> when ogp:title is missing" do
|
||||
assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/ogp-missing-title") ==
|
||||
assert Parser.parse("http://example.com/ogp-missing-title") ==
|
||||
{:ok,
|
||||
%{
|
||||
"image" => "http://ia.media-imdb.com/images/rock.jpg",
|
||||
|
@ -90,7 +98,7 @@ test "falls back to <title> when ogp:title is missing" do
|
|||
end
|
||||
|
||||
test "parses twitter card" do
|
||||
assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/twitter-card") ==
|
||||
assert Parser.parse("http://example.com/twitter-card") ==
|
||||
{:ok,
|
||||
%{
|
||||
"card" => "summary",
|
||||
|
@ -103,7 +111,7 @@ test "parses twitter card" do
|
|||
end
|
||||
|
||||
test "parses OEmbed" do
|
||||
assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/oembed") ==
|
||||
assert Parser.parse("http://example.com/oembed") ==
|
||||
{:ok,
|
||||
%{
|
||||
"author_name" => "bees",
|
||||
|
@ -132,6 +140,10 @@ test "parses OEmbed" do
|
|||
end
|
||||
|
||||
test "rejects invalid OGP data" do
|
||||
assert {:error, _} = Pleroma.Web.RichMedia.Parser.parse("http://example.com/malformed")
|
||||
assert {:error, _} = Parser.parse("http://example.com/malformed")
|
||||
end
|
||||
|
||||
test "returns error if getting page was not successful" do
|
||||
assert {:error, :overload} = Parser.parse("http://example.com/error")
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in a new issue