Merge branch 'develop' into media-preview-proxy

This commit is contained in:
Mark Felder 2020-09-03 10:34:06 -05:00
commit 85446cc30c
60 changed files with 557 additions and 191 deletions

View File

@ -3,6 +3,17 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## unreleased-patch - ???
### Added
- Rich media failure tracking (along with `:failure_backoff` option)
### Fixed
- Mastodon API: Search parameter `following` now correctly returns the followings rather than the followers
- Mastodon API: Timelines hanging for (`number of posts with links * rich media timeout`) in the worst case.
Reduced to just rich media timeout.
- Password resets no longer processed for deactivated accounts
## [2.1.0] - 2020-08-28
### Changed

View File

@ -412,6 +412,7 @@ config :pleroma, :rich_media,
Pleroma.Web.RichMedia.Parsers.TwitterCard,
Pleroma.Web.RichMedia.Parsers.OEmbed
],
failure_backoff: 60_000,
ttl_setters: [Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl]
config :pleroma, :media_proxy,
@ -752,19 +753,23 @@ config :pleroma, :connections_pool,
config :pleroma, :pools,
federation: [
size: 50,
max_waiting: 10
max_waiting: 10,
timeout: 10_000
],
media: [
size: 50,
max_waiting: 10
max_waiting: 10,
timeout: 10_000
],
upload: [
size: 25,
max_waiting: 5
max_waiting: 5,
timeout: 15_000
],
default: [
size: 10,
max_waiting: 2
max_waiting: 2,
timeout: 5_000
]
config :pleroma, :hackney_pools,

View File

@ -2441,6 +2441,13 @@ config :pleroma, :config_description, [
suggestions: [
Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl
]
},
%{
key: :failure_backoff,
type: :integer,
description:
"Amount of milliseconds after request failure, during which the request will not be retried.",
suggestions: [60_000]
}
]
},

View File

@ -361,6 +361,7 @@ config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Http,
* `ignore_hosts`: list of hosts which will be ignored by the metadata parser. For example `["accounts.google.com", "xss.website"]`, defaults to `[]`.
* `ignore_tld`: list TLDs (top-level domains) which will ignore for parse metadata. default is ["local", "localdomain", "lan"].
* `parsers`: list of Rich Media parsers.
* `failure_backoff`: Amount of milliseconds after request failure, during which the request will not be retried.
## HTTP server

View File

@ -69,7 +69,7 @@ defmodule Mix.Tasks.Pleroma.Frontend do
fe_label = "#{frontend} (#{ref})"
tmp_dir = Path.join(dest, "tmp")
tmp_dir = Path.join([instance_static_dir, "frontends", "tmp"])
with {_, :ok} <-
{:download_or_unzip, download_or_unzip(frontend_info, tmp_dir, options[:file])},
@ -124,7 +124,9 @@ defmodule Mix.Tasks.Pleroma.Frontend do
url = String.replace(frontend_info["build_url"], "${ref}", frontend_info["ref"])
with {:ok, %{status: 200, body: zip_body}} <-
Pleroma.HTTP.get(url, [], timeout: 120_000, recv_timeout: 120_000) do
Pleroma.HTTP.get(url, [],
adapter: [pool: :media, timeout: 120_000, recv_timeout: 120_000]
) do
unzip(zip_body, dest)
else
e -> {:error, e}
@ -133,6 +135,7 @@ defmodule Mix.Tasks.Pleroma.Frontend do
defp install_frontend(frontend_info, source, dest) do
from = frontend_info["build_dir"] || "dist"
File.rm_rf!(dest)
File.mkdir_p!(dest)
File.cp_r!(Path.join([source, from]), dest)
:ok

View File

@ -22,13 +22,18 @@ defmodule Pleroma.Application do
def repository, do: @repository
def user_agent do
case Config.get([:http, :user_agent], :default) do
:default ->
info = "#{Pleroma.Web.base_url()} <#{Config.get([:instance, :email], "")}>"
named_version() <> "; " <> info
if Process.whereis(Pleroma.Web.Endpoint) do
case Config.get([:http, :user_agent], :default) do
:default ->
info = "#{Pleroma.Web.base_url()} <#{Config.get([:instance, :email], "")}>"
named_version() <> "; " <> info
custom ->
custom
custom ->
custom
end
else
# fallback, if endpoint is not started yet
"Pleroma Data Loader"
end
end
@ -39,6 +44,9 @@ defmodule Pleroma.Application do
# every time the application is restarted, so we disable module
# conflicts at runtime
Code.compiler_options(ignore_module_conflict: true)
# Disable warnings_as_errors at runtime, it breaks Phoenix live reload
# due to protocol consolidation warnings
Code.compiler_options(warnings_as_errors: false)
Pleroma.Telemetry.Logger.attach()
Config.Holder.save_default()
Pleroma.HTML.compile_scrubbers()

View File

@ -0,0 +1,34 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.Emoji do
use Ecto.Type
def type, do: :map
def cast(data) when is_map(data) do
has_invalid_emoji? =
Enum.find(data, fn
{name, uri} when is_binary(name) and is_binary(uri) ->
# based on ObjectValidators.Uri.cast()
case URI.parse(uri) do
%URI{host: nil} -> true
%URI{host: ""} -> true
%URI{scheme: scheme} when scheme in ["https", "http"] -> false
_ -> true
end
{_name, _uri} ->
true
end)
if has_invalid_emoji?, do: :error, else: {:ok, data}
end
def cast(_data), do: :error
def dump(data), do: {:ok, data}
def load(data), do: {:ok, data}
end

View File

@ -83,17 +83,25 @@ defmodule Pleroma.Gun.ConnectionPool.Worker do
end)
{ref, state} = pop_in(state.client_monitors[client_pid])
Process.demonitor(ref)
timer =
if used_by == [] do
max_idle = Pleroma.Config.get([:connections_pool, :max_idle_time], 30_000)
Process.send_after(self(), :idle_close, max_idle)
# DOWN message can receive right after `remove_client` call and cause worker to terminate
state =
if is_nil(ref) do
state
else
nil
Process.demonitor(ref)
timer =
if used_by == [] do
max_idle = Pleroma.Config.get([:connections_pool, :max_idle_time], 30_000)
Process.send_after(self(), :idle_close, max_idle)
else
nil
end
%{state | timer: timer}
end
{:reply, :ok, %{state | timer: timer}, :hibernate}
{:reply, :ok, state, :hibernate}
end
@impl true
@ -103,16 +111,21 @@ defmodule Pleroma.Gun.ConnectionPool.Worker do
{:stop, :normal, state}
end
@impl true
def handle_info({:gun_up, _pid, _protocol}, state) do
{:noreply, state, :hibernate}
end
# Gracefully shutdown if the connection got closed without any streams left
@impl true
def handle_info({:gun_down, _pid, _protocol, _reason, []}, state) do
{:stop, :normal, state}
end
# Otherwise, shutdown with an error
# Otherwise, wait for retry
@impl true
def handle_info({:gun_down, _pid, _protocol, _reason, _killed_streams} = down_message, state) do
{:stop, {:error, down_message}, state}
def handle_info({:gun_down, _pid, _protocol, _reason, _killed_streams}, state) do
{:noreply, state, :hibernate}
end
@impl true

View File

@ -109,8 +109,9 @@ defmodule Pleroma.HTML do
result =
content
|> Floki.parse_fragment!()
|> Floki.filter_out("a.mention,a.hashtag,a.attachment,a[rel~=\"tag\"]")
|> Floki.attribute("a", "href")
|> Floki.find("a:not(.mention,.hashtag,.attachment,[rel~=\"tag\"])")
|> Enum.take(1)
|> Floki.attribute("href")
|> Enum.at(0)
{:commit, {:ok, result}}

View File

@ -11,7 +11,6 @@ defmodule Pleroma.HTTP.AdapterHelper do
@type proxy_type() :: :socks4 | :socks5
@type host() :: charlist() | :inet.ip_address()
alias Pleroma.Config
alias Pleroma.HTTP.AdapterHelper
require Logger
@ -44,27 +43,13 @@ defmodule Pleroma.HTTP.AdapterHelper do
@spec options(URI.t(), keyword()) :: keyword()
def options(%URI{} = uri, opts \\ []) do
@defaults
|> put_timeout()
|> Keyword.merge(opts)
|> adapter_helper().options(uri)
end
# For Hackney, this is the time a connection can stay idle in the pool.
# For Gun, this is the timeout to receive a message from Gun.
defp put_timeout(opts) do
{config_key, default} =
if adapter() == Tesla.Adapter.Gun do
{:pools, Config.get([:pools, :default, :timeout], 5_000)}
else
{:hackney_pools, 10_000}
end
timeout = Config.get([config_key, opts[:pool], :timeout], default)
Keyword.merge(opts, timeout: timeout)
end
@spec get_conn(URI.t(), keyword()) :: {:ok, keyword()} | {:error, atom()}
def get_conn(uri, opts), do: adapter_helper().get_conn(uri, opts)
defp adapter, do: Application.get_env(:tesla, :adapter)
defp adapter_helper do

View File

@ -5,6 +5,7 @@
defmodule Pleroma.HTTP.AdapterHelper.Gun do
@behaviour Pleroma.HTTP.AdapterHelper
alias Pleroma.Config
alias Pleroma.Gun.ConnectionPool
alias Pleroma.HTTP.AdapterHelper
@ -14,31 +15,46 @@ defmodule Pleroma.HTTP.AdapterHelper.Gun do
connect_timeout: 5_000,
domain_lookup_timeout: 5_000,
tls_handshake_timeout: 5_000,
retry: 0,
retry: 1,
retry_timeout: 1000,
await_up_timeout: 5_000
]
@type pool() :: :federation | :upload | :media | :default
@spec options(keyword(), URI.t()) :: keyword()
def options(incoming_opts \\ [], %URI{} = uri) do
proxy =
Pleroma.Config.get([:http, :proxy_url])
[:http, :proxy_url]
|> Config.get()
|> AdapterHelper.format_proxy()
config_opts = Pleroma.Config.get([:http, :adapter], [])
config_opts = Config.get([:http, :adapter], [])
@defaults
|> Keyword.merge(config_opts)
|> add_scheme_opts(uri)
|> AdapterHelper.maybe_add_proxy(proxy)
|> Keyword.merge(incoming_opts)
|> put_timeout()
end
defp add_scheme_opts(opts, %{scheme: "http"}), do: opts
defp add_scheme_opts(opts, %{scheme: "https"}) do
opts
|> Keyword.put(:certificates_verification, true)
Keyword.put(opts, :certificates_verification, true)
end
defp put_timeout(opts) do
# this is the timeout to receive a message from Gun
Keyword.put_new(opts, :timeout, pool_timeout(opts[:pool]))
end
@spec pool_timeout(pool()) :: non_neg_integer()
def pool_timeout(pool) do
default = Config.get([:pools, :default, :timeout], 5_000)
Config.get([:pools, pool, :timeout], default)
end
@spec get_conn(URI.t(), keyword()) :: {:ok, keyword()} | {:error, atom()}
@ -51,11 +67,11 @@ defmodule Pleroma.HTTP.AdapterHelper.Gun do
@prefix Pleroma.Gun.ConnectionPool
def limiter_setup do
wait = Pleroma.Config.get([:connections_pool, :connection_acquisition_wait])
retries = Pleroma.Config.get([:connections_pool, :connection_acquisition_retries])
wait = Config.get([:connections_pool, :connection_acquisition_wait])
retries = Config.get([:connections_pool, :connection_acquisition_retries])
:pools
|> Pleroma.Config.get([])
|> Config.get([])
|> Enum.each(fn {name, opts} ->
max_running = Keyword.get(opts, :size, 50)
max_waiting = Keyword.get(opts, :max_waiting, 10)
@ -69,7 +85,6 @@ defmodule Pleroma.HTTP.AdapterHelper.Gun do
case result do
:ok -> :ok
{:error, :existing} -> :ok
e -> raise e
end
end)

View File

@ -11,6 +11,8 @@ defmodule Pleroma.HTTP.ExAws do
@impl true
def request(method, url, body \\ "", headers \\ [], http_opts \\ []) do
http_opts = Keyword.put_new(http_opts, :adapter, pool: :upload)
case HTTP.request(method, url, body, headers, http_opts) do
{:ok, env} ->
{:ok, %{status_code: env.status, headers: env.headers, body: env.body}}

View File

@ -11,6 +11,8 @@ defmodule Pleroma.HTTP.Tzdata do
@impl true
def get(url, headers, options) do
options = Keyword.put_new(options, :adapter, pool: :default)
with {:ok, %Tesla.Env{} = env} <- HTTP.get(url, headers, options) do
{:ok, {env.status, env.headers, env.body}}
end
@ -18,6 +20,8 @@ defmodule Pleroma.HTTP.Tzdata do
@impl true
def head(url, headers, options) do
options = Keyword.put_new(options, :adapter, pool: :default)
with {:ok, %Tesla.Env{} = env} <- HTTP.head(url, headers, options) do
{:ok, {env.status, env.headers}}
end

View File

@ -150,7 +150,9 @@ defmodule Pleroma.Instances.Instance do
defp scrape_favicon(%URI{} = instance_uri) do
try do
with {:ok, %Tesla.Env{body: html}} <-
Pleroma.HTTP.get(to_string(instance_uri), [{:Accept, "text/html"}]),
Pleroma.HTTP.get(to_string(instance_uri), [{"accept", "text/html"}],
adapter: [pool: :media]
),
favicon_rel <-
html
|> Floki.parse_document!()

View File

@ -648,4 +648,16 @@ defmodule Pleroma.Notification do
)
|> Repo.one()
end
@spec mark_context_as_read(User.t(), String.t()) :: {integer(), nil | [term()]}
def mark_context_as_read(%User{id: id}, context) do
from(
n in Notification,
join: a in assoc(n, :activity),
where: n.user_id == ^id,
where: n.seen == false,
where: fragment("?->>'context'", a.data) == ^context
)
|> Repo.update_all(set: [seen: true])
end
end

View File

@ -36,8 +36,7 @@ defmodule Pleroma.Object.Fetcher do
defp reinject_object(%Object{data: %{"type" => "Question"}} = object, new_data) do
Logger.debug("Reinjecting object #{new_data["id"]}")
with new_data <- Transmogrifier.fix_object(new_data),
data <- maybe_reinject_internal_fields(object, new_data),
with data <- maybe_reinject_internal_fields(object, new_data),
{:ok, data, _} <- ObjectValidator.validate(data, %{}),
changeset <- Object.change(object, %{data: data}),
changeset <- touch_changeset(changeset),
@ -164,12 +163,12 @@ defmodule Pleroma.Object.Fetcher do
date: date
})
[{"signature", signature}]
{"signature", signature}
end
defp sign_fetch(headers, id, date) do
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
headers ++ make_signature(id, date)
[make_signature(id, date) | headers]
else
headers
end
@ -177,7 +176,7 @@ defmodule Pleroma.Object.Fetcher do
defp maybe_date_fetch(headers, date) do
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
headers ++ [{"date", date}]
[{"date", date} | headers]
else
headers
end

View File

@ -46,12 +46,23 @@ defmodule Pleroma.Uploaders.S3 do
op =
if streaming do
upload.tempfile
|> ExAws.S3.Upload.stream_file()
|> ExAws.S3.upload(bucket, s3_name, [
{:acl, :public_read},
{:content_type, upload.content_type}
])
op =
upload.tempfile
|> ExAws.S3.Upload.stream_file()
|> ExAws.S3.upload(bucket, s3_name, [
{:acl, :public_read},
{:content_type, upload.content_type}
])
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Gun do
# set s3 upload timeout to respect :upload pool timeout
# timeout should be slightly larger, so s3 can retry upload on fail
timeout = Pleroma.HTTP.AdapterHelper.Gun.pool_timeout(:upload) + 1_000
opts = Keyword.put(op.opts, :timeout, timeout)
Map.put(op, :opts, opts)
else
op
end
else
{:ok, file_data} = File.read(upload.tempfile)

View File

@ -83,7 +83,7 @@ defmodule Pleroma.User do
]
schema "users" do
field(:bio, :string)
field(:bio, :string, default: "")
field(:raw_bio, :string)
field(:email, :string)
field(:name, :string)
@ -1587,7 +1587,7 @@ defmodule Pleroma.User do
# "Right to be forgotten"
# https://gdpr.eu/right-to-be-forgotten/
change(user, %{
bio: nil,
bio: "",
raw_bio: nil,
email: nil,
name: nil,

View File

@ -116,7 +116,7 @@ defmodule Pleroma.User.Search do
end
defp base_query(_user, false), do: User
defp base_query(user, true), do: User.get_followers_query(user)
defp base_query(user, true), do: User.get_friends_query(user)
defp filter_invisible_users(query) do
from(q in query, where: q.invisible == false)

View File

@ -1224,7 +1224,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
name: data["name"],
follower_address: data["followers"],
following_address: data["following"],
bio: data["summary"],
bio: data["summary"] || "",
actor_type: actor_type,
also_known_as: Map.get(data, "alsoKnownAs", []),
public_key: public_key,

View File

@ -9,6 +9,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioValidator do
alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
alias Pleroma.Web.ActivityPub.Transmogrifier
import Ecto.Changeset
@ -33,8 +34,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioValidator do
field(:attributedTo, ObjectValidators.ObjectID)
field(:summary, :string)
field(:published, ObjectValidators.DateTime)
# TODO: Write type
field(:emoji, :map, default: %{})
field(:emoji, ObjectValidators.Emoji, default: %{})
field(:sensitive, :boolean, default: false)
embeds_many(:attachment, AttachmentValidator)
field(:replies_count, :integer, default: 0)
@ -83,6 +83,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioValidator do
data
|> CommonFixes.fix_defaults()
|> CommonFixes.fix_attribution()
|> Transmogrifier.fix_emoji()
|> fix_url()
end

View File

@ -22,7 +22,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ChatMessageValidator do
field(:content, ObjectValidators.SafeText)
field(:actor, ObjectValidators.ObjectID)
field(:published, ObjectValidators.DateTime)
field(:emoji, :map, default: %{})
field(:emoji, ObjectValidators.Emoji, default: %{})
embeds_one(:attachment, AttachmentValidator)
end

View File

@ -11,8 +11,8 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes do
Utils.create_context(data["context"] || data["conversation"])
data
|> Map.put_new("context", context)
|> Map.put_new("context_id", context_id)
|> Map.put("context", context)
|> Map.put("context_id", context_id)
end
def fix_attribution(data) do

View File

@ -9,6 +9,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do
alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
alias Pleroma.Web.ActivityPub.Transmogrifier
import Ecto.Changeset
@ -39,8 +40,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do
field(:attributedTo, ObjectValidators.ObjectID)
field(:published, ObjectValidators.DateTime)
# TODO: Write type
field(:emoji, :map, default: %{})
field(:emoji, ObjectValidators.Emoji, default: %{})
field(:sensitive, :boolean, default: false)
embeds_many(:attachment, AttachmentValidator)
field(:replies_count, :integer, default: 0)
@ -74,6 +74,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do
data
|> CommonFixes.fix_defaults()
|> CommonFixes.fix_attribution()
|> Transmogrifier.fix_emoji()
end
def changeset(struct, data) do

View File

@ -6,6 +6,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator do
use Ecto.Schema
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Web.ActivityPub.Transmogrifier
import Ecto.Changeset
@ -32,8 +33,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator do
field(:actor, ObjectValidators.ObjectID)
field(:attributedTo, ObjectValidators.ObjectID)
field(:published, ObjectValidators.DateTime)
# TODO: Write type
field(:emoji, :map, default: %{})
field(:emoji, ObjectValidators.Emoji, default: %{})
field(:sensitive, :boolean, default: false)
# TODO: Write type
field(:attachment, {:array, :map}, default: [])
@ -53,7 +53,14 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator do
|> validate_data()
end
defp fix(data) do
data
|> Transmogrifier.fix_emoji()
end
def cast_data(data) do
data = fix(data)
%__MODULE__{}
|> cast(data, __schema__(:fields))
end

View File

@ -10,6 +10,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionValidator do
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
alias Pleroma.Web.ActivityPub.ObjectValidators.QuestionOptionsValidator
alias Pleroma.Web.ActivityPub.Transmogrifier
import Ecto.Changeset
@ -35,8 +36,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionValidator do
field(:attributedTo, ObjectValidators.ObjectID)
field(:summary, :string)
field(:published, ObjectValidators.DateTime)
# TODO: Write type
field(:emoji, :map, default: %{})
field(:emoji, ObjectValidators.Emoji, default: %{})
field(:sensitive, :boolean, default: false)
embeds_many(:attachment, AttachmentValidator)
field(:replies_count, :integer, default: 0)
@ -85,6 +85,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionValidator do
data
|> CommonFixes.fix_defaults()
|> CommonFixes.fix_attribution()
|> Transmogrifier.fix_emoji()
|> fix_closed()
end

View File

@ -318,9 +318,6 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
Map.put(mapping, name, data["icon"]["url"])
end)
# we merge mastodon and pleroma emoji into a single mapping, to allow for both wire formats
emoji = Map.merge(object["emoji"] || %{}, emoji)
Map.put(object, "emoji", emoji)
end

View File

@ -114,7 +114,7 @@ defmodule Pleroma.Web.ApiSpec.ListOperation do
description: "Add accounts to the given list.",
operationId: "ListController.add_to_list",
parameters: [id_param()],
requestBody: add_remove_accounts_request(),
requestBody: add_remove_accounts_request(true),
security: [%{"oAuth" => ["write:lists"]}],
responses: %{
200 => Operation.response("Empty object", "application/json", %Schema{type: :object})
@ -127,8 +127,16 @@ defmodule Pleroma.Web.ApiSpec.ListOperation do
tags: ["Lists"],
summary: "Remove accounts from list",
operationId: "ListController.remove_from_list",
parameters: [id_param()],
requestBody: add_remove_accounts_request(),
parameters: [
id_param(),
Operation.parameter(
:account_ids,
:query,
%Schema{type: :array, items: %Schema{type: :string}},
"Array of account IDs"
)
],
requestBody: add_remove_accounts_request(false),
security: [%{"oAuth" => ["write:lists"]}],
responses: %{
200 => Operation.response("Empty object", "application/json", %Schema{type: :object})
@ -171,7 +179,7 @@ defmodule Pleroma.Web.ApiSpec.ListOperation do
)
end
defp add_remove_accounts_request do
defp add_remove_accounts_request(required) when is_boolean(required) do
request_body(
"Parameters",
%Schema{
@ -180,9 +188,9 @@ defmodule Pleroma.Web.ApiSpec.ListOperation do
properties: %{
account_ids: %Schema{type: :array, description: "Array of account IDs", items: FlakeID}
},
required: [:account_ids]
required: required && [:account_ids]
},
required: true
required: required
)
end
end

View File

@ -68,7 +68,7 @@ defmodule Pleroma.Web.Auth.PleromaAuthenticator do
nickname = value([registration_attrs["nickname"], Registration.nickname(registration)])
email = value([registration_attrs["email"], Registration.email(registration)])
name = value([registration_attrs["name"], Registration.name(registration)]) || nickname
bio = value([registration_attrs["bio"], Registration.description(registration)])
bio = value([registration_attrs["bio"], Registration.description(registration)]) || ""
random_password = :crypto.strong_rand_bytes(64) |> Base.encode64()

View File

@ -452,7 +452,8 @@ defmodule Pleroma.Web.CommonAPI do
end
def add_mute(user, activity) do
with {:ok, _} <- ThreadMute.add_mute(user.id, activity.data["context"]) do
with {:ok, _} <- ThreadMute.add_mute(user.id, activity.data["context"]),
_ <- Pleroma.Notification.mark_context_as_read(user, activity.data["context"]) do
{:ok, activity}
else
{:error, _} -> {:error, dgettext("errors", "conversation is already muted")}

View File

@ -59,17 +59,11 @@ defmodule Pleroma.Web.MastodonAPI.AuthController do
def password_reset(conn, params) do
nickname_or_email = params["email"] || params["nickname"]
with {:ok, _} <- TwitterAPI.password_reset(nickname_or_email) do
conn
|> put_status(:no_content)
|> json("")
else
{:error, "unknown user"} ->
send_resp(conn, :not_found, "")
TwitterAPI.password_reset(nickname_or_email)
{:error, _} ->
send_resp(conn, :bad_request, "")
end
conn
|> put_status(:no_content)
|> json("")
end
defp local_mastodon_root_path(conn) do

View File

@ -74,7 +74,7 @@ defmodule Pleroma.Web.MastodonAPI.ListController do
# DELETE /api/v1/lists/:id/accounts
def remove_from_list(
%{assigns: %{list: list}, body_params: %{account_ids: account_ids}} = conn,
%{assigns: %{list: list}, params: %{account_ids: account_ids}} = conn,
_
) do
Enum.each(account_ids, fn account_id ->
@ -86,6 +86,10 @@ defmodule Pleroma.Web.MastodonAPI.ListController do
json(conn, %{})
end
def remove_from_list(%{body_params: params} = conn, _) do
remove_from_list(%{conn | params: params}, %{})
end
defp list_by_id_and_user(%{assigns: %{user: user}, params: %{id: id}} = conn, _) do
case Pleroma.List.get(id, user) do
%Pleroma.List{} = list -> assign(conn, :list, list)

View File

@ -247,7 +247,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do
followers_count: followers_count,
following_count: following_count,
statuses_count: user.note_count,
note: user.bio || "",
note: user.bio,
url: user.uri || user.ap_id,
avatar: avatar,
avatar_static: avatar_static,

View File

@ -23,6 +23,17 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
import Pleroma.Web.ActivityPub.Visibility, only: [get_visibility: 1, visible_for_user?: 2]
# This is a naive way to do this, just spawning a process per activity
# to fetch the preview. However it should be fine considering
# pagination is restricted to 40 activities at a time
defp fetch_rich_media_for_activities(activities) do
Enum.each(activities, fn activity ->
spawn(fn ->
Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
end)
end)
end
# TODO: Add cached version.
defp get_replied_to_activities([]), do: %{}
@ -80,6 +91,11 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
# To do: check AdminAPIControllerTest on the reasons behind nil activities in the list
activities = Enum.filter(opts.activities, & &1)
# Start fetching rich media before doing anything else, so that later calls to get the cards
# only block for timeout in the worst case, as opposed to
# length(activities_with_links) * timeout
fetch_rich_media_for_activities(activities)
replied_to_activities = get_replied_to_activities(activities)
parent_activities =

View File

@ -61,7 +61,7 @@ defmodule Pleroma.Web.Metadata.Providers.OpenGraph do
@impl Provider
def build_tags(%{user: user}) do
with truncated_bio = Utils.scrub_html_and_truncate(user.bio || "") do
with truncated_bio = Utils.scrub_html_and_truncate(user.bio) do
[
{:meta,
[

View File

@ -40,7 +40,7 @@ defmodule Pleroma.Web.Metadata.Providers.TwitterCard do
@impl Provider
def build_tags(%{user: user}) do
with truncated_bio = Utils.scrub_html_and_truncate(user.bio || "") do
with truncated_bio = Utils.scrub_html_and_truncate(user.bio) do
[
title_tag(user),
{:meta, [property: "twitter:description", content: truncated_bio], []},

View File

@ -149,9 +149,7 @@ defmodule Pleroma.Web.PleromaAPI.ChatController do
from(c in Chat,
where: c.user_id == ^user_id,
where: c.recipient not in ^blocked_ap_ids,
order_by: [desc: c.updated_at],
inner_join: u in User,
on: u.ap_id == c.recipient
order_by: [desc: c.updated_at]
)
|> Repo.all()

View File

@ -96,6 +96,6 @@ defmodule Pleroma.Web.RichMedia.Helpers do
@rich_media_options
end
Pleroma.HTTP.get(url, headers, options)
Pleroma.HTTP.get(url, headers, adapter: options)
end
end

View File

@ -3,6 +3,8 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.RichMedia.Parser do
require Logger
defp parsers do
Pleroma.Config.get([:rich_media, :parsers])
end
@ -10,17 +12,29 @@ defmodule Pleroma.Web.RichMedia.Parser do
def parse(nil), do: {:error, "No URL provided"}
if Pleroma.Config.get(:env) == :test do
@spec parse(String.t()) :: {:ok, map()} | {:error, any()}
def parse(url), do: parse_url(url)
else
@spec parse(String.t()) :: {:ok, map()} | {:error, any()}
def parse(url) do
try do
Cachex.fetch!(:rich_media_cache, url, fn _ ->
{:commit, parse_url(url)}
end)
|> set_ttl_based_on_image(url)
rescue
e ->
{:error, "Cachex error: #{inspect(e)}"}
with {:ok, data} <- get_cached_or_parse(url),
{:ok, _} <- set_ttl_based_on_image(data, url) do
{:ok, data}
else
error ->
Logger.error(fn -> "Rich media error: #{inspect(error)}" end)
end
end
defp get_cached_or_parse(url) do
case Cachex.fetch!(:rich_media_cache, url, fn _ -> {:commit, parse_url(url)} end) do
{:ok, _data} = res ->
res
{:error, _} = e ->
ttl = Pleroma.Config.get([:rich_media, :failure_backoff], 60_000)
Cachex.expire(:rich_media_cache, url, ttl)
e
end
end
end
@ -47,19 +61,26 @@ defmodule Pleroma.Web.RichMedia.Parser do
config :pleroma, :rich_media,
ttl_setters: [MyModule]
"""
def set_ttl_based_on_image({:ok, data}, url) do
with {:ok, nil} <- Cachex.ttl(:rich_media_cache, url),
ttl when is_number(ttl) <- get_ttl_from_image(data, url) do
Cachex.expire_at(:rich_media_cache, url, ttl * 1000)
{:ok, data}
else
@spec set_ttl_based_on_image(map(), String.t()) ::
{:ok, Integer.t() | :noop} | {:error, :no_key}
def set_ttl_based_on_image(data, url) do
case get_ttl_from_image(data, url) do
{:ok, ttl} when is_number(ttl) ->
ttl = ttl * 1000
case Cachex.expire_at(:rich_media_cache, url, ttl) do
{:ok, true} -> {:ok, ttl}
{:ok, false} -> {:error, :no_key}
end
_ ->
{:ok, data}
{:ok, :noop}
end
end
defp get_ttl_from_image(data, url) do
Pleroma.Config.get([:rich_media, :ttl_setters])
[:rich_media, :ttl_setters]
|> Pleroma.Config.get()
|> Enum.reduce({:ok, nil}, fn
module, {:ok, _ttl} ->
module.ttl(data, url)
@ -70,23 +91,16 @@ defmodule Pleroma.Web.RichMedia.Parser do
end
defp parse_url(url) do
try do
{:ok, %Tesla.Env{body: html}} = Pleroma.Web.RichMedia.Helpers.rich_media_get(url)
with {:ok, %Tesla.Env{body: html}} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url),
{:ok, html} <- Floki.parse_document(html) do
html
|> parse_html()
|> maybe_parse()
|> Map.put("url", url)
|> clean_parsed_data()
|> check_parsed_data()
rescue
e ->
{:error, "Parsing error: #{inspect(e)} #{inspect(__STACKTRACE__)}"}
end
end
defp parse_html(html), do: Floki.parse_document!(html)
defp maybe_parse(html) do
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
case parser.parse(html, acc) do

View File

@ -10,20 +10,15 @@ defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl do
|> parse_query_params()
|> format_query_params()
|> get_expiration_timestamp()
else
{:error, "Not aws signed url #{inspect(image)}"}
end
end
defp is_aws_signed_url(""), do: nil
defp is_aws_signed_url(nil), do: nil
defp is_aws_signed_url(image) when is_binary(image) do
defp is_aws_signed_url(image) when is_binary(image) and image != "" do
%URI{host: host, query: query} = URI.parse(image)
if String.contains?(host, "amazonaws.com") and String.contains?(query, "X-Amz-Expires") do
image
else
nil
end
String.contains?(host, "amazonaws.com") and String.contains?(query, "X-Amz-Expires")
end
defp is_aws_signed_url(_), do: nil
@ -46,6 +41,6 @@ defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl do
|> Map.get("X-Amz-Date")
|> Timex.parse("{ISO:Basic:Z}")
Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))
{:ok, Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))}
end
end

View File

@ -72,7 +72,7 @@ defmodule Pleroma.Web.TwitterAPI.TwitterAPI do
def password_reset(nickname_or_email) do
with true <- is_binary(nickname_or_email),
%User{local: true, email: email} = user when is_binary(email) <-
%User{local: true, email: email, deactivated: false} = user when is_binary(email) <-
User.get_by_nickname_or_email(nickname_or_email),
{:ok, token_record} <- Pleroma.PasswordResetToken.create_token(user) do
user
@ -81,17 +81,8 @@ defmodule Pleroma.Web.TwitterAPI.TwitterAPI do
{:ok, :enqueued}
else
false ->
{:error, "bad user identifier"}
%User{local: true, email: nil} ->
_ ->
{:ok, :noop}
%User{local: false} ->
{:error, "remote user"}
nil ->
{:error, "unknown user"}
end
end

View File

@ -136,12 +136,12 @@ defmodule Pleroma.Web.WebFinger do
def find_lrdd_template(domain) do
with {:ok, %{status: status, body: body}} when status in 200..299 <-
HTTP.get("http://#{domain}/.well-known/host-meta", []) do
HTTP.get("http://#{domain}/.well-known/host-meta") do
get_template_from_xml(body)
else
_ ->
with {:ok, %{body: body, status: status}} when status in 200..299 <-
HTTP.get("https://#{domain}/.well-known/host-meta", []) do
HTTP.get("https://#{domain}/.well-known/host-meta") do
get_template_from_xml(body)
else
e -> {:error, "Can't find LRDD template: #{inspect(e)}"}

View File

@ -43,8 +43,8 @@
"ex_machina": {:hex, :ex_machina, "2.4.0", "09a34c5d371bfb5f78399029194a8ff67aff340ebe8ba19040181af35315eabb", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm", "a20bc9ddc721b33ea913b93666c5d0bdca5cbad7a67540784ae277228832d72c"},
"ex_syslogger": {:hex, :ex_syslogger, "1.5.2", "72b6aa2d47a236e999171f2e1ec18698740f40af0bd02c8c650bf5f1fd1bac79", [:mix], [{:poison, ">= 1.5.0", [hex: :poison, repo: "hexpm", optional: true]}, {:syslog, "~> 1.1.0", [hex: :syslog, repo: "hexpm", optional: false]}], "hexpm", "ab9fab4136dbc62651ec6f16fa4842f10cf02ab4433fa3d0976c01be99398399"},
"excoveralls": {:hex, :excoveralls, "0.13.1", "b9f1697f7c9e0cfe15d1a1d737fb169c398803ffcbc57e672aa007e9fd42864c", [:mix], [{:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "b4bb550e045def1b4d531a37fb766cbbe1307f7628bf8f0414168b3f52021cce"},
"fast_html": {:hex, :fast_html, "2.0.2", "1fabc408b2baa965cf6399a48796326f2721b21b397a3c667bb3bb88fb9559a4", [:make, :mix], [{:elixir_make, "~> 0.4", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.1.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}], "hexpm", "f077e2c1597a6e2678e6cacc64f456a6c6024eb4240092c46d4212496dc59aba"},
"fast_sanitize": {:hex, :fast_sanitize, "0.2.1", "3302421a988992b6cae08e68f77069e167ff116444183f3302e3c36017a50558", [:mix], [{:fast_html, "~> 2.0", [hex: :fast_html, repo: "hexpm", optional: false]}, {:plug, "~> 1.8", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "bcd2c54e328128515edd1a8fb032fdea7e5581672ba161fc5962d21ecee92502"},
"fast_html": {:hex, :fast_html, "2.0.3", "27289dea6c3a22952191a2d4e07f546c0de8a351484782c2e797dbae06a5dc8a", [:make, :mix], [{:elixir_make, "~> 0.4", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.1.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}], "hexpm", "17d41fa8afe4e912ffe74e13b87ddb085382cd2b7393636d338495c9a8a7b518"},
"fast_sanitize": {:hex, :fast_sanitize, "0.2.2", "3cbbaebaea6043865dfb5b4ecb0f1af066ad410a51470e353714b10c42007b81", [:mix], [{:fast_html, "~> 2.0", [hex: :fast_html, repo: "hexpm", optional: false]}, {:plug, "~> 1.8", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "69f204db9250afa94a0d559d9110139850f57de2b081719fbafa1e9a89e94466"},
"flake_id": {:hex, :flake_id, "0.1.0", "7716b086d2e405d09b647121a166498a0d93d1a623bead243e1f74216079ccb3", [:mix], [{:base62, "~> 1.2", [hex: :base62, repo: "hexpm", optional: false]}, {:ecto, ">= 2.0.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm", "31fc8090fde1acd267c07c36ea7365b8604055f897d3a53dd967658c691bd827"},
"floki": {:hex, :floki, "0.27.0", "6b29a14283f1e2e8fad824bc930eaa9477c462022075df6bea8f0ad811c13599", [:mix], [{:html_entities, "~> 0.5.0", [hex: :html_entities, repo: "hexpm", optional: false]}], "hexpm", "583b8c13697c37179f1f82443bcc7ad2f76fbc0bf4c186606eebd658f7f2631b"},
"gen_smtp": {:hex, :gen_smtp, "0.15.0", "9f51960c17769b26833b50df0b96123605a8024738b62db747fece14eb2fbfcc", [:rebar3], [], "hexpm", "29bd14a88030980849c7ed2447b8db6d6c9278a28b11a44cafe41b791205440f"},

View File

@ -0,0 +1,22 @@
defmodule Pleroma.Repo.Migrations.ChatConstraints do
use Ecto.Migration
def change do
remove_orphans = """
delete from chats where not exists(select id from users where ap_id = chats.recipient);
"""
execute(remove_orphans)
drop(constraint(:chats, "chats_user_id_fkey"))
alter table(:chats) do
modify(:user_id, references(:users, type: :uuid, on_delete: :delete_all))
modify(
:recipient,
references(:users, column: :ap_id, type: :string, on_delete: :delete_all)
)
end
end
end

View File

@ -0,0 +1,7 @@
defmodule Pleroma.Repo.Migrations.EnsureBioIsString do
use Ecto.Migration
def change do
execute("update users set bio = '' where bio is null", "")
end
end

View File

@ -0,0 +1,10 @@
defmodule Pleroma.Repo.Migrations.BioSetNotNull do
use Ecto.Migration
def change do
execute(
"alter table users alter column bio set not null",
"alter table users alter column bio drop not null"
)
end
end

View File

@ -26,6 +26,28 @@ defmodule Pleroma.ChatTest do
assert chat.id
end
test "deleting the user deletes the chat" do
user = insert(:user)
other_user = insert(:user)
{:ok, chat} = Chat.bump_or_create(user.id, other_user.ap_id)
Repo.delete(user)
refute Chat.get_by_id(chat.id)
end
test "deleting the recipient deletes the chat" do
user = insert(:user)
other_user = insert(:user)
{:ok, chat} = Chat.bump_or_create(user.id, other_user.ap_id)
Repo.delete(other_user)
refute Chat.get_by_id(chat.id)
end
test "it returns and bumps a chat for a user and recipient if it already exists" do
user = insert(:user)
other_user = insert(:user)

View File

@ -1350,11 +1350,11 @@ defmodule HttpRequestMock do
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/relay/relay.json")}}
end
def get("http://localhost:4001/", _, "", Accept: "text/html") do
def get("http://localhost:4001/", _, "", [{"accept", "text/html"}]) do
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/tesla_mock/7369654.html")}}
end
def get("https://osada.macgirvin.com/", _, "", Accept: "text/html") do
def get("https://osada.macgirvin.com/", _, "", [{"accept", "text/html"}]) do
{:ok,
%Tesla.Env{
status: 200,

View File

@ -48,11 +48,18 @@ defmodule Pleroma.FrontendTest do
}
})
folder = Path.join([@dir, "frontends", "pleroma", "fantasy"])
previously_existing = Path.join([folder, "temp"])
File.mkdir_p!(folder)
File.write!(previously_existing, "yey")
assert File.exists?(previously_existing)
capture_io(fn ->
Frontend.run(["install", "pleroma", "--file", "test/fixtures/tesla_mock/frontend.zip"])
end)
assert File.exists?(Path.join([@dir, "frontends", "pleroma", "fantasy", "test.txt"]))
assert File.exists?(Path.join([folder, "test.txt"]))
refute File.exists?(previously_existing)
end
test "it downloads and unzips unknown frontends" do

View File

@ -109,22 +109,22 @@ defmodule Pleroma.UserSearchTest do
Enum.map(User.search("doe", resolve: false, for_user: u1), & &1.id) == []
end
test "finds followers of user by partial name" do
u1 = insert(:user)
u2 = insert(:user, %{name: "Jimi"})
follower_jimi = insert(:user, %{name: "Jimi Hendrix"})
follower_lizz = insert(:user, %{name: "Lizz Wright"})
friend = insert(:user, %{name: "Jimi"})
test "finds followings of user by partial name" do
lizz = insert(:user, %{name: "Lizz"})
jimi = insert(:user, %{name: "Jimi"})
following_lizz = insert(:user, %{name: "Jimi Hendrix"})
following_jimi = insert(:user, %{name: "Lizz Wright"})
follower_lizz = insert(:user, %{name: "Jimi"})
{:ok, follower_jimi} = User.follow(follower_jimi, u1)
{:ok, _follower_lizz} = User.follow(follower_lizz, u2)
{:ok, u1} = User.follow(u1, friend)
{:ok, lizz} = User.follow(lizz, following_lizz)
{:ok, _jimi} = User.follow(jimi, following_jimi)
{:ok, _follower_lizz} = User.follow(follower_lizz, lizz)
assert Enum.map(User.search("jimi", following: true, for_user: u1), & &1.id) == [
follower_jimi.id
assert Enum.map(User.search("jimi", following: true, for_user: lizz), & &1.id) == [
following_lizz.id
]
assert User.search("lizz", following: true, for_user: u1) == []
assert User.search("lizz", following: true, for_user: lizz) == []
end
test "find local and remote users for authenticated users" do

View File

@ -1466,7 +1466,7 @@ defmodule Pleroma.UserTest do
user = User.get_by_id(user.id)
assert %User{
bio: nil,
bio: "",
raw_bio: nil,
email: nil,
name: nil,

View File

@ -69,6 +69,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ChatValidationTest do
assert {:ok, object, _meta} = ObjectValidator.validate(valid_chat_message, [])
assert Map.put(valid_chat_message, "attachment", nil) == object
assert match?(%{"firefox" => _}, object["emoji"])
end
test "validates for a basic object with an attachment", %{

View File

@ -106,6 +106,57 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier.QuestionHandlingTest do
assert Enum.sort(object.data["oneOf"]) == Enum.sort(options)
end
test "Mastodon Question activity with custom emojis" do
options = [
%{
"type" => "Note",
"name" => ":blobcat:",
"replies" => %{"totalItems" => 0, "type" => "Collection"}
},
%{
"type" => "Note",
"name" => ":blobfox:",
"replies" => %{"totalItems" => 0, "type" => "Collection"}
}
]
tag = [
%{
"icon" => %{
"type" => "Image",
"url" => "https://blob.cat/emoji/custom/blobcats/blobcat.png"
},
"id" => "https://blob.cat/emoji/custom/blobcats/blobcat.png",
"name" => ":blobcat:",
"type" => "Emoji",
"updated" => "1970-01-01T00:00:00Z"
},
%{
"icon" => %{"type" => "Image", "url" => "https://blob.cat/emoji/blobfox/blobfox.png"},
"id" => "https://blob.cat/emoji/blobfox/blobfox.png",
"name" => ":blobfox:",
"type" => "Emoji",
"updated" => "1970-01-01T00:00:00Z"
}
]
data =
File.read!("test/fixtures/mastodon-question-activity.json")
|> Poison.decode!()
|> Kernel.put_in(["object", "oneOf"], options)
|> Kernel.put_in(["object", "tag"], tag)
{:ok, %Activity{local: false} = activity} = Transmogrifier.handle_incoming(data)
object = Object.normalize(activity, false)
assert object.data["oneOf"] == options
assert object.data["emoji"] == %{
"blobcat" => "https://blob.cat/emoji/custom/blobcats/blobcat.png",
"blobfox" => "https://blob.cat/emoji/blobfox/blobfox.png"
}
end
test "returns an error if received a second time" do
data = File.read!("test/fixtures/mastodon-question-activity.json") |> Poison.decode!()

View File

@ -203,7 +203,7 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIControllerTest do
assert user.note_count == 0
assert user.follower_count == 0
assert user.following_count == 0
assert user.bio == nil
assert user.bio == ""
assert user.name == nil
assert called(Pleroma.Web.Federator.publish(:_))

View File

@ -9,6 +9,7 @@ defmodule Pleroma.Web.CommonAPITest do
alias Pleroma.Conversation.Participation
alias Pleroma.Notification
alias Pleroma.Object
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Transmogrifier
@ -18,6 +19,7 @@ defmodule Pleroma.Web.CommonAPITest do
import Pleroma.Factory
import Mock
import Ecto.Query, only: [from: 2]
require Pleroma.Constants
@ -808,6 +810,69 @@ defmodule Pleroma.Web.CommonAPITest do
[user: user, activity: activity]
end
test "marks notifications as read after mute" do
author = insert(:user)
activity = insert(:note_activity, user: author)
friend1 = insert(:user)
friend2 = insert(:user)
{:ok, reply_activity} =
CommonAPI.post(
friend2,
%{
status: "@#{author.nickname} @#{friend1.nickname} test reply",
in_reply_to_status_id: activity.id
}
)
{:ok, favorite_activity} = CommonAPI.favorite(friend2, activity.id)
{:ok, repeat_activity} = CommonAPI.repeat(activity.id, friend1)
assert Repo.aggregate(
from(n in Notification, where: n.seen == false and n.user_id == ^friend1.id),
:count
) == 1
unread_notifications =
Repo.all(from(n in Notification, where: n.seen == false, where: n.user_id == ^author.id))
assert Enum.any?(unread_notifications, fn n ->
n.type == "favourite" && n.activity_id == favorite_activity.id
end)
assert Enum.any?(unread_notifications, fn n ->
n.type == "reblog" && n.activity_id == repeat_activity.id
end)
assert Enum.any?(unread_notifications, fn n ->
n.type == "mention" && n.activity_id == reply_activity.id
end)
{:ok, _} = CommonAPI.add_mute(author, activity)
assert CommonAPI.thread_muted?(author, activity)
assert Repo.aggregate(
from(n in Notification, where: n.seen == false and n.user_id == ^friend1.id),
:count
) == 1
read_notifications =
Repo.all(from(n in Notification, where: n.seen == true, where: n.user_id == ^author.id))
assert Enum.any?(read_notifications, fn n ->
n.type == "favourite" && n.activity_id == favorite_activity.id
end)
assert Enum.any?(read_notifications, fn n ->
n.type == "reblog" && n.activity_id == repeat_activity.id
end)
assert Enum.any?(read_notifications, fn n ->
n.type == "mention" && n.activity_id == reply_activity.id
end)
end
test "add mute", %{user: user, activity: activity} do
{:ok, _} = CommonAPI.add_mute(user, activity)
assert CommonAPI.thread_muted?(user, activity)

View File

@ -122,17 +122,27 @@ defmodule Pleroma.Web.MastodonAPI.AuthControllerTest do
{:ok, user: user}
end
test "it returns 404 when user is not found", %{conn: conn, user: user} do
test "it returns 204 when user is not found", %{conn: conn, user: user} do
conn = post(conn, "/auth/password?email=nonexisting_#{user.email}")
assert conn.status == 404
assert conn.resp_body == ""
assert conn
|> json_response(:no_content)
end
test "it returns 400 when user is not local", %{conn: conn, user: user} do
test "it returns 204 when user is not local", %{conn: conn, user: user} do
{:ok, user} = Repo.update(Ecto.Changeset.change(user, local: false))
conn = post(conn, "/auth/password?email=#{user.email}")
assert conn.status == 400
assert conn.resp_body == ""
assert conn
|> json_response(:no_content)
end
test "it returns 204 when user is deactivated", %{conn: conn, user: user} do
{:ok, user} = Repo.update(Ecto.Changeset.change(user, deactivated: true, local: true))
conn = post(conn, "/auth/password?email=#{user.email}")
assert conn
|> json_response(:no_content)
end
end

View File

@ -67,7 +67,7 @@ defmodule Pleroma.Web.MastodonAPI.ListControllerTest do
assert following == [other_user.follower_address]
end
test "removing users from a list" do
test "removing users from a list, body params" do
%{user: user, conn: conn} = oauth_access(["write:lists"])
other_user = insert(:user)
third_user = insert(:user)
@ -85,6 +85,24 @@ defmodule Pleroma.Web.MastodonAPI.ListControllerTest do
assert following == [third_user.follower_address]
end
test "removing users from a list, query params" do
%{user: user, conn: conn} = oauth_access(["write:lists"])
other_user = insert(:user)
third_user = insert(:user)
{:ok, list} = Pleroma.List.create("name", user)
{:ok, list} = Pleroma.List.follow(list, other_user)
{:ok, list} = Pleroma.List.follow(list, third_user)
assert %{} ==
conn
|> put_req_header("content-type", "application/json")
|> delete("/api/v1/lists/#{list.id}/accounts?account_ids[]=#{other_user.id}")
|> json_response_and_validate_schema(:ok)
%Pleroma.List{following: following} = Pleroma.List.get(list.id, user)
assert following == [third_user.follower_address]
end
test "listing users in a list" do
%{user: user, conn: conn} = oauth_access(["read:lists"])
other_user = insert(:user)

View File

@ -21,7 +21,7 @@ defmodule Pleroma.Web.RichMedia.TTL.AwsSignedUrlTest do
expire_time =
Timex.parse!(timestamp, "{ISO:Basic:Z}") |> Timex.to_unix() |> Kernel.+(valid_till)
assert expire_time == Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl.ttl(metadata, url)
assert {:ok, expire_time} == Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl.ttl(metadata, url)
end
test "s3 signed url is parsed and correct ttl is set for rich media" do
@ -55,7 +55,7 @@ defmodule Pleroma.Web.RichMedia.TTL.AwsSignedUrlTest do
Cachex.put(:rich_media_cache, url, metadata)
Pleroma.Web.RichMedia.Parser.set_ttl_based_on_image({:ok, metadata}, url)
Pleroma.Web.RichMedia.Parser.set_ttl_based_on_image(metadata, url)
{:ok, cache_ttl} = Cachex.ttl(:rich_media_cache, url)

View File

@ -5,6 +5,8 @@
defmodule Pleroma.Web.RichMedia.ParserTest do
use ExUnit.Case, async: true
alias Pleroma.Web.RichMedia.Parser
setup do
Tesla.Mock.mock(fn
%{
@ -48,23 +50,29 @@ defmodule Pleroma.Web.RichMedia.ParserTest do
%{method: :get, url: "http://example.com/empty"} ->
%Tesla.Env{status: 200, body: "hello"}
%{method: :get, url: "http://example.com/malformed"} ->
%Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/malformed-data.html")}
%{method: :get, url: "http://example.com/error"} ->
{:error, :overload}
end)
:ok
end
test "returns error when no metadata present" do
assert {:error, _} = Pleroma.Web.RichMedia.Parser.parse("http://example.com/empty")
assert {:error, _} = Parser.parse("http://example.com/empty")
end
test "doesn't just add a title" do
assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/non-ogp") ==
assert Parser.parse("http://example.com/non-ogp") ==
{:error,
"Found metadata was invalid or incomplete: %{\"url\" => \"http://example.com/non-ogp\"}"}
end
test "parses ogp" do
assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/ogp") ==
assert Parser.parse("http://example.com/ogp") ==
{:ok,
%{
"image" => "http://ia.media-imdb.com/images/rock.jpg",
@ -77,7 +85,7 @@ defmodule Pleroma.Web.RichMedia.ParserTest do
end
test "falls back to <title> when ogp:title is missing" do
assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/ogp-missing-title") ==
assert Parser.parse("http://example.com/ogp-missing-title") ==
{:ok,
%{
"image" => "http://ia.media-imdb.com/images/rock.jpg",
@ -90,7 +98,7 @@ defmodule Pleroma.Web.RichMedia.ParserTest do
end
test "parses twitter card" do
assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/twitter-card") ==
assert Parser.parse("http://example.com/twitter-card") ==
{:ok,
%{
"card" => "summary",
@ -103,7 +111,7 @@ defmodule Pleroma.Web.RichMedia.ParserTest do
end
test "parses OEmbed" do
assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/oembed") ==
assert Parser.parse("http://example.com/oembed") ==
{:ok,
%{
"author_name" => "bees",
@ -132,6 +140,10 @@ defmodule Pleroma.Web.RichMedia.ParserTest do
end
test "rejects invalid OGP data" do
assert {:error, _} = Pleroma.Web.RichMedia.Parser.parse("http://example.com/malformed")
assert {:error, _} = Parser.parse("http://example.com/malformed")
end
test "returns error if getting page was not successful" do
assert {:error, :overload} = Parser.parse("http://example.com/error")
end
end

View File

@ -594,7 +594,7 @@ defmodule Pleroma.Web.TwitterAPI.UtilControllerTest do
user = User.get_by_id(user.id)
assert user.deactivated == true
assert user.name == nil
assert user.bio == nil
assert user.bio == ""
assert user.password_hash == nil
end
end