Merge remote-tracking branch 'oneric/varfixes' into develop
Some checks are pending
ci/woodpecker/push/build-amd64 Pipeline is pending approval
ci/woodpecker/push/build-arm64 Pipeline is pending approval
ci/woodpecker/push/docs Pipeline is pending approval
ci/woodpecker/push/lint Pipeline is pending approval
ci/woodpecker/push/test Pipeline is pending approval
Some checks are pending
ci/woodpecker/push/build-amd64 Pipeline is pending approval
ci/woodpecker/push/build-arm64 Pipeline is pending approval
ci/woodpecker/push/docs Pipeline is pending approval
ci/woodpecker/push/lint Pipeline is pending approval
ci/woodpecker/push/test Pipeline is pending approval
This commit is contained in:
commit
c0a99df06a
30 changed files with 74 additions and 42 deletions
|
@ -87,5 +87,5 @@ steps:
|
||||||
- mix ecto.create
|
- mix ecto.create
|
||||||
- mix ecto.migrate
|
- mix ecto.migrate
|
||||||
- mkdir -p test/tmp
|
- mkdir -p test/tmp
|
||||||
- mix test --preload-modules --exclude erratic --exclude federated --exclude mocked
|
- mix test --preload-modules --exclude erratic --exclude federated --exclude mocked || mix test --failed
|
||||||
- mix test --preload-modules --only mocked
|
- mix test --preload-modules --only mocked || mix test --failed
|
||||||
|
|
|
@ -8,6 +8,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
## BREAKING
|
## BREAKING
|
||||||
- Minimum PostgreSQL version is raised to 12
|
- Minimum PostgreSQL version is raised to 12
|
||||||
|
- Swagger UI moved from `/akkoma/swaggerui/` to `/pleroma/swaggerui/`
|
||||||
|
|
||||||
## Added
|
## Added
|
||||||
- Implement [FEP-67ff](https://codeberg.org/fediverse/fep/src/branch/main/fep/67ff/fep-67ff.md) (federation documentation)
|
- Implement [FEP-67ff](https://codeberg.org/fediverse/fep/src/branch/main/fep/67ff/fep-67ff.md) (federation documentation)
|
||||||
|
@ -19,6 +20,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Meilisearch: order of results returned from our REST API now actually matches how Meilisearch ranks results
|
- Meilisearch: order of results returned from our REST API now actually matches how Meilisearch ranks results
|
||||||
- Emoji are now federated as anonymous objects, fixing issues with
|
- Emoji are now federated as anonymous objects, fixing issues with
|
||||||
some strict servers e.g. rejecting e.g. remote emoji reactions
|
some strict servers e.g. rejecting e.g. remote emoji reactions
|
||||||
|
- AP objects with additional JSON-LD profiles beyond ActivityStreams can now be fetched
|
||||||
|
- Single-selection polls no longer expose the voter_count; MastoAPI demands it be null
|
||||||
|
and this confused some clients leading to vote distributions >100%
|
||||||
|
|
||||||
## Changed
|
## Changed
|
||||||
- Refactored Rich Media to cache the content in the database. Fetching operations that could block status rendering have been eliminated.
|
- Refactored Rich Media to cache the content in the database. Fetching operations that could block status rendering have been eliminated.
|
||||||
|
|
|
@ -51,7 +51,8 @@
|
||||||
hostname: System.get_env("DB_HOST") || "localhost",
|
hostname: System.get_env("DB_HOST") || "localhost",
|
||||||
pool: Ecto.Adapters.SQL.Sandbox,
|
pool: Ecto.Adapters.SQL.Sandbox,
|
||||||
pool_size: 50,
|
pool_size: 50,
|
||||||
queue_target: 5000
|
queue_target: 5000,
|
||||||
|
log: false
|
||||||
|
|
||||||
config :pleroma, :dangerzone, override_repo_pool_size: true
|
config :pleroma, :dangerzone, override_repo_pool_size: true
|
||||||
|
|
||||||
|
|
|
@ -338,7 +338,7 @@ config :pleroma, :frontends,
|
||||||
|
|
||||||
* `:primary` - The frontend that will be served at `/`
|
* `:primary` - The frontend that will be served at `/`
|
||||||
* `:admin` - The frontend that will be served at `/pleroma/admin`
|
* `:admin` - The frontend that will be served at `/pleroma/admin`
|
||||||
* `:swagger` - Config for developers to act as an API reference to be served at `/akkoma/swaggerui/` (trailing slash _needed_). Disabled by default.
|
* `:swagger` - Config for developers to act as an API reference to be served at `/pleroma/swaggerui/` (trailing slash _needed_). Disabled by default.
|
||||||
* `:mastodon` - The mastodon-fe configuration. This shouldn't need to be changed. This is served at `/web` when installed.
|
* `:mastodon` - The mastodon-fe configuration. This shouldn't need to be changed. This is served at `/web` when installed.
|
||||||
|
|
||||||
### :static\_fe
|
### :static\_fe
|
||||||
|
|
|
@ -60,4 +60,4 @@ config :pleroma, :frontends,
|
||||||
|
|
||||||
Then run the [pleroma.frontend cli task](../../administration/CLI_tasks/frontend) with the name of `swagger-ui` to install the distribution files.
|
Then run the [pleroma.frontend cli task](../../administration/CLI_tasks/frontend) with the name of `swagger-ui` to install the distribution files.
|
||||||
|
|
||||||
You will now be able to view documentation at `/akkoma/swaggerui`
|
You will now be able to view documentation at `/pleroma/swaggerui`
|
||||||
|
|
|
@ -6,7 +6,9 @@ probably install frontends.
|
||||||
These are no longer bundled with the distribution and need an extra
|
These are no longer bundled with the distribution and need an extra
|
||||||
command to install.
|
command to install.
|
||||||
|
|
||||||
For most installations, the following will suffice:
|
You **must** run frontend management tasks as the akkoma user,
|
||||||
|
the same way you downloaded the build or cloned the git repo before.
|
||||||
|
But otherwise, for most installations, the following will suffice:
|
||||||
|
|
||||||
=== "OTP"
|
=== "OTP"
|
||||||
```sh
|
```sh
|
||||||
|
@ -28,4 +30,3 @@ For most installations, the following will suffice:
|
||||||
```
|
```
|
||||||
|
|
||||||
For more customised installations, refer to [Frontend Management](../../configuration/frontend_management)
|
For more customised installations, refer to [Frontend Management](../../configuration/frontend_management)
|
||||||
|
|
||||||
|
|
|
@ -369,8 +369,12 @@ def get_object(id) do
|
||||||
{"activity+json", _} ->
|
{"activity+json", _} ->
|
||||||
{:ok, final_id, body}
|
{:ok, final_id, body}
|
||||||
|
|
||||||
{"ld+json", %{"profile" => "https://www.w3.org/ns/activitystreams"}} ->
|
{"ld+json", %{"profile" => profiles}} ->
|
||||||
|
if "https://www.w3.org/ns/activitystreams" in String.split(profiles) do
|
||||||
{:ok, final_id, body}
|
{:ok, final_id, body}
|
||||||
|
else
|
||||||
|
{:error, {:content_type, content_type}}
|
||||||
|
end
|
||||||
|
|
||||||
_ ->
|
_ ->
|
||||||
{:error, {:content_type, content_type}}
|
{:error, {:content_type, content_type}}
|
||||||
|
|
|
@ -18,6 +18,7 @@ defmodule Pleroma.Upload.Filter.Exiftool.StripMetadata do
|
||||||
|
|
||||||
# Formats not compatible with exiftool at this time
|
# Formats not compatible with exiftool at this time
|
||||||
def filter(%Pleroma.Upload{content_type: "image/webp"}), do: {:ok, :noop}
|
def filter(%Pleroma.Upload{content_type: "image/webp"}), do: {:ok, :noop}
|
||||||
|
def filter(%Pleroma.Upload{content_type: "image/bmp"}), do: {:ok, :noop}
|
||||||
def filter(%Pleroma.Upload{content_type: "image/svg+xml"}), do: {:ok, :noop}
|
def filter(%Pleroma.Upload{content_type: "image/svg+xml"}), do: {:ok, :noop}
|
||||||
|
|
||||||
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
||||||
|
|
|
@ -32,7 +32,9 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Poll do
|
||||||
},
|
},
|
||||||
voters_count: %Schema{
|
voters_count: %Schema{
|
||||||
type: :integer,
|
type: :integer,
|
||||||
description: "How many unique accounts have voted. Number."
|
nullable: true,
|
||||||
|
description:
|
||||||
|
"How many unique accounts have voted for a multi-selection poll. Number, or null if single-selection poll."
|
||||||
},
|
},
|
||||||
voted: %Schema{
|
voted: %Schema{
|
||||||
type: :boolean,
|
type: :boolean,
|
||||||
|
|
|
@ -66,10 +66,10 @@ defmodule Pleroma.Web.Endpoint do
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
plug(Plug.Static.IndexHtml, at: "/akkoma/swaggerui")
|
plug(Plug.Static.IndexHtml, at: "/pleroma/swaggerui/")
|
||||||
|
|
||||||
plug(Pleroma.Web.Plugs.FrontendStatic,
|
plug(Pleroma.Web.Plugs.FrontendStatic,
|
||||||
at: "/akkoma/swaggerui",
|
at: "/pleroma/swaggerui",
|
||||||
frontend_type: :swagger,
|
frontend_type: :swagger,
|
||||||
gzip: true,
|
gzip: true,
|
||||||
if: &Pleroma.Web.Swagger.ui_enabled?/0,
|
if: &Pleroma.Web.Swagger.ui_enabled?/0,
|
||||||
|
|
|
@ -19,7 +19,7 @@ def render("show.json", %{object: object, multiple: multiple, options: options}
|
||||||
expired: expired,
|
expired: expired,
|
||||||
multiple: multiple,
|
multiple: multiple,
|
||||||
votes_count: votes_count,
|
votes_count: votes_count,
|
||||||
voters_count: voters_count(object),
|
voters_count: voters_count(multiple, object),
|
||||||
options: options,
|
options: options,
|
||||||
emojis: Pleroma.Web.MastodonAPI.StatusView.build_emojis(object.data["emoji"])
|
emojis: Pleroma.Web.MastodonAPI.StatusView.build_emojis(object.data["emoji"])
|
||||||
}
|
}
|
||||||
|
@ -68,11 +68,19 @@ defp options_and_votes_count(options) do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp voters_count(%{data: %{"voters" => voters}}) when is_list(voters) do
|
defp voters_count(false, _poll_data) do
|
||||||
|
# Mastodon always sets voter count to "null" unless multiple options were selectable
|
||||||
|
# Some clients may rely on this to detect multiple selection polls and it can mess
|
||||||
|
# up percentages for some clients if we never got a correct remote voter count and
|
||||||
|
# only count local voters here; see https://akkoma.dev/AkkomaGang/akkoma/issues/190
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
|
||||||
|
defp voters_count(_multiple, %{data: %{"voters" => voters}}) when is_list(voters) do
|
||||||
length(voters)
|
length(voters)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp voters_count(_), do: 0
|
defp voters_count(_, _), do: 0
|
||||||
|
|
||||||
defp voted_and_own_votes(%{object: object} = params, options) do
|
defp voted_and_own_votes(%{object: object} = params, options) do
|
||||||
if params[:for] do
|
if params[:for] do
|
||||||
|
|
2
mix.exs
2
mix.exs
|
@ -180,7 +180,7 @@ defp deps do
|
||||||
{:remote_ip, "~> 1.1.0"},
|
{:remote_ip, "~> 1.1.0"},
|
||||||
{:captcha,
|
{:captcha,
|
||||||
git: "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git",
|
git: "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git",
|
||||||
ref: "90f6ce7672f70f56708792a98d98bd05176c9176"},
|
ref: "6630c42aaaab124e697b4e513190c89d8b64e410"},
|
||||||
{:restarter, path: "./restarter"},
|
{:restarter, path: "./restarter"},
|
||||||
{:majic,
|
{:majic,
|
||||||
git: "https://akkoma.dev/AkkomaGang/majic.git",
|
git: "https://akkoma.dev/AkkomaGang/majic.git",
|
||||||
|
|
2
mix.lock
2
mix.lock
|
@ -7,7 +7,7 @@
|
||||||
"bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"},
|
"bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"},
|
||||||
"cachex": {:hex, :cachex, "3.6.0", "14a1bfbeee060dd9bec25a5b6f4e4691e3670ebda28c8ba2884b12fe30b36bf8", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "ebf24e373883bc8e0c8d894a63bbe102ae13d918f790121f5cfe6e485cc8e2e2"},
|
"cachex": {:hex, :cachex, "3.6.0", "14a1bfbeee060dd9bec25a5b6f4e4691e3670ebda28c8ba2884b12fe30b36bf8", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "ebf24e373883bc8e0c8d894a63bbe102ae13d918f790121f5cfe6e485cc8e2e2"},
|
||||||
"calendar": {:hex, :calendar, "1.0.0", "f52073a708528482ec33d0a171954ca610fe2bd28f1e871f247dc7f1565fa807", [:mix], [{:tzdata, "~> 0.1.201603 or ~> 0.5.20 or ~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "990e9581920c82912a5ee50e62ff5ef96da6b15949a2ee4734f935fdef0f0a6f"},
|
"calendar": {:hex, :calendar, "1.0.0", "f52073a708528482ec33d0a171954ca610fe2bd28f1e871f247dc7f1565fa807", [:mix], [{:tzdata, "~> 0.1.201603 or ~> 0.5.20 or ~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "990e9581920c82912a5ee50e62ff5ef96da6b15949a2ee4734f935fdef0f0a6f"},
|
||||||
"captcha": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git", "90f6ce7672f70f56708792a98d98bd05176c9176", [ref: "90f6ce7672f70f56708792a98d98bd05176c9176"]},
|
"captcha": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git", "6630c42aaaab124e697b4e513190c89d8b64e410", [ref: "6630c42aaaab124e697b4e513190c89d8b64e410"]},
|
||||||
"castore": {:hex, :castore, "1.0.9", "5cc77474afadf02c7c017823f460a17daa7908e991b0cc917febc90e466a375c", [:mix], [], "hexpm", "5ea956504f1ba6f2b4eb707061d8e17870de2bee95fb59d512872c2ef06925e7"},
|
"castore": {:hex, :castore, "1.0.9", "5cc77474afadf02c7c017823f460a17daa7908e991b0cc917febc90e466a375c", [:mix], [], "hexpm", "5ea956504f1ba6f2b4eb707061d8e17870de2bee95fb59d512872c2ef06925e7"},
|
||||||
"certifi": {:hex, :certifi, "2.12.0", "2d1cca2ec95f59643862af91f001478c9863c2ac9cb6e2f89780bfd8de987329", [:rebar3], [], "hexpm", "ee68d85df22e554040cdb4be100f33873ac6051387baf6a8f6ce82272340ff1c"},
|
"certifi": {:hex, :certifi, "2.12.0", "2d1cca2ec95f59643862af91f001478c9863c2ac9cb6e2f89780bfd8de987329", [:rebar3], [], "hexpm", "ee68d85df22e554040cdb4be100f33873ac6051387baf6a8f6ce82272340ff1c"},
|
||||||
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
|
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
|
||||||
|
|
|
@ -51,7 +51,6 @@ test "error if file with custom settings doesn't exist" do
|
||||||
clear_config(:configurable_from_database, true)
|
clear_config(:configurable_from_database, true)
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "config migration refused when deprecated settings are found" do
|
test "config migration refused when deprecated settings are found" do
|
||||||
clear_config([:media_proxy, :whitelist], ["domain_without_scheme.com"])
|
clear_config([:media_proxy, :whitelist], ["domain_without_scheme.com"])
|
||||||
assert config_records() == []
|
assert config_records() == []
|
||||||
|
|
|
@ -203,7 +203,6 @@ defp spoofed_object_with_ids(
|
||||||
:ok
|
:ok
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "it works when fetching the OP actor errors out" do
|
test "it works when fetching the OP actor errors out" do
|
||||||
# Here we simulate a case where the author of the OP can't be read
|
# Here we simulate a case where the author of the OP can't be read
|
||||||
assert {:ok, _} =
|
assert {:ok, _} =
|
||||||
|
@ -779,7 +778,7 @@ test "should return ok if the content type is application/activity+json" do
|
||||||
assert {:ok, _, "{}"} = Fetcher.get_object("https://mastodon.social/2")
|
assert {:ok, _, "{}"} = Fetcher.get_object("https://mastodon.social/2")
|
||||||
end
|
end
|
||||||
|
|
||||||
test "should return ok if the content type is application/ld+json with a profile" do
|
test "should return ok if the content type is application/ld+json with the ActivityStream profile" do
|
||||||
Tesla.Mock.mock(fn
|
Tesla.Mock.mock(fn
|
||||||
%{
|
%{
|
||||||
method: :get,
|
method: :get,
|
||||||
|
@ -799,6 +798,26 @@ test "should return ok if the content type is application/ld+json with a profile
|
||||||
assert {:ok, _, "{}"} = Fetcher.get_object("https://mastodon.social/2")
|
assert {:ok, _, "{}"} = Fetcher.get_object("https://mastodon.social/2")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "should return ok if the content type is application/ld+json with several profiles" do
|
||||||
|
Tesla.Mock.mock(fn
|
||||||
|
%{
|
||||||
|
method: :get,
|
||||||
|
url: "https://mastodon.social/2"
|
||||||
|
} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 200,
|
||||||
|
url: "https://mastodon.social/2",
|
||||||
|
headers: [
|
||||||
|
{"content-type",
|
||||||
|
"application/ld+json; profile=\"https://example.org/ns/superduperspec https://www.w3.org/ns/activitystreams\""}
|
||||||
|
],
|
||||||
|
body: "{}"
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
|
||||||
|
assert {:ok, _, "{}"} = Fetcher.get_object("https://mastodon.social/2")
|
||||||
|
end
|
||||||
|
|
||||||
test "should not return ok with other content types" do
|
test "should not return ok with other content types" do
|
||||||
Tesla.Mock.mock(fn
|
Tesla.Mock.mock(fn
|
||||||
%{
|
%{
|
||||||
|
|
|
@ -6,7 +6,6 @@ defmodule Pleroma.SignatureTest do
|
||||||
use Pleroma.DataCase, async: false
|
use Pleroma.DataCase, async: false
|
||||||
@moduletag :mocked
|
@moduletag :mocked
|
||||||
|
|
||||||
import ExUnit.CaptureLog
|
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
import Tesla.Mock
|
import Tesla.Mock
|
||||||
import Mock
|
import Mock
|
||||||
|
|
|
@ -115,6 +115,15 @@ test "verify webp files are skipped" do
|
||||||
assert Filter.Exiftool.StripMetadata.filter(upload) == {:ok, :noop}
|
assert Filter.Exiftool.StripMetadata.filter(upload) == {:ok, :noop}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "verify bmp files are skipped" do
|
||||||
|
upload = %Pleroma.Upload{
|
||||||
|
name: "sample.bmp",
|
||||||
|
content_type: "image/bmp"
|
||||||
|
}
|
||||||
|
|
||||||
|
assert Filter.Exiftool.StripMetadata.filter(upload) == {:ok, :noop}
|
||||||
|
end
|
||||||
|
|
||||||
test "verify svg files are skipped" do
|
test "verify svg files are skipped" do
|
||||||
upload = %Pleroma.Upload{
|
upload = %Pleroma.Upload{
|
||||||
name: "sample.svg",
|
name: "sample.svg",
|
||||||
|
|
|
@ -815,7 +815,6 @@ test "gets an existing user by fully qualified nickname, case insensitive" do
|
||||||
assert user == fetched_user
|
assert user == fetched_user
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "returns nil if no user could be fetched" do
|
test "returns nil if no user could be fetched" do
|
||||||
{:error, fetched_user} = User.get_or_fetch_by_nickname("nonexistant@social.heldscal.la")
|
{:error, fetched_user} = User.get_or_fetch_by_nickname("nonexistant@social.heldscal.la")
|
||||||
assert fetched_user == "not found nonexistant@social.heldscal.la"
|
assert fetched_user == "not found nonexistant@social.heldscal.la"
|
||||||
|
@ -872,7 +871,6 @@ test "if nicknames clash, the old user gets a prefix with the old id to the nick
|
||||||
assert orig_user.nickname == "#{orig_user.id}.admin@mastodon.example.org"
|
assert orig_user.nickname == "#{orig_user.id}.admin@mastodon.example.org"
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "it returns the old user if stale, but unfetchable" do
|
test "it returns the old user if stale, but unfetchable" do
|
||||||
a_week_ago = NaiveDateTime.add(NaiveDateTime.utc_now(), -604_800)
|
a_week_ago = NaiveDateTime.add(NaiveDateTime.utc_now(), -604_800)
|
||||||
|
|
||||||
|
|
|
@ -573,7 +573,6 @@ test "it inserts an incoming activity into the database", %{conn: conn} do
|
||||||
assert Activity.get_by_ap_id(data["id"])
|
assert Activity.get_by_ap_id(data["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "it inserts an incoming activity into the database" <>
|
test "it inserts an incoming activity into the database" <>
|
||||||
"even if we can't fetch the user but have it in our db",
|
"even if we can't fetch the user but have it in our db",
|
||||||
%{conn: conn} do
|
%{conn: conn} do
|
||||||
|
@ -1114,7 +1113,6 @@ test "it clears `unreachable` federation status of the sender", %{conn: conn, da
|
||||||
assert Instances.reachable?(sender_host)
|
assert Instances.reachable?(sender_host)
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "it removes all follower collections but actor's", %{conn: conn} do
|
test "it removes all follower collections but actor's", %{conn: conn} do
|
||||||
[actor, recipient] = insert_pair(:user)
|
[actor, recipient] = insert_pair(:user)
|
||||||
actor = with_signing_key(actor)
|
actor = with_signing_key(actor)
|
||||||
|
@ -1179,7 +1177,6 @@ test "it requires authentication", %{conn: conn} do
|
||||||
assert json_response(ret_conn, 200)
|
assert json_response(ret_conn, 200)
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "forwarded report", %{conn: conn} do
|
test "forwarded report", %{conn: conn} do
|
||||||
admin = insert(:user, is_admin: true)
|
admin = insert(:user, is_admin: true)
|
||||||
|
|
||||||
|
@ -1260,7 +1257,6 @@ test "forwarded report", %{conn: conn} do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "forwarded report from mastodon", %{conn: conn} do
|
test "forwarded report from mastodon", %{conn: conn} do
|
||||||
admin = insert(:user, is_admin: true)
|
admin = insert(:user, is_admin: true)
|
||||||
actor = insert(:user, local: false)
|
actor = insert(:user, local: false)
|
||||||
|
|
|
@ -114,7 +114,6 @@ test "returns error when activity not `Create` type" do
|
||||||
assert Relay.publish(activity) == {:error, "Not implemented"}
|
assert Relay.publish(activity) == {:error, "Not implemented"}
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "returns error when activity not public" do
|
test "returns error when activity not public" do
|
||||||
activity = insert(:direct_note_activity)
|
activity = insert(:direct_note_activity)
|
||||||
assert Relay.publish(activity) == {:error, false}
|
assert Relay.publish(activity) == {:error, false}
|
||||||
|
|
|
@ -83,7 +83,6 @@ test "it works for incoming announces, fetching the announced object" do
|
||||||
assert(Activity.get_create_by_object_ap_id(data["object"]))
|
assert(Activity.get_create_by_object_ap_id(data["object"]))
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "it works for incoming announces with an existing activity" do
|
test "it works for incoming announces with an existing activity" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
{:ok, activity} = CommonAPI.post(user, %{status: "hey"})
|
{:ok, activity} = CommonAPI.post(user, %{status: "hey"})
|
||||||
|
@ -136,7 +135,6 @@ test "it works for incoming announces with an inlined activity" do
|
||||||
assert object.data["content"] == "this is a private toot"
|
assert object.data["content"] == "this is a private toot"
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "it rejects incoming announces with an inlined activity from another origin" do
|
test "it rejects incoming announces with an inlined activity from another origin" do
|
||||||
Tesla.Mock.mock(fn
|
Tesla.Mock.mock(fn
|
||||||
%{method: :get} -> %Tesla.Env{status: 404, body: ""}
|
%{method: :get} -> %Tesla.Env{status: 404, body: ""}
|
||||||
|
|
|
@ -86,7 +86,6 @@ test "it fails for incoming deletes with spoofed origin" do
|
||||||
assert match?({:error, _}, Transmogrifier.handle_incoming(data))
|
assert match?({:error, _}, Transmogrifier.handle_incoming(data))
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "it works for incoming user deletes" do
|
test "it works for incoming user deletes" do
|
||||||
%{ap_id: ap_id} = insert(:user, ap_id: "http://mastodon.example.org/users/admin")
|
%{ap_id: ap_id} = insert(:user, ap_id: "http://mastodon.example.org/users/admin")
|
||||||
|
|
||||||
|
|
|
@ -57,7 +57,6 @@ test "it ignores an incoming notice if we already have it" do
|
||||||
assert activity == returned_activity
|
assert activity == returned_activity
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "it fetches reply-to activities if we don't have them" do
|
test "it fetches reply-to activities if we don't have them" do
|
||||||
data =
|
data =
|
||||||
File.read!("test/fixtures/mastodon-post-activity.json")
|
File.read!("test/fixtures/mastodon-post-activity.json")
|
||||||
|
@ -537,7 +536,6 @@ test "returns object with inReplyTo when denied incoming reply", %{data: data} d
|
||||||
assert modified_object["inReplyTo"] == []
|
assert modified_object["inReplyTo"] == []
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "returns modified object when allowed incoming reply", %{data: data} do
|
test "returns modified object when allowed incoming reply", %{data: data} do
|
||||||
object_with_reply =
|
object_with_reply =
|
||||||
Map.put(
|
Map.put(
|
||||||
|
|
|
@ -561,7 +561,6 @@ test "returns nil when cannot normalize object" do
|
||||||
end) =~ ":valid_uri_scheme"
|
end) =~ ":valid_uri_scheme"
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "returns {:ok, %Object{}} for success case" do
|
test "returns {:ok, %Object{}} for success case" do
|
||||||
assert {:ok, %Object{}} =
|
assert {:ok, %Object{}} =
|
||||||
Transmogrifier.get_obj_helper(
|
Transmogrifier.get_obj_helper(
|
||||||
|
|
|
@ -79,7 +79,6 @@ test "search", %{conn: conn} do
|
||||||
assert status["id"] == to_string(activity.id)
|
assert status["id"] == to_string(activity.id)
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "constructs hashtags from search query", %{conn: conn} do
|
test "constructs hashtags from search query", %{conn: conn} do
|
||||||
results =
|
results =
|
||||||
conn
|
conn
|
||||||
|
|
|
@ -152,7 +152,6 @@ test "filtering", %{conn: conn, user: user} do
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "public" do
|
describe "public" do
|
||||||
@tag capture_log: true
|
|
||||||
test "the public timeline", %{conn: conn} do
|
test "the public timeline", %{conn: conn} do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
|
@ -810,7 +809,6 @@ test "filtering", %{user: user, conn: conn} do
|
||||||
describe "hashtag" do
|
describe "hashtag" do
|
||||||
setup do: oauth_access(["n/a"])
|
setup do: oauth_access(["n/a"])
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "hashtag timeline", %{conn: conn} do
|
test "hashtag timeline", %{conn: conn} do
|
||||||
following = insert(:user)
|
following = insert(:user)
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,7 @@ test "renders a poll" do
|
||||||
%{title: "why are you even asking?", votes_count: 0}
|
%{title: "why are you even asking?", votes_count: 0}
|
||||||
],
|
],
|
||||||
votes_count: 0,
|
votes_count: 0,
|
||||||
voters_count: 0
|
voters_count: nil
|
||||||
}
|
}
|
||||||
|
|
||||||
result = PollView.render("show.json", %{object: object})
|
result = PollView.render("show.json", %{object: object})
|
||||||
|
|
|
@ -67,7 +67,6 @@ test "performs sending notifications" do
|
||||||
assert Impl.perform(notif) == {:ok, [:ok, :ok]}
|
assert Impl.perform(notif) == {:ok, [:ok, :ok]}
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "returns error if notif does not match " do
|
test "returns error if notif does not match " do
|
||||||
assert Impl.perform(%{}) == {:error, :unknown_type}
|
assert Impl.perform(%{}) == {:error, :unknown_type}
|
||||||
end
|
end
|
||||||
|
@ -76,7 +75,6 @@ test "successful message sending" do
|
||||||
assert Impl.push_message(@message, @sub, @api_key, %Subscription{}) == :ok
|
assert Impl.push_message(@message, @sub, @api_key, %Subscription{}) == :ok
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "fail message sending" do
|
test "fail message sending" do
|
||||||
assert Impl.push_message(
|
assert Impl.push_message(
|
||||||
@message,
|
@message,
|
||||||
|
|
|
@ -190,7 +190,6 @@ test "prevents spoofing" do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "prevents forgeries" do
|
test "prevents forgeries" do
|
||||||
Tesla.Mock.mock(fn
|
Tesla.Mock.mock(fn
|
||||||
%{url: "https://bad.com/.well-known/webfinger?resource=acct:meanie@bad.com"} ->
|
%{url: "https://bad.com/.well-known/webfinger?resource=acct:meanie@bad.com"} ->
|
||||||
|
|
|
@ -3,7 +3,11 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
os_exclude = if :os.type() == {:unix, :darwin}, do: [skip_on_mac: true], else: []
|
os_exclude = if :os.type() == {:unix, :darwin}, do: [skip_on_mac: true], else: []
|
||||||
ExUnit.start(exclude: [:federated, :erratic] ++ os_exclude)
|
|
||||||
|
ExUnit.start(
|
||||||
|
capture_log: true,
|
||||||
|
exclude: [:federated, :erratic] ++ os_exclude
|
||||||
|
)
|
||||||
|
|
||||||
Ecto.Adapters.SQL.Sandbox.mode(Pleroma.Repo, :manual)
|
Ecto.Adapters.SQL.Sandbox.mode(Pleroma.Repo, :manual)
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue