Merge branch 'develop' into '1865-media-only'

# Conflicts:
#   CHANGELOG.md
This commit is contained in:
lain 2020-06-20 09:33:18 +00:00
commit 7d542450b1
81 changed files with 1658 additions and 1053 deletions

View file

@ -1,4 +1,4 @@
image: elixir:1.8.1 image: elixir:1.9.4
variables: &global_variables variables: &global_variables
POSTGRES_DB: pleroma_test POSTGRES_DB: pleroma_test
@ -170,8 +170,7 @@ stop_review_app:
amd64: amd64:
stage: release stage: release
# TODO: Replace with upstream image when 1.9.0 comes out image: elixir:1.10.3
image: rinpatch/elixir:1.9.0-rc.0
only: &release-only only: &release-only
- stable@pleroma/pleroma - stable@pleroma/pleroma
- develop@pleroma/pleroma - develop@pleroma/pleroma
@ -208,8 +207,7 @@ amd64-musl:
stage: release stage: release
artifacts: *release-artifacts artifacts: *release-artifacts
only: *release-only only: *release-only
# TODO: Replace with upstream image when 1.9.0 comes out image: elixir:1.10.3-alpine
image: rinpatch/elixir:1.9.0-rc.0-alpine
cache: *release-cache cache: *release-cache
variables: *release-variables variables: *release-variables
before_script: &before-release-musl before_script: &before-release-musl
@ -225,8 +223,7 @@ arm:
only: *release-only only: *release-only
tags: tags:
- arm32 - arm32
# TODO: Replace with upstream image when 1.9.0 comes out image: elixir:1.10.3
image: rinpatch/elixir:1.9.0-rc.0-arm
cache: *release-cache cache: *release-cache
variables: *release-variables variables: *release-variables
before_script: *before-release before_script: *before-release
@ -238,8 +235,7 @@ arm-musl:
only: *release-only only: *release-only
tags: tags:
- arm32 - arm32
# TODO: Replace with upstream image when 1.9.0 comes out image: elixir:1.10.3-alpine
image: rinpatch/elixir:1.9.0-rc.0-arm-alpine
cache: *release-cache cache: *release-cache
variables: *release-variables variables: *release-variables
before_script: *before-release-musl before_script: *before-release-musl
@ -251,8 +247,7 @@ arm64:
only: *release-only only: *release-only
tags: tags:
- arm - arm
# TODO: Replace with upstream image when 1.9.0 comes out image: elixir:1.10.3
image: rinpatch/elixir:1.9.0-rc.0-arm64
cache: *release-cache cache: *release-cache
variables: *release-variables variables: *release-variables
before_script: *before-release before_script: *before-release
@ -265,7 +260,7 @@ arm64-musl:
tags: tags:
- arm - arm
# TODO: Replace with upstream image when 1.9.0 comes out # TODO: Replace with upstream image when 1.9.0 comes out
image: rinpatch/elixir:1.9.0-rc.0-arm64-alpine image: elixir:1.10.3-alpine
cache: *release-cache cache: *release-cache
variables: *release-variables variables: *release-variables
before_script: *before-release-musl before_script: *before-release-musl

View file

@ -6,6 +6,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [unreleased] ## [unreleased]
### Changed ### Changed
- **Breaking:** Elixir >=1.9 is now required (was >= 1.8)
- In Conversations, return only direct messages as `last_status`
- Using the `only_media` filter on timelines will now exclude reblog media - Using the `only_media` filter on timelines will now exclude reblog media
- MFR policy to set global expiration for all local Create activities - MFR policy to set global expiration for all local Create activities
- OGP rich media parser merged with TwitterCard - OGP rich media parser merged with TwitterCard

View file

@ -34,6 +34,16 @@ Currently Pleroma is not packaged by any OS/Distros, but if you want to package
### Docker ### Docker
While we dont provide docker files, other people have written very good ones. Take a look at <https://github.com/angristan/docker-pleroma> or <https://glitch.sh/sn0w/pleroma-docker>. While we dont provide docker files, other people have written very good ones. Take a look at <https://github.com/angristan/docker-pleroma> or <https://glitch.sh/sn0w/pleroma-docker>.
### Compilation Troubleshooting
If you ever encounter compilation issues during the updating of Pleroma, you can try these commands and see if they fix things:
- `mix deps.clean --all`
- `mix local.rebar`
- `mix local.hex`
- `rm -r _build`
If you are not developing Pleroma, it is better to use the OTP release, which comes with everything precompiled.
## Documentation ## Documentation
- Latest Released revision: <https://docs.pleroma.social> - Latest Released revision: <https://docs.pleroma.social>
- Latest Git revision: <https://docs-develop.pleroma.social> - Latest Git revision: <https://docs-develop.pleroma.social>

View file

@ -407,6 +407,13 @@
], ],
whitelist: [] whitelist: []
config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Http,
method: :purge,
headers: [],
options: []
config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Script, script_path: nil
config :pleroma, :chat, enabled: true config :pleroma, :chat, enabled: true
config :phoenix, :format_encoders, json: Jason config :phoenix, :format_encoders, json: Jason

View file

@ -1650,6 +1650,31 @@
"The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host/CDN fronts.", "The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host/CDN fronts.",
suggestions: ["https://example.com"] suggestions: ["https://example.com"]
}, },
%{
key: :invalidation,
type: :keyword,
descpiption: "",
suggestions: [
enabled: true,
provider: Pleroma.Web.MediaProxy.Invalidation.Script
],
children: [
%{
key: :enabled,
type: :boolean,
description: "Enables invalidate media cache"
},
%{
key: :provider,
type: :module,
description: "Module which will be used to cache purge.",
suggestions: [
Pleroma.Web.MediaProxy.Invalidation.Script,
Pleroma.Web.MediaProxy.Invalidation.Http
]
}
]
},
%{ %{
key: :proxy_opts, key: :proxy_opts,
type: :keyword, type: :keyword,
@ -1722,6 +1747,45 @@
} }
] ]
}, },
%{
group: :pleroma,
key: Pleroma.Web.MediaProxy.Invalidation.Http,
type: :group,
description: "HTTP invalidate settings",
children: [
%{
key: :method,
type: :atom,
description: "HTTP method of request. Default: :purge"
},
%{
key: :headers,
type: {:list, :tuple},
description: "HTTP headers of request.",
suggestions: [{"x-refresh", 1}]
},
%{
key: :options,
type: :keyword,
description: "Request options.",
suggestions: [params: %{ts: "xxx"}]
}
]
},
%{
group: :pleroma,
key: Pleroma.Web.MediaProxy.Invalidation.Script,
type: :group,
description: "Script invalidate settings",
children: [
%{
key: :script_path,
type: :string,
description: "Path to shell script. Which will run purge cache.",
suggestions: ["./installation/nginx-cache-purge.sh.example"]
}
]
},
%{ %{
group: :pleroma, group: :pleroma,
key: :gopher, key: :gopher,

View file

@ -1225,3 +1225,65 @@ Loads json generated from `config/descriptions.exs`.
- On success: `204`, empty response - On success: `204`, empty response
- On failure: - On failure:
- 400 Bad Request `"Invalid parameters"` when `status` is missing - 400 Bad Request `"Invalid parameters"` when `status` is missing
## `GET /api/pleroma/admin/media_proxy_caches`
### Get a list of all banned MediaProxy URLs in Cachex
- Authentication: required
- Params:
- *optional* `page`: **integer** page number
- *optional* `page_size`: **integer** number of log entries per page (default is `50`)
- Response:
``` json
{
"urls": [
"http://example.com/media/a688346.jpg",
"http://example.com/media/fb1f4d.jpg"
]
}
```
## `POST /api/pleroma/admin/media_proxy_caches/delete`
### Remove a banned MediaProxy URL from Cachex
- Authentication: required
- Params:
- `urls` (array)
- Response:
``` json
{
"urls": [
"http://example.com/media/a688346.jpg",
"http://example.com/media/fb1f4d.jpg"
]
}
```
## `POST /api/pleroma/admin/media_proxy_caches/purge`
### Purge a MediaProxy URL
- Authentication: required
- Params:
- `urls` (array)
- `ban` (boolean)
- Response:
``` json
{
"urls": [
"http://example.com/media/a688346.jpg",
"http://example.com/media/fb1f4d.jpg"
]
}
```

View file

@ -268,7 +268,7 @@ This section describe PWA manifest instance-specific values. Currently this opti
#### Pleroma.Web.MediaProxy.Invalidation.Script #### Pleroma.Web.MediaProxy.Invalidation.Script
This strategy allow perform external bash script to purge cache. This strategy allow perform external shell script to purge cache.
Urls of attachments pass to script as arguments. Urls of attachments pass to script as arguments.
* `script_path`: path to external script. * `script_path`: path to external script.
@ -284,8 +284,8 @@ config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Script,
This strategy allow perform custom http request to purge cache. This strategy allow perform custom http request to purge cache.
* `method`: http method. default is `purge` * `method`: http method. default is `purge`
* `headers`: http headers. default is empty * `headers`: http headers.
* `options`: request options. default is empty * `options`: request options.
Example: Example:
```elixir ```elixir

View file

@ -1,2 +1,2 @@
elixir_version=1.8.2 elixir_version=1.9.4
erlang_version=21.3.7 erlang_version=22.3.4.1

View file

@ -13,7 +13,7 @@ CACHE_DIRECTORY="/tmp/pleroma-media-cache"
## $3 - (optional) the number of parallel processes to run for grep. ## $3 - (optional) the number of parallel processes to run for grep.
get_cache_files() { get_cache_files() {
local max_parallel=${3-16} local max_parallel=${3-16}
find $2 -maxdepth 2 -type d | xargs -P $max_parallel -n 1 grep -E Rl "^KEY:.*$1" | sort -u find $2 -maxdepth 2 -type d | xargs -P $max_parallel -n 1 grep -E -Rl "^KEY:.*$1" | sort -u
} }
## Removes an item from the given cache zone. ## Removes an item from the given cache zone.
@ -37,4 +37,4 @@ purge() {
} }
purge $1 purge $@

View file

@ -72,8 +72,7 @@ defp create(group, settings) do
group group
|> Pleroma.Config.Loader.filter_group(settings) |> Pleroma.Config.Loader.filter_group(settings)
|> Enum.each(fn {key, value} -> |> Enum.each(fn {key, value} ->
key = inspect(key) {:ok, _} = ConfigDB.update_or_create(%{group: group, key: key, value: value})
{:ok, _} = ConfigDB.update_or_create(%{group: inspect(group), key: key, value: value})
shell_info("Settings for key #{key} migrated.") shell_info("Settings for key #{key} migrated.")
end) end)
@ -131,12 +130,9 @@ defp write_and_delete(config, file, delete?) do
end end
defp write(config, file) do defp write(config, file) do
value = value = inspect(config.value, limit: :infinity)
config.value
|> ConfigDB.from_binary()
|> inspect(limit: :infinity)
IO.write(file, "config #{config.group}, #{config.key}, #{value}\r\n\r\n") IO.write(file, "config #{inspect(config.group)}, #{inspect(config.key)}, #{value}\r\n\r\n")
config config
end end

View file

@ -148,7 +148,8 @@ defp cachex_children do
build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500), build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500),
build_cachex("web_resp", limit: 2500), build_cachex("web_resp", limit: 2500),
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10), build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
build_cachex("failed_proxy_url", limit: 2500) build_cachex("failed_proxy_url", limit: 2500),
build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000)
] ]
end end

View file

@ -6,7 +6,7 @@ defmodule Pleroma.ConfigDB do
use Ecto.Schema use Ecto.Schema
import Ecto.Changeset import Ecto.Changeset
import Ecto.Query import Ecto.Query, only: [select: 3]
import Pleroma.Web.Gettext import Pleroma.Web.Gettext
alias __MODULE__ alias __MODULE__
@ -14,16 +14,6 @@ defmodule Pleroma.ConfigDB do
@type t :: %__MODULE__{} @type t :: %__MODULE__{}
@full_key_update [
{:pleroma, :ecto_repos},
{:quack, :meta},
{:mime, :types},
{:cors_plug, [:max_age, :methods, :expose, :headers]},
{:auto_linker, :opts},
{:swarm, :node_blacklist},
{:logger, :backends}
]
@full_subkey_update [ @full_subkey_update [
{:pleroma, :assets, :mascots}, {:pleroma, :assets, :mascots},
{:pleroma, :emoji, :groups}, {:pleroma, :emoji, :groups},
@ -32,14 +22,10 @@ defmodule Pleroma.ConfigDB do
{:pleroma, :mrf_keyword, :replace} {:pleroma, :mrf_keyword, :replace}
] ]
@regex ~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u
@delimiters ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}]
schema "config" do schema "config" do
field(:key, :string) field(:key, Pleroma.EctoType.Config.Atom)
field(:group, :string) field(:group, Pleroma.EctoType.Config.Atom)
field(:value, :binary) field(:value, Pleroma.EctoType.Config.BinaryValue)
field(:db, {:array, :string}, virtual: true, default: []) field(:db, {:array, :string}, virtual: true, default: [])
timestamps() timestamps()
@ -51,10 +37,6 @@ def get_all_as_keyword do
|> select([c], {c.group, c.key, c.value}) |> select([c], {c.group, c.key, c.value})
|> Repo.all() |> Repo.all()
|> Enum.reduce([], fn {group, key, value}, acc -> |> Enum.reduce([], fn {group, key, value}, acc ->
group = ConfigDB.from_string(group)
key = ConfigDB.from_string(key)
value = from_binary(value)
Keyword.update(acc, group, [{key, value}], &Keyword.merge(&1, [{key, value}])) Keyword.update(acc, group, [{key, value}], &Keyword.merge(&1, [{key, value}]))
end) end)
end end
@ -64,50 +46,41 @@ def get_by_params(params), do: Repo.get_by(ConfigDB, params)
@spec changeset(ConfigDB.t(), map()) :: Changeset.t() @spec changeset(ConfigDB.t(), map()) :: Changeset.t()
def changeset(config, params \\ %{}) do def changeset(config, params \\ %{}) do
params = Map.put(params, :value, transform(params[:value]))
config config
|> cast(params, [:key, :group, :value]) |> cast(params, [:key, :group, :value])
|> validate_required([:key, :group, :value]) |> validate_required([:key, :group, :value])
|> unique_constraint(:key, name: :config_group_key_index) |> unique_constraint(:key, name: :config_group_key_index)
end end
@spec create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} defp create(params) do
def create(params) do
%ConfigDB{} %ConfigDB{}
|> changeset(params) |> changeset(params)
|> Repo.insert() |> Repo.insert()
end end
@spec update(ConfigDB.t(), map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} defp update(%ConfigDB{} = config, %{value: value}) do
def update(%ConfigDB{} = config, %{value: value}) do
config config
|> changeset(%{value: value}) |> changeset(%{value: value})
|> Repo.update() |> Repo.update()
end end
@spec get_db_keys(ConfigDB.t()) :: [String.t()]
def get_db_keys(%ConfigDB{} = config) do
config.value
|> ConfigDB.from_binary()
|> get_db_keys(config.key)
end
@spec get_db_keys(keyword(), any()) :: [String.t()] @spec get_db_keys(keyword(), any()) :: [String.t()]
def get_db_keys(value, key) do def get_db_keys(value, key) do
keys =
if Keyword.keyword?(value) do if Keyword.keyword?(value) do
value |> Keyword.keys() |> Enum.map(&convert(&1)) Keyword.keys(value)
else else
[convert(key)] [key]
end end
Enum.map(keys, &to_json_types(&1))
end end
@spec merge_group(atom(), atom(), keyword(), keyword()) :: keyword() @spec merge_group(atom(), atom(), keyword(), keyword()) :: keyword()
def merge_group(group, key, old_value, new_value) do def merge_group(group, key, old_value, new_value) do
new_keys = to_map_set(new_value) new_keys = to_mapset(new_value)
intersect_keys = intersect_keys = old_value |> to_mapset() |> MapSet.intersection(new_keys) |> MapSet.to_list()
old_value |> to_map_set() |> MapSet.intersection(new_keys) |> MapSet.to_list()
merged_value = ConfigDB.merge(old_value, new_value) merged_value = ConfigDB.merge(old_value, new_value)
@ -120,12 +93,10 @@ def merge_group(group, key, old_value, new_value) do
[] []
end) end)
|> List.flatten() |> List.flatten()
|> Enum.reduce(merged_value, fn subkey, acc -> |> Enum.reduce(merged_value, &Keyword.put(&2, &1, new_value[&1]))
Keyword.put(acc, subkey, new_value[subkey])
end)
end end
defp to_map_set(keyword) do defp to_mapset(keyword) do
keyword keyword
|> Keyword.keys() |> Keyword.keys()
|> MapSet.new() |> MapSet.new()
@ -159,43 +130,40 @@ defp deep_merge(_key, value1, value2) do
@spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} @spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
def update_or_create(params) do def update_or_create(params) do
params = Map.put(params, :value, to_elixir_types(params[:value]))
search_opts = Map.take(params, [:group, :key]) search_opts = Map.take(params, [:group, :key])
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts), with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
{:partial_update, true, config} <- {_, true, config} <- {:partial_update, can_be_partially_updated?(config), config},
{:partial_update, can_be_partially_updated?(config), config}, {_, true, config} <-
old_value <- from_binary(config.value), {:can_be_merged, is_list(params[:value]) and is_list(config.value), config} do
transformed_value <- do_transform(params[:value]), new_value = merge_group(config.group, config.key, config.value, params[:value])
{:can_be_merged, true, config} <- {:can_be_merged, is_list(transformed_value), config}, update(config, %{value: new_value})
new_value <-
merge_group(
ConfigDB.from_string(config.group),
ConfigDB.from_string(config.key),
old_value,
transformed_value
) do
ConfigDB.update(config, %{value: new_value})
else else
{reason, false, config} when reason in [:partial_update, :can_be_merged] -> {reason, false, config} when reason in [:partial_update, :can_be_merged] ->
ConfigDB.update(config, params) update(config, params)
nil -> nil ->
ConfigDB.create(params) create(params)
end end
end end
defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config) defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config)
defp only_full_update?(%ConfigDB{} = config) do defp only_full_update?(%ConfigDB{group: group, key: key}) do
config_group = ConfigDB.from_string(config.group) full_key_update = [
config_key = ConfigDB.from_string(config.key) {:pleroma, :ecto_repos},
{:quack, :meta},
{:mime, :types},
{:cors_plug, [:max_age, :methods, :expose, :headers]},
{:auto_linker, :opts},
{:swarm, :node_blacklist},
{:logger, :backends}
]
Enum.any?(@full_key_update, fn Enum.any?(full_key_update, fn
{group, key} when is_list(key) -> {s_group, s_key} ->
config_group == group and config_key in key group == s_group and ((is_list(s_key) and key in s_key) or key == s_key)
{group, key} ->
config_group == group and config_key == key
end) end)
end end
@ -205,11 +173,10 @@ def delete(params) do
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts), with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
{config, sub_keys} when is_list(sub_keys) <- {config, params[:subkeys]}, {config, sub_keys} when is_list(sub_keys) <- {config, params[:subkeys]},
old_value <- from_binary(config.value), keys <- Enum.map(sub_keys, &string_to_elixir_types(&1)),
keys <- Enum.map(sub_keys, &do_transform_string(&1)), {_, config, new_value} when new_value != [] <-
{:partial_remove, config, new_value} when new_value != [] <- {:partial_remove, config, Keyword.drop(config.value, keys)} do
{:partial_remove, config, Keyword.drop(old_value, keys)} do update(config, %{value: new_value})
ConfigDB.update(config, %{value: new_value})
else else
{:partial_remove, config, []} -> {:partial_remove, config, []} ->
Repo.delete(config) Repo.delete(config)
@ -225,37 +192,32 @@ def delete(params) do
end end
end end
@spec from_binary(binary()) :: term() @spec to_json_types(term()) :: map() | list() | boolean() | String.t()
def from_binary(binary), do: :erlang.binary_to_term(binary) def to_json_types(entity) when is_list(entity) do
Enum.map(entity, &to_json_types/1)
@spec from_binary_with_convert(binary()) :: any()
def from_binary_with_convert(binary) do
binary
|> from_binary()
|> do_convert()
end end
@spec from_string(String.t()) :: atom() | no_return() def to_json_types(%Regex{} = entity), do: inspect(entity)
def from_string(string), do: do_transform_string(string)
@spec convert(any()) :: any() def to_json_types(entity) when is_map(entity) do
def convert(entity), do: do_convert(entity) Map.new(entity, fn {k, v} -> {to_json_types(k), to_json_types(v)} end)
defp do_convert(entity) when is_list(entity) do
for v <- entity, into: [], do: do_convert(v)
end end
defp do_convert(%Regex{} = entity), do: inspect(entity) def to_json_types({:args, args}) when is_list(args) do
arguments =
Enum.map(args, fn
arg when is_tuple(arg) -> inspect(arg)
arg -> to_json_types(arg)
end)
defp do_convert(entity) when is_map(entity) do %{"tuple" => [":args", arguments]}
for {k, v} <- entity, into: %{}, do: {do_convert(k), do_convert(v)}
end end
defp do_convert({:proxy_url, {type, :localhost, port}}) do def to_json_types({:proxy_url, {type, :localhost, port}}) do
%{"tuple" => [":proxy_url", %{"tuple" => [do_convert(type), "localhost", port]}]} %{"tuple" => [":proxy_url", %{"tuple" => [to_json_types(type), "localhost", port]}]}
end end
defp do_convert({:proxy_url, {type, host, port}}) when is_tuple(host) do def to_json_types({:proxy_url, {type, host, port}}) when is_tuple(host) do
ip = ip =
host host
|> :inet_parse.ntoa() |> :inet_parse.ntoa()
@ -264,66 +226,64 @@ defp do_convert({:proxy_url, {type, host, port}}) when is_tuple(host) do
%{ %{
"tuple" => [ "tuple" => [
":proxy_url", ":proxy_url",
%{"tuple" => [do_convert(type), ip, port]} %{"tuple" => [to_json_types(type), ip, port]}
] ]
} }
end end
defp do_convert({:proxy_url, {type, host, port}}) do def to_json_types({:proxy_url, {type, host, port}}) do
%{ %{
"tuple" => [ "tuple" => [
":proxy_url", ":proxy_url",
%{"tuple" => [do_convert(type), to_string(host), port]} %{"tuple" => [to_json_types(type), to_string(host), port]}
] ]
} }
end end
defp do_convert({:partial_chain, entity}), do: %{"tuple" => [":partial_chain", inspect(entity)]} def to_json_types({:partial_chain, entity}),
do: %{"tuple" => [":partial_chain", inspect(entity)]}
defp do_convert(entity) when is_tuple(entity) do def to_json_types(entity) when is_tuple(entity) do
value = value =
entity entity
|> Tuple.to_list() |> Tuple.to_list()
|> do_convert() |> to_json_types()
%{"tuple" => value} %{"tuple" => value}
end end
defp do_convert(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do def to_json_types(entity) when is_binary(entity), do: entity
def to_json_types(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do
entity entity
end end
defp do_convert(entity) def to_json_types(entity) when entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do
when is_atom(entity) and entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do
":#{entity}" ":#{entity}"
end end
defp do_convert(entity) when is_atom(entity), do: inspect(entity) def to_json_types(entity) when is_atom(entity), do: inspect(entity)
defp do_convert(entity) when is_binary(entity), do: entity @spec to_elixir_types(boolean() | String.t() | map() | list()) :: term()
def to_elixir_types(%{"tuple" => [":args", args]}) when is_list(args) do
arguments =
Enum.map(args, fn arg ->
if String.contains?(arg, ["{", "}"]) do
{elem, []} = Code.eval_string(arg)
elem
else
to_elixir_types(arg)
end
end)
@spec transform(any()) :: binary() | no_return() {:args, arguments}
def transform(entity) when is_binary(entity) or is_map(entity) or is_list(entity) do
entity
|> do_transform()
|> to_binary()
end end
def transform(entity), do: to_binary(entity) def to_elixir_types(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do
{:proxy_url, {string_to_elixir_types(type), parse_host(host), port}}
@spec transform_with_out_binary(any()) :: any()
def transform_with_out_binary(entity), do: do_transform(entity)
@spec to_binary(any()) :: binary()
def to_binary(entity), do: :erlang.term_to_binary(entity)
defp do_transform(%Regex{} = entity), do: entity
defp do_transform(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do
{:proxy_url, {do_transform_string(type), parse_host(host), port}}
end end
defp do_transform(%{"tuple" => [":partial_chain", entity]}) do def to_elixir_types(%{"tuple" => [":partial_chain", entity]}) do
{partial_chain, []} = {partial_chain, []} =
entity entity
|> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "") |> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "")
@ -332,25 +292,51 @@ defp do_transform(%{"tuple" => [":partial_chain", entity]}) do
{:partial_chain, partial_chain} {:partial_chain, partial_chain}
end end
defp do_transform(%{"tuple" => entity}) do def to_elixir_types(%{"tuple" => entity}) do
Enum.reduce(entity, {}, fn val, acc -> Tuple.append(acc, do_transform(val)) end) Enum.reduce(entity, {}, &Tuple.append(&2, to_elixir_types(&1)))
end end
defp do_transform(entity) when is_map(entity) do def to_elixir_types(entity) when is_map(entity) do
for {k, v} <- entity, into: %{}, do: {do_transform(k), do_transform(v)} Map.new(entity, fn {k, v} -> {to_elixir_types(k), to_elixir_types(v)} end)
end end
defp do_transform(entity) when is_list(entity) do def to_elixir_types(entity) when is_list(entity) do
for v <- entity, into: [], do: do_transform(v) Enum.map(entity, &to_elixir_types/1)
end end
defp do_transform(entity) when is_binary(entity) do def to_elixir_types(entity) when is_binary(entity) do
entity entity
|> String.trim() |> String.trim()
|> do_transform_string() |> string_to_elixir_types()
end end
defp do_transform(entity), do: entity def to_elixir_types(entity), do: entity
@spec string_to_elixir_types(String.t()) ::
atom() | Regex.t() | module() | String.t() | no_return()
def string_to_elixir_types("~r" <> _pattern = regex) do
pattern =
~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u
delimiters = ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}]
with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <-
Regex.named_captures(pattern, regex),
{:ok, {leading, closing}} <- find_valid_delimiter(delimiters, pattern, regex_delimiter),
{result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do
result
end
end
def string_to_elixir_types(":" <> atom), do: String.to_atom(atom)
def string_to_elixir_types(value) do
if module_name?(value) do
String.to_existing_atom("Elixir." <> value)
else
value
end
end
defp parse_host("localhost"), do: :localhost defp parse_host("localhost"), do: :localhost
@ -387,27 +373,8 @@ defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do
end end
end end
defp do_transform_string("~r" <> _pattern = regex) do @spec module_name?(String.t()) :: boolean()
with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <- def module_name?(string) do
Regex.named_captures(@regex, regex),
{:ok, {leading, closing}} <- find_valid_delimiter(@delimiters, pattern, regex_delimiter),
{result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do
result
end
end
defp do_transform_string(":" <> atom), do: String.to_atom(atom)
defp do_transform_string(value) do
if is_module_name?(value) do
String.to_existing_atom("Elixir." <> value)
else
value
end
end
@spec is_module_name?(String.t()) :: boolean()
def is_module_name?(string) do
Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or
string in ["Oban", "Ueberauth", "ExSyslogger"] string in ["Oban", "Ueberauth", "ExSyslogger"]
end end

View file

@ -28,10 +28,6 @@ defmodule Pleroma.Config.TransferTask do
{:pleroma, Pleroma.Captcha, [:seconds_valid]}, {:pleroma, Pleroma.Captcha, [:seconds_valid]},
{:pleroma, Pleroma.Upload, [:proxy_remote]}, {:pleroma, Pleroma.Upload, [:proxy_remote]},
{:pleroma, :instance, [:upload_limit]}, {:pleroma, :instance, [:upload_limit]},
{:pleroma, :email_notifications, [:digest]},
{:pleroma, :oauth2, [:clean_expired_tokens]},
{:pleroma, Pleroma.ActivityExpiration, [:enabled]},
{:pleroma, Pleroma.ScheduledActivity, [:enabled]},
{:pleroma, :gopher, [:enabled]} {:pleroma, :gopher, [:enabled]}
] ]
@ -48,7 +44,7 @@ def load_and_update_env(deleted_settings \\ [], restart_pleroma? \\ true) do
{logger, other} = {logger, other} =
(Repo.all(ConfigDB) ++ deleted_settings) (Repo.all(ConfigDB) ++ deleted_settings)
|> Enum.map(&transform_and_merge/1) |> Enum.map(&merge_with_default/1)
|> Enum.split_with(fn {group, _, _, _} -> group in [:logger, :quack] end) |> Enum.split_with(fn {group, _, _, _} -> group in [:logger, :quack] end)
logger logger
@ -92,11 +88,7 @@ defp maybe_set_pleroma_last(apps) do
end end
end end
defp transform_and_merge(%{group: group, key: key, value: value} = setting) do defp merge_with_default(%{group: group, key: key, value: value} = setting) do
group = ConfigDB.from_string(group)
key = ConfigDB.from_string(key)
value = ConfigDB.from_binary(value)
default = Config.Holder.default_config(group, key) default = Config.Holder.default_config(group, key)
merged = merged =

View file

@ -162,10 +162,13 @@ def for_user_with_last_activity_id(user, params \\ %{}) do
for_user(user, params) for_user(user, params)
|> Enum.map(fn participation -> |> Enum.map(fn participation ->
activity_id = activity_id =
ActivityPub.fetch_latest_activity_id_for_context(participation.conversation.ap_id, %{ ActivityPub.fetch_latest_direct_activity_id_for_context(
participation.conversation.ap_id,
%{
user: user, user: user,
blocking_user: user blocking_user: user
}) }
)
%{ %{
participation participation

View file

@ -1,4 +1,8 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.DateTime do # Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.DateTime do
@moduledoc """ @moduledoc """
The AP standard defines the date fields in AP as xsd:DateTime. Elixir's The AP standard defines the date fields in AP as xsd:DateTime. Elixir's
DateTime can't parse this, but it can parse the related iso8601. This DateTime can't parse this, but it can parse the related iso8601. This

View file

@ -1,4 +1,8 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.ObjectID do # Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.ObjectID do
use Ecto.Type use Ecto.Type
def type, do: :string def type, do: :string

View file

@ -1,7 +1,11 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.Recipients do # Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.Recipients do
use Ecto.Type use Ecto.Type
alias Pleroma.Web.ActivityPub.ObjectValidators.Types.ObjectID alias Pleroma.EctoType.ActivityPub.ObjectValidators.ObjectID
def type, do: {:array, ObjectID} def type, do: {:array, ObjectID}

View file

@ -2,7 +2,7 @@
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/> # Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.SafeText do defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.SafeText do
use Ecto.Type use Ecto.Type
alias Pleroma.HTML alias Pleroma.HTML

View file

@ -1,4 +1,8 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.Uri do # Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.Uri do
use Ecto.Type use Ecto.Type
def type, do: :string def type, do: :string

View file

@ -0,0 +1,26 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.Config.Atom do
use Ecto.Type
def type, do: :atom
def cast(key) when is_atom(key) do
{:ok, key}
end
def cast(key) when is_binary(key) do
{:ok, Pleroma.ConfigDB.string_to_elixir_types(key)}
end
def cast(_), do: :error
def load(key) do
{:ok, Pleroma.ConfigDB.string_to_elixir_types(key)}
end
def dump(key) when is_atom(key), do: {:ok, inspect(key)}
def dump(_), do: :error
end

View file

@ -0,0 +1,27 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.Config.BinaryValue do
use Ecto.Type
def type, do: :term
def cast(value) when is_binary(value) do
if String.valid?(value) do
{:ok, value}
else
{:ok, :erlang.binary_to_term(value)}
end
end
def cast(value), do: {:ok, value}
def load(value) when is_binary(value) do
{:ok, :erlang.binary_to_term(value)}
end
def dump(value) do
{:ok, :erlang.term_to_binary(value)}
end
end

View file

@ -18,7 +18,7 @@ def fill_in_notification_types do
) )
query query
|> Repo.all() |> Repo.chunk_stream(100)
|> Enum.each(fn notification -> |> Enum.each(fn notification ->
type = type =
notification.activity notification.activity

View file

@ -64,6 +64,12 @@ def fetch_paginated(query, params, :offset, table_binding) do
@spec paginate(Ecto.Query.t(), map(), type(), atom() | nil) :: [Ecto.Schema.t()] @spec paginate(Ecto.Query.t(), map(), type(), atom() | nil) :: [Ecto.Schema.t()]
def paginate(query, options, method \\ :keyset, table_binding \\ nil) def paginate(query, options, method \\ :keyset, table_binding \\ nil)
def paginate(list, options, _method, _table_binding) when is_list(list) do
offset = options[:offset] || 0
limit = options[:limit] || 0
Enum.slice(list, offset, limit)
end
def paginate(query, options, :keyset, table_binding) do def paginate(query, options, :keyset, table_binding) do
query query
|> restrict(:min_id, options, table_binding) |> restrict(:min_id, options, table_binding)

View file

@ -10,6 +10,8 @@ defmodule Pleroma.Plugs.UploadedMedia do
import Pleroma.Web.Gettext import Pleroma.Web.Gettext
require Logger require Logger
alias Pleroma.Web.MediaProxy
@behaviour Plug @behaviour Plug
# no slashes # no slashes
@path "media" @path "media"
@ -35,8 +37,7 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
%{query_params: %{"name" => name}} = conn -> %{query_params: %{"name" => name}} = conn ->
name = String.replace(name, "\"", "\\\"") name = String.replace(name, "\"", "\\\"")
conn put_resp_header(conn, "content-disposition", "filename=\"#{name}\"")
|> put_resp_header("content-disposition", "filename=\"#{name}\"")
conn -> conn ->
conn conn
@ -47,7 +48,8 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
with uploader <- Keyword.fetch!(config, :uploader), with uploader <- Keyword.fetch!(config, :uploader),
proxy_remote = Keyword.get(config, :proxy_remote, false), proxy_remote = Keyword.get(config, :proxy_remote, false),
{:ok, get_method} <- uploader.get_file(file) do {:ok, get_method} <- uploader.get_file(file),
false <- media_is_banned(conn, get_method) do
get_media(conn, get_method, proxy_remote, opts) get_media(conn, get_method, proxy_remote, opts)
else else
_ -> _ ->
@ -59,6 +61,14 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
def call(conn, _opts), do: conn def call(conn, _opts), do: conn
defp media_is_banned(%{request_path: path} = _conn, {:static_dir, _}) do
MediaProxy.in_banned_urls(Pleroma.Web.base_url() <> path)
end
defp media_is_banned(_, {:url, url}), do: MediaProxy.in_banned_urls(url)
defp media_is_banned(_, _), do: false
defp get_media(conn, {:static_dir, directory}, _, opts) do defp get_media(conn, {:static_dir, directory}, _, opts) do
static_opts = static_opts =
Map.get(opts, :static_plug_opts) Map.get(opts, :static_plug_opts)

View file

@ -8,6 +8,7 @@ defmodule Pleroma.Repo do
adapter: Ecto.Adapters.Postgres, adapter: Ecto.Adapters.Postgres,
migration_timestamps: [type: :naive_datetime_usec] migration_timestamps: [type: :naive_datetime_usec]
import Ecto.Query
require Logger require Logger
defmodule Instrumenter do defmodule Instrumenter do
@ -78,6 +79,33 @@ def check_migrations_applied!() do
:ok :ok
end end
end end
def chunk_stream(query, chunk_size) do
# We don't actually need start and end funcitons of resource streaming,
# but it seems to be the only way to not fetch records one-by-one and
# have individual records be the elements of the stream, instead of
# lists of records
Stream.resource(
fn -> 0 end,
fn
last_id ->
query
|> order_by(asc: :id)
|> where([r], r.id > ^last_id)
|> limit(^chunk_size)
|> all()
|> case do
[] ->
{:halt, last_id}
records ->
last_id = List.last(records).id
{records, last_id}
end
end,
fn _ -> :ok end
)
end
end end
defmodule Pleroma.Repo.UnappliedMigrationsError do defmodule Pleroma.Repo.UnappliedMigrationsError do

View file

@ -5,10 +5,10 @@
defmodule Pleroma.Signature do defmodule Pleroma.Signature do
@behaviour HTTPSignatures.Adapter @behaviour HTTPSignatures.Adapter
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Keys alias Pleroma.Keys
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
def key_id_to_actor_id(key_id) do def key_id_to_actor_id(key_id) do
uri = uri =
@ -24,7 +24,7 @@ def key_id_to_actor_id(key_id) do
maybe_ap_id = URI.to_string(uri) maybe_ap_id = URI.to_string(uri)
case Types.ObjectID.cast(maybe_ap_id) do case ObjectValidators.ObjectID.cast(maybe_ap_id) do
{:ok, ap_id} -> {:ok, ap_id} ->
{:ok, ap_id} {:ok, ap_id}

View file

@ -14,6 +14,7 @@ defmodule Pleroma.User do
alias Pleroma.Config alias Pleroma.Config
alias Pleroma.Conversation.Participation alias Pleroma.Conversation.Participation
alias Pleroma.Delivery alias Pleroma.Delivery
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Emoji alias Pleroma.Emoji
alias Pleroma.FollowingRelationship alias Pleroma.FollowingRelationship
alias Pleroma.Formatter alias Pleroma.Formatter
@ -30,7 +31,6 @@ defmodule Pleroma.User do
alias Pleroma.Web alias Pleroma.Web
alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Builder alias Pleroma.Web.ActivityPub.Builder
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
alias Pleroma.Web.ActivityPub.Pipeline alias Pleroma.Web.ActivityPub.Pipeline
alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.CommonAPI alias Pleroma.Web.CommonAPI
@ -79,6 +79,7 @@ defmodule Pleroma.User do
schema "users" do schema "users" do
field(:bio, :string) field(:bio, :string)
field(:raw_bio, :string)
field(:email, :string) field(:email, :string)
field(:name, :string) field(:name, :string)
field(:nickname, :string) field(:nickname, :string)
@ -115,7 +116,7 @@ defmodule Pleroma.User do
field(:is_admin, :boolean, default: false) field(:is_admin, :boolean, default: false)
field(:show_role, :boolean, default: true) field(:show_role, :boolean, default: true)
field(:settings, :map, default: nil) field(:settings, :map, default: nil)
field(:uri, Types.Uri, default: nil) field(:uri, ObjectValidators.Uri, default: nil)
field(:hide_followers_count, :boolean, default: false) field(:hide_followers_count, :boolean, default: false)
field(:hide_follows_count, :boolean, default: false) field(:hide_follows_count, :boolean, default: false)
field(:hide_followers, :boolean, default: false) field(:hide_followers, :boolean, default: false)
@ -432,6 +433,7 @@ def update_changeset(struct, params \\ %{}) do
params, params,
[ [
:bio, :bio,
:raw_bio,
:name, :name,
:emoji, :emoji,
:avatar, :avatar,
@ -607,7 +609,16 @@ def register_changeset(struct, params \\ %{}, opts \\ []) do
struct struct
|> confirmation_changeset(need_confirmation: need_confirmation?) |> confirmation_changeset(need_confirmation: need_confirmation?)
|> cast(params, [:bio, :email, :name, :nickname, :password, :password_confirmation, :emoji]) |> cast(params, [
:bio,
:raw_bio,
:email,
:name,
:nickname,
:password,
:password_confirmation,
:emoji
])
|> validate_required([:name, :nickname, :password, :password_confirmation]) |> validate_required([:name, :nickname, :password, :password_confirmation])
|> validate_confirmation(:password) |> validate_confirmation(:password)
|> unique_constraint(:email) |> unique_constraint(:email)
@ -747,7 +758,6 @@ def follow(%User{} = follower, %User{} = followed, state \\ :follow_accept) do
follower follower
|> update_following_count() |> update_following_count()
|> set_cache()
end end
end end
@ -776,7 +786,6 @@ defp do_unfollow(%User{} = follower, %User{} = followed) do
{:ok, follower} = {:ok, follower} =
follower follower
|> update_following_count() |> update_following_count()
|> set_cache()
{:ok, follower, followed} {:ok, follower, followed}
@ -1128,35 +1137,25 @@ defp follow_information_changeset(user, params) do
]) ])
end end
@spec update_follower_count(User.t()) :: {:ok, User.t()}
def update_follower_count(%User{} = user) do def update_follower_count(%User{} = user) do
if user.local or !Pleroma.Config.get([:instance, :external_user_synchronization]) do if user.local or !Pleroma.Config.get([:instance, :external_user_synchronization]) do
follower_count_query = follower_count = FollowingRelationship.follower_count(user)
User.Query.build(%{followers: user, deactivated: false})
|> select([u], %{count: count(u.id)})
User user
|> where(id: ^user.id) |> follow_information_changeset(%{follower_count: follower_count})
|> join(:inner, [u], s in subquery(follower_count_query)) |> update_and_set_cache
|> update([u, s],
set: [follower_count: s.count]
)
|> select([u], u)
|> Repo.update_all([])
|> case do
{1, [user]} -> set_cache(user)
_ -> {:error, user}
end
else else
{:ok, maybe_fetch_follow_information(user)} {:ok, maybe_fetch_follow_information(user)}
end end
end end
@spec update_following_count(User.t()) :: User.t() @spec update_following_count(User.t()) :: {:ok, User.t()}
def update_following_count(%User{local: false} = user) do def update_following_count(%User{local: false} = user) do
if Pleroma.Config.get([:instance, :external_user_synchronization]) do if Pleroma.Config.get([:instance, :external_user_synchronization]) do
maybe_fetch_follow_information(user) {:ok, maybe_fetch_follow_information(user)}
else else
user {:ok, user}
end end
end end
@ -1165,7 +1164,7 @@ def update_following_count(%User{local: true} = user) do
user user
|> follow_information_changeset(%{following_count: following_count}) |> follow_information_changeset(%{following_count: following_count})
|> Repo.update!() |> update_and_set_cache()
end end
def set_unread_conversation_count(%User{local: true} = user) do def set_unread_conversation_count(%User{local: true} = user) do

View file

@ -210,7 +210,7 @@ def stream_out_participations(%Object{data: %{"context" => context}}, user) do
conversation = Repo.preload(conversation, :participations) conversation = Repo.preload(conversation, :participations)
last_activity_id = last_activity_id =
fetch_latest_activity_id_for_context(conversation.ap_id, %{ fetch_latest_direct_activity_id_for_context(conversation.ap_id, %{
user: user, user: user,
blocking_user: user blocking_user: user
}) })
@ -517,11 +517,12 @@ def fetch_activities_for_context(context, opts \\ %{}) do
|> Repo.all() |> Repo.all()
end end
@spec fetch_latest_activity_id_for_context(String.t(), keyword() | map()) :: @spec fetch_latest_direct_activity_id_for_context(String.t(), keyword() | map()) ::
FlakeId.Ecto.CompatType.t() | nil FlakeId.Ecto.CompatType.t() | nil
def fetch_latest_activity_id_for_context(context, opts \\ %{}) do def fetch_latest_direct_activity_id_for_context(context, opts \\ %{}) do
context context
|> fetch_activities_for_context_query(Map.merge(%{skip_preload: true}, opts)) |> fetch_activities_for_context_query(Map.merge(%{skip_preload: true}, opts))
|> restrict_visibility(%{visibility: "direct"})
|> limit(1) |> limit(1)
|> select([a], a.id) |> select([a], a.id)
|> Repo.one() |> Repo.one()

View file

@ -9,6 +9,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do
the system. the system.
""" """
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.ActivityPub.ObjectValidators.AnnounceValidator alias Pleroma.Web.ActivityPub.ObjectValidators.AnnounceValidator
@ -17,7 +18,6 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do
alias Pleroma.Web.ActivityPub.ObjectValidators.DeleteValidator alias Pleroma.Web.ActivityPub.ObjectValidators.DeleteValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.EmojiReactValidator alias Pleroma.Web.ActivityPub.ObjectValidators.EmojiReactValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.LikeValidator alias Pleroma.Web.ActivityPub.ObjectValidators.LikeValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
alias Pleroma.Web.ActivityPub.ObjectValidators.UndoValidator alias Pleroma.Web.ActivityPub.ObjectValidators.UndoValidator
@spec validate(map(), keyword()) :: {:ok, map(), keyword()} | {:error, any()} @spec validate(map(), keyword()) :: {:ok, map(), keyword()} | {:error, any()}
@ -120,7 +120,7 @@ def stringify_keys(object) when is_list(object) do
def stringify_keys(object), do: object def stringify_keys(object), do: object
def fetch_actor(object) do def fetch_actor(object) do
with {:ok, actor} <- Types.ObjectID.cast(object["actor"]) do with {:ok, actor} <- ObjectValidators.ObjectID.cast(object["actor"]) do
User.get_or_fetch_by_ap_id(actor) User.get_or_fetch_by_ap_id(actor)
end end
end end

View file

@ -5,9 +5,9 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.AnnounceValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.AnnounceValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.ActivityPub.Visibility
@ -19,14 +19,14 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AnnounceValidator do
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:type, :string) field(:type, :string)
field(:object, Types.ObjectID) field(:object, ObjectValidators.ObjectID)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:context, :string, autogenerate: {Utils, :generate_context_id, []}) field(:context, :string, autogenerate: {Utils, :generate_context_id, []})
field(:to, Types.Recipients, default: []) field(:to, ObjectValidators.Recipients, default: [])
field(:cc, Types.Recipients, default: []) field(:cc, ObjectValidators.Recipients, default: [])
field(:published, Types.DateTime) field(:published, ObjectValidators.DateTime)
end end
def cast_and_validate(data) do def cast_and_validate(data) do

View file

@ -5,9 +5,9 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.ChatMessageValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.ChatMessageValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
import Ecto.Changeset import Ecto.Changeset
import Pleroma.Web.ActivityPub.Transmogrifier, only: [fix_emoji: 1] import Pleroma.Web.ActivityPub.Transmogrifier, only: [fix_emoji: 1]
@ -16,12 +16,12 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ChatMessageValidator do
@derive Jason.Encoder @derive Jason.Encoder
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:to, Types.Recipients, default: []) field(:to, ObjectValidators.Recipients, default: [])
field(:type, :string) field(:type, :string)
field(:content, Types.SafeText) field(:content, ObjectValidators.SafeText)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:published, Types.DateTime) field(:published, ObjectValidators.DateTime)
field(:emoji, :map, default: %{}) field(:emoji, :map, default: %{})
embeds_one(:attachment, AttachmentValidator) embeds_one(:attachment, AttachmentValidator)

View file

@ -7,9 +7,9 @@
# - doesn't embed, will only get the object id # - doesn't embed, will only get the object id
defmodule Pleroma.Web.ActivityPub.ObjectValidators.CreateChatMessageValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.CreateChatMessageValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
import Ecto.Changeset import Ecto.Changeset
import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
@ -17,11 +17,11 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CreateChatMessageValidator do
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:type, :string) field(:type, :string)
field(:to, Types.Recipients, default: []) field(:to, ObjectValidators.Recipients, default: [])
field(:object, Types.ObjectID) field(:object, ObjectValidators.ObjectID)
end end
def cast_and_apply(data) do def cast_and_apply(data) do

View file

@ -5,16 +5,16 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.CreateNoteValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.CreateNoteValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator alias Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
import Ecto.Changeset import Ecto.Changeset
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:type, :string) field(:type, :string)
field(:to, {:array, :string}) field(:to, {:array, :string})
field(:cc, {:array, :string}) field(:cc, {:array, :string})

View file

@ -6,8 +6,8 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.DeleteValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.Activity alias Pleroma.Activity
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
import Ecto.Changeset import Ecto.Changeset
import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
@ -15,13 +15,13 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.DeleteValidator do
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:type, :string) field(:type, :string)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:to, Types.Recipients, default: []) field(:to, ObjectValidators.Recipients, default: [])
field(:cc, Types.Recipients, default: []) field(:cc, ObjectValidators.Recipients, default: [])
field(:deleted_activity_id, Types.ObjectID) field(:deleted_activity_id, ObjectValidators.ObjectID)
field(:object, Types.ObjectID) field(:object, ObjectValidators.ObjectID)
end end
def cast_data(data) do def cast_data(data) do

View file

@ -5,8 +5,8 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.EmojiReactValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.EmojiReactValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
import Ecto.Changeset import Ecto.Changeset
import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
@ -14,10 +14,10 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EmojiReactValidator do
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:type, :string) field(:type, :string)
field(:object, Types.ObjectID) field(:object, ObjectValidators.ObjectID)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:context, :string) field(:context, :string)
field(:content, :string) field(:content, :string)
field(:to, {:array, :string}, default: []) field(:to, {:array, :string}, default: [])

View file

@ -5,8 +5,8 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.LikeValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.LikeValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Utils
import Ecto.Changeset import Ecto.Changeset
@ -15,13 +15,13 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.LikeValidator do
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:type, :string) field(:type, :string)
field(:object, Types.ObjectID) field(:object, ObjectValidators.ObjectID)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:context, :string) field(:context, :string)
field(:to, Types.Recipients, default: []) field(:to, ObjectValidators.Recipients, default: [])
field(:cc, Types.Recipients, default: []) field(:cc, ObjectValidators.Recipients, default: [])
end end
def cast_and_validate(data) do def cast_and_validate(data) do
@ -67,7 +67,7 @@ def fix_recipients(cng) do
with {[], []} <- {to, cc}, with {[], []} <- {to, cc},
%Object{data: %{"actor" => actor}} <- Object.get_cached_by_ap_id(object), %Object{data: %{"actor" => actor}} <- Object.get_cached_by_ap_id(object),
{:ok, actor} <- Types.ObjectID.cast(actor) do {:ok, actor} <- ObjectValidators.ObjectID.cast(actor) do
cng cng
|> put_change(:to, [actor]) |> put_change(:to, [actor])
else else

View file

@ -5,14 +5,14 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.Web.ActivityPub.ObjectValidators.Types alias Pleroma.EctoType.ActivityPub.ObjectValidators
import Ecto.Changeset import Ecto.Changeset
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:to, {:array, :string}, default: []) field(:to, {:array, :string}, default: [])
field(:cc, {:array, :string}, default: []) field(:cc, {:array, :string}, default: [])
field(:bto, {:array, :string}, default: []) field(:bto, {:array, :string}, default: [])
@ -22,10 +22,10 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator do
field(:type, :string) field(:type, :string)
field(:content, :string) field(:content, :string)
field(:context, :string) field(:context, :string)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:attributedTo, Types.ObjectID) field(:attributedTo, ObjectValidators.ObjectID)
field(:summary, :string) field(:summary, :string)
field(:published, Types.DateTime) field(:published, ObjectValidators.DateTime)
# TODO: Write type # TODO: Write type
field(:emoji, :map, default: %{}) field(:emoji, :map, default: %{})
field(:sensitive, :boolean, default: false) field(:sensitive, :boolean, default: false)
@ -35,7 +35,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator do
field(:like_count, :integer, default: 0) field(:like_count, :integer, default: 0)
field(:announcement_count, :integer, default: 0) field(:announcement_count, :integer, default: 0)
field(:inRepyTo, :string) field(:inRepyTo, :string)
field(:uri, Types.Uri) field(:uri, ObjectValidators.Uri)
field(:likes, {:array, :string}, default: []) field(:likes, {:array, :string}, default: [])
field(:announcements, {:array, :string}, default: []) field(:announcements, {:array, :string}, default: [])

View file

@ -6,7 +6,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.UndoValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.Activity alias Pleroma.Activity
alias Pleroma.Web.ActivityPub.ObjectValidators.Types alias Pleroma.EctoType.ActivityPub.ObjectValidators
import Ecto.Changeset import Ecto.Changeset
import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
@ -14,10 +14,10 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.UndoValidator do
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:type, :string) field(:type, :string)
field(:object, Types.ObjectID) field(:object, ObjectValidators.ObjectID)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:to, {:array, :string}, default: []) field(:to, {:array, :string}, default: [])
field(:cc, {:array, :string}, default: []) field(:cc, {:array, :string}, default: [])
end end

View file

@ -1,14 +1,18 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.ObjectValidators.UrlObjectValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.UrlObjectValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.Web.ActivityPub.ObjectValidators.Types alias Pleroma.EctoType.ActivityPub.ObjectValidators
import Ecto.Changeset import Ecto.Changeset
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:type, :string) field(:type, :string)
field(:href, Types.Uri) field(:href, ObjectValidators.Uri)
field(:mediaType, :string) field(:mediaType, :string)
end end

View file

@ -8,6 +8,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
""" """
alias Pleroma.Activity alias Pleroma.Activity
alias Pleroma.EarmarkRenderer alias Pleroma.EarmarkRenderer
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.FollowingRelationship alias Pleroma.FollowingRelationship
alias Pleroma.Maps alias Pleroma.Maps
alias Pleroma.Notification alias Pleroma.Notification
@ -18,7 +19,6 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Builder alias Pleroma.Web.ActivityPub.Builder
alias Pleroma.Web.ActivityPub.ObjectValidator alias Pleroma.Web.ActivityPub.ObjectValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
alias Pleroma.Web.ActivityPub.Pipeline alias Pleroma.Web.ActivityPub.Pipeline
alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.ActivityPub.Visibility
@ -725,7 +725,7 @@ def handle_incoming(
else else
{:error, {:validate_object, _}} = e -> {:error, {:validate_object, _}} = e ->
# Check if we have a create activity for this # Check if we have a create activity for this
with {:ok, object_id} <- Types.ObjectID.cast(data["object"]), with {:ok, object_id} <- ObjectValidators.ObjectID.cast(data["object"]),
%Activity{data: %{"actor" => actor}} <- %Activity{data: %{"actor" => actor}} <-
Activity.create_by_object_ap_id(object_id) |> Repo.one(), Activity.create_by_object_ap_id(object_id) |> Repo.one(),
# We have one, insert a tombstone and retry # We have one, insert a tombstone and retry

View file

@ -33,7 +33,11 @@ def descriptions(conn, _params) do
def show(conn, %{only_db: true}) do def show(conn, %{only_db: true}) do
with :ok <- configurable_from_database() do with :ok <- configurable_from_database() do
configs = Pleroma.Repo.all(ConfigDB) configs = Pleroma.Repo.all(ConfigDB)
render(conn, "index.json", %{configs: configs})
render(conn, "index.json", %{
configs: configs,
need_reboot: Restarter.Pleroma.need_reboot?()
})
end end
end end
@ -61,17 +65,20 @@ def show(conn, _params) do
value value
end end
%{ %ConfigDB{
group: ConfigDB.convert(group), group: group,
key: ConfigDB.convert(key), key: key,
value: ConfigDB.convert(merged_value) value: merged_value
} }
|> Pleroma.Maps.put_if_present(:db, db) |> Pleroma.Maps.put_if_present(:db, db)
end) end)
end) end)
|> List.flatten() |> List.flatten()
json(conn, %{configs: merged, need_reboot: Restarter.Pleroma.need_reboot?()}) render(conn, "index.json", %{
configs: merged,
need_reboot: Restarter.Pleroma.need_reboot?()
})
end end
end end
@ -91,24 +98,17 @@ def update(%{body_params: %{configs: configs}} = conn, _) do
{deleted, updated} = {deleted, updated} =
results results
|> Enum.map(fn {:ok, config} -> |> Enum.map(fn {:ok, %{key: key, value: value} = config} ->
Map.put(config, :db, ConfigDB.get_db_keys(config)) Map.put(config, :db, ConfigDB.get_db_keys(value, key))
end)
|> Enum.split_with(fn config ->
Ecto.get_meta(config, :state) == :deleted
end) end)
|> Enum.split_with(&(Ecto.get_meta(&1, :state) == :deleted))
Config.TransferTask.load_and_update_env(deleted, false) Config.TransferTask.load_and_update_env(deleted, false)
if not Restarter.Pleroma.need_reboot?() do if not Restarter.Pleroma.need_reboot?() do
changed_reboot_settings? = changed_reboot_settings? =
(updated ++ deleted) (updated ++ deleted)
|> Enum.any?(fn config -> |> Enum.any?(&Config.TransferTask.pleroma_need_restart?(&1.group, &1.key, &1.value))
group = ConfigDB.from_string(config.group)
key = ConfigDB.from_string(config.key)
value = ConfigDB.from_binary(config.value)
Config.TransferTask.pleroma_need_restart?(group, key, value)
end)
if changed_reboot_settings?, do: Restarter.Pleroma.need_reboot() if changed_reboot_settings?, do: Restarter.Pleroma.need_reboot()
end end

View file

@ -0,0 +1,63 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.AdminAPI.MediaProxyCacheController do
use Pleroma.Web, :controller
alias Pleroma.Plugs.OAuthScopesPlug
alias Pleroma.Web.ApiSpec.Admin, as: Spec
alias Pleroma.Web.MediaProxy
plug(Pleroma.Web.ApiSpec.CastAndValidate)
plug(
OAuthScopesPlug,
%{scopes: ["read:media_proxy_caches"], admin: true} when action in [:index]
)
plug(
OAuthScopesPlug,
%{scopes: ["write:media_proxy_caches"], admin: true} when action in [:purge, :delete]
)
action_fallback(Pleroma.Web.AdminAPI.FallbackController)
defdelegate open_api_operation(action), to: Spec.MediaProxyCacheOperation
def index(%{assigns: %{user: _}} = conn, params) do
cursor =
:banned_urls_cache
|> :ets.table([{:traverse, {:select, Cachex.Query.create(true, :key)}}])
|> :qlc.cursor()
urls =
case params.page do
1 ->
:qlc.next_answers(cursor, params.page_size)
_ ->
:qlc.next_answers(cursor, (params.page - 1) * params.page_size)
:qlc.next_answers(cursor, params.page_size)
end
:qlc.delete_cursor(cursor)
render(conn, "index.json", urls: urls)
end
def delete(%{assigns: %{user: _}, body_params: %{urls: urls}} = conn, _) do
MediaProxy.remove_from_banned_urls(urls)
render(conn, "index.json", urls: urls)
end
def purge(%{assigns: %{user: _}, body_params: %{urls: urls, ban: ban}} = conn, _) do
MediaProxy.Invalidation.purge(urls)
if ban do
MediaProxy.put_in_banned_urls(urls)
end
render(conn, "index.json", urls: urls)
end
end

View file

@ -5,23 +5,20 @@
defmodule Pleroma.Web.AdminAPI.ConfigView do defmodule Pleroma.Web.AdminAPI.ConfigView do
use Pleroma.Web, :view use Pleroma.Web, :view
def render("index.json", %{configs: configs} = params) do alias Pleroma.ConfigDB
map = %{
configs: render_many(configs, __MODULE__, "show.json", as: :config)
}
if params[:need_reboot] do def render("index.json", %{configs: configs} = params) do
Map.put(map, :need_reboot, true) %{
else configs: render_many(configs, __MODULE__, "show.json", as: :config),
map need_reboot: params[:need_reboot]
end }
end end
def render("show.json", %{config: config}) do def render("show.json", %{config: config}) do
map = %{ map = %{
key: config.key, key: ConfigDB.to_json_types(config.key),
group: config.group, group: ConfigDB.to_json_types(config.group),
value: Pleroma.ConfigDB.from_binary_with_convert(config.value) value: ConfigDB.to_json_types(config.value)
} }
if config.db != [] do if config.db != [] do

View file

@ -0,0 +1,11 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.AdminAPI.MediaProxyCacheView do
use Pleroma.Web, :view
def render("index.json", %{urls: urls}) do
%{urls: urls}
end
end

View file

@ -39,6 +39,12 @@ def pagination_params do
:string, :string,
"Return the newest items newer than this ID" "Return the newest items newer than this ID"
), ),
Operation.parameter(
:offset,
:query,
%Schema{type: :integer, default: 0},
"Return items past this number of items"
),
Operation.parameter( Operation.parameter(
:limit, :limit,
:query, :query,

View file

@ -0,0 +1,109 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ApiSpec.Admin.MediaProxyCacheOperation do
alias OpenApiSpex.Operation
alias OpenApiSpex.Schema
alias Pleroma.Web.ApiSpec.Schemas.ApiError
import Pleroma.Web.ApiSpec.Helpers
def open_api_operation(action) do
operation = String.to_existing_atom("#{action}_operation")
apply(__MODULE__, operation, [])
end
def index_operation do
%Operation{
tags: ["Admin", "MediaProxyCache"],
summary: "Fetch a paginated list of all banned MediaProxy URLs in Cachex",
operationId: "AdminAPI.MediaProxyCacheController.index",
security: [%{"oAuth" => ["read:media_proxy_caches"]}],
parameters: [
Operation.parameter(
:page,
:query,
%Schema{type: :integer, default: 1},
"Page"
),
Operation.parameter(
:page_size,
:query,
%Schema{type: :integer, default: 50},
"Number of statuses to return"
)
],
responses: %{
200 => success_response()
}
}
end
def delete_operation do
%Operation{
tags: ["Admin", "MediaProxyCache"],
summary: "Remove a banned MediaProxy URL from Cachex",
operationId: "AdminAPI.MediaProxyCacheController.delete",
security: [%{"oAuth" => ["write:media_proxy_caches"]}],
requestBody:
request_body(
"Parameters",
%Schema{
type: :object,
required: [:urls],
properties: %{
urls: %Schema{type: :array, items: %Schema{type: :string, format: :uri}}
}
},
required: true
),
responses: %{
200 => success_response(),
400 => Operation.response("Error", "application/json", ApiError)
}
}
end
def purge_operation do
%Operation{
tags: ["Admin", "MediaProxyCache"],
summary: "Purge and optionally ban a MediaProxy URL",
operationId: "AdminAPI.MediaProxyCacheController.purge",
security: [%{"oAuth" => ["write:media_proxy_caches"]}],
requestBody:
request_body(
"Parameters",
%Schema{
type: :object,
required: [:urls],
properties: %{
urls: %Schema{type: :array, items: %Schema{type: :string, format: :uri}},
ban: %Schema{type: :boolean, default: true}
}
},
required: true
),
responses: %{
200 => success_response(),
400 => Operation.response("Error", "application/json", ApiError)
}
}
end
defp success_response do
Operation.response("Array of banned MediaProxy URLs in Cachex", "application/json", %Schema{
type: :object,
properties: %{
urls: %Schema{
type: :array,
items: %Schema{
type: :string,
format: :uri,
description: "MediaProxy URLs"
}
}
}
})
end
end

View file

@ -183,7 +183,6 @@ defp notification_type do
"favourite", "favourite",
"reblog", "reblog",
"mention", "mention",
"poll",
"pleroma:emoji_reaction", "pleroma:emoji_reaction",
"pleroma:chat_mention", "pleroma:chat_mention",
"move", "move",

View file

@ -165,6 +165,7 @@ def update_credentials(%{assigns: %{user: user}, body_params: params} = conn, _p
end) end)
|> Maps.put_if_present(:name, params[:display_name]) |> Maps.put_if_present(:name, params[:display_name])
|> Maps.put_if_present(:bio, params[:note]) |> Maps.put_if_present(:bio, params[:note])
|> Maps.put_if_present(:raw_bio, params[:note])
|> Maps.put_if_present(:avatar, params[:avatar]) |> Maps.put_if_present(:avatar, params[:avatar])
|> Maps.put_if_present(:banner, params[:header]) |> Maps.put_if_present(:banner, params[:header])
|> Maps.put_if_present(:background, params[:pleroma_background_image]) |> Maps.put_if_present(:background, params[:pleroma_background_image])

View file

@ -107,21 +107,21 @@ defp resource_search(_, "statuses", query, options) do
) )
end end
defp resource_search(:v2, "hashtags", query, _options) do defp resource_search(:v2, "hashtags", query, options) do
tags_path = Web.base_url() <> "/tag/" tags_path = Web.base_url() <> "/tag/"
query query
|> prepare_tags() |> prepare_tags(options)
|> Enum.map(fn tag -> |> Enum.map(fn tag ->
%{name: tag, url: tags_path <> tag} %{name: tag, url: tags_path <> tag}
end) end)
end end
defp resource_search(:v1, "hashtags", query, _options) do defp resource_search(:v1, "hashtags", query, options) do
prepare_tags(query) prepare_tags(query, options)
end end
defp prepare_tags(query, add_joined_tag \\ true) do defp prepare_tags(query, options) do
tags = tags =
query query
|> preprocess_uri_query() |> preprocess_uri_query()
@ -139,13 +139,20 @@ defp prepare_tags(query, add_joined_tag \\ true) do
tags = Enum.map(tags, fn tag -> String.trim_leading(tag, "#") end) tags = Enum.map(tags, fn tag -> String.trim_leading(tag, "#") end)
if Enum.empty?(explicit_tags) && add_joined_tag do tags =
tags if Enum.empty?(explicit_tags) && !options[:skip_joined_tag] do
|> Kernel.++([joined_tag(tags)]) add_joined_tag(tags)
|> Enum.uniq_by(&String.downcase/1)
else else
tags tags
end end
Pleroma.Pagination.paginate(tags, options)
end
defp add_joined_tag(tags) do
tags
|> Kernel.++([joined_tag(tags)])
|> Enum.uniq_by(&String.downcase/1)
end end
# If `query` is a URI, returns last component of its path, otherwise returns `query` # If `query` is a URI, returns last component of its path, otherwise returns `query`

View file

@ -224,7 +224,7 @@ defp do_render("show.json", %{user: user} = opts) do
fields: user.fields, fields: user.fields,
bot: bot, bot: bot,
source: %{ source: %{
note: prepare_user_bio(user), note: user.raw_bio || "",
sensitive: false, sensitive: false,
fields: user.raw_fields, fields: user.raw_fields,
pleroma: %{ pleroma: %{
@ -260,17 +260,6 @@ defp do_render("show.json", %{user: user} = opts) do
|> maybe_put_unread_notification_count(user, opts[:for]) |> maybe_put_unread_notification_count(user, opts[:for])
end end
defp prepare_user_bio(%User{bio: ""}), do: ""
defp prepare_user_bio(%User{bio: bio}) when is_binary(bio) do
bio
|> String.replace(~r(<br */?>), "\n")
|> Pleroma.HTML.strip_tags()
|> HtmlEntities.decode()
end
defp prepare_user_bio(_), do: ""
defp username_from_nickname(string) when is_binary(string) do defp username_from_nickname(string) when is_binary(string) do
hd(String.split(string, "@")) hd(String.split(string, "@"))
end end

View file

@ -23,10 +23,13 @@ def render("participation.json", %{participation: participation, for: user}) do
last_activity_id = last_activity_id =
with nil <- participation.last_activity_id do with nil <- participation.last_activity_id do
ActivityPub.fetch_latest_activity_id_for_context(participation.conversation.ap_id, %{ ActivityPub.fetch_latest_direct_activity_id_for_context(
participation.conversation.ap_id,
%{
user: user, user: user,
blocking_user: user blocking_user: user
}) }
)
end end
activity = Activity.get_by_id_with_object(last_activity_id) activity = Activity.get_by_id_with_object(last_activity_id)

View file

@ -5,22 +5,34 @@
defmodule Pleroma.Web.MediaProxy.Invalidation do defmodule Pleroma.Web.MediaProxy.Invalidation do
@moduledoc false @moduledoc false
@callback purge(list(String.t()), map()) :: {:ok, String.t()} | {:error, String.t()} @callback purge(list(String.t()), Keyword.t()) :: {:ok, list(String.t())} | {:error, String.t()}
alias Pleroma.Config alias Pleroma.Config
alias Pleroma.Web.MediaProxy
@spec purge(list(String.t())) :: {:ok, String.t()} | {:error, String.t()} @spec enabled?() :: boolean()
def enabled?, do: Config.get([:media_proxy, :invalidation, :enabled])
@spec purge(list(String.t()) | String.t()) :: {:ok, list(String.t())} | {:error, String.t()}
def purge(urls) do def purge(urls) do
[:media_proxy, :invalidation, :enabled] prepared_urls = prepare_urls(urls)
|> Config.get()
|> do_purge(urls) if enabled?() do
do_purge(prepared_urls)
else
{:ok, prepared_urls}
end
end end
defp do_purge(true, urls) do defp do_purge(urls) do
provider = Config.get([:media_proxy, :invalidation, :provider]) provider = Config.get([:media_proxy, :invalidation, :provider])
options = Config.get(provider) options = Config.get(provider)
provider.purge(urls, options) provider.purge(urls, options)
end end
defp do_purge(_, _), do: :ok def prepare_urls(urls) do
urls
|> List.wrap()
|> Enum.map(&MediaProxy.url/1)
end
end end

View file

@ -9,10 +9,10 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Http do
require Logger require Logger
@impl Pleroma.Web.MediaProxy.Invalidation @impl Pleroma.Web.MediaProxy.Invalidation
def purge(urls, opts) do def purge(urls, opts \\ []) do
method = Map.get(opts, :method, :purge) method = Keyword.get(opts, :method, :purge)
headers = Map.get(opts, :headers, []) headers = Keyword.get(opts, :headers, [])
options = Map.get(opts, :options, []) options = Keyword.get(opts, :options, [])
Logger.debug("Running cache purge: #{inspect(urls)}") Logger.debug("Running cache purge: #{inspect(urls)}")
@ -22,7 +22,7 @@ def purge(urls, opts) do
end end
end) end)
{:ok, "success"} {:ok, urls}
end end
defp do_purge(method, url, headers, options) do defp do_purge(method, url, headers, options) do

View file

@ -10,32 +10,34 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.Script do
require Logger require Logger
@impl Pleroma.Web.MediaProxy.Invalidation @impl Pleroma.Web.MediaProxy.Invalidation
def purge(urls, %{script_path: script_path} = _options) do def purge(urls, opts \\ []) do
args = args =
urls urls
|> List.wrap() |> List.wrap()
|> Enum.uniq() |> Enum.uniq()
|> Enum.join(" ") |> Enum.join(" ")
opts
|> Keyword.get(:script_path)
|> do_purge([args])
|> handle_result(urls)
end
defp do_purge(script_path, args) when is_binary(script_path) do
path = Path.expand(script_path) path = Path.expand(script_path)
Logger.debug("Running cache purge: #{inspect(args)}, #{inspect(path)}")
Logger.debug("Running cache purge: #{inspect(urls)}, #{path}")
case do_purge(path, [args]) do
{result, exit_status} when exit_status > 0 ->
Logger.error("Error while cache purge: #{inspect(result)}")
{:error, inspect(result)}
_ ->
{:ok, "success"}
end
end
def purge(_, _), do: {:error, "not found script path"}
defp do_purge(path, args) do
System.cmd(path, args) System.cmd(path, args)
rescue rescue
error -> {inspect(error), 1} error -> error
end
defp do_purge(_, _), do: {:error, "not found script path"}
defp handle_result({_result, 0}, urls), do: {:ok, urls}
defp handle_result({:error, error}, urls), do: handle_result(error, urls)
defp handle_result(error, _) do
Logger.error("Error while cache purge: #{inspect(error)}")
{:error, inspect(error)}
end end
end end

View file

@ -6,20 +6,53 @@ defmodule Pleroma.Web.MediaProxy do
alias Pleroma.Config alias Pleroma.Config
alias Pleroma.Upload alias Pleroma.Upload
alias Pleroma.Web alias Pleroma.Web
alias Pleroma.Web.MediaProxy.Invalidation
@base64_opts [padding: false] @base64_opts [padding: false]
@spec in_banned_urls(String.t()) :: boolean()
def in_banned_urls(url), do: elem(Cachex.exists?(:banned_urls_cache, url(url)), 1)
def remove_from_banned_urls(urls) when is_list(urls) do
Cachex.execute!(:banned_urls_cache, fn cache ->
Enum.each(Invalidation.prepare_urls(urls), &Cachex.del(cache, &1))
end)
end
def remove_from_banned_urls(url) when is_binary(url) do
Cachex.del(:banned_urls_cache, url(url))
end
def put_in_banned_urls(urls) when is_list(urls) do
Cachex.execute!(:banned_urls_cache, fn cache ->
Enum.each(Invalidation.prepare_urls(urls), &Cachex.put(cache, &1, true))
end)
end
def put_in_banned_urls(url) when is_binary(url) do
Cachex.put(:banned_urls_cache, url(url), true)
end
def url(url) when is_nil(url) or url == "", do: nil def url(url) when is_nil(url) or url == "", do: nil
def url("/" <> _ = url), do: url def url("/" <> _ = url), do: url
def url(url) do def url(url) do
if disabled?() or local?(url) or whitelisted?(url) do if disabled?() or not url_proxiable?(url) do
url url
else else
encode_url(url) encode_url(url)
end end
end end
@spec url_proxiable?(String.t()) :: boolean()
def url_proxiable?(url) do
if local?(url) or whitelisted?(url) do
false
else
true
end
end
defp disabled?, do: !Config.get([:media_proxy, :enabled], false) defp disabled?, do: !Config.get([:media_proxy, :enabled], false)
defp local?(url), do: String.starts_with?(url, Pleroma.Web.base_url()) defp local?(url), do: String.starts_with?(url, Pleroma.Web.base_url())

View file

@ -14,10 +14,11 @@ def remote(conn, %{"sig" => sig64, "url" => url64} = params) do
with config <- Pleroma.Config.get([:media_proxy], []), with config <- Pleroma.Config.get([:media_proxy], []),
true <- Keyword.get(config, :enabled, false), true <- Keyword.get(config, :enabled, false),
{:ok, url} <- MediaProxy.decode_url(sig64, url64), {:ok, url} <- MediaProxy.decode_url(sig64, url64),
{_, false} <- {:in_banned_urls, MediaProxy.in_banned_urls(url)},
:ok <- filename_matches(params, conn.request_path, url) do :ok <- filename_matches(params, conn.request_path, url) do
ReverseProxy.call(conn, url, Keyword.get(config, :proxy_opts, @default_proxy_opts)) ReverseProxy.call(conn, url, Keyword.get(config, :proxy_opts, @default_proxy_opts))
else else
false -> error when error in [false, {:in_banned_urls, true}] ->
send_resp(conn, 404, Plug.Conn.Status.reason_phrase(404)) send_resp(conn, 404, Plug.Conn.Status.reason_phrase(404))
{:error, :invalid_signature} -> {:error, :invalid_signature} ->

View file

@ -209,6 +209,10 @@ defmodule Pleroma.Web.Router do
post("/oauth_app", OAuthAppController, :create) post("/oauth_app", OAuthAppController, :create)
patch("/oauth_app/:id", OAuthAppController, :update) patch("/oauth_app/:id", OAuthAppController, :update)
delete("/oauth_app/:id", OAuthAppController, :delete) delete("/oauth_app/:id", OAuthAppController, :delete)
get("/media_proxy_caches", MediaProxyCacheController, :index)
post("/media_proxy_caches/delete", MediaProxyCacheController, :delete)
post("/media_proxy_caches/purge", MediaProxyCacheController, :purge)
end end
scope "/api/pleroma/emoji", Pleroma.Web.PleromaAPI do scope "/api/pleroma/emoji", Pleroma.Web.PleromaAPI do

View file

@ -18,13 +18,19 @@ def perform(
}, },
_job _job
) do ) do
hrefs = attachments
Enum.flat_map(attachments, fn attachment -> |> Enum.flat_map(fn item -> Enum.map(item["url"], & &1["href"]) end)
Enum.map(attachment["url"], & &1["href"]) |> fetch_objects
end) |> prepare_objects(actor, Enum.map(attachments, & &1["name"]))
|> filter_objects
|> do_clean
names = Enum.map(attachments, & &1["name"]) {:ok, :success}
end
def perform(%{"op" => "cleanup_attachments", "object" => _object}, _job), do: {:ok, :skip}
defp do_clean({object_ids, attachment_urls}) do
uploader = Pleroma.Config.get([Pleroma.Upload, :uploader]) uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
prefix = prefix =
@ -39,22 +45,35 @@ def perform(
"/" "/"
) )
# find all objects for copies of the attachments, name and actor doesn't matter here Enum.each(attachment_urls, fn href ->
object_ids_and_hrefs = href
from(o in Object, |> String.trim_leading("#{base_url}/#{prefix}")
where: |> uploader.delete_file()
fragment( end)
"to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)",
o.data, delete_objects(object_ids)
o.data, end
^hrefs
) defp delete_objects([_ | _] = object_ids) do
) Repo.delete_all(from(o in Object, where: o.id in ^object_ids))
# The query above can be time consumptive on large instances until we end
# refactor how uploads are stored
|> Repo.all(timeout: :infinity) defp delete_objects(_), do: :ok
# we should delete 1 object for any given attachment, but don't delete # we should delete 1 object for any given attachment, but don't delete
# files if there are more than 1 object for it # files if there are more than 1 object for it
defp filter_objects(objects) do
Enum.reduce(objects, {[], []}, fn {href, %{id: id, count: count}}, {ids, hrefs} ->
with 1 <- count do
{ids ++ [id], hrefs ++ [href]}
else
_ -> {ids ++ [id], hrefs}
end
end)
end
defp prepare_objects(objects, actor, names) do
objects
|> Enum.reduce(%{}, fn %{ |> Enum.reduce(%{}, fn %{
id: id, id: id,
data: %{ data: %{
@ -76,31 +95,20 @@ def perform(
end end
end) end)
end) end)
|> Enum.map(fn {href, %{id: id, count: count}} ->
# only delete files that have single instance
with 1 <- count do
href
|> String.trim_leading("#{base_url}/#{prefix}")
|> uploader.delete_file()
{id, href}
else
_ -> {id, nil}
end
end)
object_ids = Enum.map(object_ids_and_hrefs, fn {id, _} -> id end)
from(o in Object, where: o.id in ^object_ids)
|> Repo.delete_all()
object_ids_and_hrefs
|> Enum.filter(fn {_, href} -> not is_nil(href) end)
|> Enum.map(&elem(&1, 1))
|> Pleroma.Web.MediaProxy.Invalidation.purge()
{:ok, :success}
end end
def perform(%{"op" => "cleanup_attachments", "object" => _object}, _job), do: {:ok, :skip} defp fetch_objects(hrefs) do
from(o in Object,
where:
fragment(
"to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)",
o.data,
o.data,
^hrefs
)
)
# The query above can be time consumptive on large instances until we
# refactor how uploads are stored
|> Repo.all(timeout: :infinity)
end
end end

View file

@ -5,7 +5,7 @@ def project do
[ [
app: :pleroma, app: :pleroma,
version: version("2.0.50"), version: version("2.0.50"),
elixir: "~> 1.8", elixir: "~> 1.9",
elixirc_paths: elixirc_paths(Mix.env()), elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(), compilers: [:phoenix, :gettext] ++ Mix.compilers(),
elixirc_options: [warnings_as_errors: warnings_as_errors(Mix.env())], elixirc_options: [warnings_as_errors: warnings_as_errors(Mix.env())],

View file

@ -0,0 +1,9 @@
defmodule Pleroma.Repo.Migrations.UserRawBio do
use Ecto.Migration
def change do
alter table(:users) do
add_if_not_exists(:raw_bio, :text)
end
end
end

View file

@ -0,0 +1,25 @@
defmodule Pleroma.Repo.Migrations.PopulateUserRawBio do
use Ecto.Migration
import Ecto.Query
alias Pleroma.User
alias Pleroma.Repo
def change do
{:ok, _} = Application.ensure_all_started(:fast_sanitize)
User.Query.build(%{local: true})
|> select([u], struct(u, [:id, :ap_id, :bio]))
|> Repo.stream()
|> Enum.each(fn %{bio: bio} = user ->
if bio do
raw_bio =
bio
|> String.replace(~r(<br */?>), "\n")
|> Pleroma.HTML.strip_tags()
Ecto.Changeset.cast(user, %{raw_bio: raw_bio}, [:raw_bio])
|> Repo.update()
end
end)
end
end

View file

@ -7,40 +7,28 @@ defmodule Pleroma.ConfigDBTest do
import Pleroma.Factory import Pleroma.Factory
alias Pleroma.ConfigDB alias Pleroma.ConfigDB
test "get_by_key/1" do test "get_by_params/1" do
config = insert(:config) config = insert(:config)
insert(:config) insert(:config)
assert config == ConfigDB.get_by_params(%{group: config.group, key: config.key}) assert config == ConfigDB.get_by_params(%{group: config.group, key: config.key})
end end
test "create/1" do
{:ok, config} = ConfigDB.create(%{group: ":pleroma", key: ":some_key", value: "some_value"})
assert config == ConfigDB.get_by_params(%{group: ":pleroma", key: ":some_key"})
end
test "update/1" do
config = insert(:config)
{:ok, updated} = ConfigDB.update(config, %{value: "some_value"})
loaded = ConfigDB.get_by_params(%{group: config.group, key: config.key})
assert loaded == updated
end
test "get_all_as_keyword/0" do test "get_all_as_keyword/0" do
saved = insert(:config) saved = insert(:config)
insert(:config, group: ":quack", key: ":level", value: ConfigDB.to_binary(:info)) insert(:config, group: ":quack", key: ":level", value: :info)
insert(:config, group: ":quack", key: ":meta", value: ConfigDB.to_binary([:none])) insert(:config, group: ":quack", key: ":meta", value: [:none])
insert(:config, insert(:config,
group: ":quack", group: ":quack",
key: ":webhook_url", key: ":webhook_url",
value: ConfigDB.to_binary("https://hooks.slack.com/services/KEY/some_val") value: "https://hooks.slack.com/services/KEY/some_val"
) )
config = ConfigDB.get_all_as_keyword() config = ConfigDB.get_all_as_keyword()
assert config[:pleroma] == [ assert config[:pleroma] == [
{ConfigDB.from_string(saved.key), ConfigDB.from_binary(saved.value)} {saved.key, saved.value}
] ]
assert config[:quack][:level] == :info assert config[:quack][:level] == :info
@ -51,11 +39,11 @@ test "get_all_as_keyword/0" do
describe "update_or_create/1" do describe "update_or_create/1" do
test "common" do test "common" do
config = insert(:config) config = insert(:config)
key2 = "another_key" key2 = :another_key
params = [ params = [
%{group: "pleroma", key: key2, value: "another_value"}, %{group: :pleroma, key: key2, value: "another_value"},
%{group: config.group, key: config.key, value: "new_value"} %{group: :pleroma, key: config.key, value: [a: 1, b: 2, c: "new_value"]}
] ]
assert Repo.all(ConfigDB) |> length() == 1 assert Repo.all(ConfigDB) |> length() == 1
@ -65,16 +53,16 @@ test "common" do
assert Repo.all(ConfigDB) |> length() == 2 assert Repo.all(ConfigDB) |> length() == 2
config1 = ConfigDB.get_by_params(%{group: config.group, key: config.key}) config1 = ConfigDB.get_by_params(%{group: config.group, key: config.key})
config2 = ConfigDB.get_by_params(%{group: "pleroma", key: key2}) config2 = ConfigDB.get_by_params(%{group: :pleroma, key: key2})
assert config1.value == ConfigDB.transform("new_value") assert config1.value == [a: 1, b: 2, c: "new_value"]
assert config2.value == ConfigDB.transform("another_value") assert config2.value == "another_value"
end end
test "partial update" do test "partial update" do
config = insert(:config, value: ConfigDB.to_binary(key1: "val1", key2: :val2)) config = insert(:config, value: [key1: "val1", key2: :val2])
{:ok, _config} = {:ok, config} =
ConfigDB.update_or_create(%{ ConfigDB.update_or_create(%{
group: config.group, group: config.group,
key: config.key, key: config.key,
@ -83,15 +71,14 @@ test "partial update" do
updated = ConfigDB.get_by_params(%{group: config.group, key: config.key}) updated = ConfigDB.get_by_params(%{group: config.group, key: config.key})
value = ConfigDB.from_binary(updated.value) assert config.value == updated.value
assert length(value) == 3 assert updated.value[:key1] == :val1
assert value[:key1] == :val1 assert updated.value[:key2] == :val2
assert value[:key2] == :val2 assert updated.value[:key3] == :val3
assert value[:key3] == :val3
end end
test "deep merge" do test "deep merge" do
config = insert(:config, value: ConfigDB.to_binary(key1: "val1", key2: [k1: :v1, k2: "v2"])) config = insert(:config, value: [key1: "val1", key2: [k1: :v1, k2: "v2"]])
{:ok, config} = {:ok, config} =
ConfigDB.update_or_create(%{ ConfigDB.update_or_create(%{
@ -103,18 +90,15 @@ test "deep merge" do
updated = ConfigDB.get_by_params(%{group: config.group, key: config.key}) updated = ConfigDB.get_by_params(%{group: config.group, key: config.key})
assert config.value == updated.value assert config.value == updated.value
assert updated.value[:key1] == :val1
value = ConfigDB.from_binary(updated.value) assert updated.value[:key2] == [k1: :v1, k2: :v2, k3: :v3]
assert value[:key1] == :val1 assert updated.value[:key3] == :val3
assert value[:key2] == [k1: :v1, k2: :v2, k3: :v3]
assert value[:key3] == :val3
end end
test "only full update for some keys" do test "only full update for some keys" do
config1 = insert(:config, key: ":ecto_repos", value: ConfigDB.to_binary(repo: Pleroma.Repo)) config1 = insert(:config, key: :ecto_repos, value: [repo: Pleroma.Repo])
config2 = config2 = insert(:config, group: :cors_plug, key: :max_age, value: 18)
insert(:config, group: ":cors_plug", key: ":max_age", value: ConfigDB.to_binary(18))
{:ok, _config} = {:ok, _config} =
ConfigDB.update_or_create(%{ ConfigDB.update_or_create(%{
@ -133,8 +117,8 @@ test "only full update for some keys" do
updated1 = ConfigDB.get_by_params(%{group: config1.group, key: config1.key}) updated1 = ConfigDB.get_by_params(%{group: config1.group, key: config1.key})
updated2 = ConfigDB.get_by_params(%{group: config2.group, key: config2.key}) updated2 = ConfigDB.get_by_params(%{group: config2.group, key: config2.key})
assert ConfigDB.from_binary(updated1.value) == [another_repo: [Pleroma.Repo]] assert updated1.value == [another_repo: [Pleroma.Repo]]
assert ConfigDB.from_binary(updated2.value) == 777 assert updated2.value == 777
end end
test "full update if value is not keyword" do test "full update if value is not keyword" do
@ -142,7 +126,7 @@ test "full update if value is not keyword" do
insert(:config, insert(:config,
group: ":tesla", group: ":tesla",
key: ":adapter", key: ":adapter",
value: ConfigDB.to_binary(Tesla.Adapter.Hackney) value: Tesla.Adapter.Hackney
) )
{:ok, _config} = {:ok, _config} =
@ -154,20 +138,20 @@ test "full update if value is not keyword" do
updated = ConfigDB.get_by_params(%{group: config.group, key: config.key}) updated = ConfigDB.get_by_params(%{group: config.group, key: config.key})
assert ConfigDB.from_binary(updated.value) == Tesla.Adapter.Httpc assert updated.value == Tesla.Adapter.Httpc
end end
test "only full update for some subkeys" do test "only full update for some subkeys" do
config1 = config1 =
insert(:config, insert(:config,
key: ":emoji", key: ":emoji",
value: ConfigDB.to_binary(groups: [a: 1, b: 2], key: [a: 1]) value: [groups: [a: 1, b: 2], key: [a: 1]]
) )
config2 = config2 =
insert(:config, insert(:config,
key: ":assets", key: ":assets",
value: ConfigDB.to_binary(mascots: [a: 1, b: 2], key: [a: 1]) value: [mascots: [a: 1, b: 2], key: [a: 1]]
) )
{:ok, _config} = {:ok, _config} =
@ -187,8 +171,8 @@ test "only full update for some subkeys" do
updated1 = ConfigDB.get_by_params(%{group: config1.group, key: config1.key}) updated1 = ConfigDB.get_by_params(%{group: config1.group, key: config1.key})
updated2 = ConfigDB.get_by_params(%{group: config2.group, key: config2.key}) updated2 = ConfigDB.get_by_params(%{group: config2.group, key: config2.key})
assert ConfigDB.from_binary(updated1.value) == [groups: [c: 3, d: 4], key: [a: 1, b: 2]] assert updated1.value == [groups: [c: 3, d: 4], key: [a: 1, b: 2]]
assert ConfigDB.from_binary(updated2.value) == [mascots: [c: 3, d: 4], key: [a: 1, b: 2]] assert updated2.value == [mascots: [c: 3, d: 4], key: [a: 1, b: 2]]
end end
end end
@ -206,14 +190,14 @@ test "full delete" do
end end
test "partial subkeys delete" do test "partial subkeys delete" do
config = insert(:config, value: ConfigDB.to_binary(groups: [a: 1, b: 2], key: [a: 1])) config = insert(:config, value: [groups: [a: 1, b: 2], key: [a: 1]])
{:ok, deleted} = {:ok, deleted} =
ConfigDB.delete(%{group: config.group, key: config.key, subkeys: [":groups"]}) ConfigDB.delete(%{group: config.group, key: config.key, subkeys: [":groups"]})
assert Ecto.get_meta(deleted, :state) == :loaded assert Ecto.get_meta(deleted, :state) == :loaded
assert deleted.value == ConfigDB.to_binary(key: [a: 1]) assert deleted.value == [key: [a: 1]]
updated = ConfigDB.get_by_params(%{group: config.group, key: config.key}) updated = ConfigDB.get_by_params(%{group: config.group, key: config.key})
@ -221,7 +205,7 @@ test "partial subkeys delete" do
end end
test "full delete if remaining value after subkeys deletion is empty list" do test "full delete if remaining value after subkeys deletion is empty list" do
config = insert(:config, value: ConfigDB.to_binary(groups: [a: 1, b: 2])) config = insert(:config, value: [groups: [a: 1, b: 2]])
{:ok, deleted} = {:ok, deleted} =
ConfigDB.delete(%{group: config.group, key: config.key, subkeys: [":groups"]}) ConfigDB.delete(%{group: config.group, key: config.key, subkeys: [":groups"]})
@ -232,163 +216,116 @@ test "full delete if remaining value after subkeys deletion is empty list" do
end end
end end
describe "transform/1" do describe "to_elixir_types/1" do
test "string" do test "string" do
binary = ConfigDB.transform("value as string") assert ConfigDB.to_elixir_types("value as string") == "value as string"
assert binary == :erlang.term_to_binary("value as string")
assert ConfigDB.from_binary(binary) == "value as string"
end end
test "boolean" do test "boolean" do
binary = ConfigDB.transform(false) assert ConfigDB.to_elixir_types(false) == false
assert binary == :erlang.term_to_binary(false)
assert ConfigDB.from_binary(binary) == false
end end
test "nil" do test "nil" do
binary = ConfigDB.transform(nil) assert ConfigDB.to_elixir_types(nil) == nil
assert binary == :erlang.term_to_binary(nil)
assert ConfigDB.from_binary(binary) == nil
end end
test "integer" do test "integer" do
binary = ConfigDB.transform(150) assert ConfigDB.to_elixir_types(150) == 150
assert binary == :erlang.term_to_binary(150)
assert ConfigDB.from_binary(binary) == 150
end end
test "atom" do test "atom" do
binary = ConfigDB.transform(":atom") assert ConfigDB.to_elixir_types(":atom") == :atom
assert binary == :erlang.term_to_binary(:atom)
assert ConfigDB.from_binary(binary) == :atom
end end
test "ssl options" do test "ssl options" do
binary = ConfigDB.transform([":tlsv1", ":tlsv1.1", ":tlsv1.2"]) assert ConfigDB.to_elixir_types([":tlsv1", ":tlsv1.1", ":tlsv1.2"]) == [
assert binary == :erlang.term_to_binary([:tlsv1, :"tlsv1.1", :"tlsv1.2"]) :tlsv1,
assert ConfigDB.from_binary(binary) == [:tlsv1, :"tlsv1.1", :"tlsv1.2"] :"tlsv1.1",
:"tlsv1.2"
]
end end
test "pleroma module" do test "pleroma module" do
binary = ConfigDB.transform("Pleroma.Bookmark") assert ConfigDB.to_elixir_types("Pleroma.Bookmark") == Pleroma.Bookmark
assert binary == :erlang.term_to_binary(Pleroma.Bookmark)
assert ConfigDB.from_binary(binary) == Pleroma.Bookmark
end end
test "pleroma string" do test "pleroma string" do
binary = ConfigDB.transform("Pleroma") assert ConfigDB.to_elixir_types("Pleroma") == "Pleroma"
assert binary == :erlang.term_to_binary("Pleroma")
assert ConfigDB.from_binary(binary) == "Pleroma"
end end
test "phoenix module" do test "phoenix module" do
binary = ConfigDB.transform("Phoenix.Socket.V1.JSONSerializer") assert ConfigDB.to_elixir_types("Phoenix.Socket.V1.JSONSerializer") ==
assert binary == :erlang.term_to_binary(Phoenix.Socket.V1.JSONSerializer) Phoenix.Socket.V1.JSONSerializer
assert ConfigDB.from_binary(binary) == Phoenix.Socket.V1.JSONSerializer
end end
test "tesla module" do test "tesla module" do
binary = ConfigDB.transform("Tesla.Adapter.Hackney") assert ConfigDB.to_elixir_types("Tesla.Adapter.Hackney") == Tesla.Adapter.Hackney
assert binary == :erlang.term_to_binary(Tesla.Adapter.Hackney)
assert ConfigDB.from_binary(binary) == Tesla.Adapter.Hackney
end end
test "ExSyslogger module" do test "ExSyslogger module" do
binary = ConfigDB.transform("ExSyslogger") assert ConfigDB.to_elixir_types("ExSyslogger") == ExSyslogger
assert binary == :erlang.term_to_binary(ExSyslogger)
assert ConfigDB.from_binary(binary) == ExSyslogger
end end
test "Quack.Logger module" do test "Quack.Logger module" do
binary = ConfigDB.transform("Quack.Logger") assert ConfigDB.to_elixir_types("Quack.Logger") == Quack.Logger
assert binary == :erlang.term_to_binary(Quack.Logger)
assert ConfigDB.from_binary(binary) == Quack.Logger
end end
test "Swoosh.Adapters modules" do test "Swoosh.Adapters modules" do
binary = ConfigDB.transform("Swoosh.Adapters.SMTP") assert ConfigDB.to_elixir_types("Swoosh.Adapters.SMTP") == Swoosh.Adapters.SMTP
assert binary == :erlang.term_to_binary(Swoosh.Adapters.SMTP) assert ConfigDB.to_elixir_types("Swoosh.Adapters.AmazonSES") == Swoosh.Adapters.AmazonSES
assert ConfigDB.from_binary(binary) == Swoosh.Adapters.SMTP
binary = ConfigDB.transform("Swoosh.Adapters.AmazonSES")
assert binary == :erlang.term_to_binary(Swoosh.Adapters.AmazonSES)
assert ConfigDB.from_binary(binary) == Swoosh.Adapters.AmazonSES
end end
test "sigil" do test "sigil" do
binary = ConfigDB.transform("~r[comp[lL][aA][iI][nN]er]") assert ConfigDB.to_elixir_types("~r[comp[lL][aA][iI][nN]er]") == ~r/comp[lL][aA][iI][nN]er/
assert binary == :erlang.term_to_binary(~r/comp[lL][aA][iI][nN]er/)
assert ConfigDB.from_binary(binary) == ~r/comp[lL][aA][iI][nN]er/
end end
test "link sigil" do test "link sigil" do
binary = ConfigDB.transform("~r/https:\/\/example.com/") assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/") == ~r/https:\/\/example.com/
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/)
assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/
end end
test "link sigil with um modifiers" do test "link sigil with um modifiers" do
binary = ConfigDB.transform("~r/https:\/\/example.com/um") assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/um") ==
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/um) ~r/https:\/\/example.com/um
assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/um
end end
test "link sigil with i modifier" do test "link sigil with i modifier" do
binary = ConfigDB.transform("~r/https:\/\/example.com/i") assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/i") == ~r/https:\/\/example.com/i
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/i)
assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/i
end end
test "link sigil with s modifier" do test "link sigil with s modifier" do
binary = ConfigDB.transform("~r/https:\/\/example.com/s") assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/s") == ~r/https:\/\/example.com/s
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/s)
assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/s
end end
test "raise if valid delimiter not found" do test "raise if valid delimiter not found" do
assert_raise ArgumentError, "valid delimiter for Regex expression not found", fn -> assert_raise ArgumentError, "valid delimiter for Regex expression not found", fn ->
ConfigDB.transform("~r/https://[]{}<>\"'()|example.com/s") ConfigDB.to_elixir_types("~r/https://[]{}<>\"'()|example.com/s")
end end
end end
test "2 child tuple" do test "2 child tuple" do
binary = ConfigDB.transform(%{"tuple" => ["v1", ":v2"]}) assert ConfigDB.to_elixir_types(%{"tuple" => ["v1", ":v2"]}) == {"v1", :v2}
assert binary == :erlang.term_to_binary({"v1", :v2})
assert ConfigDB.from_binary(binary) == {"v1", :v2}
end end
test "proxy tuple with localhost" do test "proxy tuple with localhost" do
binary = assert ConfigDB.to_elixir_types(%{
ConfigDB.transform(%{
"tuple" => [":proxy_url", %{"tuple" => [":socks5", "localhost", 1234]}] "tuple" => [":proxy_url", %{"tuple" => [":socks5", "localhost", 1234]}]
}) }) == {:proxy_url, {:socks5, :localhost, 1234}}
assert binary == :erlang.term_to_binary({:proxy_url, {:socks5, :localhost, 1234}})
assert ConfigDB.from_binary(binary) == {:proxy_url, {:socks5, :localhost, 1234}}
end end
test "proxy tuple with domain" do test "proxy tuple with domain" do
binary = assert ConfigDB.to_elixir_types(%{
ConfigDB.transform(%{
"tuple" => [":proxy_url", %{"tuple" => [":socks5", "domain.com", 1234]}] "tuple" => [":proxy_url", %{"tuple" => [":socks5", "domain.com", 1234]}]
}) }) == {:proxy_url, {:socks5, 'domain.com', 1234}}
assert binary == :erlang.term_to_binary({:proxy_url, {:socks5, 'domain.com', 1234}})
assert ConfigDB.from_binary(binary) == {:proxy_url, {:socks5, 'domain.com', 1234}}
end end
test "proxy tuple with ip" do test "proxy tuple with ip" do
binary = assert ConfigDB.to_elixir_types(%{
ConfigDB.transform(%{
"tuple" => [":proxy_url", %{"tuple" => [":socks5", "127.0.0.1", 1234]}] "tuple" => [":proxy_url", %{"tuple" => [":socks5", "127.0.0.1", 1234]}]
}) }) == {:proxy_url, {:socks5, {127, 0, 0, 1}, 1234}}
assert binary == :erlang.term_to_binary({:proxy_url, {:socks5, {127, 0, 0, 1}, 1234}})
assert ConfigDB.from_binary(binary) == {:proxy_url, {:socks5, {127, 0, 0, 1}, 1234}}
end end
test "tuple with n childs" do test "tuple with n childs" do
binary = assert ConfigDB.to_elixir_types(%{
ConfigDB.transform(%{
"tuple" => [ "tuple" => [
"v1", "v1",
":v2", ":v2",
@ -397,69 +334,41 @@ test "tuple with n childs" do
false, false,
"Phoenix.Socket.V1.JSONSerializer" "Phoenix.Socket.V1.JSONSerializer"
] ]
}) }) == {"v1", :v2, Pleroma.Bookmark, 150, false, Phoenix.Socket.V1.JSONSerializer}
assert binary ==
:erlang.term_to_binary(
{"v1", :v2, Pleroma.Bookmark, 150, false, Phoenix.Socket.V1.JSONSerializer}
)
assert ConfigDB.from_binary(binary) ==
{"v1", :v2, Pleroma.Bookmark, 150, false, Phoenix.Socket.V1.JSONSerializer}
end end
test "map with string key" do test "map with string key" do
binary = ConfigDB.transform(%{"key" => "value"}) assert ConfigDB.to_elixir_types(%{"key" => "value"}) == %{"key" => "value"}
assert binary == :erlang.term_to_binary(%{"key" => "value"})
assert ConfigDB.from_binary(binary) == %{"key" => "value"}
end end
test "map with atom key" do test "map with atom key" do
binary = ConfigDB.transform(%{":key" => "value"}) assert ConfigDB.to_elixir_types(%{":key" => "value"}) == %{key: "value"}
assert binary == :erlang.term_to_binary(%{key: "value"})
assert ConfigDB.from_binary(binary) == %{key: "value"}
end end
test "list of strings" do test "list of strings" do
binary = ConfigDB.transform(["v1", "v2", "v3"]) assert ConfigDB.to_elixir_types(["v1", "v2", "v3"]) == ["v1", "v2", "v3"]
assert binary == :erlang.term_to_binary(["v1", "v2", "v3"])
assert ConfigDB.from_binary(binary) == ["v1", "v2", "v3"]
end end
test "list of modules" do test "list of modules" do
binary = ConfigDB.transform(["Pleroma.Repo", "Pleroma.Activity"]) assert ConfigDB.to_elixir_types(["Pleroma.Repo", "Pleroma.Activity"]) == [
assert binary == :erlang.term_to_binary([Pleroma.Repo, Pleroma.Activity]) Pleroma.Repo,
assert ConfigDB.from_binary(binary) == [Pleroma.Repo, Pleroma.Activity] Pleroma.Activity
]
end end
test "list of atoms" do test "list of atoms" do
binary = ConfigDB.transform([":v1", ":v2", ":v3"]) assert ConfigDB.to_elixir_types([":v1", ":v2", ":v3"]) == [:v1, :v2, :v3]
assert binary == :erlang.term_to_binary([:v1, :v2, :v3])
assert ConfigDB.from_binary(binary) == [:v1, :v2, :v3]
end end
test "list of mixed values" do test "list of mixed values" do
binary = assert ConfigDB.to_elixir_types([
ConfigDB.transform([
"v1", "v1",
":v2", ":v2",
"Pleroma.Repo", "Pleroma.Repo",
"Phoenix.Socket.V1.JSONSerializer", "Phoenix.Socket.V1.JSONSerializer",
15, 15,
false false
]) ]) == [
assert binary ==
:erlang.term_to_binary([
"v1",
:v2,
Pleroma.Repo,
Phoenix.Socket.V1.JSONSerializer,
15,
false
])
assert ConfigDB.from_binary(binary) == [
"v1", "v1",
:v2, :v2,
Pleroma.Repo, Pleroma.Repo,
@ -470,40 +379,17 @@ test "list of mixed values" do
end end
test "simple keyword" do test "simple keyword" do
binary = ConfigDB.transform([%{"tuple" => [":key", "value"]}]) assert ConfigDB.to_elixir_types([%{"tuple" => [":key", "value"]}]) == [key: "value"]
assert binary == :erlang.term_to_binary([{:key, "value"}])
assert ConfigDB.from_binary(binary) == [{:key, "value"}]
assert ConfigDB.from_binary(binary) == [key: "value"]
end
test "keyword with partial_chain key" do
binary =
ConfigDB.transform([%{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]}])
assert binary == :erlang.term_to_binary(partial_chain: &:hackney_connect.partial_chain/1)
assert ConfigDB.from_binary(binary) == [partial_chain: &:hackney_connect.partial_chain/1]
end end
test "keyword" do test "keyword" do
binary = assert ConfigDB.to_elixir_types([
ConfigDB.transform([
%{"tuple" => [":types", "Pleroma.PostgresTypes"]}, %{"tuple" => [":types", "Pleroma.PostgresTypes"]},
%{"tuple" => [":telemetry_event", ["Pleroma.Repo.Instrumenter"]]}, %{"tuple" => [":telemetry_event", ["Pleroma.Repo.Instrumenter"]]},
%{"tuple" => [":migration_lock", nil]}, %{"tuple" => [":migration_lock", nil]},
%{"tuple" => [":key1", 150]}, %{"tuple" => [":key1", 150]},
%{"tuple" => [":key2", "string"]} %{"tuple" => [":key2", "string"]}
]) ]) == [
assert binary ==
:erlang.term_to_binary(
types: Pleroma.PostgresTypes,
telemetry_event: [Pleroma.Repo.Instrumenter],
migration_lock: nil,
key1: 150,
key2: "string"
)
assert ConfigDB.from_binary(binary) == [
types: Pleroma.PostgresTypes, types: Pleroma.PostgresTypes,
telemetry_event: [Pleroma.Repo.Instrumenter], telemetry_event: [Pleroma.Repo.Instrumenter],
migration_lock: nil, migration_lock: nil,
@ -512,9 +398,12 @@ test "keyword" do
] ]
end end
test "trandformed keyword" do
assert ConfigDB.to_elixir_types(a: 1, b: 2, c: "string") == [a: 1, b: 2, c: "string"]
end
test "complex keyword with nested mixed childs" do test "complex keyword with nested mixed childs" do
binary = assert ConfigDB.to_elixir_types([
ConfigDB.transform([
%{"tuple" => [":uploader", "Pleroma.Uploaders.Local"]}, %{"tuple" => [":uploader", "Pleroma.Uploaders.Local"]},
%{"tuple" => [":filters", ["Pleroma.Upload.Filter.Dedupe"]]}, %{"tuple" => [":filters", ["Pleroma.Upload.Filter.Dedupe"]]},
%{"tuple" => [":link_name", true]}, %{"tuple" => [":link_name", true]},
@ -529,33 +418,16 @@ test "complex keyword with nested mixed childs" do
%{ %{
"tuple" => [ "tuple" => [
":http", ":http",
[%{"tuple" => [":follow_redirect", true]}, %{"tuple" => [":pool", ":upload"]}]
]
}
]
]
}
])
assert binary ==
:erlang.term_to_binary(
uploader: Pleroma.Uploaders.Local,
filters: [Pleroma.Upload.Filter.Dedupe],
link_name: true,
proxy_remote: false,
common_map: %{key: "value"},
proxy_opts: [
redirect_on_failure: false,
max_body_length: 1_048_576,
http: [
follow_redirect: true,
pool: :upload
]
]
)
assert ConfigDB.from_binary(binary) ==
[ [
%{"tuple" => [":follow_redirect", true]},
%{"tuple" => [":pool", ":upload"]}
]
]
}
]
]
}
]) == [
uploader: Pleroma.Uploaders.Local, uploader: Pleroma.Uploaders.Local,
filters: [Pleroma.Upload.Filter.Dedupe], filters: [Pleroma.Upload.Filter.Dedupe],
link_name: true, link_name: true,
@ -573,25 +445,13 @@ test "complex keyword with nested mixed childs" do
end end
test "common keyword" do test "common keyword" do
binary = assert ConfigDB.to_elixir_types([
ConfigDB.transform([
%{"tuple" => [":level", ":warn"]}, %{"tuple" => [":level", ":warn"]},
%{"tuple" => [":meta", [":all"]]}, %{"tuple" => [":meta", [":all"]]},
%{"tuple" => [":path", ""]}, %{"tuple" => [":path", ""]},
%{"tuple" => [":val", nil]}, %{"tuple" => [":val", nil]},
%{"tuple" => [":webhook_url", "https://hooks.slack.com/services/YOUR-KEY-HERE"]} %{"tuple" => [":webhook_url", "https://hooks.slack.com/services/YOUR-KEY-HERE"]}
]) ]) == [
assert binary ==
:erlang.term_to_binary(
level: :warn,
meta: [:all],
path: "",
val: nil,
webhook_url: "https://hooks.slack.com/services/YOUR-KEY-HERE"
)
assert ConfigDB.from_binary(binary) == [
level: :warn, level: :warn,
meta: [:all], meta: [:all],
path: "", path: "",
@ -601,27 +461,19 @@ test "common keyword" do
end end
test "complex keyword with sigil" do test "complex keyword with sigil" do
binary = assert ConfigDB.to_elixir_types([
ConfigDB.transform([
%{"tuple" => [":federated_timeline_removal", []]}, %{"tuple" => [":federated_timeline_removal", []]},
%{"tuple" => [":reject", ["~r/comp[lL][aA][iI][nN]er/"]]}, %{"tuple" => [":reject", ["~r/comp[lL][aA][iI][nN]er/"]]},
%{"tuple" => [":replace", []]} %{"tuple" => [":replace", []]}
]) ]) == [
assert binary ==
:erlang.term_to_binary(
federated_timeline_removal: [], federated_timeline_removal: [],
reject: [~r/comp[lL][aA][iI][nN]er/], reject: [~r/comp[lL][aA][iI][nN]er/],
replace: [] replace: []
) ]
assert ConfigDB.from_binary(binary) ==
[federated_timeline_removal: [], reject: [~r/comp[lL][aA][iI][nN]er/], replace: []]
end end
test "complex keyword with tuples with more than 2 values" do test "complex keyword with tuples with more than 2 values" do
binary = assert ConfigDB.to_elixir_types([
ConfigDB.transform([
%{ %{
"tuple" => [ "tuple" => [
":http", ":http",
@ -675,24 +527,7 @@ test "complex keyword with tuples with more than 2 values" do
] ]
] ]
} }
]) ]) == [
assert binary ==
:erlang.term_to_binary(
http: [
key1: [
_: [
{"/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler, []},
{"/websocket", Phoenix.Endpoint.CowboyWebSocket,
{Phoenix.Transports.WebSocket,
{Pleroma.Web.Endpoint, Pleroma.Web.UserSocket, []}}},
{:_, Phoenix.Endpoint.Cowboy2Handler, {Pleroma.Web.Endpoint, []}}
]
]
]
)
assert ConfigDB.from_binary(binary) == [
http: [ http: [
key1: [ key1: [
{:_, {:_,

View file

@ -6,9 +6,9 @@ defmodule Pleroma.Config.TransferTaskTest do
use Pleroma.DataCase use Pleroma.DataCase
import ExUnit.CaptureLog import ExUnit.CaptureLog
import Pleroma.Factory
alias Pleroma.Config.TransferTask alias Pleroma.Config.TransferTask
alias Pleroma.ConfigDB
setup do: clear_config(:configurable_from_database, true) setup do: clear_config(:configurable_from_database, true)
@ -19,31 +19,11 @@ test "transfer config values from db to env" do
refute Application.get_env(:postgrex, :test_key) refute Application.get_env(:postgrex, :test_key)
initial = Application.get_env(:logger, :level) initial = Application.get_env(:logger, :level)
ConfigDB.create(%{ insert(:config, key: :test_key, value: [live: 2, com: 3])
group: ":pleroma", insert(:config, group: :idna, key: :test_key, value: [live: 15, com: 35])
key: ":test_key", insert(:config, group: :quack, key: :test_key, value: [:test_value1, :test_value2])
value: [live: 2, com: 3] insert(:config, group: :postgrex, key: :test_key, value: :value)
}) insert(:config, group: :logger, key: :level, value: :debug)
ConfigDB.create(%{
group: ":idna",
key: ":test_key",
value: [live: 15, com: 35]
})
ConfigDB.create(%{
group: ":quack",
key: ":test_key",
value: [:test_value1, :test_value2]
})
ConfigDB.create(%{
group: ":postgrex",
key: ":test_key",
value: :value
})
ConfigDB.create(%{group: ":logger", key: ":level", value: :debug})
TransferTask.start_link([]) TransferTask.start_link([])
@ -66,17 +46,8 @@ test "transfer config values for 1 group and some keys" do
level = Application.get_env(:quack, :level) level = Application.get_env(:quack, :level)
meta = Application.get_env(:quack, :meta) meta = Application.get_env(:quack, :meta)
ConfigDB.create(%{ insert(:config, group: :quack, key: :level, value: :info)
group: ":quack", insert(:config, group: :quack, key: :meta, value: [:none])
key: ":level",
value: :info
})
ConfigDB.create(%{
group: ":quack",
key: ":meta",
value: [:none]
})
TransferTask.start_link([]) TransferTask.start_link([])
@ -95,17 +66,8 @@ test "transfer config values with full subkey update" do
clear_config(:emoji) clear_config(:emoji)
clear_config(:assets) clear_config(:assets)
ConfigDB.create(%{ insert(:config, key: :emoji, value: [groups: [a: 1, b: 2]])
group: ":pleroma", insert(:config, key: :assets, value: [mascots: [a: 1, b: 2]])
key: ":emoji",
value: [groups: [a: 1, b: 2]]
})
ConfigDB.create(%{
group: ":pleroma",
key: ":assets",
value: [mascots: [a: 1, b: 2]]
})
TransferTask.start_link([]) TransferTask.start_link([])
@ -122,12 +84,7 @@ test "transfer config values with full subkey update" do
test "don't restart if no reboot time settings were changed" do test "don't restart if no reboot time settings were changed" do
clear_config(:emoji) clear_config(:emoji)
insert(:config, key: :emoji, value: [groups: [a: 1, b: 2]])
ConfigDB.create(%{
group: ":pleroma",
key: ":emoji",
value: [groups: [a: 1, b: 2]]
})
refute String.contains?( refute String.contains?(
capture_log(fn -> TransferTask.start_link([]) end), capture_log(fn -> TransferTask.start_link([]) end),
@ -137,25 +94,13 @@ test "don't restart if no reboot time settings were changed" do
test "on reboot time key" do test "on reboot time key" do
clear_config(:chat) clear_config(:chat)
insert(:config, key: :chat, value: [enabled: false])
ConfigDB.create(%{
group: ":pleroma",
key: ":chat",
value: [enabled: false]
})
assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted" assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted"
end end
test "on reboot time subkey" do test "on reboot time subkey" do
clear_config(Pleroma.Captcha) clear_config(Pleroma.Captcha)
insert(:config, key: Pleroma.Captcha, value: [seconds_valid: 60])
ConfigDB.create(%{
group: ":pleroma",
key: "Pleroma.Captcha",
value: [seconds_valid: 60]
})
assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted" assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted"
end end
@ -163,17 +108,8 @@ test "don't restart pleroma on reboot time key and subkey if there is false flag
clear_config(:chat) clear_config(:chat)
clear_config(Pleroma.Captcha) clear_config(Pleroma.Captcha)
ConfigDB.create(%{ insert(:config, key: :chat, value: [enabled: false])
group: ":pleroma", insert(:config, key: Pleroma.Captcha, value: [seconds_valid: 60])
key: ":chat",
value: [enabled: false]
})
ConfigDB.create(%{
group: ":pleroma",
key: "Pleroma.Captcha",
value: [seconds_valid: 60]
})
refute String.contains?( refute String.contains?(
capture_log(fn -> TransferTask.load_and_update_env([], false) end), capture_log(fn -> TransferTask.load_and_update_env([], false) end),

View file

@ -42,7 +42,8 @@ def user_factory do
user user
| ap_id: User.ap_id(user), | ap_id: User.ap_id(user),
follower_address: User.ap_followers(user), follower_address: User.ap_followers(user),
following_address: User.ap_following(user) following_address: User.ap_following(user),
raw_bio: user.bio
} }
end end
@ -396,24 +397,17 @@ def registration_factory do
} }
end end
def config_factory do def config_factory(attrs \\ %{}) do
%Pleroma.ConfigDB{ %Pleroma.ConfigDB{
key: key: sequence(:key, &String.to_atom("some_key_#{&1}")),
sequence(:key, fn key -> group: :pleroma,
# Atom dynamic registration hack in tests
"some_key_#{key}"
|> String.to_atom()
|> inspect()
end),
group: ":pleroma",
value: value:
sequence( sequence(
:value, :value,
fn key -> &%{another_key: "#{&1}somevalue", another: "#{&1}somevalue"}
:erlang.term_to_binary(%{another_key: "#{key}somevalue", another: "#{key}somevalue"})
end
) )
} }
|> merge_attributes(attrs)
end end
def marker_factory do def marker_factory do

View file

@ -5,6 +5,8 @@
defmodule Mix.Tasks.Pleroma.ConfigTest do defmodule Mix.Tasks.Pleroma.ConfigTest do
use Pleroma.DataCase use Pleroma.DataCase
import Pleroma.Factory
alias Pleroma.ConfigDB alias Pleroma.ConfigDB
alias Pleroma.Repo alias Pleroma.Repo
@ -49,24 +51,19 @@ test "filtered settings are migrated to db" do
refute ConfigDB.get_by_params(%{group: ":pleroma", key: "Pleroma.Repo"}) refute ConfigDB.get_by_params(%{group: ":pleroma", key: "Pleroma.Repo"})
refute ConfigDB.get_by_params(%{group: ":postgrex", key: ":json_library"}) refute ConfigDB.get_by_params(%{group: ":postgrex", key: ":json_library"})
assert ConfigDB.from_binary(config1.value) == [key: "value", key2: [Repo]] assert config1.value == [key: "value", key2: [Repo]]
assert ConfigDB.from_binary(config2.value) == [key: "value2", key2: ["Activity"]] assert config2.value == [key: "value2", key2: ["Activity"]]
assert ConfigDB.from_binary(config3.value) == :info assert config3.value == :info
end end
test "config table is truncated before migration" do test "config table is truncated before migration" do
ConfigDB.create(%{ insert(:config, key: :first_setting, value: [key: "value", key2: ["Activity"]])
group: ":pleroma",
key: ":first_setting",
value: [key: "value", key2: ["Activity"]]
})
assert Repo.aggregate(ConfigDB, :count, :id) == 1 assert Repo.aggregate(ConfigDB, :count, :id) == 1
Mix.Tasks.Pleroma.Config.migrate_to_db("test/fixtures/config/temp.secret.exs") Mix.Tasks.Pleroma.Config.migrate_to_db("test/fixtures/config/temp.secret.exs")
config = ConfigDB.get_by_params(%{group: ":pleroma", key: ":first_setting"}) config = ConfigDB.get_by_params(%{group: ":pleroma", key: ":first_setting"})
assert ConfigDB.from_binary(config.value) == [key: "value", key2: [Repo]] assert config.value == [key: "value", key2: [Repo]]
end end
end end
@ -82,19 +79,9 @@ test "config table is truncated before migration" do
end end
test "settings are migrated to file and deleted from db", %{temp_file: temp_file} do test "settings are migrated to file and deleted from db", %{temp_file: temp_file} do
ConfigDB.create(%{ insert(:config, key: :setting_first, value: [key: "value", key2: ["Activity"]])
group: ":pleroma", insert(:config, key: :setting_second, value: [key: "value2", key2: [Repo]])
key: ":setting_first", insert(:config, group: :quack, key: :level, value: :info)
value: [key: "value", key2: ["Activity"]]
})
ConfigDB.create(%{
group: ":pleroma",
key: ":setting_second",
value: [key: "value2", key2: [Repo]]
})
ConfigDB.create(%{group: ":quack", key: ":level", value: :info})
Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "--env", "temp", "-d"]) Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "--env", "temp", "-d"])
@ -107,9 +94,8 @@ test "settings are migrated to file and deleted from db", %{temp_file: temp_file
end end
test "load a settings with large values and pass to file", %{temp_file: temp_file} do test "load a settings with large values and pass to file", %{temp_file: temp_file} do
ConfigDB.create(%{ insert(:config,
group: ":pleroma", key: :instance,
key: ":instance",
value: [ value: [
name: "Pleroma", name: "Pleroma",
email: "example@example.com", email: "example@example.com",
@ -163,7 +149,6 @@ test "load a settings with large values and pass to file", %{temp_file: temp_fil
extended_nickname_format: true, extended_nickname_format: true,
multi_factor_authentication: [ multi_factor_authentication: [
totp: [ totp: [
# digits 6 or 8
digits: 6, digits: 6,
period: 30 period: 30
], ],
@ -173,7 +158,7 @@ test "load a settings with large values and pass to file", %{temp_file: temp_fil
] ]
] ]
] ]
}) )
Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "--env", "temp", "-d"]) Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "--env", "temp", "-d"])

View file

@ -6,21 +6,17 @@ defmodule Pleroma.Upload.Filter.MogrifyTest do
use Pleroma.DataCase use Pleroma.DataCase
import Mock import Mock
alias Pleroma.Config
alias Pleroma.Upload
alias Pleroma.Upload.Filter alias Pleroma.Upload.Filter
setup do: clear_config([Filter.Mogrify, :args])
test "apply mogrify filter" do test "apply mogrify filter" do
Config.put([Filter.Mogrify, :args], [{"tint", "40"}]) clear_config(Filter.Mogrify, args: [{"tint", "40"}])
File.cp!( File.cp!(
"test/fixtures/image.jpg", "test/fixtures/image.jpg",
"test/fixtures/image_tmp.jpg" "test/fixtures/image_tmp.jpg"
) )
upload = %Upload{ upload = %Pleroma.Upload{
name: "an… image.jpg", name: "an… image.jpg",
content_type: "image/jpg", content_type: "image/jpg",
path: Path.absname("test/fixtures/image_tmp.jpg"), path: Path.absname("test/fixtures/image_tmp.jpg"),

View file

@ -1,5 +1,5 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.DateTimeTest do defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.DateTimeTest do
alias Pleroma.Web.ActivityPub.ObjectValidators.Types.DateTime alias Pleroma.EctoType.ActivityPub.ObjectValidators.DateTime
use Pleroma.DataCase use Pleroma.DataCase
test "it validates an xsd:Datetime" do test "it validates an xsd:Datetime" do

View file

@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ObjectValidators.Types.ObjectIDTest do defmodule Pleroma.Web.ObjectValidators.Types.ObjectIDTest do
alias Pleroma.Web.ActivityPub.ObjectValidators.Types.ObjectID alias Pleroma.EctoType.ActivityPub.ObjectValidators.ObjectID
use Pleroma.DataCase use Pleroma.DataCase
@uris [ @uris [

View file

@ -1,5 +1,5 @@
defmodule Pleroma.Web.ObjectValidators.Types.RecipientsTest do defmodule Pleroma.Web.ObjectValidators.Types.RecipientsTest do
alias Pleroma.Web.ActivityPub.ObjectValidators.Types.Recipients alias Pleroma.EctoType.ActivityPub.ObjectValidators.Recipients
use Pleroma.DataCase use Pleroma.DataCase
test "it asserts that all elements of the list are object ids" do test "it asserts that all elements of the list are object ids" do

View file

@ -5,7 +5,7 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.SafeTextTest do defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.SafeTextTest do
use Pleroma.DataCase use Pleroma.DataCase
alias Pleroma.Web.ActivityPub.ObjectValidators.Types.SafeText alias Pleroma.EctoType.ActivityPub.ObjectValidators.SafeText
test "it lets normal text go through" do test "it lets normal text go through" do
text = "hey how are you" text = "hey how are you"

View file

@ -57,12 +57,12 @@ test "with settings only in db", %{conn: conn} do
] ]
} = json_response_and_validate_schema(conn, 200) } = json_response_and_validate_schema(conn, 200)
assert key1 == config1.key assert key1 == inspect(config1.key)
assert key2 == config2.key assert key2 == inspect(config2.key)
end end
test "db is added to settings that are in db", %{conn: conn} do test "db is added to settings that are in db", %{conn: conn} do
_config = insert(:config, key: ":instance", value: ConfigDB.to_binary(name: "Some name")) _config = insert(:config, key: ":instance", value: [name: "Some name"])
%{"configs" => configs} = %{"configs" => configs} =
conn conn
@ -83,7 +83,7 @@ test "merged default setting with db settings", %{conn: conn} do
config3 = config3 =
insert(:config, insert(:config,
value: ConfigDB.to_binary(k1: :v1, k2: :v2) value: [k1: :v1, k2: :v2]
) )
%{"configs" => configs} = %{"configs" => configs} =
@ -93,41 +93,44 @@ test "merged default setting with db settings", %{conn: conn} do
assert length(configs) > 3 assert length(configs) > 3
saved_configs = [config1, config2, config3]
keys = Enum.map(saved_configs, &inspect(&1.key))
received_configs = received_configs =
Enum.filter(configs, fn %{"group" => group, "key" => key} -> Enum.filter(configs, fn %{"group" => group, "key" => key} ->
group == ":pleroma" and key in [config1.key, config2.key, config3.key] group == ":pleroma" and key in keys
end) end)
assert length(received_configs) == 3 assert length(received_configs) == 3
db_keys = db_keys =
config3.value config3.value
|> ConfigDB.from_binary()
|> Keyword.keys() |> Keyword.keys()
|> ConfigDB.convert() |> ConfigDB.to_json_types()
keys = Enum.map(saved_configs -- [config3], &inspect(&1.key))
values = Enum.map(saved_configs, &ConfigDB.to_json_types(&1.value))
mapset_keys = MapSet.new(keys ++ db_keys)
Enum.each(received_configs, fn %{"value" => value, "db" => db} -> Enum.each(received_configs, fn %{"value" => value, "db" => db} ->
assert db in [[config1.key], [config2.key], db_keys] db = MapSet.new(db)
assert MapSet.subset?(db, mapset_keys)
assert value in [ assert value in values
ConfigDB.from_binary_with_convert(config1.value),
ConfigDB.from_binary_with_convert(config2.value),
ConfigDB.from_binary_with_convert(config3.value)
]
end) end)
end end
test "subkeys with full update right merge", %{conn: conn} do test "subkeys with full update right merge", %{conn: conn} do
config1 =
insert(:config, insert(:config,
key: ":emoji", key: ":emoji",
value: ConfigDB.to_binary(groups: [a: 1, b: 2], key: [a: 1]) value: [groups: [a: 1, b: 2], key: [a: 1]]
) )
config2 =
insert(:config, insert(:config,
key: ":assets", key: ":assets",
value: ConfigDB.to_binary(mascots: [a: 1, b: 2], key: [a: 1]) value: [mascots: [a: 1, b: 2], key: [a: 1]]
) )
%{"configs" => configs} = %{"configs" => configs} =
@ -137,14 +140,14 @@ test "subkeys with full update right merge", %{conn: conn} do
vals = vals =
Enum.filter(configs, fn %{"group" => group, "key" => key} -> Enum.filter(configs, fn %{"group" => group, "key" => key} ->
group == ":pleroma" and key in [config1.key, config2.key] group == ":pleroma" and key in [":emoji", ":assets"]
end) end)
emoji = Enum.find(vals, fn %{"key" => key} -> key == ":emoji" end) emoji = Enum.find(vals, fn %{"key" => key} -> key == ":emoji" end)
assets = Enum.find(vals, fn %{"key" => key} -> key == ":assets" end) assets = Enum.find(vals, fn %{"key" => key} -> key == ":assets" end)
emoji_val = ConfigDB.transform_with_out_binary(emoji["value"]) emoji_val = ConfigDB.to_elixir_types(emoji["value"])
assets_val = ConfigDB.transform_with_out_binary(assets["value"]) assets_val = ConfigDB.to_elixir_types(assets["value"])
assert emoji_val[:groups] == [a: 1, b: 2] assert emoji_val[:groups] == [a: 1, b: 2]
assert assets_val[:mascots] == [a: 1, b: 2] assert assets_val[:mascots] == [a: 1, b: 2]
@ -277,7 +280,8 @@ test "create new config setting in db", %{conn: conn} do
"value" => %{"tuple" => ["string", "Pleroma.Captcha.NotReal", []]}, "value" => %{"tuple" => ["string", "Pleroma.Captcha.NotReal", []]},
"db" => [":key5"] "db" => [":key5"]
} }
] ],
"need_reboot" => false
} }
assert Application.get_env(:pleroma, :key1) == "value1" assert Application.get_env(:pleroma, :key1) == "value1"
@ -357,7 +361,8 @@ test "save configs setting without explicit key", %{conn: conn} do
"value" => "https://hooks.slack.com/services/KEY", "value" => "https://hooks.slack.com/services/KEY",
"db" => [":webhook_url"] "db" => [":webhook_url"]
} }
] ],
"need_reboot" => false
} }
assert Application.get_env(:quack, :level) == :info assert Application.get_env(:quack, :level) == :info
@ -366,14 +371,14 @@ test "save configs setting without explicit key", %{conn: conn} do
end end
test "saving config with partial update", %{conn: conn} do test "saving config with partial update", %{conn: conn} do
config = insert(:config, key: ":key1", value: :erlang.term_to_binary(key1: 1, key2: 2)) insert(:config, key: ":key1", value: :erlang.term_to_binary(key1: 1, key2: 2))
conn = conn =
conn conn
|> put_req_header("content-type", "application/json") |> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{group: config.group, key: config.key, value: [%{"tuple" => [":key3", 3]}]} %{group: ":pleroma", key: ":key1", value: [%{"tuple" => [":key3", 3]}]}
] ]
}) })
@ -389,7 +394,8 @@ test "saving config with partial update", %{conn: conn} do
], ],
"db" => [":key1", ":key2", ":key3"] "db" => [":key1", ":key2", ":key3"]
} }
] ],
"need_reboot" => false
} }
end end
@ -500,8 +506,7 @@ test "update setting which need reboot, don't change reboot flag until reboot",
end end
test "saving config with nested merge", %{conn: conn} do test "saving config with nested merge", %{conn: conn} do
config = insert(:config, key: :key1, value: [key1: 1, key2: [k1: 1, k2: 2]])
insert(:config, key: ":key1", value: :erlang.term_to_binary(key1: 1, key2: [k1: 1, k2: 2]))
conn = conn =
conn conn
@ -509,8 +514,8 @@ test "saving config with nested merge", %{conn: conn} do
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{ %{
group: config.group, group: ":pleroma",
key: config.key, key: ":key1",
value: [ value: [
%{"tuple" => [":key3", 3]}, %{"tuple" => [":key3", 3]},
%{ %{
@ -548,7 +553,8 @@ test "saving config with nested merge", %{conn: conn} do
], ],
"db" => [":key1", ":key3", ":key2"] "db" => [":key1", ":key3", ":key2"]
} }
] ],
"need_reboot" => false
} }
end end
@ -588,7 +594,8 @@ test "saving special atoms", %{conn: conn} do
], ],
"db" => [":ssl_options"] "db" => [":ssl_options"]
} }
] ],
"need_reboot" => false
} }
assert Application.get_env(:pleroma, :key1) == [ assert Application.get_env(:pleroma, :key1) == [
@ -600,11 +607,10 @@ test "saving full setting if value is in full_key_update list", %{conn: conn} do
backends = Application.get_env(:logger, :backends) backends = Application.get_env(:logger, :backends)
on_exit(fn -> Application.put_env(:logger, :backends, backends) end) on_exit(fn -> Application.put_env(:logger, :backends, backends) end)
config =
insert(:config, insert(:config,
group: ":logger", group: :logger,
key: ":backends", key: :backends,
value: :erlang.term_to_binary([]) value: []
) )
Pleroma.Config.TransferTask.load_and_update_env([], false) Pleroma.Config.TransferTask.load_and_update_env([], false)
@ -617,8 +623,8 @@ test "saving full setting if value is in full_key_update list", %{conn: conn} do
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{ %{
group: config.group, group: ":logger",
key: config.key, key: ":backends",
value: [":console"] value: [":console"]
} }
] ]
@ -634,7 +640,8 @@ test "saving full setting if value is in full_key_update list", %{conn: conn} do
], ],
"db" => [":backends"] "db" => [":backends"]
} }
] ],
"need_reboot" => false
} }
assert Application.get_env(:logger, :backends) == [ assert Application.get_env(:logger, :backends) == [
@ -643,11 +650,10 @@ test "saving full setting if value is in full_key_update list", %{conn: conn} do
end end
test "saving full setting if value is not keyword", %{conn: conn} do test "saving full setting if value is not keyword", %{conn: conn} do
config =
insert(:config, insert(:config,
group: ":tesla", group: :tesla,
key: ":adapter", key: :adapter,
value: :erlang.term_to_binary(Tesla.Adapter.Hackey) value: Tesla.Adapter.Hackey
) )
conn = conn =
@ -655,7 +661,7 @@ test "saving full setting if value is not keyword", %{conn: conn} do
|> put_req_header("content-type", "application/json") |> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{group: config.group, key: config.key, value: "Tesla.Adapter.Httpc"} %{group: ":tesla", key: ":adapter", value: "Tesla.Adapter.Httpc"}
] ]
}) })
@ -667,7 +673,8 @@ test "saving full setting if value is not keyword", %{conn: conn} do
"value" => "Tesla.Adapter.Httpc", "value" => "Tesla.Adapter.Httpc",
"db" => [":adapter"] "db" => [":adapter"]
} }
] ],
"need_reboot" => false
} }
end end
@ -677,13 +684,13 @@ test "update config setting & delete with fallback to default value", %{
token: token token: token
} do } do
ueberauth = Application.get_env(:ueberauth, Ueberauth) ueberauth = Application.get_env(:ueberauth, Ueberauth)
config1 = insert(:config, key: ":keyaa1") insert(:config, key: :keyaa1)
config2 = insert(:config, key: ":keyaa2") insert(:config, key: :keyaa2)
config3 = config3 =
insert(:config, insert(:config,
group: ":ueberauth", group: :ueberauth,
key: "Ueberauth" key: Ueberauth
) )
conn = conn =
@ -691,8 +698,8 @@ test "update config setting & delete with fallback to default value", %{
|> put_req_header("content-type", "application/json") |> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{group: config1.group, key: config1.key, value: "another_value"}, %{group: ":pleroma", key: ":keyaa1", value: "another_value"},
%{group: config2.group, key: config2.key, value: "another_value"} %{group: ":pleroma", key: ":keyaa2", value: "another_value"}
] ]
}) })
@ -700,22 +707,23 @@ test "update config setting & delete with fallback to default value", %{
"configs" => [ "configs" => [
%{ %{
"group" => ":pleroma", "group" => ":pleroma",
"key" => config1.key, "key" => ":keyaa1",
"value" => "another_value", "value" => "another_value",
"db" => [":keyaa1"] "db" => [":keyaa1"]
}, },
%{ %{
"group" => ":pleroma", "group" => ":pleroma",
"key" => config2.key, "key" => ":keyaa2",
"value" => "another_value", "value" => "another_value",
"db" => [":keyaa2"] "db" => [":keyaa2"]
} }
] ],
"need_reboot" => false
} }
assert Application.get_env(:pleroma, :keyaa1) == "another_value" assert Application.get_env(:pleroma, :keyaa1) == "another_value"
assert Application.get_env(:pleroma, :keyaa2) == "another_value" assert Application.get_env(:pleroma, :keyaa2) == "another_value"
assert Application.get_env(:ueberauth, Ueberauth) == ConfigDB.from_binary(config3.value) assert Application.get_env(:ueberauth, Ueberauth) == config3.value
conn = conn =
build_conn() build_conn()
@ -724,7 +732,7 @@ test "update config setting & delete with fallback to default value", %{
|> put_req_header("content-type", "application/json") |> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{group: config2.group, key: config2.key, delete: true}, %{group: ":pleroma", key: ":keyaa2", delete: true},
%{ %{
group: ":ueberauth", group: ":ueberauth",
key: "Ueberauth", key: "Ueberauth",
@ -734,7 +742,8 @@ test "update config setting & delete with fallback to default value", %{
}) })
assert json_response_and_validate_schema(conn, 200) == %{ assert json_response_and_validate_schema(conn, 200) == %{
"configs" => [] "configs" => [],
"need_reboot" => false
} }
assert Application.get_env(:ueberauth, Ueberauth) == ueberauth assert Application.get_env(:ueberauth, Ueberauth) == ueberauth
@ -801,7 +810,8 @@ test "common config example", %{conn: conn} do
":name" ":name"
] ]
} }
] ],
"need_reboot" => false
} }
end end
@ -935,7 +945,8 @@ test "tuples with more than two values", %{conn: conn} do
], ],
"db" => [":http"] "db" => [":http"]
} }
] ],
"need_reboot" => false
} }
end end
@ -1000,7 +1011,8 @@ test "settings with nesting map", %{conn: conn} do
], ],
"db" => [":key2", ":key3"] "db" => [":key2", ":key3"]
} }
] ],
"need_reboot" => false
} }
end end
@ -1027,7 +1039,8 @@ test "value as map", %{conn: conn} do
"value" => %{"key" => "some_val"}, "value" => %{"key" => "some_val"},
"db" => [":key1"] "db" => [":key1"]
} }
] ],
"need_reboot" => false
} }
end end
@ -1077,15 +1090,15 @@ test "queues key as atom", %{conn: conn} do
":background" ":background"
] ]
} }
] ],
"need_reboot" => false
} }
end end
test "delete part of settings by atom subkeys", %{conn: conn} do test "delete part of settings by atom subkeys", %{conn: conn} do
config =
insert(:config, insert(:config,
key: ":keyaa1", key: :keyaa1,
value: :erlang.term_to_binary(subkey1: "val1", subkey2: "val2", subkey3: "val3") value: [subkey1: "val1", subkey2: "val2", subkey3: "val3"]
) )
conn = conn =
@ -1094,8 +1107,8 @@ test "delete part of settings by atom subkeys", %{conn: conn} do
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{ %{
group: config.group, group: ":pleroma",
key: config.key, key: ":keyaa1",
subkeys: [":subkey1", ":subkey3"], subkeys: [":subkey1", ":subkey3"],
delete: true delete: true
} }
@ -1110,7 +1123,8 @@ test "delete part of settings by atom subkeys", %{conn: conn} do
"value" => [%{"tuple" => [":subkey2", "val2"]}], "value" => [%{"tuple" => [":subkey2", "val2"]}],
"db" => [":subkey2"] "db" => [":subkey2"]
} }
] ],
"need_reboot" => false
} }
end end
@ -1236,6 +1250,90 @@ test "doesn't set keys not in the whitelist", %{conn: conn} do
assert Application.get_env(:pleroma, Pleroma.Captcha.NotReal) == "value5" assert Application.get_env(:pleroma, Pleroma.Captcha.NotReal) == "value5"
assert Application.get_env(:not_real, :anything) == "value6" assert Application.get_env(:not_real, :anything) == "value6"
end end
test "args for Pleroma.Upload.Filter.Mogrify with custom tuples", %{conn: conn} do
clear_config(Pleroma.Upload.Filter.Mogrify)
assert conn
|> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{
configs: [
%{
group: ":pleroma",
key: "Pleroma.Upload.Filter.Mogrify",
value: [
%{"tuple" => [":args", ["auto-orient", "strip"]]}
]
}
]
})
|> json_response_and_validate_schema(200) == %{
"configs" => [
%{
"group" => ":pleroma",
"key" => "Pleroma.Upload.Filter.Mogrify",
"value" => [
%{"tuple" => [":args", ["auto-orient", "strip"]]}
],
"db" => [":args"]
}
],
"need_reboot" => false
}
assert Config.get(Pleroma.Upload.Filter.Mogrify) == [args: ["auto-orient", "strip"]]
assert conn
|> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{
configs: [
%{
group: ":pleroma",
key: "Pleroma.Upload.Filter.Mogrify",
value: [
%{
"tuple" => [
":args",
[
"auto-orient",
"strip",
"{\"implode\", \"1\"}",
"{\"resize\", \"3840x1080>\"}"
]
]
}
]
}
]
})
|> json_response(200) == %{
"configs" => [
%{
"group" => ":pleroma",
"key" => "Pleroma.Upload.Filter.Mogrify",
"value" => [
%{
"tuple" => [
":args",
[
"auto-orient",
"strip",
"{\"implode\", \"1\"}",
"{\"resize\", \"3840x1080>\"}"
]
]
}
],
"db" => [":args"]
}
],
"need_reboot" => false
}
assert Config.get(Pleroma.Upload.Filter.Mogrify) == [
args: ["auto-orient", "strip", {"implode", "1"}, {"resize", "3840x1080>"}]
]
end
end end
describe "GET /api/pleroma/admin/config/descriptions" do describe "GET /api/pleroma/admin/config/descriptions" do

View file

@ -0,0 +1,145 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.AdminAPI.MediaProxyCacheControllerTest do
use Pleroma.Web.ConnCase
import Pleroma.Factory
import Mock
alias Pleroma.Web.MediaProxy
setup do: clear_config([:media_proxy])
setup do
on_exit(fn -> Cachex.clear(:banned_urls_cache) end)
end
setup do
admin = insert(:user, is_admin: true)
token = insert(:oauth_admin_token, user: admin)
conn =
build_conn()
|> assign(:user, admin)
|> assign(:token, token)
Config.put([:media_proxy, :enabled], true)
Config.put([:media_proxy, :invalidation, :enabled], true)
Config.put([:media_proxy, :invalidation, :provider], MediaProxy.Invalidation.Script)
{:ok, %{admin: admin, token: token, conn: conn}}
end
describe "GET /api/pleroma/admin/media_proxy_caches" do
test "shows banned MediaProxy URLs", %{conn: conn} do
MediaProxy.put_in_banned_urls([
"http://localhost:4001/media/a688346.jpg",
"http://localhost:4001/media/fb1f4d.jpg"
])
MediaProxy.put_in_banned_urls("http://localhost:4001/media/gb1f44.jpg")
MediaProxy.put_in_banned_urls("http://localhost:4001/media/tb13f47.jpg")
MediaProxy.put_in_banned_urls("http://localhost:4001/media/wb1f46.jpg")
response =
conn
|> get("/api/pleroma/admin/media_proxy_caches?page_size=2")
|> json_response_and_validate_schema(200)
assert response["urls"] == [
"http://localhost:4001/media/fb1f4d.jpg",
"http://localhost:4001/media/a688346.jpg"
]
response =
conn
|> get("/api/pleroma/admin/media_proxy_caches?page_size=2&page=2")
|> json_response_and_validate_schema(200)
assert response["urls"] == [
"http://localhost:4001/media/gb1f44.jpg",
"http://localhost:4001/media/tb13f47.jpg"
]
response =
conn
|> get("/api/pleroma/admin/media_proxy_caches?page_size=2&page=3")
|> json_response_and_validate_schema(200)
assert response["urls"] == ["http://localhost:4001/media/wb1f46.jpg"]
end
end
describe "POST /api/pleroma/admin/media_proxy_caches/delete" do
test "deleted MediaProxy URLs from banned", %{conn: conn} do
MediaProxy.put_in_banned_urls([
"http://localhost:4001/media/a688346.jpg",
"http://localhost:4001/media/fb1f4d.jpg"
])
response =
conn
|> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/media_proxy_caches/delete", %{
urls: ["http://localhost:4001/media/a688346.jpg"]
})
|> json_response_and_validate_schema(200)
assert response["urls"] == ["http://localhost:4001/media/a688346.jpg"]
refute MediaProxy.in_banned_urls("http://localhost:4001/media/a688346.jpg")
assert MediaProxy.in_banned_urls("http://localhost:4001/media/fb1f4d.jpg")
end
end
describe "POST /api/pleroma/admin/media_proxy_caches/purge" do
test "perform invalidates cache of MediaProxy", %{conn: conn} do
urls = [
"http://example.com/media/a688346.jpg",
"http://example.com/media/fb1f4d.jpg"
]
with_mocks [
{MediaProxy.Invalidation.Script, [],
[
purge: fn _, _ -> {"ok", 0} end
]}
] do
response =
conn
|> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/media_proxy_caches/purge", %{urls: urls, ban: false})
|> json_response_and_validate_schema(200)
assert response["urls"] == urls
refute MediaProxy.in_banned_urls("http://example.com/media/a688346.jpg")
refute MediaProxy.in_banned_urls("http://example.com/media/fb1f4d.jpg")
end
end
test "perform invalidates cache of MediaProxy and adds url to banned", %{conn: conn} do
urls = [
"http://example.com/media/a688346.jpg",
"http://example.com/media/fb1f4d.jpg"
]
with_mocks [{MediaProxy.Invalidation.Script, [], [purge: fn _, _ -> {"ok", 0} end]}] do
response =
conn
|> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/media_proxy_caches/purge", %{
urls: urls,
ban: true
})
|> json_response_and_validate_schema(200)
assert response["urls"] == urls
assert MediaProxy.in_banned_urls("http://example.com/media/a688346.jpg")
assert MediaProxy.in_banned_urls("http://example.com/media/fb1f4d.jpg")
end
end
end
end

View file

@ -83,10 +83,9 @@ test "sets user settings in a generic way", %{conn: conn} do
test "updates the user's bio", %{conn: conn} do test "updates the user's bio", %{conn: conn} do
user2 = insert(:user) user2 = insert(:user)
conn = raw_bio = "I drink #cofe with @#{user2.nickname}\n\nsuya.."
patch(conn, "/api/v1/accounts/update_credentials", %{
"note" => "I drink #cofe with @#{user2.nickname}\n\nsuya.." conn = patch(conn, "/api/v1/accounts/update_credentials", %{"note" => raw_bio})
})
assert user_data = json_response_and_validate_schema(conn, 200) assert user_data = json_response_and_validate_schema(conn, 200)
@ -94,6 +93,12 @@ test "updates the user's bio", %{conn: conn} do
~s(I drink <a class="hashtag" data-tag="cofe" href="http://localhost:4001/tag/cofe">#cofe</a> with <span class="h-card"><a class="u-url mention" data-user="#{ ~s(I drink <a class="hashtag" data-tag="cofe" href="http://localhost:4001/tag/cofe">#cofe</a> with <span class="h-card"><a class="u-url mention" data-user="#{
user2.id user2.id
}" href="#{user2.ap_id}" rel="ugc">@<span>#{user2.nickname}</span></a></span><br/><br/>suya..) }" href="#{user2.ap_id}" rel="ugc">@<span>#{user2.nickname}</span></a></span><br/><br/>suya..)
assert user_data["source"]["note"] == raw_bio
user = Repo.get(User, user_data["id"])
assert user.raw_bio == raw_bio
end end
test "updates the user's locking status", %{conn: conn} do test "updates the user's locking status", %{conn: conn} do

View file

@ -151,6 +151,22 @@ test "constructs hashtags from search query", %{conn: conn} do
] ]
end end
test "supports pagination of hashtags search results", %{conn: conn} do
results =
conn
|> get(
"/api/v2/search?#{
URI.encode_query(%{q: "#some #text #with #hashtags", limit: 2, offset: 1})
}"
)
|> json_response_and_validate_schema(200)
assert results["hashtags"] == [
%{"name" => "text", "url" => "#{Web.base_url()}/tag/text"},
%{"name" => "with", "url" => "#{Web.base_url()}/tag/with"}
]
end
test "excludes a blocked users from search results", %{conn: conn} do test "excludes a blocked users from search results", %{conn: conn} do
user = insert(:user) user = insert(:user)
user_smith = insert(:user, %{nickname: "Agent", name: "I love 2hu"}) user_smith = insert(:user, %{nickname: "Agent", name: "I love 2hu"})

View file

@ -1561,7 +1561,7 @@ test "favorites paginate correctly" do
# Using the header for pagination works correctly # Using the header for pagination works correctly
[next, _] = get_resp_header(result, "link") |> hd() |> String.split(", ") [next, _] = get_resp_header(result, "link") |> hd() |> String.split(", ")
[_, max_id] = Regex.run(~r/max_id=(.*)>;/, next) [_, max_id] = Regex.run(~r/max_id=([^&]+)/, next)
assert max_id == third_favorite.id assert max_id == third_favorite.id

View file

@ -33,7 +33,8 @@ test "Represent a user account" do
bio: bio:
"<script src=\"invalid-html\"></script><span>valid html</span>. a<br>b<br/>c<br >d<br />f '&<>\"", "<script src=\"invalid-html\"></script><span>valid html</span>. a<br>b<br/>c<br >d<br />f '&<>\"",
inserted_at: ~N[2017-08-15 15:47:06.597036], inserted_at: ~N[2017-08-15 15:47:06.597036],
emoji: %{"karjalanpiirakka" => "/file.png"} emoji: %{"karjalanpiirakka" => "/file.png"},
raw_bio: "valid html. a\nb\nc\nd\nf '&<>\""
}) })
expected = %{ expected = %{

View file

@ -15,8 +15,17 @@ test "represents a Mastodon Conversation entity" do
user = insert(:user) user = insert(:user)
other_user = insert(:user) other_user = insert(:user)
{:ok, parent} = CommonAPI.post(user, %{status: "parent"})
{:ok, activity} = {:ok, activity} =
CommonAPI.post(user, %{status: "hey @#{other_user.nickname}", visibility: "direct"}) CommonAPI.post(user, %{
status: "hey @#{other_user.nickname}",
visibility: "direct",
in_reply_to_id: parent.id
})
{:ok, _reply_activity} =
CommonAPI.post(user, %{status: "hu", visibility: "public", in_reply_to_id: parent.id})
[participation] = Participation.for_user_with_last_activity_id(user) [participation] = Participation.for_user_with_last_activity_id(user)

View file

@ -0,0 +1,64 @@
defmodule Pleroma.Web.MediaProxy.InvalidationTest do
use ExUnit.Case
use Pleroma.Tests.Helpers
alias Pleroma.Config
alias Pleroma.Web.MediaProxy.Invalidation
import ExUnit.CaptureLog
import Mock
import Tesla.Mock
setup do: clear_config([:media_proxy])
setup do
on_exit(fn -> Cachex.clear(:banned_urls_cache) end)
end
describe "Invalidation.Http" do
test "perform request to clear cache" do
Config.put([:media_proxy, :enabled], false)
Config.put([:media_proxy, :invalidation, :enabled], true)
Config.put([:media_proxy, :invalidation, :provider], Invalidation.Http)
Config.put([Invalidation.Http], method: :purge, headers: [{"x-refresh", 1}])
image_url = "http://example.com/media/example.jpg"
Pleroma.Web.MediaProxy.put_in_banned_urls(image_url)
mock(fn
%{
method: :purge,
url: "http://example.com/media/example.jpg",
headers: [{"x-refresh", 1}]
} ->
%Tesla.Env{status: 200}
end)
assert capture_log(fn ->
assert Pleroma.Web.MediaProxy.in_banned_urls(image_url)
assert Invalidation.purge([image_url]) == {:ok, [image_url]}
assert Pleroma.Web.MediaProxy.in_banned_urls(image_url)
end) =~ "Running cache purge: [\"#{image_url}\"]"
end
end
describe "Invalidation.Script" do
test "run script to clear cache" do
Config.put([:media_proxy, :enabled], false)
Config.put([:media_proxy, :invalidation, :enabled], true)
Config.put([:media_proxy, :invalidation, :provider], Invalidation.Script)
Config.put([Invalidation.Script], script_path: "purge-nginx")
image_url = "http://example.com/media/example.jpg"
Pleroma.Web.MediaProxy.put_in_banned_urls(image_url)
with_mocks [{System, [], [cmd: fn _, _ -> {"ok", 0} end]}] do
assert capture_log(fn ->
assert Pleroma.Web.MediaProxy.in_banned_urls(image_url)
assert Invalidation.purge([image_url]) == {:ok, [image_url]}
assert Pleroma.Web.MediaProxy.in_banned_urls(image_url)
end) =~ "Running cache purge: [\"#{image_url}\"]"
end
end
end
end

View file

@ -5,6 +5,10 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.HttpTest do
import ExUnit.CaptureLog import ExUnit.CaptureLog
import Tesla.Mock import Tesla.Mock
setup do
on_exit(fn -> Cachex.clear(:banned_urls_cache) end)
end
test "logs hasn't error message when request is valid" do test "logs hasn't error message when request is valid" do
mock(fn mock(fn
%{method: :purge, url: "http://example.com/media/example.jpg"} -> %{method: :purge, url: "http://example.com/media/example.jpg"} ->
@ -14,8 +18,8 @@ test "logs hasn't error message when request is valid" do
refute capture_log(fn -> refute capture_log(fn ->
assert Invalidation.Http.purge( assert Invalidation.Http.purge(
["http://example.com/media/example.jpg"], ["http://example.com/media/example.jpg"],
%{} []
) == {:ok, "success"} ) == {:ok, ["http://example.com/media/example.jpg"]}
end) =~ "Error while cache purge" end) =~ "Error while cache purge"
end end
@ -28,8 +32,8 @@ test "it write error message in logs when request invalid" do
assert capture_log(fn -> assert capture_log(fn ->
assert Invalidation.Http.purge( assert Invalidation.Http.purge(
["http://example.com/media/example1.jpg"], ["http://example.com/media/example1.jpg"],
%{} []
) == {:ok, "success"} ) == {:ok, ["http://example.com/media/example1.jpg"]}
end) =~ "Error while cache purge: url - http://example.com/media/example1.jpg" end) =~ "Error while cache purge: url - http://example.com/media/example1.jpg"
end end
end end

View file

@ -4,17 +4,23 @@ defmodule Pleroma.Web.MediaProxy.Invalidation.ScriptTest do
import ExUnit.CaptureLog import ExUnit.CaptureLog
setup do
on_exit(fn -> Cachex.clear(:banned_urls_cache) end)
end
test "it logger error when script not found" do test "it logger error when script not found" do
assert capture_log(fn -> assert capture_log(fn ->
assert Invalidation.Script.purge( assert Invalidation.Script.purge(
["http://example.com/media/example.jpg"], ["http://example.com/media/example.jpg"],
%{script_path: "./example"} script_path: "./example"
) == {:error, "\"%ErlangError{original: :enoent}\""} ) == {:error, "%ErlangError{original: :enoent}"}
end) =~ "Error while cache purge: \"%ErlangError{original: :enoent}\"" end) =~ "Error while cache purge: %ErlangError{original: :enoent}"
capture_log(fn ->
assert Invalidation.Script.purge( assert Invalidation.Script.purge(
["http://example.com/media/example.jpg"], ["http://example.com/media/example.jpg"],
%{} []
) == {:error, "not found script path"} ) == {:error, "\"not found script path\""}
end)
end end
end end

View file

@ -10,6 +10,10 @@ defmodule Pleroma.Web.MediaProxy.MediaProxyControllerTest do
setup do: clear_config(:media_proxy) setup do: clear_config(:media_proxy)
setup do: clear_config([Pleroma.Web.Endpoint, :secret_key_base]) setup do: clear_config([Pleroma.Web.Endpoint, :secret_key_base])
setup do
on_exit(fn -> Cachex.clear(:banned_urls_cache) end)
end
test "it returns 404 when MediaProxy disabled", %{conn: conn} do test "it returns 404 when MediaProxy disabled", %{conn: conn} do
Config.put([:media_proxy, :enabled], false) Config.put([:media_proxy, :enabled], false)
@ -66,4 +70,16 @@ test "it performs ReverseProxy.call when signature valid", %{conn: conn} do
assert %Plug.Conn{status: :success} = get(conn, url) assert %Plug.Conn{status: :success} = get(conn, url)
end end
end end
test "it returns 404 when url contains in banned_urls cache", %{conn: conn} do
Config.put([:media_proxy, :enabled], true)
Config.put([Pleroma.Web.Endpoint, :secret_key_base], "00000000000")
url = Pleroma.Web.MediaProxy.encode_url("https://google.fn/test.png")
Pleroma.Web.MediaProxy.put_in_banned_urls("https://google.fn/test.png")
with_mock Pleroma.ReverseProxy,
call: fn _conn, _url, _opts -> %Plug.Conn{status: :success} end do
assert %Plug.Conn{status: 404, resp_body: "Not Found"} = get(conn, url)
end
end
end end