forked from AkkomaGang/akkoma
Merge branch 'admin-be' into 'develop'
Config api for admin-fe Closes #1406 See merge request pleroma/pleroma!1976
This commit is contained in:
commit
be27777156
35 changed files with 3635 additions and 1416 deletions
|
@ -112,7 +112,6 @@
|
||||||
shortcode_globs: ["/emoji/custom/**/*.png"],
|
shortcode_globs: ["/emoji/custom/**/*.png"],
|
||||||
pack_extensions: [".png", ".gif"],
|
pack_extensions: [".png", ".gif"],
|
||||||
groups: [
|
groups: [
|
||||||
# Put groups that have higher priority than defaults here. Example in `docs/config/custom_emoji.md`
|
|
||||||
Custom: ["/emoji/*.png", "/emoji/**/*.png"]
|
Custom: ["/emoji/*.png", "/emoji/**/*.png"]
|
||||||
],
|
],
|
||||||
default_manifest: "https://git.pleroma.social/pleroma/emoji-index/raw/master/index.json",
|
default_manifest: "https://git.pleroma.social/pleroma/emoji-index/raw/master/index.json",
|
||||||
|
@ -265,7 +264,6 @@
|
||||||
remote_post_retention_days: 90,
|
remote_post_retention_days: 90,
|
||||||
skip_thread_containment: true,
|
skip_thread_containment: true,
|
||||||
limit_to_local_content: :unauthenticated,
|
limit_to_local_content: :unauthenticated,
|
||||||
dynamic_configuration: false,
|
|
||||||
user_bio_length: 5000,
|
user_bio_length: 5000,
|
||||||
user_name_length: 100,
|
user_name_length: 100,
|
||||||
max_account_fields: 10,
|
max_account_fields: 10,
|
||||||
|
@ -620,6 +618,8 @@
|
||||||
|
|
||||||
config :pleroma, :modules, runtime_dir: "instance/modules"
|
config :pleroma, :modules, runtime_dir: "instance/modules"
|
||||||
|
|
||||||
|
config :pleroma, configurable_from_database: false
|
||||||
|
|
||||||
config :swarm, node_blacklist: [~r/myhtml_.*$/]
|
config :swarm, node_blacklist: [~r/myhtml_.*$/]
|
||||||
# Import environment specific config. This must remain at the bottom
|
# Import environment specific config. This must remain at the bottom
|
||||||
# of this file so it overrides the configuration defined above.
|
# of this file so it overrides the configuration defined above.
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -6,6 +6,8 @@
|
||||||
|
|
||||||
config_path = System.get_env("PLEROMA_CONFIG_PATH") || "/etc/pleroma/config.exs"
|
config_path = System.get_env("PLEROMA_CONFIG_PATH") || "/etc/pleroma/config.exs"
|
||||||
|
|
||||||
|
config :pleroma, release: true, config_path: config_path
|
||||||
|
|
||||||
if File.exists?(config_path) do
|
if File.exists?(config_path) do
|
||||||
import_config config_path
|
import_config config_path
|
||||||
else
|
else
|
||||||
|
@ -18,3 +20,12 @@
|
||||||
|
|
||||||
IO.puts(warning)
|
IO.puts(warning)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
exported_config =
|
||||||
|
config_path
|
||||||
|
|> Path.dirname()
|
||||||
|
|> Path.join("prod.exported_from_db.secret.exs")
|
||||||
|
|
||||||
|
if File.exists?(exported_config) do
|
||||||
|
import_config exported_config
|
||||||
|
end
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
Authentication is required and the user must be an admin.
|
Authentication is required and the user must be an admin.
|
||||||
|
|
||||||
Configuration options:
|
Configuration options:
|
||||||
|
|
||||||
* `[:auth, :enforce_oauth_admin_scope_usage]` — OAuth admin scope requirement toggle.
|
* `[:auth, :enforce_oauth_admin_scope_usage]` — OAuth admin scope requirement toggle.
|
||||||
If `true`, admin actions explicitly demand admin OAuth scope(s) presence in OAuth token (client app must support admin scopes).
|
If `true`, admin actions explicitly demand admin OAuth scope(s) presence in OAuth token (client app must support admin scopes).
|
||||||
If `false` and token doesn't have admin scope(s), `is_admin` user flag grants access to admin-specific actions.
|
If `false` and token doesn't have admin scope(s), `is_admin` user flag grants access to admin-specific actions.
|
||||||
|
@ -665,27 +665,16 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
- 404 Not Found `"Not found"`
|
- 404 Not Found `"Not found"`
|
||||||
- On success: 200 OK `{}`
|
- On success: 200 OK `{}`
|
||||||
|
|
||||||
## `GET /api/pleroma/admin/config/migrate_to_db`
|
|
||||||
|
|
||||||
### Run mix task pleroma.config migrate_to_db
|
|
||||||
|
|
||||||
Copy settings on key `:pleroma` to DB.
|
|
||||||
|
|
||||||
- Params: none
|
|
||||||
- Response:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{}
|
|
||||||
```
|
|
||||||
|
|
||||||
## `GET /api/pleroma/admin/config/migrate_from_db`
|
## `GET /api/pleroma/admin/config/migrate_from_db`
|
||||||
|
|
||||||
### Run mix task pleroma.config migrate_from_db
|
### Run mix task pleroma.config migrate_from_db
|
||||||
|
|
||||||
Copy all settings from DB to `config/prod.exported_from_db.secret.exs` with deletion from DB.
|
Copies all settings from database to `config/{env}.exported_from_db.secret.exs` with deletion from the table. Where `{env}` is the environment in which `pleroma` is running.
|
||||||
|
|
||||||
- Params: none
|
- Params: none
|
||||||
- Response:
|
- Response:
|
||||||
|
- On failure:
|
||||||
|
- 400 Bad Request `"To use this endpoint you need to enable configuration from database."`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{}
|
{}
|
||||||
|
@ -693,20 +682,24 @@ Copy all settings from DB to `config/prod.exported_from_db.secret.exs` with dele
|
||||||
|
|
||||||
## `GET /api/pleroma/admin/config`
|
## `GET /api/pleroma/admin/config`
|
||||||
|
|
||||||
### List config settings
|
### Get list of merged default settings with saved in database.
|
||||||
|
|
||||||
List config settings only works with `:pleroma => :instance => :dynamic_configuration` setting to `true`.
|
**Only works when configuration from database is enabled.**
|
||||||
|
|
||||||
- Params: none
|
- Params:
|
||||||
|
- `only_db`: true (*optional*, get only saved in database settings)
|
||||||
- Response:
|
- Response:
|
||||||
|
- On failure:
|
||||||
|
- 400 Bad Request `"To use this endpoint you need to enable configuration from database."`
|
||||||
|
- 400 Bad Request `"To use configuration from database migrate your settings to database."`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
configs: [
|
configs: [
|
||||||
{
|
{
|
||||||
"group": string,
|
"group": ":pleroma",
|
||||||
"key": string or string with leading `:` for atoms,
|
"key": "Pleroma.Upload",
|
||||||
"value": string or {} or [] or {"tuple": []}
|
"value": []
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -716,44 +709,107 @@ List config settings only works with `:pleroma => :instance => :dynamic_configur
|
||||||
|
|
||||||
### Update config settings
|
### Update config settings
|
||||||
|
|
||||||
Updating config settings only works with `:pleroma => :instance => :dynamic_configuration` setting to `true`.
|
**Only works when configuration from database is enabled.**
|
||||||
Module name can be passed as string, which starts with `Pleroma`, e.g. `"Pleroma.Upload"`.
|
|
||||||
Atom keys and values can be passed with `:` in the beginning, e.g. `":upload"`.
|
|
||||||
Tuples can be passed as `{"tuple": ["first_val", Pleroma.Module, []]}`.
|
|
||||||
`{"tuple": ["some_string", "Pleroma.Some.Module", []]}` will be converted to `{"some_string", Pleroma.Some.Module, []}`.
|
|
||||||
Keywords can be passed as lists with 2 child tuples, e.g.
|
|
||||||
`[{"tuple": ["first_val", Pleroma.Module]}, {"tuple": ["second_val", true]}]`.
|
|
||||||
|
|
||||||
If value contains list of settings `[subkey: val1, subkey2: val2, subkey3: val3]`, it's possible to remove only subkeys instead of all settings passing `subkeys` parameter. E.g.:
|
Some modifications are necessary to save the config settings correctly:
|
||||||
{"group": "pleroma", "key": "some_key", "delete": "true", "subkeys": [":subkey", ":subkey3"]}.
|
|
||||||
|
|
||||||
Compile time settings (need instance reboot):
|
- strings which start with `Pleroma.`, `Phoenix.`, `Tesla.` or strings like `Oban`, `Ueberauth` will be converted to modules;
|
||||||
- all settings by this keys:
|
```
|
||||||
|
"Pleroma.Upload" -> Pleroma.Upload
|
||||||
|
"Oban" -> Oban
|
||||||
|
```
|
||||||
|
- strings starting with `:` will be converted to atoms;
|
||||||
|
```
|
||||||
|
":pleroma" -> :pleroma
|
||||||
|
```
|
||||||
|
- objects with `tuple` key and array value will be converted to tuples;
|
||||||
|
```
|
||||||
|
{"tuple": ["string", "Pleroma.Upload", []]} -> {"string", Pleroma.Upload, []}
|
||||||
|
```
|
||||||
|
- arrays with *tuple objects* will be converted to keywords;
|
||||||
|
```
|
||||||
|
[{"tuple": [":key1", "value"]}, {"tuple": [":key2", "value"]}] -> [key1: "value", key2: "value"]
|
||||||
|
```
|
||||||
|
|
||||||
|
Most of the settings will be applied in `runtime`, this means that you don't need to restart the instance. But some settings are applied in `compile time` and require a reboot of the instance, such as:
|
||||||
|
- all settings inside these keys:
|
||||||
- `:hackney_pools`
|
- `:hackney_pools`
|
||||||
- `:chat`
|
- `:chat`
|
||||||
- `Pleroma.Web.Endpoint`
|
- partially settings inside these keys:
|
||||||
- `Pleroma.Repo`
|
- `:seconds_valid` in `Pleroma.Captcha`
|
||||||
- part settings:
|
- `:proxy_remote` in `Pleroma.Upload`
|
||||||
- `Pleroma.Captcha` -> `:seconds_valid`
|
- `:upload_limit` in `:instance`
|
||||||
- `Pleroma.Upload` -> `:proxy_remote`
|
|
||||||
- `:instance` -> `:upload_limit`
|
|
||||||
|
|
||||||
- Params:
|
- Params:
|
||||||
- `configs` => [
|
- `configs` - array of config objects
|
||||||
- `group` (string)
|
- config object params:
|
||||||
- `key` (string or string with leading `:` for atoms)
|
- `group` - string (**required**)
|
||||||
- `value` (string, [], {} or {"tuple": []})
|
- `key` - string (**required**)
|
||||||
- `delete` = true (optional, if parameter must be deleted)
|
- `value` - string, [], {} or {"tuple": []} (**required**)
|
||||||
- `subkeys` [(string with leading `:` for atoms)] (optional, works only if `delete=true` parameter is passed, otherwise will be ignored)
|
- `delete` - true (*optional*, if setting must be deleted)
|
||||||
]
|
- `subkeys` - array of strings (*optional*, only works when `delete=true` parameter is passed, otherwise will be ignored)
|
||||||
|
|
||||||
- Request (example):
|
*When a value have several nested settings, you can delete only some nested settings by passing a parameter `subkeys`, without deleting all settings by key.*
|
||||||
|
```
|
||||||
|
[subkey: val1, subkey2: val2, subkey3: val3] \\ initial value
|
||||||
|
{"group": ":pleroma", "key": "some_key", "delete": true, "subkeys": [":subkey", ":subkey3"]} \\ passing json for deletion
|
||||||
|
[subkey2: val2] \\ value after deletion
|
||||||
|
```
|
||||||
|
|
||||||
|
*Most of the settings can be partially updated through merge old values with new values, except settings value of which is list or is not keyword.*
|
||||||
|
|
||||||
|
Example of setting without keyword in value:
|
||||||
|
```elixir
|
||||||
|
config :tesla, :adapter, Tesla.Adapter.Hackney
|
||||||
|
```
|
||||||
|
|
||||||
|
List of settings which support only full update by key:
|
||||||
|
```elixir
|
||||||
|
@full_key_update [
|
||||||
|
{:pleroma, :ecto_repos},
|
||||||
|
{:quack, :meta},
|
||||||
|
{:mime, :types},
|
||||||
|
{:cors_plug, [:max_age, :methods, :expose, :headers]},
|
||||||
|
{:auto_linker, :opts},
|
||||||
|
{:swarm, :node_blacklist},
|
||||||
|
{:logger, :backends}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
List of settings which support only full update by subkey:
|
||||||
|
```elixir
|
||||||
|
@full_subkey_update [
|
||||||
|
{:pleroma, :assets, :mascots},
|
||||||
|
{:pleroma, :emoji, :groups},
|
||||||
|
{:pleroma, :workers, :retries},
|
||||||
|
{:pleroma, :mrf_subchain, :match_actor},
|
||||||
|
{:pleroma, :mrf_keyword, :replace}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
*Settings without explicit key must be sended in separate config object params.*
|
||||||
|
```elixir
|
||||||
|
config :quack,
|
||||||
|
level: :debug,
|
||||||
|
meta: [:all],
|
||||||
|
...
|
||||||
|
```
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
configs: [
|
||||||
|
{"group": ":quack", "key": ":level", "value": ":debug"},
|
||||||
|
{"group": ":quack", "key": ":meta", "value": [":all"]},
|
||||||
|
...
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
- Request:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
configs: [
|
configs: [
|
||||||
{
|
{
|
||||||
"group": "pleroma",
|
"group": ":pleroma",
|
||||||
"key": "Pleroma.Upload",
|
"key": "Pleroma.Upload",
|
||||||
"value": [
|
"value": [
|
||||||
{"tuple": [":uploader", "Pleroma.Uploaders.Local"]},
|
{"tuple": [":uploader", "Pleroma.Uploaders.Local"]},
|
||||||
|
@ -763,7 +819,7 @@ Compile time settings (need instance reboot):
|
||||||
{"tuple": [":proxy_opts", [
|
{"tuple": [":proxy_opts", [
|
||||||
{"tuple": [":redirect_on_failure", false]},
|
{"tuple": [":redirect_on_failure", false]},
|
||||||
{"tuple": [":max_body_length", 1048576]},
|
{"tuple": [":max_body_length", 1048576]},
|
||||||
{"tuple": [":http": [
|
{"tuple": [":http", [
|
||||||
{"tuple": [":follow_redirect", true]},
|
{"tuple": [":follow_redirect", true]},
|
||||||
{"tuple": [":pool", ":upload"]},
|
{"tuple": [":pool", ":upload"]},
|
||||||
]]}
|
]]}
|
||||||
|
@ -779,19 +835,53 @@ Compile time settings (need instance reboot):
|
||||||
```
|
```
|
||||||
|
|
||||||
- Response:
|
- Response:
|
||||||
|
- On failure:
|
||||||
|
- 400 Bad Request `"To use this endpoint you need to enable configuration from database."`
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
configs: [
|
configs: [
|
||||||
{
|
{
|
||||||
"group": string,
|
"group": ":pleroma",
|
||||||
"key": string or string with leading `:` for atoms,
|
"key": "Pleroma.Upload",
|
||||||
"value": string or {} or [] or {"tuple": []}
|
"value": [...]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## ` GET /api/pleroma/admin/config/descriptions`
|
||||||
|
|
||||||
|
### Get JSON with config descriptions.
|
||||||
|
Loads json generated from `config/descriptions.exs`.
|
||||||
|
|
||||||
|
- Params: none
|
||||||
|
- Response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
[{
|
||||||
|
"group": ":pleroma", // string
|
||||||
|
"key": "ModuleName", // string
|
||||||
|
"type": "group", // string or list with possible values,
|
||||||
|
"description": "Upload general settings", // string
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"key": ":uploader", // string or module name `Pleroma.Upload`
|
||||||
|
"type": "module",
|
||||||
|
"description": "Module which will be used for uploads",
|
||||||
|
"suggestions": ["module1", "module2"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": ":filters",
|
||||||
|
"type": ["list", "module"],
|
||||||
|
"description": "List of filter modules for uploads",
|
||||||
|
"suggestions": [
|
||||||
|
"module1", "module2", "module3"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}]
|
||||||
|
```
|
||||||
|
|
||||||
## `GET /api/pleroma/admin/moderation_log`
|
## `GET /api/pleroma/admin/moderation_log`
|
||||||
|
|
||||||
### Get moderation log
|
### Get moderation log
|
||||||
|
|
79
docs/admin/config.md
Normal file
79
docs/admin/config.md
Normal file
|
@ -0,0 +1,79 @@
|
||||||
|
# Configuring instance
|
||||||
|
You can configure your instance from admin interface. You need account with admin rights and little change in config file, which will allow settings configuration from database.
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :pleroma, configurable_from_database: true
|
||||||
|
```
|
||||||
|
|
||||||
|
## How it works
|
||||||
|
Settings are stored in database and are applied in `runtime` after each change. Most of the settings take effect immediately, except some, which need instance reboot. These settings are needed in `compile time`, that's why settings are duplicated to the file.
|
||||||
|
|
||||||
|
File with duplicated settings is located in `config/{env}.exported_from_db.exs` if pleroma is runned from source. For prod env it will be `config/prod.exported_from_db.exs`.
|
||||||
|
|
||||||
|
For releases: `/etc/pleroma/prod.exported_from_db.secret.exs` or `PLEROMA_CONFIG_PATH/prod.exported_from_db.exs`.
|
||||||
|
|
||||||
|
## How to set it up
|
||||||
|
You need to migrate your existing settings to the database. This task will migrate only added by user settings.
|
||||||
|
For example you add settings to `prod.secret.exs` file, only these settings will be migrated to database. For release it will be `/etc/pleroma/config.exs` or `PLEROMA_CONFIG_PATH`.
|
||||||
|
You can do this with mix task (all config files will remain untouched):
|
||||||
|
|
||||||
|
```sh tab="OTP"
|
||||||
|
./bin/pleroma_ctl config migrate_to_db
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.config migrate_to_db
|
||||||
|
```
|
||||||
|
|
||||||
|
Now you can change settings in admin interface. After each save, settings from database are duplicated to the `config/{env}.exported_from_db.exs` file.
|
||||||
|
|
||||||
|
<span style="color:red">**ATTENTION**</span>
|
||||||
|
|
||||||
|
**<span style="color:red">Be careful while changing the settings. Every inaccurate configuration change can break the federation or the instance load.</span>**
|
||||||
|
|
||||||
|
*Compile time settings, which require instance reboot and can break instance loading:*
|
||||||
|
- all settings inside these keys:
|
||||||
|
- `:hackney_pools`
|
||||||
|
- `:chat`
|
||||||
|
- partially settings inside these keys:
|
||||||
|
- `:seconds_valid` in `Pleroma.Captcha`
|
||||||
|
- `:proxy_remote` in `Pleroma.Upload`
|
||||||
|
- `:upload_limit` in `:instance`
|
||||||
|
|
||||||
|
## How to dump settings from database to file
|
||||||
|
|
||||||
|
*Adding `-d` flag will delete migrated settings from database table.*
|
||||||
|
|
||||||
|
```sh tab="OTP"
|
||||||
|
./bin/pleroma_ctl config migrate_from_db [-d]
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.config migrate_from_db [-d]
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## How to completely remove it
|
||||||
|
|
||||||
|
1. Truncate or delete all values from `config` table
|
||||||
|
```sql
|
||||||
|
TRUNCATE TABLE config;
|
||||||
|
```
|
||||||
|
2. Delete `config/{env}.exported_from_db.exs`.
|
||||||
|
|
||||||
|
For `prod` env:
|
||||||
|
```bash
|
||||||
|
cd /opt/pleroma
|
||||||
|
cp config/prod.exported_from_db.exs config/exported_from_db.back
|
||||||
|
rm -rf config/prod.exported_from_db.exs
|
||||||
|
```
|
||||||
|
*If you don't want to backup settings, you can skip step with `cp` command.*
|
||||||
|
|
||||||
|
3. Set configurable_from_database to `false`.
|
||||||
|
```elixir
|
||||||
|
config :pleroma, configurable_from_database: false
|
||||||
|
```
|
||||||
|
4. Restart pleroma instance
|
||||||
|
```bash
|
||||||
|
sudo service pleroma restart
|
||||||
|
```
|
|
@ -18,11 +18,11 @@ mix pleroma.config migrate_to_db
|
||||||
|
|
||||||
## Transfer config from DB to `config/env.exported_from_db.secret.exs`
|
## Transfer config from DB to `config/env.exported_from_db.secret.exs`
|
||||||
|
|
||||||
|
To delete transfered settings from database optional flag `-d` can be used. <env> is `prod` by default.
|
||||||
```sh tab="OTP"
|
```sh tab="OTP"
|
||||||
./bin/pleroma_ctl config migrate_from_db <env>
|
./bin/pleroma_ctl config migrate_from_db [--env=<env>] [-d]
|
||||||
```
|
```
|
||||||
|
|
||||||
```sh tab="From Source"
|
```sh tab="From Source"
|
||||||
mix pleroma.config migrate_from_db <env>
|
mix pleroma.config migrate_from_db [--env=<env>] [-d]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -70,11 +70,6 @@ You shouldn't edit the base config directly to avoid breakages and merge conflic
|
||||||
* `account_field_value_length`: An account field value maximum length (default: `2048`).
|
* `account_field_value_length`: An account field value maximum length (default: `2048`).
|
||||||
* `external_user_synchronization`: Enabling following/followers counters synchronization for external users.
|
* `external_user_synchronization`: Enabling following/followers counters synchronization for external users.
|
||||||
|
|
||||||
!!! danger
|
|
||||||
This is a Work In Progress, not usable just yet
|
|
||||||
|
|
||||||
* `dynamic_configuration`: Allow transferring configuration to DB with the subsequent customization from Admin api.
|
|
||||||
|
|
||||||
## Federation
|
## Federation
|
||||||
### MRF policies
|
### MRF policies
|
||||||
|
|
||||||
|
@ -355,7 +350,7 @@ Available caches:
|
||||||
|
|
||||||
* `proxy_url`: an upstream proxy to fetch posts and/or media with, (default: `nil`)
|
* `proxy_url`: an upstream proxy to fetch posts and/or media with, (default: `nil`)
|
||||||
* `send_user_agent`: should we include a user agent with HTTP requests? (default: `true`)
|
* `send_user_agent`: should we include a user agent with HTTP requests? (default: `true`)
|
||||||
* `user_agent`: what user agent should we use? (default: `:default`), must be string or `:default`
|
* `user_agent`: what user agent should we use? (default: `:default`), must be string or `:default`
|
||||||
* `adapter`: array of hackney options
|
* `adapter`: array of hackney options
|
||||||
|
|
||||||
|
|
||||||
|
@ -841,3 +836,7 @@ config :auto_linker,
|
||||||
## Custom Runtime Modules (`:modules`)
|
## Custom Runtime Modules (`:modules`)
|
||||||
|
|
||||||
* `runtime_dir`: A path to custom Elixir modules (such as MRF policies).
|
* `runtime_dir`: A path to custom Elixir modules (such as MRF policies).
|
||||||
|
|
||||||
|
|
||||||
|
## :configurable_from_database
|
||||||
|
Enable/disable configuration from database.
|
||||||
|
|
|
@ -4,71 +4,144 @@
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.Config do
|
defmodule Mix.Tasks.Pleroma.Config do
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
|
|
||||||
import Mix.Pleroma
|
import Mix.Pleroma
|
||||||
|
|
||||||
|
alias Pleroma.ConfigDB
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.Web.AdminAPI.Config
|
|
||||||
@shortdoc "Manages the location of the config"
|
@shortdoc "Manages the location of the config"
|
||||||
@moduledoc File.read!("docs/administration/CLI_tasks/config.md")
|
@moduledoc File.read!("docs/administration/CLI_tasks/config.md")
|
||||||
|
|
||||||
def run(["migrate_to_db"]) do
|
def run(["migrate_to_db"]) do
|
||||||
start_pleroma()
|
start_pleroma()
|
||||||
|
migrate_to_db()
|
||||||
if Pleroma.Config.get([:instance, :dynamic_configuration]) do
|
|
||||||
Application.get_all_env(:pleroma)
|
|
||||||
|> Enum.reject(fn {k, _v} -> k in [Pleroma.Repo, :env] end)
|
|
||||||
|> Enum.each(fn {k, v} ->
|
|
||||||
key = to_string(k) |> String.replace("Elixir.", "")
|
|
||||||
|
|
||||||
key =
|
|
||||||
if String.starts_with?(key, "Pleroma.") do
|
|
||||||
key
|
|
||||||
else
|
|
||||||
":" <> key
|
|
||||||
end
|
|
||||||
|
|
||||||
{:ok, _} = Config.update_or_create(%{group: "pleroma", key: key, value: v})
|
|
||||||
Mix.shell().info("#{key} is migrated.")
|
|
||||||
end)
|
|
||||||
|
|
||||||
Mix.shell().info("Settings migrated.")
|
|
||||||
else
|
|
||||||
Mix.shell().info(
|
|
||||||
"Migration is not allowed by config. You can change this behavior in instance settings."
|
|
||||||
)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["migrate_from_db", env, delete?]) do
|
def run(["migrate_from_db" | options]) do
|
||||||
start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
delete? = if delete? == "true", do: true, else: false
|
{opts, _} =
|
||||||
|
OptionParser.parse!(options,
|
||||||
if Pleroma.Config.get([:instance, :dynamic_configuration]) do
|
strict: [env: :string, delete: :boolean],
|
||||||
config_path = "config/#{env}.exported_from_db.secret.exs"
|
aliases: [d: :delete]
|
||||||
|
|
||||||
{:ok, file} = File.open(config_path, [:write, :utf8])
|
|
||||||
IO.write(file, "use Mix.Config\r\n")
|
|
||||||
|
|
||||||
Repo.all(Config)
|
|
||||||
|> Enum.each(fn config ->
|
|
||||||
IO.write(
|
|
||||||
file,
|
|
||||||
"config :#{config.group}, #{config.key}, #{
|
|
||||||
inspect(Config.from_binary(config.value), limit: :infinity)
|
|
||||||
}\r\n\r\n"
|
|
||||||
)
|
|
||||||
|
|
||||||
if delete? do
|
|
||||||
{:ok, _} = Repo.delete(config)
|
|
||||||
Mix.shell().info("#{config.key} deleted from DB.")
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
|
|
||||||
File.close(file)
|
|
||||||
System.cmd("mix", ["format", config_path])
|
|
||||||
else
|
|
||||||
Mix.shell().info(
|
|
||||||
"Migration is not allowed by config. You can change this behavior in instance settings."
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
migrate_from_db(opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec migrate_to_db(Path.t() | nil) :: any()
|
||||||
|
def migrate_to_db(file_path \\ nil) do
|
||||||
|
if Pleroma.Config.get([:configurable_from_database]) do
|
||||||
|
config_file =
|
||||||
|
if file_path do
|
||||||
|
file_path
|
||||||
|
else
|
||||||
|
if Pleroma.Config.get(:release) do
|
||||||
|
Pleroma.Config.get(:config_path)
|
||||||
|
else
|
||||||
|
"config/#{Pleroma.Config.get(:env)}.secret.exs"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
do_migrate_to_db(config_file)
|
||||||
|
else
|
||||||
|
migration_error()
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp do_migrate_to_db(config_file) do
|
||||||
|
if File.exists?(config_file) do
|
||||||
|
custom_config =
|
||||||
|
config_file
|
||||||
|
|> read_file()
|
||||||
|
|> elem(0)
|
||||||
|
|
||||||
|
custom_config
|
||||||
|
|> Keyword.keys()
|
||||||
|
|> Enum.each(&create(&1, custom_config))
|
||||||
|
else
|
||||||
|
shell_info("To migrate settings, you must define custom settings in #{config_file}.")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp create(group, settings) do
|
||||||
|
group
|
||||||
|
|> Pleroma.Config.Loader.filter_group(settings)
|
||||||
|
|> Enum.each(fn {key, value} ->
|
||||||
|
key = inspect(key)
|
||||||
|
{:ok, _} = ConfigDB.update_or_create(%{group: inspect(group), key: key, value: value})
|
||||||
|
|
||||||
|
shell_info("Settings for key #{key} migrated.")
|
||||||
|
end)
|
||||||
|
|
||||||
|
shell_info("Settings for group :#{group} migrated.")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp migrate_from_db(opts) do
|
||||||
|
if Pleroma.Config.get([:configurable_from_database]) do
|
||||||
|
env = opts[:env] || "prod"
|
||||||
|
|
||||||
|
config_path =
|
||||||
|
if Pleroma.Config.get(:release) do
|
||||||
|
:config_path
|
||||||
|
|> Pleroma.Config.get()
|
||||||
|
|> Path.dirname()
|
||||||
|
else
|
||||||
|
"config"
|
||||||
|
end
|
||||||
|
|> Path.join("#{env}.exported_from_db.secret.exs")
|
||||||
|
|
||||||
|
file = File.open!(config_path, [:write, :utf8])
|
||||||
|
|
||||||
|
IO.write(file, config_header())
|
||||||
|
|
||||||
|
ConfigDB
|
||||||
|
|> Repo.all()
|
||||||
|
|> Enum.each(&write_and_delete(&1, file, opts[:delete]))
|
||||||
|
|
||||||
|
:ok = File.close(file)
|
||||||
|
System.cmd("mix", ["format", config_path])
|
||||||
|
else
|
||||||
|
migration_error()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp migration_error do
|
||||||
|
shell_error(
|
||||||
|
"Migration is not allowed in config. You can change this behavior by setting `configurable_from_database` to true."
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
if Code.ensure_loaded?(Config.Reader) do
|
||||||
|
defp config_header, do: "import Config\r\n\r\n"
|
||||||
|
defp read_file(config_file), do: Config.Reader.read_imports!(config_file)
|
||||||
|
else
|
||||||
|
defp config_header, do: "use Mix.Config\r\n\r\n"
|
||||||
|
defp read_file(config_file), do: Mix.Config.eval!(config_file)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp write_and_delete(config, file, delete?) do
|
||||||
|
config
|
||||||
|
|> write(file)
|
||||||
|
|> delete(delete?)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp write(config, file) do
|
||||||
|
value =
|
||||||
|
config.value
|
||||||
|
|> ConfigDB.from_binary()
|
||||||
|
|> inspect(limit: :infinity)
|
||||||
|
|
||||||
|
IO.write(file, "config #{config.group}, #{config.key}, #{value}\r\n\r\n")
|
||||||
|
|
||||||
|
config
|
||||||
|
end
|
||||||
|
|
||||||
|
defp delete(config, true) do
|
||||||
|
{:ok, _} = Repo.delete(config)
|
||||||
|
shell_info("#{config.key} deleted from DB.")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp delete(_config, _), do: :ok
|
||||||
end
|
end
|
||||||
|
|
|
@ -28,7 +28,7 @@ def run(_) do
|
||||||
defp do_run(implementation) do
|
defp do_run(implementation) do
|
||||||
start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with {descriptions, _paths} <- Mix.Config.eval!("config/description.exs"),
|
with descriptions <- Pleroma.Config.Loader.load("config/description.exs"),
|
||||||
{:ok, file_path} <-
|
{:ok, file_path} <-
|
||||||
Pleroma.Docs.Generator.process(
|
Pleroma.Docs.Generator.process(
|
||||||
implementation,
|
implementation,
|
||||||
|
|
422
lib/pleroma/config/config_db.ex
Normal file
422
lib/pleroma/config/config_db.ex
Normal file
|
@ -0,0 +1,422 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.ConfigDB do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
import Ecto.Query
|
||||||
|
import Pleroma.Web.Gettext
|
||||||
|
|
||||||
|
alias __MODULE__
|
||||||
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
@type t :: %__MODULE__{}
|
||||||
|
|
||||||
|
@full_key_update [
|
||||||
|
{:pleroma, :ecto_repos},
|
||||||
|
{:quack, :meta},
|
||||||
|
{:mime, :types},
|
||||||
|
{:cors_plug, [:max_age, :methods, :expose, :headers]},
|
||||||
|
{:auto_linker, :opts},
|
||||||
|
{:swarm, :node_blacklist},
|
||||||
|
{:logger, :backends}
|
||||||
|
]
|
||||||
|
|
||||||
|
@full_subkey_update [
|
||||||
|
{:pleroma, :assets, :mascots},
|
||||||
|
{:pleroma, :emoji, :groups},
|
||||||
|
{:pleroma, :workers, :retries},
|
||||||
|
{:pleroma, :mrf_subchain, :match_actor},
|
||||||
|
{:pleroma, :mrf_keyword, :replace}
|
||||||
|
]
|
||||||
|
|
||||||
|
@regex ~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u
|
||||||
|
|
||||||
|
@delimiters ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}]
|
||||||
|
|
||||||
|
schema "config" do
|
||||||
|
field(:key, :string)
|
||||||
|
field(:group, :string)
|
||||||
|
field(:value, :binary)
|
||||||
|
field(:db, {:array, :string}, virtual: true, default: [])
|
||||||
|
|
||||||
|
timestamps()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec get_all_as_keyword() :: keyword()
|
||||||
|
def get_all_as_keyword do
|
||||||
|
ConfigDB
|
||||||
|
|> select([c], {c.group, c.key, c.value})
|
||||||
|
|> Repo.all()
|
||||||
|
|> Enum.reduce([], fn {group, key, value}, acc ->
|
||||||
|
group = ConfigDB.from_string(group)
|
||||||
|
key = ConfigDB.from_string(key)
|
||||||
|
value = from_binary(value)
|
||||||
|
|
||||||
|
Keyword.update(acc, group, [{key, value}], &Keyword.merge(&1, [{key, value}]))
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec get_by_params(map()) :: ConfigDB.t() | nil
|
||||||
|
def get_by_params(params), do: Repo.get_by(ConfigDB, params)
|
||||||
|
|
||||||
|
@spec changeset(ConfigDB.t(), map()) :: Changeset.t()
|
||||||
|
def changeset(config, params \\ %{}) do
|
||||||
|
params = Map.put(params, :value, transform(params[:value]))
|
||||||
|
|
||||||
|
config
|
||||||
|
|> cast(params, [:key, :group, :value])
|
||||||
|
|> validate_required([:key, :group, :value])
|
||||||
|
|> unique_constraint(:key, name: :config_group_key_index)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
||||||
|
def create(params) do
|
||||||
|
%ConfigDB{}
|
||||||
|
|> changeset(params)
|
||||||
|
|> Repo.insert()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec update(ConfigDB.t(), map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
||||||
|
def update(%ConfigDB{} = config, %{value: value}) do
|
||||||
|
config
|
||||||
|
|> changeset(%{value: value})
|
||||||
|
|> Repo.update()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec get_db_keys(ConfigDB.t()) :: [String.t()]
|
||||||
|
def get_db_keys(%ConfigDB{} = config) do
|
||||||
|
config.value
|
||||||
|
|> ConfigDB.from_binary()
|
||||||
|
|> get_db_keys(config.key)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec get_db_keys(keyword(), any()) :: [String.t()]
|
||||||
|
def get_db_keys(value, key) do
|
||||||
|
if Keyword.keyword?(value) do
|
||||||
|
value |> Keyword.keys() |> Enum.map(&convert(&1))
|
||||||
|
else
|
||||||
|
[convert(key)]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec merge_group(atom(), atom(), keyword(), keyword()) :: keyword()
|
||||||
|
def merge_group(group, key, old_value, new_value) do
|
||||||
|
new_keys = to_map_set(new_value)
|
||||||
|
|
||||||
|
intersect_keys =
|
||||||
|
old_value |> to_map_set() |> MapSet.intersection(new_keys) |> MapSet.to_list()
|
||||||
|
|
||||||
|
merged_value = ConfigDB.merge(old_value, new_value)
|
||||||
|
|
||||||
|
@full_subkey_update
|
||||||
|
|> Enum.map(fn
|
||||||
|
{g, k, subkey} when g == group and k == key ->
|
||||||
|
if subkey in intersect_keys, do: subkey, else: []
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
[]
|
||||||
|
end)
|
||||||
|
|> List.flatten()
|
||||||
|
|> Enum.reduce(merged_value, fn subkey, acc ->
|
||||||
|
Keyword.put(acc, subkey, new_value[subkey])
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp to_map_set(keyword) do
|
||||||
|
keyword
|
||||||
|
|> Keyword.keys()
|
||||||
|
|> MapSet.new()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec sub_key_full_update?(atom(), atom(), [Keyword.key()]) :: boolean()
|
||||||
|
def sub_key_full_update?(group, key, subkeys) do
|
||||||
|
Enum.any?(@full_subkey_update, fn {g, k, subkey} ->
|
||||||
|
g == group and k == key and subkey in subkeys
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec merge(keyword(), keyword()) :: keyword()
|
||||||
|
def merge(config1, config2) when is_list(config1) and is_list(config2) do
|
||||||
|
Keyword.merge(config1, config2, fn _, app1, app2 ->
|
||||||
|
if Keyword.keyword?(app1) and Keyword.keyword?(app2) do
|
||||||
|
Keyword.merge(app1, app2, &deep_merge/3)
|
||||||
|
else
|
||||||
|
app2
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp deep_merge(_key, value1, value2) do
|
||||||
|
if Keyword.keyword?(value1) and Keyword.keyword?(value2) do
|
||||||
|
Keyword.merge(value1, value2, &deep_merge/3)
|
||||||
|
else
|
||||||
|
value2
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
||||||
|
def update_or_create(params) do
|
||||||
|
search_opts = Map.take(params, [:group, :key])
|
||||||
|
|
||||||
|
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
|
||||||
|
{:partial_update, true, config} <-
|
||||||
|
{:partial_update, can_be_partially_updated?(config), config},
|
||||||
|
old_value <- from_binary(config.value),
|
||||||
|
transformed_value <- do_transform(params[:value]),
|
||||||
|
{:can_be_merged, true, config} <- {:can_be_merged, is_list(transformed_value), config},
|
||||||
|
new_value <-
|
||||||
|
merge_group(
|
||||||
|
ConfigDB.from_string(config.group),
|
||||||
|
ConfigDB.from_string(config.key),
|
||||||
|
old_value,
|
||||||
|
transformed_value
|
||||||
|
) do
|
||||||
|
ConfigDB.update(config, %{value: new_value})
|
||||||
|
else
|
||||||
|
{reason, false, config} when reason in [:partial_update, :can_be_merged] ->
|
||||||
|
ConfigDB.update(config, params)
|
||||||
|
|
||||||
|
nil ->
|
||||||
|
ConfigDB.create(params)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config)
|
||||||
|
|
||||||
|
defp only_full_update?(%ConfigDB{} = config) do
|
||||||
|
config_group = ConfigDB.from_string(config.group)
|
||||||
|
config_key = ConfigDB.from_string(config.key)
|
||||||
|
|
||||||
|
Enum.any?(@full_key_update, fn
|
||||||
|
{group, key} when is_list(key) ->
|
||||||
|
config_group == group and config_key in key
|
||||||
|
|
||||||
|
{group, key} ->
|
||||||
|
config_group == group and config_key == key
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec delete(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
||||||
|
def delete(params) do
|
||||||
|
search_opts = Map.delete(params, :subkeys)
|
||||||
|
|
||||||
|
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
|
||||||
|
{config, sub_keys} when is_list(sub_keys) <- {config, params[:subkeys]},
|
||||||
|
old_value <- from_binary(config.value),
|
||||||
|
keys <- Enum.map(sub_keys, &do_transform_string(&1)),
|
||||||
|
{:partial_remove, config, new_value} when new_value != [] <-
|
||||||
|
{:partial_remove, config, Keyword.drop(old_value, keys)} do
|
||||||
|
ConfigDB.update(config, %{value: new_value})
|
||||||
|
else
|
||||||
|
{:partial_remove, config, []} ->
|
||||||
|
Repo.delete(config)
|
||||||
|
|
||||||
|
{config, nil} ->
|
||||||
|
Repo.delete(config)
|
||||||
|
|
||||||
|
nil ->
|
||||||
|
err =
|
||||||
|
dgettext("errors", "Config with params %{params} not found", params: inspect(params))
|
||||||
|
|
||||||
|
{:error, err}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec from_binary(binary()) :: term()
|
||||||
|
def from_binary(binary), do: :erlang.binary_to_term(binary)
|
||||||
|
|
||||||
|
@spec from_binary_with_convert(binary()) :: any()
|
||||||
|
def from_binary_with_convert(binary) do
|
||||||
|
binary
|
||||||
|
|> from_binary()
|
||||||
|
|> do_convert()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec from_string(String.t()) :: atom() | no_return()
|
||||||
|
def from_string(":" <> entity), do: String.to_existing_atom(entity)
|
||||||
|
|
||||||
|
def from_string(entity) when is_binary(entity) do
|
||||||
|
if is_module_name?(entity) do
|
||||||
|
String.to_existing_atom("Elixir.#{entity}")
|
||||||
|
else
|
||||||
|
entity
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec convert(any()) :: any()
|
||||||
|
def convert(entity), do: do_convert(entity)
|
||||||
|
|
||||||
|
defp do_convert(entity) when is_list(entity) do
|
||||||
|
for v <- entity, into: [], do: do_convert(v)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_convert(%Regex{} = entity), do: inspect(entity)
|
||||||
|
|
||||||
|
defp do_convert(entity) when is_map(entity) do
|
||||||
|
for {k, v} <- entity, into: %{}, do: {do_convert(k), do_convert(v)}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_convert({:proxy_url, {type, :localhost, port}}) do
|
||||||
|
%{"tuple" => [":proxy_url", %{"tuple" => [do_convert(type), "localhost", port]}]}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_convert({:proxy_url, {type, host, port}}) when is_tuple(host) do
|
||||||
|
ip =
|
||||||
|
host
|
||||||
|
|> :inet_parse.ntoa()
|
||||||
|
|> to_string()
|
||||||
|
|
||||||
|
%{
|
||||||
|
"tuple" => [
|
||||||
|
":proxy_url",
|
||||||
|
%{"tuple" => [do_convert(type), ip, port]}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_convert({:proxy_url, {type, host, port}}) do
|
||||||
|
%{
|
||||||
|
"tuple" => [
|
||||||
|
":proxy_url",
|
||||||
|
%{"tuple" => [do_convert(type), to_string(host), port]}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_convert({:partial_chain, entity}), do: %{"tuple" => [":partial_chain", inspect(entity)]}
|
||||||
|
|
||||||
|
defp do_convert(entity) when is_tuple(entity) do
|
||||||
|
value =
|
||||||
|
entity
|
||||||
|
|> Tuple.to_list()
|
||||||
|
|> do_convert()
|
||||||
|
|
||||||
|
%{"tuple" => value}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_convert(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do
|
||||||
|
entity
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_convert(entity)
|
||||||
|
when is_atom(entity) and entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do
|
||||||
|
":#{entity}"
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_convert(entity) when is_atom(entity), do: inspect(entity)
|
||||||
|
|
||||||
|
defp do_convert(entity) when is_binary(entity), do: entity
|
||||||
|
|
||||||
|
@spec transform(any()) :: binary() | no_return()
|
||||||
|
def transform(entity) when is_binary(entity) or is_map(entity) or is_list(entity) do
|
||||||
|
entity
|
||||||
|
|> do_transform()
|
||||||
|
|> to_binary()
|
||||||
|
end
|
||||||
|
|
||||||
|
def transform(entity), do: to_binary(entity)
|
||||||
|
|
||||||
|
@spec transform_with_out_binary(any()) :: any()
|
||||||
|
def transform_with_out_binary(entity), do: do_transform(entity)
|
||||||
|
|
||||||
|
@spec to_binary(any()) :: binary()
|
||||||
|
def to_binary(entity), do: :erlang.term_to_binary(entity)
|
||||||
|
|
||||||
|
defp do_transform(%Regex{} = entity), do: entity
|
||||||
|
|
||||||
|
defp do_transform(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do
|
||||||
|
{:proxy_url, {do_transform_string(type), parse_host(host), port}}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform(%{"tuple" => [":partial_chain", entity]}) do
|
||||||
|
{partial_chain, []} =
|
||||||
|
entity
|
||||||
|
|> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "")
|
||||||
|
|> Code.eval_string()
|
||||||
|
|
||||||
|
{:partial_chain, partial_chain}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform(%{"tuple" => entity}) do
|
||||||
|
Enum.reduce(entity, {}, fn val, acc -> Tuple.append(acc, do_transform(val)) end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform(entity) when is_map(entity) do
|
||||||
|
for {k, v} <- entity, into: %{}, do: {do_transform(k), do_transform(v)}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform(entity) when is_list(entity) do
|
||||||
|
for v <- entity, into: [], do: do_transform(v)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform(entity) when is_binary(entity) do
|
||||||
|
entity
|
||||||
|
|> String.trim()
|
||||||
|
|> do_transform_string()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform(entity), do: entity
|
||||||
|
|
||||||
|
defp parse_host("localhost"), do: :localhost
|
||||||
|
|
||||||
|
defp parse_host(host) do
|
||||||
|
charlist = to_charlist(host)
|
||||||
|
|
||||||
|
case :inet.parse_address(charlist) do
|
||||||
|
{:error, :einval} ->
|
||||||
|
charlist
|
||||||
|
|
||||||
|
{:ok, ip} ->
|
||||||
|
ip
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp find_valid_delimiter([], _string, _) do
|
||||||
|
raise(ArgumentError, message: "valid delimiter for Regex expression not found")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp find_valid_delimiter([{leading, closing} = delimiter | others], pattern, regex_delimiter)
|
||||||
|
when is_tuple(delimiter) do
|
||||||
|
if String.contains?(pattern, closing) do
|
||||||
|
find_valid_delimiter(others, pattern, regex_delimiter)
|
||||||
|
else
|
||||||
|
{:ok, {leading, closing}}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do
|
||||||
|
if String.contains?(pattern, delimiter) do
|
||||||
|
find_valid_delimiter(others, pattern, regex_delimiter)
|
||||||
|
else
|
||||||
|
{:ok, {delimiter, delimiter}}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform_string("~r" <> _pattern = regex) do
|
||||||
|
with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <-
|
||||||
|
Regex.named_captures(@regex, regex),
|
||||||
|
{:ok, {leading, closing}} <- find_valid_delimiter(@delimiters, pattern, regex_delimiter),
|
||||||
|
{result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do
|
||||||
|
result
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform_string(":" <> atom), do: String.to_atom(atom)
|
||||||
|
|
||||||
|
defp do_transform_string(value) do
|
||||||
|
if is_module_name?(value) do
|
||||||
|
String.to_existing_atom("Elixir." <> value)
|
||||||
|
else
|
||||||
|
value
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec is_module_name?(String.t()) :: boolean()
|
||||||
|
def is_module_name?(string) do
|
||||||
|
Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth)\./, string) or
|
||||||
|
string in ["Oban", "Ueberauth", "ExSyslogger"]
|
||||||
|
end
|
||||||
|
end
|
16
lib/pleroma/config/holder.ex
Normal file
16
lib/pleroma/config/holder.ex
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Config.Holder do
|
||||||
|
@config Pleroma.Config.Loader.load_and_merge()
|
||||||
|
|
||||||
|
@spec config() :: keyword()
|
||||||
|
def config, do: @config
|
||||||
|
|
||||||
|
@spec config(atom()) :: any()
|
||||||
|
def config(group), do: @config[group]
|
||||||
|
|
||||||
|
@spec config(atom(), atom()) :: any()
|
||||||
|
def config(group, key), do: @config[group][key]
|
||||||
|
end
|
59
lib/pleroma/config/loader.ex
Normal file
59
lib/pleroma/config/loader.ex
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Config.Loader do
|
||||||
|
@paths ["config/config.exs", "config/#{Mix.env()}.exs"]
|
||||||
|
|
||||||
|
@reject_keys [
|
||||||
|
Pleroma.Repo,
|
||||||
|
Pleroma.Web.Endpoint,
|
||||||
|
:env,
|
||||||
|
:configurable_from_database,
|
||||||
|
:database,
|
||||||
|
:swarm
|
||||||
|
]
|
||||||
|
|
||||||
|
if Code.ensure_loaded?(Config.Reader) do
|
||||||
|
@spec load(Path.t()) :: keyword()
|
||||||
|
def load(path), do: Config.Reader.read!(path)
|
||||||
|
|
||||||
|
defp do_merge(conf1, conf2), do: Config.Reader.merge(conf1, conf2)
|
||||||
|
else
|
||||||
|
# support for Elixir less than 1.9
|
||||||
|
@spec load(Path.t()) :: keyword()
|
||||||
|
def load(path) do
|
||||||
|
path
|
||||||
|
|> Mix.Config.eval!()
|
||||||
|
|> elem(0)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_merge(conf1, conf2), do: Mix.Config.merge(conf1, conf2)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec load_and_merge() :: keyword()
|
||||||
|
def load_and_merge do
|
||||||
|
all_paths =
|
||||||
|
if Pleroma.Config.get(:release),
|
||||||
|
do: @paths ++ ["config/releases.exs"],
|
||||||
|
else: @paths
|
||||||
|
|
||||||
|
all_paths
|
||||||
|
|> Enum.map(&load(&1))
|
||||||
|
|> Enum.reduce([], &do_merge(&2, &1))
|
||||||
|
|> filter()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp filter(configs) do
|
||||||
|
configs
|
||||||
|
|> Keyword.keys()
|
||||||
|
|> Enum.reduce([], &Keyword.put(&2, &1, filter_group(&1, configs)))
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec filter_group(atom(), keyword()) :: keyword()
|
||||||
|
def filter_group(group, configs) do
|
||||||
|
Enum.reject(configs[group], fn {key, _v} ->
|
||||||
|
key in @reject_keys or (group == :phoenix and key == :serve_endpoints)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end
|
|
@ -4,56 +4,111 @@
|
||||||
|
|
||||||
defmodule Pleroma.Config.TransferTask do
|
defmodule Pleroma.Config.TransferTask do
|
||||||
use Task
|
use Task
|
||||||
alias Pleroma.Web.AdminAPI.Config
|
|
||||||
|
alias Pleroma.ConfigDB
|
||||||
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
def start_link(_) do
|
def start_link(_) do
|
||||||
load_and_update_env()
|
load_and_update_env()
|
||||||
if Pleroma.Config.get(:env) == :test, do: Ecto.Adapters.SQL.Sandbox.checkin(Pleroma.Repo)
|
if Pleroma.Config.get(:env) == :test, do: Ecto.Adapters.SQL.Sandbox.checkin(Repo)
|
||||||
:ignore
|
:ignore
|
||||||
end
|
end
|
||||||
|
|
||||||
def load_and_update_env do
|
@spec load_and_update_env([ConfigDB.t()]) :: :ok | false
|
||||||
if Pleroma.Config.get([:instance, :dynamic_configuration]) and
|
def load_and_update_env(deleted \\ []) do
|
||||||
Ecto.Adapters.SQL.table_exists?(Pleroma.Repo, "config") do
|
with true <- Pleroma.Config.get(:configurable_from_database),
|
||||||
for_restart =
|
true <- Ecto.Adapters.SQL.table_exists?(Repo, "config"),
|
||||||
Pleroma.Repo.all(Config)
|
started_applications <- Application.started_applications() do
|
||||||
|> Enum.map(&update_env(&1))
|
|
||||||
|
|
||||||
# We need to restart applications for loaded settings take effect
|
# We need to restart applications for loaded settings take effect
|
||||||
for_restart
|
in_db = Repo.all(ConfigDB)
|
||||||
|> Enum.reject(&(&1 in [:pleroma, :ok]))
|
|
||||||
|> Enum.each(fn app ->
|
with_deleted = in_db ++ deleted
|
||||||
Application.stop(app)
|
|
||||||
:ok = Application.start(app)
|
with_deleted
|
||||||
end)
|
|> Enum.map(&merge_and_update(&1))
|
||||||
|
|> Enum.uniq()
|
||||||
|
# TODO: some problem with prometheus after restart!
|
||||||
|
|> Enum.reject(&(&1 in [:pleroma, nil, :prometheus]))
|
||||||
|
|> Enum.each(&restart(started_applications, &1))
|
||||||
|
|
||||||
|
:ok
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp update_env(setting) do
|
defp merge_and_update(setting) do
|
||||||
try do
|
try do
|
||||||
key =
|
key = ConfigDB.from_string(setting.key)
|
||||||
if String.starts_with?(setting.key, "Pleroma.") do
|
group = ConfigDB.from_string(setting.group)
|
||||||
"Elixir." <> setting.key
|
|
||||||
|
default = Pleroma.Config.Holder.config(group, key)
|
||||||
|
merged_value = merge_value(setting, default, group, key)
|
||||||
|
|
||||||
|
:ok = update_env(group, key, merged_value)
|
||||||
|
|
||||||
|
if group != :logger do
|
||||||
|
group
|
||||||
|
else
|
||||||
|
# change logger configuration in runtime, without restart
|
||||||
|
if Keyword.keyword?(merged_value) and
|
||||||
|
key not in [:compile_time_application, :backends, :compile_time_purge_matching] do
|
||||||
|
Logger.configure_backend(key, merged_value)
|
||||||
else
|
else
|
||||||
String.trim_leading(setting.key, ":")
|
Logger.configure([{key, merged_value}])
|
||||||
end
|
end
|
||||||
|
|
||||||
group = String.to_existing_atom(setting.group)
|
nil
|
||||||
|
end
|
||||||
Application.put_env(
|
|
||||||
group,
|
|
||||||
String.to_existing_atom(key),
|
|
||||||
Config.from_binary(setting.value)
|
|
||||||
)
|
|
||||||
|
|
||||||
group
|
|
||||||
rescue
|
rescue
|
||||||
e ->
|
error ->
|
||||||
require Logger
|
error_msg =
|
||||||
|
"updating env causes error, group: " <>
|
||||||
|
inspect(setting.group) <>
|
||||||
|
" key: " <>
|
||||||
|
inspect(setting.key) <>
|
||||||
|
" value: " <>
|
||||||
|
inspect(ConfigDB.from_binary(setting.value)) <> " error: " <> inspect(error)
|
||||||
|
|
||||||
Logger.warn(
|
Logger.warn(error_msg)
|
||||||
"updating env causes error, key: #{inspect(setting.key)}, error: #{inspect(e)}"
|
|
||||||
)
|
nil
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp merge_value(%{__meta__: %{state: :deleted}}, default, _group, _key), do: default
|
||||||
|
|
||||||
|
defp merge_value(setting, default, group, key) do
|
||||||
|
value = ConfigDB.from_binary(setting.value)
|
||||||
|
|
||||||
|
if can_be_merged?(default, value) do
|
||||||
|
ConfigDB.merge_group(group, key, default, value)
|
||||||
|
else
|
||||||
|
value
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp update_env(group, key, nil), do: Application.delete_env(group, key)
|
||||||
|
defp update_env(group, key, value), do: Application.put_env(group, key, value)
|
||||||
|
|
||||||
|
defp restart(started_applications, app) do
|
||||||
|
with {^app, _, _} <- List.keyfind(started_applications, app, 0),
|
||||||
|
:ok <- Application.stop(app) do
|
||||||
|
:ok = Application.start(app)
|
||||||
|
else
|
||||||
|
nil ->
|
||||||
|
Logger.warn("#{app} is not started.")
|
||||||
|
|
||||||
|
error ->
|
||||||
|
error
|
||||||
|
|> inspect()
|
||||||
|
|> Logger.warn()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp can_be_merged?(val1, val2) when is_list(val1) and is_list(val2) do
|
||||||
|
Keyword.keyword?(val1) and Keyword.keyword?(val2)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp can_be_merged?(_val1, _val2), do: false
|
||||||
end
|
end
|
||||||
|
|
|
@ -6,68 +6,116 @@ def process(implementation, descriptions) do
|
||||||
implementation.process(descriptions)
|
implementation.process(descriptions)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec uploaders_list() :: [module()]
|
@spec list_modules_in_dir(String.t(), String.t()) :: [module()]
|
||||||
def uploaders_list do
|
def list_modules_in_dir(dir, start) do
|
||||||
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
with {:ok, files} <- File.ls(dir) do
|
||||||
|
files
|
||||||
Enum.filter(modules, fn module ->
|
|> Enum.filter(&String.ends_with?(&1, ".ex"))
|
||||||
name_as_list = Module.split(module)
|
|> Enum.map(fn filename ->
|
||||||
|
module = filename |> String.trim_trailing(".ex") |> Macro.camelize()
|
||||||
List.starts_with?(name_as_list, ["Pleroma", "Uploaders"]) and
|
String.to_existing_atom(start <> module)
|
||||||
List.last(name_as_list) != "Uploader"
|
end)
|
||||||
end)
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec filters_list() :: [module()]
|
@doc """
|
||||||
def filters_list do
|
Converts:
|
||||||
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
- atoms to strings with leading `:`
|
||||||
|
- module names to strings, without leading `Elixir.`
|
||||||
Enum.filter(modules, fn module ->
|
- add humanized labels to `keys` if label is not defined, e.g. `:instance` -> `Instance`
|
||||||
name_as_list = Module.split(module)
|
"""
|
||||||
|
@spec convert_to_strings([map()]) :: [map()]
|
||||||
List.starts_with?(name_as_list, ["Pleroma", "Upload", "Filter"])
|
def convert_to_strings(descriptions) do
|
||||||
end)
|
Enum.map(descriptions, &format_entity(&1))
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec mrf_list() :: [module()]
|
defp format_entity(entity) do
|
||||||
def mrf_list do
|
entity
|
||||||
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
|> format_key()
|
||||||
|
|> Map.put(:group, atom_to_string(entity[:group]))
|
||||||
Enum.filter(modules, fn module ->
|
|> format_children()
|
||||||
name_as_list = Module.split(module)
|
|
||||||
|
|
||||||
List.starts_with?(name_as_list, ["Pleroma", "Web", "ActivityPub", "MRF"]) and
|
|
||||||
length(name_as_list) > 4
|
|
||||||
end)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec richmedia_parsers() :: [module()]
|
defp format_key(%{key: key} = entity) do
|
||||||
def richmedia_parsers do
|
entity
|
||||||
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
|> Map.put(:key, atom_to_string(key))
|
||||||
|
|> Map.put(:label, entity[:label] || humanize(key))
|
||||||
Enum.filter(modules, fn module ->
|
|
||||||
name_as_list = Module.split(module)
|
|
||||||
|
|
||||||
List.starts_with?(name_as_list, ["Pleroma", "Web", "RichMedia", "Parsers"]) and
|
|
||||||
length(name_as_list) == 5
|
|
||||||
end)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp format_key(%{group: group} = entity) do
|
||||||
|
Map.put(entity, :label, entity[:label] || humanize(group))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_key(entity), do: entity
|
||||||
|
|
||||||
|
defp format_children(%{children: children} = entity) do
|
||||||
|
Map.put(entity, :children, Enum.map(children, &format_child(&1)))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_children(entity), do: entity
|
||||||
|
|
||||||
|
defp format_child(%{suggestions: suggestions} = entity) do
|
||||||
|
entity
|
||||||
|
|> Map.put(:suggestions, format_suggestions(suggestions))
|
||||||
|
|> format_key()
|
||||||
|
|> format_group()
|
||||||
|
|> format_children()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_child(entity) do
|
||||||
|
entity
|
||||||
|
|> format_key()
|
||||||
|
|> format_group()
|
||||||
|
|> format_children()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_group(%{group: group} = entity) do
|
||||||
|
Map.put(entity, :group, format_suggestion(group))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_group(entity), do: entity
|
||||||
|
|
||||||
|
defp atom_to_string(entity) when is_binary(entity), do: entity
|
||||||
|
|
||||||
|
defp atom_to_string(entity) when is_atom(entity), do: inspect(entity)
|
||||||
|
|
||||||
|
defp humanize(entity) do
|
||||||
|
string = inspect(entity)
|
||||||
|
|
||||||
|
if String.starts_with?(string, ":"),
|
||||||
|
do: Phoenix.Naming.humanize(entity),
|
||||||
|
else: string
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_suggestions([]), do: []
|
||||||
|
|
||||||
|
defp format_suggestions([suggestion | tail]) do
|
||||||
|
[format_suggestion(suggestion) | format_suggestions(tail)]
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_suggestion(entity) when is_atom(entity) do
|
||||||
|
atom_to_string(entity)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_suggestion([head | tail] = entity) when is_list(entity) do
|
||||||
|
[format_suggestion(head) | format_suggestions(tail)]
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_suggestion(entity) when is_tuple(entity) do
|
||||||
|
format_suggestions(Tuple.to_list(entity)) |> List.to_tuple()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_suggestion(entity), do: entity
|
||||||
end
|
end
|
||||||
|
|
||||||
defimpl Jason.Encoder, for: Tuple do
|
defimpl Jason.Encoder, for: Tuple do
|
||||||
def encode(tuple, opts) do
|
def encode(tuple, opts), do: Jason.Encode.list(Tuple.to_list(tuple), opts)
|
||||||
Jason.Encode.list(Tuple.to_list(tuple), opts)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defimpl Jason.Encoder, for: [Regex, Function] do
|
defimpl Jason.Encoder, for: [Regex, Function] do
|
||||||
def encode(term, opts) do
|
def encode(term, opts), do: Jason.Encode.string(inspect(term), opts)
|
||||||
Jason.Encode.string(inspect(term), opts)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defimpl String.Chars, for: Regex do
|
defimpl String.Chars, for: Regex do
|
||||||
def to_string(term) do
|
def to_string(term), do: inspect(term)
|
||||||
inspect(term)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,18 +3,22 @@ defmodule Pleroma.Docs.JSON do
|
||||||
|
|
||||||
@spec process(keyword()) :: {:ok, String.t()}
|
@spec process(keyword()) :: {:ok, String.t()}
|
||||||
def process(descriptions) do
|
def process(descriptions) do
|
||||||
config_path = "docs/generate_config.json"
|
with path <- "docs/generated_config.json",
|
||||||
|
{:ok, file} <- File.open(path, [:write, :utf8]),
|
||||||
with {:ok, file} <- File.open(config_path, [:write, :utf8]),
|
formatted_descriptions <-
|
||||||
json <- generate_json(descriptions),
|
Pleroma.Docs.Generator.convert_to_strings(descriptions),
|
||||||
|
json <- Jason.encode!(formatted_descriptions),
|
||||||
:ok <- IO.write(file, json),
|
:ok <- IO.write(file, json),
|
||||||
:ok <- File.close(file) do
|
:ok <- File.close(file) do
|
||||||
{:ok, config_path}
|
{:ok, path}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec generate_json([keyword()]) :: String.t()
|
def compile do
|
||||||
def generate_json(descriptions) do
|
with config <- Pleroma.Config.Loader.load("config/description.exs") do
|
||||||
Jason.encode!(descriptions)
|
config[:pleroma][:config_description]
|
||||||
|
|> Pleroma.Docs.Generator.convert_to_strings()
|
||||||
|
|> Jason.encode!()
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -4,7 +4,11 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.AdminAPI.AdminAPIController do
|
defmodule Pleroma.Web.AdminAPI.AdminAPIController do
|
||||||
use Pleroma.Web, :controller
|
use Pleroma.Web, :controller
|
||||||
|
|
||||||
|
import Pleroma.Web.ControllerHelper, only: [json_response: 3]
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.ConfigDB
|
||||||
alias Pleroma.ModerationLog
|
alias Pleroma.ModerationLog
|
||||||
alias Pleroma.Plugs.OAuthScopesPlug
|
alias Pleroma.Plugs.OAuthScopesPlug
|
||||||
alias Pleroma.ReportNote
|
alias Pleroma.ReportNote
|
||||||
|
@ -14,7 +18,6 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
|
||||||
alias Pleroma.Web.ActivityPub.Relay
|
alias Pleroma.Web.ActivityPub.Relay
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.AdminAPI.AccountView
|
alias Pleroma.Web.AdminAPI.AccountView
|
||||||
alias Pleroma.Web.AdminAPI.Config
|
|
||||||
alias Pleroma.Web.AdminAPI.ConfigView
|
alias Pleroma.Web.AdminAPI.ConfigView
|
||||||
alias Pleroma.Web.AdminAPI.ModerationLogView
|
alias Pleroma.Web.AdminAPI.ModerationLogView
|
||||||
alias Pleroma.Web.AdminAPI.Report
|
alias Pleroma.Web.AdminAPI.Report
|
||||||
|
@ -25,10 +28,11 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
|
||||||
alias Pleroma.Web.MastodonAPI.StatusView
|
alias Pleroma.Web.MastodonAPI.StatusView
|
||||||
alias Pleroma.Web.Router
|
alias Pleroma.Web.Router
|
||||||
|
|
||||||
import Pleroma.Web.ControllerHelper, only: [json_response: 3]
|
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@descriptions_json Pleroma.Docs.JSON.compile()
|
||||||
|
@users_page_size 50
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
OAuthScopesPlug,
|
OAuthScopesPlug,
|
||||||
%{scopes: ["read:accounts"], admin: true}
|
%{scopes: ["read:accounts"], admin: true}
|
||||||
|
@ -93,7 +97,7 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
|
||||||
plug(
|
plug(
|
||||||
OAuthScopesPlug,
|
OAuthScopesPlug,
|
||||||
%{scopes: ["read"], admin: true}
|
%{scopes: ["read"], admin: true}
|
||||||
when action in [:config_show, :migrate_to_db, :migrate_from_db, :list_log]
|
when action in [:config_show, :migrate_from_db, :list_log]
|
||||||
)
|
)
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
|
@ -102,8 +106,6 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
|
||||||
when action == :config_update
|
when action == :config_update
|
||||||
)
|
)
|
||||||
|
|
||||||
@users_page_size 50
|
|
||||||
|
|
||||||
action_fallback(:errors)
|
action_fallback(:errors)
|
||||||
|
|
||||||
def user_delete(%{assigns: %{user: admin}} = conn, %{"nickname" => nickname}) do
|
def user_delete(%{assigns: %{user: admin}} = conn, %{"nickname" => nickname}) do
|
||||||
|
@ -785,49 +787,132 @@ def list_log(conn, params) do
|
||||||
|> render("index.json", %{log: log})
|
|> render("index.json", %{log: log})
|
||||||
end
|
end
|
||||||
|
|
||||||
def migrate_to_db(conn, _params) do
|
def config_descriptions(conn, _params) do
|
||||||
Mix.Tasks.Pleroma.Config.run(["migrate_to_db"])
|
conn
|
||||||
json(conn, %{})
|
|> Plug.Conn.put_resp_content_type("application/json")
|
||||||
|
|> Plug.Conn.send_resp(200, @descriptions_json)
|
||||||
end
|
end
|
||||||
|
|
||||||
def migrate_from_db(conn, _params) do
|
def migrate_from_db(conn, _params) do
|
||||||
Mix.Tasks.Pleroma.Config.run(["migrate_from_db", Pleroma.Config.get(:env), "true"])
|
with :ok <- configurable_from_database(conn) do
|
||||||
json(conn, %{})
|
Mix.Tasks.Pleroma.Config.run([
|
||||||
|
"migrate_from_db",
|
||||||
|
"--env",
|
||||||
|
to_string(Pleroma.Config.get(:env)),
|
||||||
|
"-d"
|
||||||
|
])
|
||||||
|
|
||||||
|
json(conn, %{})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def config_show(conn, %{"only_db" => true}) do
|
||||||
|
with :ok <- configurable_from_database(conn) do
|
||||||
|
configs = Pleroma.Repo.all(ConfigDB)
|
||||||
|
|
||||||
|
if configs == [] do
|
||||||
|
errors(
|
||||||
|
conn,
|
||||||
|
{:error, "To use configuration from database migrate your settings to database."}
|
||||||
|
)
|
||||||
|
else
|
||||||
|
conn
|
||||||
|
|> put_view(ConfigView)
|
||||||
|
|> render("index.json", %{configs: configs})
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def config_show(conn, _params) do
|
def config_show(conn, _params) do
|
||||||
configs = Pleroma.Repo.all(Config)
|
with :ok <- configurable_from_database(conn) do
|
||||||
|
configs = ConfigDB.get_all_as_keyword()
|
||||||
|
|
||||||
conn
|
if configs == [] do
|
||||||
|> put_view(ConfigView)
|
errors(
|
||||||
|> render("index.json", %{configs: configs})
|
conn,
|
||||||
|
{:error, "To use configuration from database migrate your settings to database."}
|
||||||
|
)
|
||||||
|
else
|
||||||
|
merged =
|
||||||
|
Pleroma.Config.Holder.config()
|
||||||
|
|> ConfigDB.merge(configs)
|
||||||
|
|> Enum.map(fn {group, values} ->
|
||||||
|
Enum.map(values, fn {key, value} ->
|
||||||
|
db =
|
||||||
|
if configs[group][key] do
|
||||||
|
ConfigDB.get_db_keys(configs[group][key], key)
|
||||||
|
end
|
||||||
|
|
||||||
|
db_value = configs[group][key]
|
||||||
|
|
||||||
|
merged_value =
|
||||||
|
if !is_nil(db_value) and Keyword.keyword?(db_value) and
|
||||||
|
ConfigDB.sub_key_full_update?(group, key, Keyword.keys(db_value)) do
|
||||||
|
ConfigDB.merge_group(group, key, value, db_value)
|
||||||
|
else
|
||||||
|
value
|
||||||
|
end
|
||||||
|
|
||||||
|
setting = %{
|
||||||
|
group: ConfigDB.convert(group),
|
||||||
|
key: ConfigDB.convert(key),
|
||||||
|
value: ConfigDB.convert(merged_value)
|
||||||
|
}
|
||||||
|
|
||||||
|
if db, do: Map.put(setting, :db, db), else: setting
|
||||||
|
end)
|
||||||
|
end)
|
||||||
|
|> List.flatten()
|
||||||
|
|
||||||
|
json(conn, %{configs: merged})
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def config_update(conn, %{"configs" => configs}) do
|
def config_update(conn, %{"configs" => configs}) do
|
||||||
updated =
|
with :ok <- configurable_from_database(conn) do
|
||||||
if Pleroma.Config.get([:instance, :dynamic_configuration]) do
|
{_errors, results} =
|
||||||
updated =
|
Enum.map(configs, fn
|
||||||
Enum.map(configs, fn
|
%{"group" => group, "key" => key, "delete" => true} = params ->
|
||||||
%{"group" => group, "key" => key, "delete" => "true"} = params ->
|
ConfigDB.delete(%{group: group, key: key, subkeys: params["subkeys"]})
|
||||||
{:ok, config} = Config.delete(%{group: group, key: key, subkeys: params["subkeys"]})
|
|
||||||
config
|
|
||||||
|
|
||||||
%{"group" => group, "key" => key, "value" => value} ->
|
%{"group" => group, "key" => key, "value" => value} ->
|
||||||
{:ok, config} = Config.update_or_create(%{group: group, key: key, value: value})
|
ConfigDB.update_or_create(%{group: group, key: key, value: value})
|
||||||
config
|
end)
|
||||||
end)
|
|> Enum.split_with(fn result -> elem(result, 0) == :error end)
|
||||||
|> Enum.reject(&is_nil(&1))
|
|
||||||
|
|
||||||
Pleroma.Config.TransferTask.load_and_update_env()
|
{deleted, updated} =
|
||||||
Mix.Tasks.Pleroma.Config.run(["migrate_from_db", Pleroma.Config.get(:env), "false"])
|
results
|
||||||
updated
|
|> Enum.map(fn {:ok, config} ->
|
||||||
else
|
Map.put(config, :db, ConfigDB.get_db_keys(config))
|
||||||
[]
|
end)
|
||||||
end
|
|> Enum.split_with(fn config ->
|
||||||
|
Ecto.get_meta(config, :state) == :deleted
|
||||||
|
end)
|
||||||
|
|
||||||
conn
|
Pleroma.Config.TransferTask.load_and_update_env(deleted)
|
||||||
|> put_view(ConfigView)
|
|
||||||
|> render("index.json", %{configs: updated})
|
Mix.Tasks.Pleroma.Config.run([
|
||||||
|
"migrate_from_db",
|
||||||
|
"--env",
|
||||||
|
to_string(Pleroma.Config.get(:env))
|
||||||
|
])
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_view(ConfigView)
|
||||||
|
|> render("index.json", %{configs: updated})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp configurable_from_database(conn) do
|
||||||
|
if Pleroma.Config.get(:configurable_from_database) do
|
||||||
|
:ok
|
||||||
|
else
|
||||||
|
errors(
|
||||||
|
conn,
|
||||||
|
{:error, "To use this endpoint you need to enable configuration from database."}
|
||||||
|
)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def reload_emoji(conn, _params) do
|
def reload_emoji(conn, _params) do
|
||||||
|
|
|
@ -1,182 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Web.AdminAPI.Config do
|
|
||||||
use Ecto.Schema
|
|
||||||
import Ecto.Changeset
|
|
||||||
import Pleroma.Web.Gettext
|
|
||||||
alias __MODULE__
|
|
||||||
alias Pleroma.Repo
|
|
||||||
|
|
||||||
@type t :: %__MODULE__{}
|
|
||||||
|
|
||||||
schema "config" do
|
|
||||||
field(:key, :string)
|
|
||||||
field(:group, :string)
|
|
||||||
field(:value, :binary)
|
|
||||||
|
|
||||||
timestamps()
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec get_by_params(map()) :: Config.t() | nil
|
|
||||||
def get_by_params(params), do: Repo.get_by(Config, params)
|
|
||||||
|
|
||||||
@spec changeset(Config.t(), map()) :: Changeset.t()
|
|
||||||
def changeset(config, params \\ %{}) do
|
|
||||||
config
|
|
||||||
|> cast(params, [:key, :group, :value])
|
|
||||||
|> validate_required([:key, :group, :value])
|
|
||||||
|> unique_constraint(:key, name: :config_group_key_index)
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec create(map()) :: {:ok, Config.t()} | {:error, Changeset.t()}
|
|
||||||
def create(params) do
|
|
||||||
%Config{}
|
|
||||||
|> changeset(Map.put(params, :value, transform(params[:value])))
|
|
||||||
|> Repo.insert()
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec update(Config.t(), map()) :: {:ok, Config} | {:error, Changeset.t()}
|
|
||||||
def update(%Config{} = config, %{value: value}) do
|
|
||||||
config
|
|
||||||
|> change(value: transform(value))
|
|
||||||
|> Repo.update()
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec update_or_create(map()) :: {:ok, Config.t()} | {:error, Changeset.t()}
|
|
||||||
def update_or_create(params) do
|
|
||||||
with %Config{} = config <- Config.get_by_params(Map.take(params, [:group, :key])) do
|
|
||||||
Config.update(config, params)
|
|
||||||
else
|
|
||||||
nil -> Config.create(params)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec delete(map()) :: {:ok, Config.t()} | {:error, Changeset.t()}
|
|
||||||
def delete(params) do
|
|
||||||
with %Config{} = config <- Config.get_by_params(Map.delete(params, :subkeys)) do
|
|
||||||
if params[:subkeys] do
|
|
||||||
updated_value =
|
|
||||||
Keyword.drop(
|
|
||||||
:erlang.binary_to_term(config.value),
|
|
||||||
Enum.map(params[:subkeys], &do_transform_string(&1))
|
|
||||||
)
|
|
||||||
|
|
||||||
Config.update(config, %{value: updated_value})
|
|
||||||
else
|
|
||||||
Repo.delete(config)
|
|
||||||
{:ok, nil}
|
|
||||||
end
|
|
||||||
else
|
|
||||||
nil ->
|
|
||||||
err =
|
|
||||||
dgettext("errors", "Config with params %{params} not found", params: inspect(params))
|
|
||||||
|
|
||||||
{:error, err}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec from_binary(binary()) :: term()
|
|
||||||
def from_binary(binary), do: :erlang.binary_to_term(binary)
|
|
||||||
|
|
||||||
@spec from_binary_with_convert(binary()) :: any()
|
|
||||||
def from_binary_with_convert(binary) do
|
|
||||||
from_binary(binary)
|
|
||||||
|> do_convert()
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_convert(entity) when is_list(entity) do
|
|
||||||
for v <- entity, into: [], do: do_convert(v)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_convert(%Regex{} = entity), do: inspect(entity)
|
|
||||||
|
|
||||||
defp do_convert(entity) when is_map(entity) do
|
|
||||||
for {k, v} <- entity, into: %{}, do: {do_convert(k), do_convert(v)}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_convert({:dispatch, [entity]}), do: %{"tuple" => [":dispatch", [inspect(entity)]]}
|
|
||||||
defp do_convert({:partial_chain, entity}), do: %{"tuple" => [":partial_chain", inspect(entity)]}
|
|
||||||
|
|
||||||
defp do_convert(entity) when is_tuple(entity),
|
|
||||||
do: %{"tuple" => do_convert(Tuple.to_list(entity))}
|
|
||||||
|
|
||||||
defp do_convert(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity),
|
|
||||||
do: entity
|
|
||||||
|
|
||||||
defp do_convert(entity) when is_atom(entity) do
|
|
||||||
string = to_string(entity)
|
|
||||||
|
|
||||||
if String.starts_with?(string, "Elixir."),
|
|
||||||
do: do_convert(string),
|
|
||||||
else: ":" <> string
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_convert("Elixir." <> module_name), do: module_name
|
|
||||||
|
|
||||||
defp do_convert(entity) when is_binary(entity), do: entity
|
|
||||||
|
|
||||||
@spec transform(any()) :: binary()
|
|
||||||
def transform(entity) when is_binary(entity) or is_map(entity) or is_list(entity) do
|
|
||||||
:erlang.term_to_binary(do_transform(entity))
|
|
||||||
end
|
|
||||||
|
|
||||||
def transform(entity), do: :erlang.term_to_binary(entity)
|
|
||||||
|
|
||||||
defp do_transform(%Regex{} = entity), do: entity
|
|
||||||
|
|
||||||
defp do_transform(%{"tuple" => [":dispatch", [entity]]}) do
|
|
||||||
{dispatch_settings, []} = do_eval(entity)
|
|
||||||
{:dispatch, [dispatch_settings]}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_transform(%{"tuple" => [":partial_chain", entity]}) do
|
|
||||||
{partial_chain, []} = do_eval(entity)
|
|
||||||
{:partial_chain, partial_chain}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_transform(%{"tuple" => entity}) do
|
|
||||||
Enum.reduce(entity, {}, fn val, acc -> Tuple.append(acc, do_transform(val)) end)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_transform(entity) when is_map(entity) do
|
|
||||||
for {k, v} <- entity, into: %{}, do: {do_transform(k), do_transform(v)}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_transform(entity) when is_list(entity) do
|
|
||||||
for v <- entity, into: [], do: do_transform(v)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_transform(entity) when is_binary(entity) do
|
|
||||||
String.trim(entity)
|
|
||||||
|> do_transform_string()
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_transform(entity), do: entity
|
|
||||||
|
|
||||||
defp do_transform_string("~r/" <> pattern) do
|
|
||||||
modificator = String.split(pattern, "/") |> List.last()
|
|
||||||
pattern = String.trim_trailing(pattern, "/" <> modificator)
|
|
||||||
|
|
||||||
case modificator do
|
|
||||||
"" -> ~r/#{pattern}/
|
|
||||||
"i" -> ~r/#{pattern}/i
|
|
||||||
"u" -> ~r/#{pattern}/u
|
|
||||||
"s" -> ~r/#{pattern}/s
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_transform_string(":" <> atom), do: String.to_atom(atom)
|
|
||||||
|
|
||||||
defp do_transform_string(value) do
|
|
||||||
if String.starts_with?(value, "Pleroma") or String.starts_with?(value, "Phoenix"),
|
|
||||||
do: String.to_existing_atom("Elixir." <> value),
|
|
||||||
else: value
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_eval(entity) do
|
|
||||||
cleaned_string = String.replace(entity, ~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "")
|
|
||||||
Code.eval_string(cleaned_string, [], requires: [], macros: [])
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -12,10 +12,16 @@ def render("index.json", %{configs: configs}) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def render("show.json", %{config: config}) do
|
def render("show.json", %{config: config}) do
|
||||||
%{
|
map = %{
|
||||||
key: config.key,
|
key: config.key,
|
||||||
group: config.group,
|
group: config.group,
|
||||||
value: Pleroma.Web.AdminAPI.Config.from_binary_with_convert(config.value)
|
value: Pleroma.ConfigDB.from_binary_with_convert(config.value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if config.db != [] do
|
||||||
|
Map.put(map, :db, config.db)
|
||||||
|
else
|
||||||
|
map
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -195,7 +195,7 @@ defmodule Pleroma.Web.Router do
|
||||||
|
|
||||||
get("/config", AdminAPIController, :config_show)
|
get("/config", AdminAPIController, :config_show)
|
||||||
post("/config", AdminAPIController, :config_update)
|
post("/config", AdminAPIController, :config_update)
|
||||||
get("/config/migrate_to_db", AdminAPIController, :migrate_to_db)
|
get("/config/descriptions", AdminAPIController, :config_descriptions)
|
||||||
get("/config/migrate_from_db", AdminAPIController, :migrate_from_db)
|
get("/config/migrate_from_db", AdminAPIController, :migrate_from_db)
|
||||||
|
|
||||||
get("/moderation_log", AdminAPIController, :list_log)
|
get("/moderation_log", AdminAPIController, :list_log)
|
||||||
|
|
|
@ -20,8 +20,7 @@ config :pleroma, :instance,
|
||||||
email: "<%= email %>",
|
email: "<%= email %>",
|
||||||
notify_email: "<%= notify_email %>",
|
notify_email: "<%= notify_email %>",
|
||||||
limit: 5000,
|
limit: 5000,
|
||||||
registrations_open: true,
|
registrations_open: true
|
||||||
dynamic_configuration: <%= db_configurable? %>
|
|
||||||
|
|
||||||
config :pleroma, :media_proxy,
|
config :pleroma, :media_proxy,
|
||||||
enabled: false,
|
enabled: false,
|
||||||
|
@ -70,3 +69,5 @@ config :pleroma, Pleroma.Uploaders.Local, uploads: "<%= uploads_dir %>"
|
||||||
# host: "s3.wasabisys.com"
|
# host: "s3.wasabisys.com"
|
||||||
|
|
||||||
config :joken, default_signer: "<%= jwt_secret %>"
|
config :joken, default_signer: "<%= jwt_secret %>"
|
||||||
|
|
||||||
|
config :pleroma, configurable_from_database: <%= db_configurable? %>
|
||||||
|
|
704
test/config/config_db_test.exs
Normal file
704
test/config/config_db_test.exs
Normal file
|
@ -0,0 +1,704 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.ConfigDBTest do
|
||||||
|
use Pleroma.DataCase, async: true
|
||||||
|
import Pleroma.Factory
|
||||||
|
alias Pleroma.ConfigDB
|
||||||
|
|
||||||
|
test "get_by_key/1" do
|
||||||
|
config = insert(:config)
|
||||||
|
insert(:config)
|
||||||
|
|
||||||
|
assert config == ConfigDB.get_by_params(%{group: config.group, key: config.key})
|
||||||
|
end
|
||||||
|
|
||||||
|
test "create/1" do
|
||||||
|
{:ok, config} = ConfigDB.create(%{group: ":pleroma", key: ":some_key", value: "some_value"})
|
||||||
|
assert config == ConfigDB.get_by_params(%{group: ":pleroma", key: ":some_key"})
|
||||||
|
end
|
||||||
|
|
||||||
|
test "update/1" do
|
||||||
|
config = insert(:config)
|
||||||
|
{:ok, updated} = ConfigDB.update(config, %{value: "some_value"})
|
||||||
|
loaded = ConfigDB.get_by_params(%{group: config.group, key: config.key})
|
||||||
|
assert loaded == updated
|
||||||
|
end
|
||||||
|
|
||||||
|
test "get_all_as_keyword/0" do
|
||||||
|
saved = insert(:config)
|
||||||
|
insert(:config, group: ":quack", key: ":level", value: ConfigDB.to_binary(:info))
|
||||||
|
insert(:config, group: ":quack", key: ":meta", value: ConfigDB.to_binary([:none]))
|
||||||
|
|
||||||
|
insert(:config,
|
||||||
|
group: ":quack",
|
||||||
|
key: ":webhook_url",
|
||||||
|
value: ConfigDB.to_binary("https://hooks.slack.com/services/KEY/some_val")
|
||||||
|
)
|
||||||
|
|
||||||
|
config = ConfigDB.get_all_as_keyword()
|
||||||
|
|
||||||
|
assert config[:pleroma] == [
|
||||||
|
{ConfigDB.from_string(saved.key), ConfigDB.from_binary(saved.value)}
|
||||||
|
]
|
||||||
|
|
||||||
|
assert config[:quack] == [
|
||||||
|
level: :info,
|
||||||
|
meta: [:none],
|
||||||
|
webhook_url: "https://hooks.slack.com/services/KEY/some_val"
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "update_or_create/1" do
|
||||||
|
test "common" do
|
||||||
|
config = insert(:config)
|
||||||
|
key2 = "another_key"
|
||||||
|
|
||||||
|
params = [
|
||||||
|
%{group: "pleroma", key: key2, value: "another_value"},
|
||||||
|
%{group: config.group, key: config.key, value: "new_value"}
|
||||||
|
]
|
||||||
|
|
||||||
|
assert Repo.all(ConfigDB) |> length() == 1
|
||||||
|
|
||||||
|
Enum.each(params, &ConfigDB.update_or_create(&1))
|
||||||
|
|
||||||
|
assert Repo.all(ConfigDB) |> length() == 2
|
||||||
|
|
||||||
|
config1 = ConfigDB.get_by_params(%{group: config.group, key: config.key})
|
||||||
|
config2 = ConfigDB.get_by_params(%{group: "pleroma", key: key2})
|
||||||
|
|
||||||
|
assert config1.value == ConfigDB.transform("new_value")
|
||||||
|
assert config2.value == ConfigDB.transform("another_value")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "partial update" do
|
||||||
|
config = insert(:config, value: ConfigDB.to_binary(key1: "val1", key2: :val2))
|
||||||
|
|
||||||
|
{:ok, _config} =
|
||||||
|
ConfigDB.update_or_create(%{
|
||||||
|
group: config.group,
|
||||||
|
key: config.key,
|
||||||
|
value: [key1: :val1, key3: :val3]
|
||||||
|
})
|
||||||
|
|
||||||
|
updated = ConfigDB.get_by_params(%{group: config.group, key: config.key})
|
||||||
|
|
||||||
|
value = ConfigDB.from_binary(updated.value)
|
||||||
|
assert length(value) == 3
|
||||||
|
assert value[:key1] == :val1
|
||||||
|
assert value[:key2] == :val2
|
||||||
|
assert value[:key3] == :val3
|
||||||
|
end
|
||||||
|
|
||||||
|
test "deep merge" do
|
||||||
|
config = insert(:config, value: ConfigDB.to_binary(key1: "val1", key2: [k1: :v1, k2: "v2"]))
|
||||||
|
|
||||||
|
{:ok, config} =
|
||||||
|
ConfigDB.update_or_create(%{
|
||||||
|
group: config.group,
|
||||||
|
key: config.key,
|
||||||
|
value: [key1: :val1, key2: [k2: :v2, k3: :v3], key3: :val3]
|
||||||
|
})
|
||||||
|
|
||||||
|
updated = ConfigDB.get_by_params(%{group: config.group, key: config.key})
|
||||||
|
|
||||||
|
assert config.value == updated.value
|
||||||
|
|
||||||
|
value = ConfigDB.from_binary(updated.value)
|
||||||
|
assert value[:key1] == :val1
|
||||||
|
assert value[:key2] == [k1: :v1, k2: :v2, k3: :v3]
|
||||||
|
assert value[:key3] == :val3
|
||||||
|
end
|
||||||
|
|
||||||
|
test "only full update for some keys" do
|
||||||
|
config1 = insert(:config, key: ":ecto_repos", value: ConfigDB.to_binary(repo: Pleroma.Repo))
|
||||||
|
|
||||||
|
config2 =
|
||||||
|
insert(:config, group: ":cors_plug", key: ":max_age", value: ConfigDB.to_binary(18))
|
||||||
|
|
||||||
|
{:ok, _config} =
|
||||||
|
ConfigDB.update_or_create(%{
|
||||||
|
group: config1.group,
|
||||||
|
key: config1.key,
|
||||||
|
value: [another_repo: [Pleroma.Repo]]
|
||||||
|
})
|
||||||
|
|
||||||
|
{:ok, _config} =
|
||||||
|
ConfigDB.update_or_create(%{
|
||||||
|
group: config2.group,
|
||||||
|
key: config2.key,
|
||||||
|
value: 777
|
||||||
|
})
|
||||||
|
|
||||||
|
updated1 = ConfigDB.get_by_params(%{group: config1.group, key: config1.key})
|
||||||
|
updated2 = ConfigDB.get_by_params(%{group: config2.group, key: config2.key})
|
||||||
|
|
||||||
|
assert ConfigDB.from_binary(updated1.value) == [another_repo: [Pleroma.Repo]]
|
||||||
|
assert ConfigDB.from_binary(updated2.value) == 777
|
||||||
|
end
|
||||||
|
|
||||||
|
test "full update if value is not keyword" do
|
||||||
|
config =
|
||||||
|
insert(:config,
|
||||||
|
group: ":tesla",
|
||||||
|
key: ":adapter",
|
||||||
|
value: ConfigDB.to_binary(Tesla.Adapter.Hackney)
|
||||||
|
)
|
||||||
|
|
||||||
|
{:ok, _config} =
|
||||||
|
ConfigDB.update_or_create(%{
|
||||||
|
group: config.group,
|
||||||
|
key: config.key,
|
||||||
|
value: Tesla.Adapter.Httpc
|
||||||
|
})
|
||||||
|
|
||||||
|
updated = ConfigDB.get_by_params(%{group: config.group, key: config.key})
|
||||||
|
|
||||||
|
assert ConfigDB.from_binary(updated.value) == Tesla.Adapter.Httpc
|
||||||
|
end
|
||||||
|
|
||||||
|
test "only full update for some subkeys" do
|
||||||
|
config1 =
|
||||||
|
insert(:config,
|
||||||
|
key: ":emoji",
|
||||||
|
value: ConfigDB.to_binary(groups: [a: 1, b: 2], key: [a: 1])
|
||||||
|
)
|
||||||
|
|
||||||
|
config2 =
|
||||||
|
insert(:config,
|
||||||
|
key: ":assets",
|
||||||
|
value: ConfigDB.to_binary(mascots: [a: 1, b: 2], key: [a: 1])
|
||||||
|
)
|
||||||
|
|
||||||
|
{:ok, _config} =
|
||||||
|
ConfigDB.update_or_create(%{
|
||||||
|
group: config1.group,
|
||||||
|
key: config1.key,
|
||||||
|
value: [groups: [c: 3, d: 4], key: [b: 2]]
|
||||||
|
})
|
||||||
|
|
||||||
|
{:ok, _config} =
|
||||||
|
ConfigDB.update_or_create(%{
|
||||||
|
group: config2.group,
|
||||||
|
key: config2.key,
|
||||||
|
value: [mascots: [c: 3, d: 4], key: [b: 2]]
|
||||||
|
})
|
||||||
|
|
||||||
|
updated1 = ConfigDB.get_by_params(%{group: config1.group, key: config1.key})
|
||||||
|
updated2 = ConfigDB.get_by_params(%{group: config2.group, key: config2.key})
|
||||||
|
|
||||||
|
assert ConfigDB.from_binary(updated1.value) == [groups: [c: 3, d: 4], key: [a: 1, b: 2]]
|
||||||
|
assert ConfigDB.from_binary(updated2.value) == [mascots: [c: 3, d: 4], key: [a: 1, b: 2]]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "delete/1" do
|
||||||
|
test "error on deleting non existing setting" do
|
||||||
|
{:error, error} = ConfigDB.delete(%{group: ":pleroma", key: ":key"})
|
||||||
|
assert error =~ "Config with params %{group: \":pleroma\", key: \":key\"} not found"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "full delete" do
|
||||||
|
config = insert(:config)
|
||||||
|
{:ok, deleted} = ConfigDB.delete(%{group: config.group, key: config.key})
|
||||||
|
assert Ecto.get_meta(deleted, :state) == :deleted
|
||||||
|
refute ConfigDB.get_by_params(%{group: config.group, key: config.key})
|
||||||
|
end
|
||||||
|
|
||||||
|
test "partial subkeys delete" do
|
||||||
|
config = insert(:config, value: ConfigDB.to_binary(groups: [a: 1, b: 2], key: [a: 1]))
|
||||||
|
|
||||||
|
{:ok, deleted} =
|
||||||
|
ConfigDB.delete(%{group: config.group, key: config.key, subkeys: [":groups"]})
|
||||||
|
|
||||||
|
assert Ecto.get_meta(deleted, :state) == :loaded
|
||||||
|
|
||||||
|
assert deleted.value == ConfigDB.to_binary(key: [a: 1])
|
||||||
|
|
||||||
|
updated = ConfigDB.get_by_params(%{group: config.group, key: config.key})
|
||||||
|
|
||||||
|
assert updated.value == deleted.value
|
||||||
|
end
|
||||||
|
|
||||||
|
test "full delete if remaining value after subkeys deletion is empty list" do
|
||||||
|
config = insert(:config, value: ConfigDB.to_binary(groups: [a: 1, b: 2]))
|
||||||
|
|
||||||
|
{:ok, deleted} =
|
||||||
|
ConfigDB.delete(%{group: config.group, key: config.key, subkeys: [":groups"]})
|
||||||
|
|
||||||
|
assert Ecto.get_meta(deleted, :state) == :deleted
|
||||||
|
|
||||||
|
refute ConfigDB.get_by_params(%{group: config.group, key: config.key})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "transform/1" do
|
||||||
|
test "string" do
|
||||||
|
binary = ConfigDB.transform("value as string")
|
||||||
|
assert binary == :erlang.term_to_binary("value as string")
|
||||||
|
assert ConfigDB.from_binary(binary) == "value as string"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "boolean" do
|
||||||
|
binary = ConfigDB.transform(false)
|
||||||
|
assert binary == :erlang.term_to_binary(false)
|
||||||
|
assert ConfigDB.from_binary(binary) == false
|
||||||
|
end
|
||||||
|
|
||||||
|
test "nil" do
|
||||||
|
binary = ConfigDB.transform(nil)
|
||||||
|
assert binary == :erlang.term_to_binary(nil)
|
||||||
|
assert ConfigDB.from_binary(binary) == nil
|
||||||
|
end
|
||||||
|
|
||||||
|
test "integer" do
|
||||||
|
binary = ConfigDB.transform(150)
|
||||||
|
assert binary == :erlang.term_to_binary(150)
|
||||||
|
assert ConfigDB.from_binary(binary) == 150
|
||||||
|
end
|
||||||
|
|
||||||
|
test "atom" do
|
||||||
|
binary = ConfigDB.transform(":atom")
|
||||||
|
assert binary == :erlang.term_to_binary(:atom)
|
||||||
|
assert ConfigDB.from_binary(binary) == :atom
|
||||||
|
end
|
||||||
|
|
||||||
|
test "ssl options" do
|
||||||
|
binary = ConfigDB.transform([":tlsv1", ":tlsv1.1", ":tlsv1.2"])
|
||||||
|
assert binary == :erlang.term_to_binary([:tlsv1, :"tlsv1.1", :"tlsv1.2"])
|
||||||
|
assert ConfigDB.from_binary(binary) == [:tlsv1, :"tlsv1.1", :"tlsv1.2"]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "pleroma module" do
|
||||||
|
binary = ConfigDB.transform("Pleroma.Bookmark")
|
||||||
|
assert binary == :erlang.term_to_binary(Pleroma.Bookmark)
|
||||||
|
assert ConfigDB.from_binary(binary) == Pleroma.Bookmark
|
||||||
|
end
|
||||||
|
|
||||||
|
test "pleroma string" do
|
||||||
|
binary = ConfigDB.transform("Pleroma")
|
||||||
|
assert binary == :erlang.term_to_binary("Pleroma")
|
||||||
|
assert ConfigDB.from_binary(binary) == "Pleroma"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "phoenix module" do
|
||||||
|
binary = ConfigDB.transform("Phoenix.Socket.V1.JSONSerializer")
|
||||||
|
assert binary == :erlang.term_to_binary(Phoenix.Socket.V1.JSONSerializer)
|
||||||
|
assert ConfigDB.from_binary(binary) == Phoenix.Socket.V1.JSONSerializer
|
||||||
|
end
|
||||||
|
|
||||||
|
test "tesla module" do
|
||||||
|
binary = ConfigDB.transform("Tesla.Adapter.Hackney")
|
||||||
|
assert binary == :erlang.term_to_binary(Tesla.Adapter.Hackney)
|
||||||
|
assert ConfigDB.from_binary(binary) == Tesla.Adapter.Hackney
|
||||||
|
end
|
||||||
|
|
||||||
|
test "ExSyslogger module" do
|
||||||
|
binary = ConfigDB.transform("ExSyslogger")
|
||||||
|
assert binary == :erlang.term_to_binary(ExSyslogger)
|
||||||
|
assert ConfigDB.from_binary(binary) == ExSyslogger
|
||||||
|
end
|
||||||
|
|
||||||
|
test "Quack.Logger module" do
|
||||||
|
binary = ConfigDB.transform("Quack.Logger")
|
||||||
|
assert binary == :erlang.term_to_binary(Quack.Logger)
|
||||||
|
assert ConfigDB.from_binary(binary) == Quack.Logger
|
||||||
|
end
|
||||||
|
|
||||||
|
test "sigil" do
|
||||||
|
binary = ConfigDB.transform("~r[comp[lL][aA][iI][nN]er]")
|
||||||
|
assert binary == :erlang.term_to_binary(~r/comp[lL][aA][iI][nN]er/)
|
||||||
|
assert ConfigDB.from_binary(binary) == ~r/comp[lL][aA][iI][nN]er/
|
||||||
|
end
|
||||||
|
|
||||||
|
test "link sigil" do
|
||||||
|
binary = ConfigDB.transform("~r/https:\/\/example.com/")
|
||||||
|
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/)
|
||||||
|
assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/
|
||||||
|
end
|
||||||
|
|
||||||
|
test "link sigil with um modifiers" do
|
||||||
|
binary = ConfigDB.transform("~r/https:\/\/example.com/um")
|
||||||
|
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/um)
|
||||||
|
assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/um
|
||||||
|
end
|
||||||
|
|
||||||
|
test "link sigil with i modifier" do
|
||||||
|
binary = ConfigDB.transform("~r/https:\/\/example.com/i")
|
||||||
|
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/i)
|
||||||
|
assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/i
|
||||||
|
end
|
||||||
|
|
||||||
|
test "link sigil with s modifier" do
|
||||||
|
binary = ConfigDB.transform("~r/https:\/\/example.com/s")
|
||||||
|
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/s)
|
||||||
|
assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/s
|
||||||
|
end
|
||||||
|
|
||||||
|
test "raise if valid delimiter not found" do
|
||||||
|
assert_raise ArgumentError, "valid delimiter for Regex expression not found", fn ->
|
||||||
|
ConfigDB.transform("~r/https://[]{}<>\"'()|example.com/s")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
test "2 child tuple" do
|
||||||
|
binary = ConfigDB.transform(%{"tuple" => ["v1", ":v2"]})
|
||||||
|
assert binary == :erlang.term_to_binary({"v1", :v2})
|
||||||
|
assert ConfigDB.from_binary(binary) == {"v1", :v2}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "proxy tuple with localhost" do
|
||||||
|
binary =
|
||||||
|
ConfigDB.transform(%{
|
||||||
|
"tuple" => [":proxy_url", %{"tuple" => [":socks5", "localhost", 1234]}]
|
||||||
|
})
|
||||||
|
|
||||||
|
assert binary == :erlang.term_to_binary({:proxy_url, {:socks5, :localhost, 1234}})
|
||||||
|
assert ConfigDB.from_binary(binary) == {:proxy_url, {:socks5, :localhost, 1234}}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "proxy tuple with domain" do
|
||||||
|
binary =
|
||||||
|
ConfigDB.transform(%{
|
||||||
|
"tuple" => [":proxy_url", %{"tuple" => [":socks5", "domain.com", 1234]}]
|
||||||
|
})
|
||||||
|
|
||||||
|
assert binary == :erlang.term_to_binary({:proxy_url, {:socks5, 'domain.com', 1234}})
|
||||||
|
assert ConfigDB.from_binary(binary) == {:proxy_url, {:socks5, 'domain.com', 1234}}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "proxy tuple with ip" do
|
||||||
|
binary =
|
||||||
|
ConfigDB.transform(%{
|
||||||
|
"tuple" => [":proxy_url", %{"tuple" => [":socks5", "127.0.0.1", 1234]}]
|
||||||
|
})
|
||||||
|
|
||||||
|
assert binary == :erlang.term_to_binary({:proxy_url, {:socks5, {127, 0, 0, 1}, 1234}})
|
||||||
|
assert ConfigDB.from_binary(binary) == {:proxy_url, {:socks5, {127, 0, 0, 1}, 1234}}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "tuple with n childs" do
|
||||||
|
binary =
|
||||||
|
ConfigDB.transform(%{
|
||||||
|
"tuple" => [
|
||||||
|
"v1",
|
||||||
|
":v2",
|
||||||
|
"Pleroma.Bookmark",
|
||||||
|
150,
|
||||||
|
false,
|
||||||
|
"Phoenix.Socket.V1.JSONSerializer"
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
assert binary ==
|
||||||
|
:erlang.term_to_binary(
|
||||||
|
{"v1", :v2, Pleroma.Bookmark, 150, false, Phoenix.Socket.V1.JSONSerializer}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert ConfigDB.from_binary(binary) ==
|
||||||
|
{"v1", :v2, Pleroma.Bookmark, 150, false, Phoenix.Socket.V1.JSONSerializer}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "map with string key" do
|
||||||
|
binary = ConfigDB.transform(%{"key" => "value"})
|
||||||
|
assert binary == :erlang.term_to_binary(%{"key" => "value"})
|
||||||
|
assert ConfigDB.from_binary(binary) == %{"key" => "value"}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "map with atom key" do
|
||||||
|
binary = ConfigDB.transform(%{":key" => "value"})
|
||||||
|
assert binary == :erlang.term_to_binary(%{key: "value"})
|
||||||
|
assert ConfigDB.from_binary(binary) == %{key: "value"}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "list of strings" do
|
||||||
|
binary = ConfigDB.transform(["v1", "v2", "v3"])
|
||||||
|
assert binary == :erlang.term_to_binary(["v1", "v2", "v3"])
|
||||||
|
assert ConfigDB.from_binary(binary) == ["v1", "v2", "v3"]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "list of modules" do
|
||||||
|
binary = ConfigDB.transform(["Pleroma.Repo", "Pleroma.Activity"])
|
||||||
|
assert binary == :erlang.term_to_binary([Pleroma.Repo, Pleroma.Activity])
|
||||||
|
assert ConfigDB.from_binary(binary) == [Pleroma.Repo, Pleroma.Activity]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "list of atoms" do
|
||||||
|
binary = ConfigDB.transform([":v1", ":v2", ":v3"])
|
||||||
|
assert binary == :erlang.term_to_binary([:v1, :v2, :v3])
|
||||||
|
assert ConfigDB.from_binary(binary) == [:v1, :v2, :v3]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "list of mixed values" do
|
||||||
|
binary =
|
||||||
|
ConfigDB.transform([
|
||||||
|
"v1",
|
||||||
|
":v2",
|
||||||
|
"Pleroma.Repo",
|
||||||
|
"Phoenix.Socket.V1.JSONSerializer",
|
||||||
|
15,
|
||||||
|
false
|
||||||
|
])
|
||||||
|
|
||||||
|
assert binary ==
|
||||||
|
:erlang.term_to_binary([
|
||||||
|
"v1",
|
||||||
|
:v2,
|
||||||
|
Pleroma.Repo,
|
||||||
|
Phoenix.Socket.V1.JSONSerializer,
|
||||||
|
15,
|
||||||
|
false
|
||||||
|
])
|
||||||
|
|
||||||
|
assert ConfigDB.from_binary(binary) == [
|
||||||
|
"v1",
|
||||||
|
:v2,
|
||||||
|
Pleroma.Repo,
|
||||||
|
Phoenix.Socket.V1.JSONSerializer,
|
||||||
|
15,
|
||||||
|
false
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "simple keyword" do
|
||||||
|
binary = ConfigDB.transform([%{"tuple" => [":key", "value"]}])
|
||||||
|
assert binary == :erlang.term_to_binary([{:key, "value"}])
|
||||||
|
assert ConfigDB.from_binary(binary) == [{:key, "value"}]
|
||||||
|
assert ConfigDB.from_binary(binary) == [key: "value"]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "keyword with partial_chain key" do
|
||||||
|
binary =
|
||||||
|
ConfigDB.transform([%{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]}])
|
||||||
|
|
||||||
|
assert binary == :erlang.term_to_binary(partial_chain: &:hackney_connect.partial_chain/1)
|
||||||
|
assert ConfigDB.from_binary(binary) == [partial_chain: &:hackney_connect.partial_chain/1]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "keyword" do
|
||||||
|
binary =
|
||||||
|
ConfigDB.transform([
|
||||||
|
%{"tuple" => [":types", "Pleroma.PostgresTypes"]},
|
||||||
|
%{"tuple" => [":telemetry_event", ["Pleroma.Repo.Instrumenter"]]},
|
||||||
|
%{"tuple" => [":migration_lock", nil]},
|
||||||
|
%{"tuple" => [":key1", 150]},
|
||||||
|
%{"tuple" => [":key2", "string"]}
|
||||||
|
])
|
||||||
|
|
||||||
|
assert binary ==
|
||||||
|
:erlang.term_to_binary(
|
||||||
|
types: Pleroma.PostgresTypes,
|
||||||
|
telemetry_event: [Pleroma.Repo.Instrumenter],
|
||||||
|
migration_lock: nil,
|
||||||
|
key1: 150,
|
||||||
|
key2: "string"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert ConfigDB.from_binary(binary) == [
|
||||||
|
types: Pleroma.PostgresTypes,
|
||||||
|
telemetry_event: [Pleroma.Repo.Instrumenter],
|
||||||
|
migration_lock: nil,
|
||||||
|
key1: 150,
|
||||||
|
key2: "string"
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "complex keyword with nested mixed childs" do
|
||||||
|
binary =
|
||||||
|
ConfigDB.transform([
|
||||||
|
%{"tuple" => [":uploader", "Pleroma.Uploaders.Local"]},
|
||||||
|
%{"tuple" => [":filters", ["Pleroma.Upload.Filter.Dedupe"]]},
|
||||||
|
%{"tuple" => [":link_name", true]},
|
||||||
|
%{"tuple" => [":proxy_remote", false]},
|
||||||
|
%{"tuple" => [":common_map", %{":key" => "value"}]},
|
||||||
|
%{
|
||||||
|
"tuple" => [
|
||||||
|
":proxy_opts",
|
||||||
|
[
|
||||||
|
%{"tuple" => [":redirect_on_failure", false]},
|
||||||
|
%{"tuple" => [":max_body_length", 1_048_576]},
|
||||||
|
%{
|
||||||
|
"tuple" => [
|
||||||
|
":http",
|
||||||
|
[%{"tuple" => [":follow_redirect", true]}, %{"tuple" => [":pool", ":upload"]}]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
])
|
||||||
|
|
||||||
|
assert binary ==
|
||||||
|
:erlang.term_to_binary(
|
||||||
|
uploader: Pleroma.Uploaders.Local,
|
||||||
|
filters: [Pleroma.Upload.Filter.Dedupe],
|
||||||
|
link_name: true,
|
||||||
|
proxy_remote: false,
|
||||||
|
common_map: %{key: "value"},
|
||||||
|
proxy_opts: [
|
||||||
|
redirect_on_failure: false,
|
||||||
|
max_body_length: 1_048_576,
|
||||||
|
http: [
|
||||||
|
follow_redirect: true,
|
||||||
|
pool: :upload
|
||||||
|
]
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
assert ConfigDB.from_binary(binary) ==
|
||||||
|
[
|
||||||
|
uploader: Pleroma.Uploaders.Local,
|
||||||
|
filters: [Pleroma.Upload.Filter.Dedupe],
|
||||||
|
link_name: true,
|
||||||
|
proxy_remote: false,
|
||||||
|
common_map: %{key: "value"},
|
||||||
|
proxy_opts: [
|
||||||
|
redirect_on_failure: false,
|
||||||
|
max_body_length: 1_048_576,
|
||||||
|
http: [
|
||||||
|
follow_redirect: true,
|
||||||
|
pool: :upload
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "common keyword" do
|
||||||
|
binary =
|
||||||
|
ConfigDB.transform([
|
||||||
|
%{"tuple" => [":level", ":warn"]},
|
||||||
|
%{"tuple" => [":meta", [":all"]]},
|
||||||
|
%{"tuple" => [":path", ""]},
|
||||||
|
%{"tuple" => [":val", nil]},
|
||||||
|
%{"tuple" => [":webhook_url", "https://hooks.slack.com/services/YOUR-KEY-HERE"]}
|
||||||
|
])
|
||||||
|
|
||||||
|
assert binary ==
|
||||||
|
:erlang.term_to_binary(
|
||||||
|
level: :warn,
|
||||||
|
meta: [:all],
|
||||||
|
path: "",
|
||||||
|
val: nil,
|
||||||
|
webhook_url: "https://hooks.slack.com/services/YOUR-KEY-HERE"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert ConfigDB.from_binary(binary) == [
|
||||||
|
level: :warn,
|
||||||
|
meta: [:all],
|
||||||
|
path: "",
|
||||||
|
val: nil,
|
||||||
|
webhook_url: "https://hooks.slack.com/services/YOUR-KEY-HERE"
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "complex keyword with sigil" do
|
||||||
|
binary =
|
||||||
|
ConfigDB.transform([
|
||||||
|
%{"tuple" => [":federated_timeline_removal", []]},
|
||||||
|
%{"tuple" => [":reject", ["~r/comp[lL][aA][iI][nN]er/"]]},
|
||||||
|
%{"tuple" => [":replace", []]}
|
||||||
|
])
|
||||||
|
|
||||||
|
assert binary ==
|
||||||
|
:erlang.term_to_binary(
|
||||||
|
federated_timeline_removal: [],
|
||||||
|
reject: [~r/comp[lL][aA][iI][nN]er/],
|
||||||
|
replace: []
|
||||||
|
)
|
||||||
|
|
||||||
|
assert ConfigDB.from_binary(binary) ==
|
||||||
|
[federated_timeline_removal: [], reject: [~r/comp[lL][aA][iI][nN]er/], replace: []]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "complex keyword with tuples with more than 2 values" do
|
||||||
|
binary =
|
||||||
|
ConfigDB.transform([
|
||||||
|
%{
|
||||||
|
"tuple" => [
|
||||||
|
":http",
|
||||||
|
[
|
||||||
|
%{
|
||||||
|
"tuple" => [
|
||||||
|
":key1",
|
||||||
|
[
|
||||||
|
%{
|
||||||
|
"tuple" => [
|
||||||
|
":_",
|
||||||
|
[
|
||||||
|
%{
|
||||||
|
"tuple" => [
|
||||||
|
"/api/v1/streaming",
|
||||||
|
"Pleroma.Web.MastodonAPI.WebsocketHandler",
|
||||||
|
[]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
"tuple" => [
|
||||||
|
"/websocket",
|
||||||
|
"Phoenix.Endpoint.CowboyWebSocket",
|
||||||
|
%{
|
||||||
|
"tuple" => [
|
||||||
|
"Phoenix.Transports.WebSocket",
|
||||||
|
%{
|
||||||
|
"tuple" => [
|
||||||
|
"Pleroma.Web.Endpoint",
|
||||||
|
"Pleroma.Web.UserSocket",
|
||||||
|
[]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
"tuple" => [
|
||||||
|
":_",
|
||||||
|
"Phoenix.Endpoint.Cowboy2Handler",
|
||||||
|
%{"tuple" => ["Pleroma.Web.Endpoint", []]}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
])
|
||||||
|
|
||||||
|
assert binary ==
|
||||||
|
:erlang.term_to_binary(
|
||||||
|
http: [
|
||||||
|
key1: [
|
||||||
|
_: [
|
||||||
|
{"/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler, []},
|
||||||
|
{"/websocket", Phoenix.Endpoint.CowboyWebSocket,
|
||||||
|
{Phoenix.Transports.WebSocket,
|
||||||
|
{Pleroma.Web.Endpoint, Pleroma.Web.UserSocket, []}}},
|
||||||
|
{:_, Phoenix.Endpoint.Cowboy2Handler, {Pleroma.Web.Endpoint, []}}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
assert ConfigDB.from_binary(binary) == [
|
||||||
|
http: [
|
||||||
|
key1: [
|
||||||
|
{:_,
|
||||||
|
[
|
||||||
|
{"/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler, []},
|
||||||
|
{"/websocket", Phoenix.Endpoint.CowboyWebSocket,
|
||||||
|
{Phoenix.Transports.WebSocket,
|
||||||
|
{Pleroma.Web.Endpoint, Pleroma.Web.UserSocket, []}}},
|
||||||
|
{:_, Phoenix.Endpoint.Cowboy2Handler, {Pleroma.Web.Endpoint, []}}
|
||||||
|
]}
|
||||||
|
]
|
||||||
|
]
|
||||||
|
]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
34
test/config/holder_test.exs
Normal file
34
test/config/holder_test.exs
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Config.HolderTest do
|
||||||
|
use ExUnit.Case, async: true
|
||||||
|
|
||||||
|
alias Pleroma.Config.Holder
|
||||||
|
|
||||||
|
test "config/0" do
|
||||||
|
config = Holder.config()
|
||||||
|
assert config[:pleroma][Pleroma.Uploaders.Local][:uploads] == "test/uploads"
|
||||||
|
assert config[:tesla][:adapter] == Tesla.Mock
|
||||||
|
|
||||||
|
refute config[:pleroma][Pleroma.Repo]
|
||||||
|
refute config[:pleroma][Pleroma.Web.Endpoint]
|
||||||
|
refute config[:pleroma][:env]
|
||||||
|
refute config[:pleroma][:configurable_from_database]
|
||||||
|
refute config[:pleroma][:database]
|
||||||
|
refute config[:phoenix][:serve_endpoints]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "config/1" do
|
||||||
|
pleroma_config = Holder.config(:pleroma)
|
||||||
|
assert pleroma_config[Pleroma.Uploaders.Local][:uploads] == "test/uploads"
|
||||||
|
tesla_config = Holder.config(:tesla)
|
||||||
|
assert tesla_config[:adapter] == Tesla.Mock
|
||||||
|
end
|
||||||
|
|
||||||
|
test "config/2" do
|
||||||
|
assert Holder.config(:pleroma, Pleroma.Uploaders.Local) == [uploads: "test/uploads"]
|
||||||
|
assert Holder.config(:tesla, :adapter) == Tesla.Mock
|
||||||
|
end
|
||||||
|
end
|
44
test/config/loader_test.exs
Normal file
44
test/config/loader_test.exs
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Config.LoaderTest do
|
||||||
|
use ExUnit.Case, async: true
|
||||||
|
|
||||||
|
alias Pleroma.Config.Loader
|
||||||
|
|
||||||
|
test "load/1" do
|
||||||
|
config = Loader.load("test/fixtures/config/temp.secret.exs")
|
||||||
|
assert config[:pleroma][:first_setting][:key] == "value"
|
||||||
|
assert config[:pleroma][:first_setting][:key2] == [Pleroma.Repo]
|
||||||
|
assert config[:quack][:level] == :info
|
||||||
|
end
|
||||||
|
|
||||||
|
test "load_and_merge/0" do
|
||||||
|
config = Loader.load_and_merge()
|
||||||
|
|
||||||
|
refute config[:pleroma][Pleroma.Repo]
|
||||||
|
refute config[:pleroma][Pleroma.Web.Endpoint]
|
||||||
|
refute config[:pleroma][:env]
|
||||||
|
refute config[:pleroma][:configurable_from_database]
|
||||||
|
refute config[:pleroma][:database]
|
||||||
|
refute config[:phoenix][:serve_endpoints]
|
||||||
|
|
||||||
|
assert config[:pleroma][:ecto_repos] == [Pleroma.Repo]
|
||||||
|
assert config[:pleroma][Pleroma.Uploaders.Local][:uploads] == "test/uploads"
|
||||||
|
assert config[:tesla][:adapter] == Tesla.Mock
|
||||||
|
end
|
||||||
|
|
||||||
|
test "filter_group/2" do
|
||||||
|
assert Loader.filter_group(:pleroma,
|
||||||
|
pleroma: [
|
||||||
|
{Pleroma.Repo, [a: 1, b: 2]},
|
||||||
|
{Pleroma.Upload, [a: 1, b: 2]},
|
||||||
|
{Pleroma.Web.Endpoint, []},
|
||||||
|
env: :test,
|
||||||
|
configurable_from_database: true,
|
||||||
|
database: []
|
||||||
|
]
|
||||||
|
) == [{Pleroma.Upload, [a: 1, b: 2]}]
|
||||||
|
end
|
||||||
|
end
|
|
@ -5,47 +5,117 @@
|
||||||
defmodule Pleroma.Config.TransferTaskTest do
|
defmodule Pleroma.Config.TransferTaskTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
|
||||||
clear_config([:instance, :dynamic_configuration]) do
|
alias Pleroma.Config.TransferTask
|
||||||
Pleroma.Config.put([:instance, :dynamic_configuration], true)
|
alias Pleroma.ConfigDB
|
||||||
|
|
||||||
|
clear_config(:configurable_from_database) do
|
||||||
|
Pleroma.Config.put(:configurable_from_database, true)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "transfer config values from db to env" do
|
test "transfer config values from db to env" do
|
||||||
refute Application.get_env(:pleroma, :test_key)
|
refute Application.get_env(:pleroma, :test_key)
|
||||||
refute Application.get_env(:idna, :test_key)
|
refute Application.get_env(:idna, :test_key)
|
||||||
|
refute Application.get_env(:quack, :test_key)
|
||||||
|
|
||||||
Pleroma.Web.AdminAPI.Config.create(%{
|
ConfigDB.create(%{
|
||||||
group: "pleroma",
|
group: ":pleroma",
|
||||||
key: "test_key",
|
key: ":test_key",
|
||||||
value: [live: 2, com: 3]
|
value: [live: 2, com: 3]
|
||||||
})
|
})
|
||||||
|
|
||||||
Pleroma.Web.AdminAPI.Config.create(%{
|
ConfigDB.create(%{
|
||||||
group: "idna",
|
group: ":idna",
|
||||||
key: "test_key",
|
key: ":test_key",
|
||||||
value: [live: 15, com: 35]
|
value: [live: 15, com: 35]
|
||||||
})
|
})
|
||||||
|
|
||||||
Pleroma.Config.TransferTask.start_link([])
|
ConfigDB.create(%{
|
||||||
|
group: ":quack",
|
||||||
|
key: ":test_key",
|
||||||
|
value: [:test_value1, :test_value2]
|
||||||
|
})
|
||||||
|
|
||||||
|
TransferTask.start_link([])
|
||||||
|
|
||||||
assert Application.get_env(:pleroma, :test_key) == [live: 2, com: 3]
|
assert Application.get_env(:pleroma, :test_key) == [live: 2, com: 3]
|
||||||
assert Application.get_env(:idna, :test_key) == [live: 15, com: 35]
|
assert Application.get_env(:idna, :test_key) == [live: 15, com: 35]
|
||||||
|
assert Application.get_env(:quack, :test_key) == [:test_value1, :test_value2]
|
||||||
|
|
||||||
on_exit(fn ->
|
on_exit(fn ->
|
||||||
Application.delete_env(:pleroma, :test_key)
|
Application.delete_env(:pleroma, :test_key)
|
||||||
Application.delete_env(:idna, :test_key)
|
Application.delete_env(:idna, :test_key)
|
||||||
|
Application.delete_env(:quack, :test_key)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "transfer config values for 1 group and some keys" do
|
||||||
|
level = Application.get_env(:quack, :level)
|
||||||
|
meta = Application.get_env(:quack, :meta)
|
||||||
|
|
||||||
|
ConfigDB.create(%{
|
||||||
|
group: ":quack",
|
||||||
|
key: ":level",
|
||||||
|
value: :info
|
||||||
|
})
|
||||||
|
|
||||||
|
ConfigDB.create(%{
|
||||||
|
group: ":quack",
|
||||||
|
key: ":meta",
|
||||||
|
value: [:none]
|
||||||
|
})
|
||||||
|
|
||||||
|
TransferTask.start_link([])
|
||||||
|
|
||||||
|
assert Application.get_env(:quack, :level) == :info
|
||||||
|
assert Application.get_env(:quack, :meta) == [:none]
|
||||||
|
default = Pleroma.Config.Holder.config(:quack, :webhook_url)
|
||||||
|
assert Application.get_env(:quack, :webhook_url) == default
|
||||||
|
|
||||||
|
on_exit(fn ->
|
||||||
|
Application.put_env(:quack, :level, level)
|
||||||
|
Application.put_env(:quack, :meta, meta)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "transfer config values with full subkey update" do
|
||||||
|
emoji = Application.get_env(:pleroma, :emoji)
|
||||||
|
assets = Application.get_env(:pleroma, :assets)
|
||||||
|
|
||||||
|
ConfigDB.create(%{
|
||||||
|
group: ":pleroma",
|
||||||
|
key: ":emoji",
|
||||||
|
value: [groups: [a: 1, b: 2]]
|
||||||
|
})
|
||||||
|
|
||||||
|
ConfigDB.create(%{
|
||||||
|
group: ":pleroma",
|
||||||
|
key: ":assets",
|
||||||
|
value: [mascots: [a: 1, b: 2]]
|
||||||
|
})
|
||||||
|
|
||||||
|
TransferTask.start_link([])
|
||||||
|
|
||||||
|
emoji_env = Application.get_env(:pleroma, :emoji)
|
||||||
|
assert emoji_env[:groups] == [a: 1, b: 2]
|
||||||
|
assets_env = Application.get_env(:pleroma, :assets)
|
||||||
|
assert assets_env[:mascots] == [a: 1, b: 2]
|
||||||
|
|
||||||
|
on_exit(fn ->
|
||||||
|
Application.put_env(:pleroma, :emoji, emoji)
|
||||||
|
Application.put_env(:pleroma, :assets, assets)
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "non existing atom" do
|
test "non existing atom" do
|
||||||
Pleroma.Web.AdminAPI.Config.create(%{
|
ConfigDB.create(%{
|
||||||
group: "pleroma",
|
group: ":pleroma",
|
||||||
key: "undefined_atom_key",
|
key: ":undefined_atom_key",
|
||||||
value: [live: 2, com: 3]
|
value: [live: 2, com: 3]
|
||||||
})
|
})
|
||||||
|
|
||||||
assert ExUnit.CaptureLog.capture_log(fn ->
|
assert ExUnit.CaptureLog.capture_log(fn ->
|
||||||
Pleroma.Config.TransferTask.start_link([])
|
TransferTask.start_link([])
|
||||||
end) =~
|
end) =~
|
||||||
"updating env causes error, key: \"undefined_atom_key\", error: %ArgumentError{message: \"argument error\"}"
|
"updating env causes error, group: \":pleroma\" key: \":undefined_atom_key\" value: [live: 2, com: 3] error: %ArgumentError{message: \"argument error\"}"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
230
test/docs/generator_test.exs
Normal file
230
test/docs/generator_test.exs
Normal file
|
@ -0,0 +1,230 @@
|
||||||
|
defmodule Pleroma.Docs.GeneratorTest do
|
||||||
|
use ExUnit.Case, async: true
|
||||||
|
alias Pleroma.Docs.Generator
|
||||||
|
|
||||||
|
@descriptions [
|
||||||
|
%{
|
||||||
|
group: :pleroma,
|
||||||
|
key: Pleroma.Upload,
|
||||||
|
type: :group,
|
||||||
|
description: "",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :uploader,
|
||||||
|
type: :module,
|
||||||
|
description: "",
|
||||||
|
suggestions:
|
||||||
|
Generator.list_modules_in_dir(
|
||||||
|
"lib/pleroma/upload/filter",
|
||||||
|
"Elixir.Pleroma.Upload.Filter."
|
||||||
|
)
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :filters,
|
||||||
|
type: {:list, :module},
|
||||||
|
description: "",
|
||||||
|
suggestions:
|
||||||
|
Generator.list_modules_in_dir(
|
||||||
|
"lib/pleroma/web/activity_pub/mrf",
|
||||||
|
"Elixir.Pleroma.Web.ActivityPub.MRF."
|
||||||
|
)
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: Pleroma.Upload,
|
||||||
|
type: :string,
|
||||||
|
description: "",
|
||||||
|
suggestions: [""]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :some_key,
|
||||||
|
type: :keyword,
|
||||||
|
description: "",
|
||||||
|
suggestions: [],
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :another_key,
|
||||||
|
type: :integer,
|
||||||
|
description: "",
|
||||||
|
suggestions: [5]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :another_key_with_label,
|
||||||
|
label: "Another label",
|
||||||
|
type: :integer,
|
||||||
|
description: "",
|
||||||
|
suggestions: [7]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :key1,
|
||||||
|
type: :atom,
|
||||||
|
description: "",
|
||||||
|
suggestions: [
|
||||||
|
:atom,
|
||||||
|
Pleroma.Upload,
|
||||||
|
{:tuple, "string", 8080},
|
||||||
|
[:atom, Pleroma.Upload, {:atom, Pleroma.Upload}]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: Pleroma.Upload,
|
||||||
|
label: "Special Label",
|
||||||
|
type: :string,
|
||||||
|
description: "",
|
||||||
|
suggestions: [""]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
group: {:subgroup, Swoosh.Adapters.SMTP},
|
||||||
|
key: :auth,
|
||||||
|
type: :atom,
|
||||||
|
description: "`Swoosh.Adapters.SMTP` adapter specific setting",
|
||||||
|
suggestions: [:always, :never, :if_available]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: "application/xml",
|
||||||
|
type: {:list, :string},
|
||||||
|
suggestions: ["xml"]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :versions,
|
||||||
|
type: {:list, :atom},
|
||||||
|
description: "List of TLS version to use",
|
||||||
|
suggestions: [:tlsv1, ":tlsv1.1", ":tlsv1.2"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
group: :tesla,
|
||||||
|
key: :adapter,
|
||||||
|
type: :group,
|
||||||
|
description: ""
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
group: :cors_plug,
|
||||||
|
type: :group,
|
||||||
|
children: [%{key: :key1, type: :string, suggestions: [""]}]
|
||||||
|
},
|
||||||
|
%{group: "Some string group", key: "Some string key", type: :group}
|
||||||
|
]
|
||||||
|
|
||||||
|
describe "convert_to_strings/1" do
|
||||||
|
test "group, key, label" do
|
||||||
|
[desc1, desc2 | _] = Generator.convert_to_strings(@descriptions)
|
||||||
|
|
||||||
|
assert desc1[:group] == ":pleroma"
|
||||||
|
assert desc1[:key] == "Pleroma.Upload"
|
||||||
|
assert desc1[:label] == "Pleroma.Upload"
|
||||||
|
|
||||||
|
assert desc2[:group] == ":tesla"
|
||||||
|
assert desc2[:key] == ":adapter"
|
||||||
|
assert desc2[:label] == "Adapter"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "group without key" do
|
||||||
|
descriptions = Generator.convert_to_strings(@descriptions)
|
||||||
|
desc = Enum.at(descriptions, 2)
|
||||||
|
|
||||||
|
assert desc[:group] == ":cors_plug"
|
||||||
|
refute desc[:key]
|
||||||
|
assert desc[:label] == "Cors plug"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "children key, label, type" do
|
||||||
|
[%{children: [child1, child2, child3, child4 | _]} | _] =
|
||||||
|
Generator.convert_to_strings(@descriptions)
|
||||||
|
|
||||||
|
assert child1[:key] == ":uploader"
|
||||||
|
assert child1[:label] == "Uploader"
|
||||||
|
assert child1[:type] == :module
|
||||||
|
|
||||||
|
assert child2[:key] == ":filters"
|
||||||
|
assert child2[:label] == "Filters"
|
||||||
|
assert child2[:type] == {:list, :module}
|
||||||
|
|
||||||
|
assert child3[:key] == "Pleroma.Upload"
|
||||||
|
assert child3[:label] == "Pleroma.Upload"
|
||||||
|
assert child3[:type] == :string
|
||||||
|
|
||||||
|
assert child4[:key] == ":some_key"
|
||||||
|
assert child4[:label] == "Some key"
|
||||||
|
assert child4[:type] == :keyword
|
||||||
|
end
|
||||||
|
|
||||||
|
test "child with predefined label" do
|
||||||
|
[%{children: children} | _] = Generator.convert_to_strings(@descriptions)
|
||||||
|
child = Enum.at(children, 5)
|
||||||
|
assert child[:key] == "Pleroma.Upload"
|
||||||
|
assert child[:label] == "Special Label"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "subchild" do
|
||||||
|
[%{children: children} | _] = Generator.convert_to_strings(@descriptions)
|
||||||
|
child = Enum.at(children, 3)
|
||||||
|
%{children: [subchild | _]} = child
|
||||||
|
|
||||||
|
assert subchild[:key] == ":another_key"
|
||||||
|
assert subchild[:label] == "Another key"
|
||||||
|
assert subchild[:type] == :integer
|
||||||
|
end
|
||||||
|
|
||||||
|
test "subchild with predefined label" do
|
||||||
|
[%{children: children} | _] = Generator.convert_to_strings(@descriptions)
|
||||||
|
child = Enum.at(children, 3)
|
||||||
|
%{children: subchildren} = child
|
||||||
|
subchild = Enum.at(subchildren, 1)
|
||||||
|
|
||||||
|
assert subchild[:key] == ":another_key_with_label"
|
||||||
|
assert subchild[:label] == "Another label"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "module suggestions" do
|
||||||
|
[%{children: [%{suggestions: suggestions} | _]} | _] =
|
||||||
|
Generator.convert_to_strings(@descriptions)
|
||||||
|
|
||||||
|
Enum.each(suggestions, fn suggestion ->
|
||||||
|
assert String.starts_with?(suggestion, "Pleroma.")
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "atoms in suggestions with leading `:`" do
|
||||||
|
[%{children: children} | _] = Generator.convert_to_strings(@descriptions)
|
||||||
|
%{suggestions: suggestions} = Enum.at(children, 4)
|
||||||
|
assert Enum.at(suggestions, 0) == ":atom"
|
||||||
|
assert Enum.at(suggestions, 1) == "Pleroma.Upload"
|
||||||
|
assert Enum.at(suggestions, 2) == {":tuple", "string", 8080}
|
||||||
|
assert Enum.at(suggestions, 3) == [":atom", "Pleroma.Upload", {":atom", "Pleroma.Upload"}]
|
||||||
|
|
||||||
|
%{suggestions: suggestions} = Enum.at(children, 6)
|
||||||
|
assert Enum.at(suggestions, 0) == ":always"
|
||||||
|
assert Enum.at(suggestions, 1) == ":never"
|
||||||
|
assert Enum.at(suggestions, 2) == ":if_available"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "group, key as string in main desc" do
|
||||||
|
descriptions = Generator.convert_to_strings(@descriptions)
|
||||||
|
desc = Enum.at(descriptions, 3)
|
||||||
|
assert desc[:group] == "Some string group"
|
||||||
|
assert desc[:key] == "Some string key"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "key as string subchild" do
|
||||||
|
[%{children: children} | _] = Generator.convert_to_strings(@descriptions)
|
||||||
|
child = Enum.at(children, 7)
|
||||||
|
assert child[:key] == "application/xml"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "suggestion for tls versions" do
|
||||||
|
[%{children: children} | _] = Generator.convert_to_strings(@descriptions)
|
||||||
|
child = Enum.at(children, 8)
|
||||||
|
assert child[:suggestions] == [":tlsv1", ":tlsv1.1", ":tlsv1.2"]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "subgroup with module name" do
|
||||||
|
[%{children: children} | _] = Generator.convert_to_strings(@descriptions)
|
||||||
|
|
||||||
|
%{group: subgroup} = Enum.at(children, 6)
|
||||||
|
assert subgroup == {":subgroup", "Swoosh.Adapters.SMTP"}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
9
test/fixtures/config/temp.secret.exs
vendored
Normal file
9
test/fixtures/config/temp.secret.exs
vendored
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
use Mix.Config
|
||||||
|
|
||||||
|
config :pleroma, :first_setting, key: "value", key2: [Pleroma.Repo]
|
||||||
|
|
||||||
|
config :pleroma, :second_setting, key: "value2", key2: ["Activity"]
|
||||||
|
|
||||||
|
config :quack, level: :info
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Repo, pool: Ecto.Adapters.SQL.Sandbox
|
|
@ -394,9 +394,15 @@ def registration_factory do
|
||||||
end
|
end
|
||||||
|
|
||||||
def config_factory do
|
def config_factory do
|
||||||
%Pleroma.Web.AdminAPI.Config{
|
%Pleroma.ConfigDB{
|
||||||
key: sequence(:key, &"some_key_#{&1}"),
|
key:
|
||||||
group: "pleroma",
|
sequence(:key, fn key ->
|
||||||
|
# Atom dynamic registration hack in tests
|
||||||
|
"some_key_#{key}"
|
||||||
|
|> String.to_atom()
|
||||||
|
|> inspect()
|
||||||
|
end),
|
||||||
|
group: ":pleroma",
|
||||||
value:
|
value:
|
||||||
sequence(
|
sequence(
|
||||||
:value,
|
:value,
|
||||||
|
|
|
@ -6,6 +6,7 @@ defmodule Pleroma.Tests.Helpers do
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
Helpers for use in tests.
|
Helpers for use in tests.
|
||||||
"""
|
"""
|
||||||
|
alias Pleroma.Config
|
||||||
|
|
||||||
defmacro clear_config(config_path) do
|
defmacro clear_config(config_path) do
|
||||||
quote do
|
quote do
|
||||||
|
@ -17,9 +18,9 @@ defmacro clear_config(config_path) do
|
||||||
defmacro clear_config(config_path, do: yield) do
|
defmacro clear_config(config_path, do: yield) do
|
||||||
quote do
|
quote do
|
||||||
setup do
|
setup do
|
||||||
initial_setting = Pleroma.Config.get(unquote(config_path))
|
initial_setting = Config.get(unquote(config_path))
|
||||||
unquote(yield)
|
unquote(yield)
|
||||||
on_exit(fn -> Pleroma.Config.put(unquote(config_path), initial_setting) end)
|
on_exit(fn -> Config.put(unquote(config_path), initial_setting) end)
|
||||||
:ok
|
:ok
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -35,9 +36,9 @@ defmacro clear_config_all(config_path) do
|
||||||
defmacro clear_config_all(config_path, do: yield) do
|
defmacro clear_config_all(config_path, do: yield) do
|
||||||
quote do
|
quote do
|
||||||
setup_all do
|
setup_all do
|
||||||
initial_setting = Pleroma.Config.get(unquote(config_path))
|
initial_setting = Config.get(unquote(config_path))
|
||||||
unquote(yield)
|
unquote(yield)
|
||||||
on_exit(fn -> Pleroma.Config.put(unquote(config_path), initial_setting) end)
|
on_exit(fn -> Config.put(unquote(config_path), initial_setting) end)
|
||||||
:ok
|
:ok
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -94,10 +95,10 @@ def stringify_keys(key), do: key
|
||||||
|
|
||||||
defmacro guards_config(config_path) do
|
defmacro guards_config(config_path) do
|
||||||
quote do
|
quote do
|
||||||
initial_setting = Pleroma.Config.get(config_path)
|
initial_setting = Config.get(config_path)
|
||||||
|
|
||||||
Pleroma.Config.put(config_path, true)
|
Config.put(config_path, true)
|
||||||
on_exit(fn -> Pleroma.Config.put(config_path, initial_setting) end)
|
on_exit(fn -> Config.put(config_path, initial_setting) end)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -4,143 +4,173 @@
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.ConfigTest do
|
defmodule Mix.Tasks.Pleroma.ConfigTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
|
||||||
|
alias Pleroma.ConfigDB
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.Web.AdminAPI.Config
|
|
||||||
|
|
||||||
setup_all do
|
setup_all do
|
||||||
Mix.shell(Mix.Shell.Process)
|
Mix.shell(Mix.Shell.Process)
|
||||||
temp_file = "config/temp.exported_from_db.secret.exs"
|
|
||||||
|
|
||||||
on_exit(fn ->
|
on_exit(fn ->
|
||||||
Mix.shell(Mix.Shell.IO)
|
Mix.shell(Mix.Shell.IO)
|
||||||
Application.delete_env(:pleroma, :first_setting)
|
Application.delete_env(:pleroma, :first_setting)
|
||||||
Application.delete_env(:pleroma, :second_setting)
|
Application.delete_env(:pleroma, :second_setting)
|
||||||
:ok = File.rm(temp_file)
|
|
||||||
end)
|
end)
|
||||||
|
|
||||||
{:ok, temp_file: temp_file}
|
:ok
|
||||||
end
|
end
|
||||||
|
|
||||||
clear_config_all([:instance, :dynamic_configuration]) do
|
clear_config_all(:configurable_from_database) do
|
||||||
Pleroma.Config.put([:instance, :dynamic_configuration], true)
|
Pleroma.Config.put(:configurable_from_database, true)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "error if file with custom settings doesn't exist" do
|
||||||
|
Mix.Tasks.Pleroma.Config.run(["migrate_to_db"])
|
||||||
|
|
||||||
|
assert_receive {:mix_shell, :info,
|
||||||
|
[
|
||||||
|
"To migrate settings, you must define custom settings in config/test.secret.exs."
|
||||||
|
]},
|
||||||
|
15
|
||||||
end
|
end
|
||||||
|
|
||||||
test "settings are migrated to db" do
|
test "settings are migrated to db" do
|
||||||
assert Repo.all(Config) == []
|
initial = Application.get_env(:quack, :level)
|
||||||
|
on_exit(fn -> Application.put_env(:quack, :level, initial) end)
|
||||||
|
assert Repo.all(ConfigDB) == []
|
||||||
|
|
||||||
Application.put_env(:pleroma, :first_setting, key: "value", key2: [Pleroma.Repo])
|
Mix.Tasks.Pleroma.Config.migrate_to_db("test/fixtures/config/temp.secret.exs")
|
||||||
Application.put_env(:pleroma, :second_setting, key: "value2", key2: [Pleroma.Activity])
|
|
||||||
|
|
||||||
Mix.Tasks.Pleroma.Config.run(["migrate_to_db"])
|
config1 = ConfigDB.get_by_params(%{group: ":pleroma", key: ":first_setting"})
|
||||||
|
config2 = ConfigDB.get_by_params(%{group: ":pleroma", key: ":second_setting"})
|
||||||
|
config3 = ConfigDB.get_by_params(%{group: ":quack", key: ":level"})
|
||||||
|
refute ConfigDB.get_by_params(%{group: ":pleroma", key: "Pleroma.Repo"})
|
||||||
|
|
||||||
first_db = Config.get_by_params(%{group: "pleroma", key: ":first_setting"})
|
assert ConfigDB.from_binary(config1.value) == [key: "value", key2: [Repo]]
|
||||||
second_db = Config.get_by_params(%{group: "pleroma", key: ":second_setting"})
|
assert ConfigDB.from_binary(config2.value) == [key: "value2", key2: ["Activity"]]
|
||||||
refute Config.get_by_params(%{group: "pleroma", key: "Pleroma.Repo"})
|
assert ConfigDB.from_binary(config3.value) == :info
|
||||||
|
|
||||||
assert Config.from_binary(first_db.value) == [key: "value", key2: [Pleroma.Repo]]
|
|
||||||
assert Config.from_binary(second_db.value) == [key: "value2", key2: [Pleroma.Activity]]
|
|
||||||
end
|
end
|
||||||
|
|
||||||
test "settings are migrated to file and deleted from db", %{temp_file: temp_file} do
|
describe "with deletion temp file" do
|
||||||
Config.create(%{
|
setup do
|
||||||
group: "pleroma",
|
temp_file = "config/temp.exported_from_db.secret.exs"
|
||||||
key: ":setting_first",
|
|
||||||
value: [key: "value", key2: [Pleroma.Activity]]
|
|
||||||
})
|
|
||||||
|
|
||||||
Config.create(%{
|
on_exit(fn ->
|
||||||
group: "pleroma",
|
:ok = File.rm(temp_file)
|
||||||
key: ":setting_second",
|
end)
|
||||||
value: [key: "valu2", key2: [Pleroma.Repo]]
|
|
||||||
})
|
|
||||||
|
|
||||||
Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "temp", "true"])
|
{:ok, temp_file: temp_file}
|
||||||
|
end
|
||||||
|
|
||||||
assert Repo.all(Config) == []
|
test "settings are migrated to file and deleted from db", %{temp_file: temp_file} do
|
||||||
assert File.exists?(temp_file)
|
ConfigDB.create(%{
|
||||||
{:ok, file} = File.read(temp_file)
|
group: ":pleroma",
|
||||||
|
key: ":setting_first",
|
||||||
|
value: [key: "value", key2: ["Activity"]]
|
||||||
|
})
|
||||||
|
|
||||||
assert file =~ "config :pleroma, :setting_first,"
|
ConfigDB.create(%{
|
||||||
assert file =~ "config :pleroma, :setting_second,"
|
group: ":pleroma",
|
||||||
end
|
key: ":setting_second",
|
||||||
|
value: [key: "value2", key2: [Repo]]
|
||||||
|
})
|
||||||
|
|
||||||
test "load a settings with large values and pass to file", %{temp_file: temp_file} do
|
ConfigDB.create(%{group: ":quack", key: ":level", value: :info})
|
||||||
Config.create(%{
|
|
||||||
group: "pleroma",
|
Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "--env", "temp", "-d"])
|
||||||
key: ":instance",
|
|
||||||
value: [
|
assert Repo.all(ConfigDB) == []
|
||||||
name: "Pleroma",
|
|
||||||
email: "example@example.com",
|
file = File.read!(temp_file)
|
||||||
notify_email: "noreply@example.com",
|
assert file =~ "config :pleroma, :setting_first,"
|
||||||
description: "A Pleroma instance, an alternative fediverse server",
|
assert file =~ "config :pleroma, :setting_second,"
|
||||||
limit: 5_000,
|
assert file =~ "config :quack, :level, :info"
|
||||||
chat_limit: 5_000,
|
end
|
||||||
remote_limit: 100_000,
|
|
||||||
upload_limit: 16_000_000,
|
test "load a settings with large values and pass to file", %{temp_file: temp_file} do
|
||||||
avatar_upload_limit: 2_000_000,
|
ConfigDB.create(%{
|
||||||
background_upload_limit: 4_000_000,
|
group: ":pleroma",
|
||||||
banner_upload_limit: 4_000_000,
|
key: ":instance",
|
||||||
poll_limits: %{
|
value: [
|
||||||
max_options: 20,
|
name: "Pleroma",
|
||||||
max_option_chars: 200,
|
email: "example@example.com",
|
||||||
min_expiration: 0,
|
notify_email: "noreply@example.com",
|
||||||
max_expiration: 365 * 24 * 60 * 60
|
description: "A Pleroma instance, an alternative fediverse server",
|
||||||
},
|
limit: 5_000,
|
||||||
registrations_open: true,
|
chat_limit: 5_000,
|
||||||
federating: true,
|
remote_limit: 100_000,
|
||||||
federation_incoming_replies_max_depth: 100,
|
upload_limit: 16_000_000,
|
||||||
federation_reachability_timeout_days: 7,
|
avatar_upload_limit: 2_000_000,
|
||||||
federation_publisher_modules: [Pleroma.Web.ActivityPub.Publisher],
|
background_upload_limit: 4_000_000,
|
||||||
allow_relay: true,
|
banner_upload_limit: 4_000_000,
|
||||||
rewrite_policy: Pleroma.Web.ActivityPub.MRF.NoOpPolicy,
|
poll_limits: %{
|
||||||
public: true,
|
max_options: 20,
|
||||||
quarantined_instances: [],
|
max_option_chars: 200,
|
||||||
managed_config: true,
|
min_expiration: 0,
|
||||||
static_dir: "instance/static/",
|
max_expiration: 365 * 24 * 60 * 60
|
||||||
allowed_post_formats: ["text/plain", "text/html", "text/markdown", "text/bbcode"],
|
},
|
||||||
mrf_transparency: true,
|
registrations_open: true,
|
||||||
mrf_transparency_exclusions: [],
|
federating: true,
|
||||||
autofollowed_nicknames: [],
|
federation_incoming_replies_max_depth: 100,
|
||||||
max_pinned_statuses: 1,
|
federation_reachability_timeout_days: 7,
|
||||||
no_attachment_links: true,
|
federation_publisher_modules: [Pleroma.Web.ActivityPub.Publisher],
|
||||||
welcome_user_nickname: nil,
|
allow_relay: true,
|
||||||
welcome_message: nil,
|
rewrite_policy: Pleroma.Web.ActivityPub.MRF.NoOpPolicy,
|
||||||
max_report_comment_size: 1000,
|
public: true,
|
||||||
safe_dm_mentions: false,
|
quarantined_instances: [],
|
||||||
healthcheck: false,
|
managed_config: true,
|
||||||
remote_post_retention_days: 90,
|
static_dir: "instance/static/",
|
||||||
skip_thread_containment: true,
|
allowed_post_formats: ["text/plain", "text/html", "text/markdown", "text/bbcode"],
|
||||||
limit_to_local_content: :unauthenticated,
|
mrf_transparency: true,
|
||||||
dynamic_configuration: false,
|
mrf_transparency_exclusions: [],
|
||||||
user_bio_length: 5000,
|
autofollowed_nicknames: [],
|
||||||
user_name_length: 100,
|
max_pinned_statuses: 1,
|
||||||
max_account_fields: 10,
|
no_attachment_links: true,
|
||||||
max_remote_account_fields: 20,
|
welcome_user_nickname: nil,
|
||||||
account_field_name_length: 512,
|
welcome_message: nil,
|
||||||
account_field_value_length: 2048,
|
max_report_comment_size: 1000,
|
||||||
external_user_synchronization: true,
|
safe_dm_mentions: false,
|
||||||
extended_nickname_format: true,
|
healthcheck: false,
|
||||||
multi_factor_authentication: [
|
remote_post_retention_days: 90,
|
||||||
totp: [
|
skip_thread_containment: true,
|
||||||
# digits 6 or 8
|
limit_to_local_content: :unauthenticated,
|
||||||
digits: 6,
|
user_bio_length: 5000,
|
||||||
period: 30
|
user_name_length: 100,
|
||||||
],
|
max_account_fields: 10,
|
||||||
backup_codes: [
|
max_remote_account_fields: 20,
|
||||||
number: 2,
|
account_field_name_length: 512,
|
||||||
length: 6
|
account_field_value_length: 2048,
|
||||||
|
external_user_synchronization: true,
|
||||||
|
extended_nickname_format: true,
|
||||||
|
multi_factor_authentication: [
|
||||||
|
totp: [
|
||||||
|
# digits 6 or 8
|
||||||
|
digits: 6,
|
||||||
|
period: 30
|
||||||
|
],
|
||||||
|
backup_codes: [
|
||||||
|
number: 2,
|
||||||
|
length: 6
|
||||||
|
]
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
]
|
})
|
||||||
})
|
|
||||||
|
|
||||||
Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "temp", "true"])
|
Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "--env", "temp", "-d"])
|
||||||
|
|
||||||
assert Repo.all(Config) == []
|
assert Repo.all(ConfigDB) == []
|
||||||
assert File.exists?(temp_file)
|
assert File.exists?(temp_file)
|
||||||
{:ok, file} = File.read(temp_file)
|
{:ok, file} = File.read(temp_file)
|
||||||
|
|
||||||
assert file ==
|
header =
|
||||||
"use Mix.Config\n\nconfig :pleroma, :instance,\n name: \"Pleroma\",\n email: \"example@example.com\",\n notify_email: \"noreply@example.com\",\n description: \"A Pleroma instance, an alternative fediverse server\",\n limit: 5000,\n chat_limit: 5000,\n remote_limit: 100_000,\n upload_limit: 16_000_000,\n avatar_upload_limit: 2_000_000,\n background_upload_limit: 4_000_000,\n banner_upload_limit: 4_000_000,\n poll_limits: %{\n max_expiration: 31_536_000,\n max_option_chars: 200,\n max_options: 20,\n min_expiration: 0\n },\n registrations_open: true,\n federating: true,\n federation_incoming_replies_max_depth: 100,\n federation_reachability_timeout_days: 7,\n federation_publisher_modules: [Pleroma.Web.ActivityPub.Publisher],\n allow_relay: true,\n rewrite_policy: Pleroma.Web.ActivityPub.MRF.NoOpPolicy,\n public: true,\n quarantined_instances: [],\n managed_config: true,\n static_dir: \"instance/static/\",\n allowed_post_formats: [\"text/plain\", \"text/html\", \"text/markdown\", \"text/bbcode\"],\n mrf_transparency: true,\n mrf_transparency_exclusions: [],\n autofollowed_nicknames: [],\n max_pinned_statuses: 1,\n no_attachment_links: true,\n welcome_user_nickname: nil,\n welcome_message: nil,\n max_report_comment_size: 1000,\n safe_dm_mentions: false,\n healthcheck: false,\n remote_post_retention_days: 90,\n skip_thread_containment: true,\n limit_to_local_content: :unauthenticated,\n dynamic_configuration: false,\n user_bio_length: 5000,\n user_name_length: 100,\n max_account_fields: 10,\n max_remote_account_fields: 20,\n account_field_name_length: 512,\n account_field_value_length: 2048,\n external_user_synchronization: true,\n extended_nickname_format: true,\n multi_factor_authentication: [\n totp: [digits: 6, period: 30],\n backup_codes: [number: 2, length: 6]\n ]\n"
|
if Code.ensure_loaded?(Config.Reader) do
|
||||||
|
"import Config"
|
||||||
|
else
|
||||||
|
"use Mix.Config"
|
||||||
|
end
|
||||||
|
|
||||||
|
assert file ==
|
||||||
|
"#{header}\n\nconfig :pleroma, :instance,\n name: \"Pleroma\",\n email: \"example@example.com\",\n notify_email: \"noreply@example.com\",\n description: \"A Pleroma instance, an alternative fediverse server\",\n limit: 5000,\n chat_limit: 5000,\n remote_limit: 100_000,\n upload_limit: 16_000_000,\n avatar_upload_limit: 2_000_000,\n background_upload_limit: 4_000_000,\n banner_upload_limit: 4_000_000,\n poll_limits: %{\n max_expiration: 31_536_000,\n max_option_chars: 200,\n max_options: 20,\n min_expiration: 0\n },\n registrations_open: true,\n federating: true,\n federation_incoming_replies_max_depth: 100,\n federation_reachability_timeout_days: 7,\n federation_publisher_modules: [Pleroma.Web.ActivityPub.Publisher],\n allow_relay: true,\n rewrite_policy: Pleroma.Web.ActivityPub.MRF.NoOpPolicy,\n public: true,\n quarantined_instances: [],\n managed_config: true,\n static_dir: \"instance/static/\",\n allowed_post_formats: [\"text/plain\", \"text/html\", \"text/markdown\", \"text/bbcode\"],\n mrf_transparency: true,\n mrf_transparency_exclusions: [],\n autofollowed_nicknames: [],\n max_pinned_statuses: 1,\n no_attachment_links: true,\n welcome_user_nickname: nil,\n welcome_message: nil,\n max_report_comment_size: 1000,\n safe_dm_mentions: false,\n healthcheck: false,\n remote_post_retention_days: 90,\n skip_thread_containment: true,\n limit_to_local_content: :unauthenticated,\n user_bio_length: 5000,\n user_name_length: 100,\n max_account_fields: 10,\n max_remote_account_fields: 20,\n account_field_name_length: 512,\n account_field_value_length: 2048,\n external_user_synchronization: true,\n extended_nickname_format: true,\n multi_factor_authentication: [\n totp: [digits: 6, period: 30],\n backup_codes: [number: 2, length: 6]\n ]\n"
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -78,7 +78,7 @@ test "running gen" do
|
||||||
assert generated_config =~ "database: \"dbname\""
|
assert generated_config =~ "database: \"dbname\""
|
||||||
assert generated_config =~ "username: \"dbuser\""
|
assert generated_config =~ "username: \"dbuser\""
|
||||||
assert generated_config =~ "password: \"dbpass\""
|
assert generated_config =~ "password: \"dbpass\""
|
||||||
assert generated_config =~ "dynamic_configuration: true"
|
assert generated_config =~ "configurable_from_database: true"
|
||||||
assert generated_config =~ "http: [ip: {127, 0, 0, 1}, port: 4000]"
|
assert generated_config =~ "http: [ip: {127, 0, 0, 1}, port: 4000]"
|
||||||
assert File.read!(tmp_path() <> "setup.psql") == generated_setup_psql()
|
assert File.read!(tmp_path() <> "setup.psql") == generated_setup_psql()
|
||||||
end
|
end
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,497 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Web.AdminAPI.ConfigTest do
|
|
||||||
use Pleroma.DataCase, async: true
|
|
||||||
import Pleroma.Factory
|
|
||||||
alias Pleroma.Web.AdminAPI.Config
|
|
||||||
|
|
||||||
test "get_by_key/1" do
|
|
||||||
config = insert(:config)
|
|
||||||
insert(:config)
|
|
||||||
|
|
||||||
assert config == Config.get_by_params(%{group: config.group, key: config.key})
|
|
||||||
end
|
|
||||||
|
|
||||||
test "create/1" do
|
|
||||||
{:ok, config} = Config.create(%{group: "pleroma", key: "some_key", value: "some_value"})
|
|
||||||
assert config == Config.get_by_params(%{group: "pleroma", key: "some_key"})
|
|
||||||
end
|
|
||||||
|
|
||||||
test "update/1" do
|
|
||||||
config = insert(:config)
|
|
||||||
{:ok, updated} = Config.update(config, %{value: "some_value"})
|
|
||||||
loaded = Config.get_by_params(%{group: config.group, key: config.key})
|
|
||||||
assert loaded == updated
|
|
||||||
end
|
|
||||||
|
|
||||||
test "update_or_create/1" do
|
|
||||||
config = insert(:config)
|
|
||||||
key2 = "another_key"
|
|
||||||
|
|
||||||
params = [
|
|
||||||
%{group: "pleroma", key: key2, value: "another_value"},
|
|
||||||
%{group: config.group, key: config.key, value: "new_value"}
|
|
||||||
]
|
|
||||||
|
|
||||||
assert Repo.all(Config) |> length() == 1
|
|
||||||
|
|
||||||
Enum.each(params, &Config.update_or_create(&1))
|
|
||||||
|
|
||||||
assert Repo.all(Config) |> length() == 2
|
|
||||||
|
|
||||||
config1 = Config.get_by_params(%{group: config.group, key: config.key})
|
|
||||||
config2 = Config.get_by_params(%{group: "pleroma", key: key2})
|
|
||||||
|
|
||||||
assert config1.value == Config.transform("new_value")
|
|
||||||
assert config2.value == Config.transform("another_value")
|
|
||||||
end
|
|
||||||
|
|
||||||
test "delete/1" do
|
|
||||||
config = insert(:config)
|
|
||||||
{:ok, _} = Config.delete(%{key: config.key, group: config.group})
|
|
||||||
refute Config.get_by_params(%{key: config.key, group: config.group})
|
|
||||||
end
|
|
||||||
|
|
||||||
describe "transform/1" do
|
|
||||||
test "string" do
|
|
||||||
binary = Config.transform("value as string")
|
|
||||||
assert binary == :erlang.term_to_binary("value as string")
|
|
||||||
assert Config.from_binary(binary) == "value as string"
|
|
||||||
end
|
|
||||||
|
|
||||||
test "boolean" do
|
|
||||||
binary = Config.transform(false)
|
|
||||||
assert binary == :erlang.term_to_binary(false)
|
|
||||||
assert Config.from_binary(binary) == false
|
|
||||||
end
|
|
||||||
|
|
||||||
test "nil" do
|
|
||||||
binary = Config.transform(nil)
|
|
||||||
assert binary == :erlang.term_to_binary(nil)
|
|
||||||
assert Config.from_binary(binary) == nil
|
|
||||||
end
|
|
||||||
|
|
||||||
test "integer" do
|
|
||||||
binary = Config.transform(150)
|
|
||||||
assert binary == :erlang.term_to_binary(150)
|
|
||||||
assert Config.from_binary(binary) == 150
|
|
||||||
end
|
|
||||||
|
|
||||||
test "atom" do
|
|
||||||
binary = Config.transform(":atom")
|
|
||||||
assert binary == :erlang.term_to_binary(:atom)
|
|
||||||
assert Config.from_binary(binary) == :atom
|
|
||||||
end
|
|
||||||
|
|
||||||
test "pleroma module" do
|
|
||||||
binary = Config.transform("Pleroma.Bookmark")
|
|
||||||
assert binary == :erlang.term_to_binary(Pleroma.Bookmark)
|
|
||||||
assert Config.from_binary(binary) == Pleroma.Bookmark
|
|
||||||
end
|
|
||||||
|
|
||||||
test "phoenix module" do
|
|
||||||
binary = Config.transform("Phoenix.Socket.V1.JSONSerializer")
|
|
||||||
assert binary == :erlang.term_to_binary(Phoenix.Socket.V1.JSONSerializer)
|
|
||||||
assert Config.from_binary(binary) == Phoenix.Socket.V1.JSONSerializer
|
|
||||||
end
|
|
||||||
|
|
||||||
test "sigil" do
|
|
||||||
binary = Config.transform("~r/comp[lL][aA][iI][nN]er/")
|
|
||||||
assert binary == :erlang.term_to_binary(~r/comp[lL][aA][iI][nN]er/)
|
|
||||||
assert Config.from_binary(binary) == ~r/comp[lL][aA][iI][nN]er/
|
|
||||||
end
|
|
||||||
|
|
||||||
test "link sigil" do
|
|
||||||
binary = Config.transform("~r/https:\/\/example.com/")
|
|
||||||
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/)
|
|
||||||
assert Config.from_binary(binary) == ~r/https:\/\/example.com/
|
|
||||||
end
|
|
||||||
|
|
||||||
test "link sigil with u modifier" do
|
|
||||||
binary = Config.transform("~r/https:\/\/example.com/u")
|
|
||||||
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/u)
|
|
||||||
assert Config.from_binary(binary) == ~r/https:\/\/example.com/u
|
|
||||||
end
|
|
||||||
|
|
||||||
test "link sigil with i modifier" do
|
|
||||||
binary = Config.transform("~r/https:\/\/example.com/i")
|
|
||||||
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/i)
|
|
||||||
assert Config.from_binary(binary) == ~r/https:\/\/example.com/i
|
|
||||||
end
|
|
||||||
|
|
||||||
test "link sigil with s modifier" do
|
|
||||||
binary = Config.transform("~r/https:\/\/example.com/s")
|
|
||||||
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/s)
|
|
||||||
assert Config.from_binary(binary) == ~r/https:\/\/example.com/s
|
|
||||||
end
|
|
||||||
|
|
||||||
test "2 child tuple" do
|
|
||||||
binary = Config.transform(%{"tuple" => ["v1", ":v2"]})
|
|
||||||
assert binary == :erlang.term_to_binary({"v1", :v2})
|
|
||||||
assert Config.from_binary(binary) == {"v1", :v2}
|
|
||||||
end
|
|
||||||
|
|
||||||
test "tuple with n childs" do
|
|
||||||
binary =
|
|
||||||
Config.transform(%{
|
|
||||||
"tuple" => [
|
|
||||||
"v1",
|
|
||||||
":v2",
|
|
||||||
"Pleroma.Bookmark",
|
|
||||||
150,
|
|
||||||
false,
|
|
||||||
"Phoenix.Socket.V1.JSONSerializer"
|
|
||||||
]
|
|
||||||
})
|
|
||||||
|
|
||||||
assert binary ==
|
|
||||||
:erlang.term_to_binary(
|
|
||||||
{"v1", :v2, Pleroma.Bookmark, 150, false, Phoenix.Socket.V1.JSONSerializer}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert Config.from_binary(binary) ==
|
|
||||||
{"v1", :v2, Pleroma.Bookmark, 150, false, Phoenix.Socket.V1.JSONSerializer}
|
|
||||||
end
|
|
||||||
|
|
||||||
test "tuple with dispatch key" do
|
|
||||||
binary = Config.transform(%{"tuple" => [":dispatch", ["{:_,
|
|
||||||
[
|
|
||||||
{\"/api/v1/streaming\", Pleroma.Web.MastodonAPI.WebsocketHandler, []},
|
|
||||||
{\"/websocket\", Phoenix.Endpoint.CowboyWebSocket,
|
|
||||||
{Phoenix.Transports.WebSocket,
|
|
||||||
{Pleroma.Web.Endpoint, Pleroma.Web.UserSocket, [path: \"/websocket\"]}}},
|
|
||||||
{:_, Phoenix.Endpoint.Cowboy2Handler, {Pleroma.Web.Endpoint, []}}
|
|
||||||
]}"]]})
|
|
||||||
|
|
||||||
assert binary ==
|
|
||||||
:erlang.term_to_binary(
|
|
||||||
{:dispatch,
|
|
||||||
[
|
|
||||||
{:_,
|
|
||||||
[
|
|
||||||
{"/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler, []},
|
|
||||||
{"/websocket", Phoenix.Endpoint.CowboyWebSocket,
|
|
||||||
{Phoenix.Transports.WebSocket,
|
|
||||||
{Pleroma.Web.Endpoint, Pleroma.Web.UserSocket, [path: "/websocket"]}}},
|
|
||||||
{:_, Phoenix.Endpoint.Cowboy2Handler, {Pleroma.Web.Endpoint, []}}
|
|
||||||
]}
|
|
||||||
]}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert Config.from_binary(binary) ==
|
|
||||||
{:dispatch,
|
|
||||||
[
|
|
||||||
{:_,
|
|
||||||
[
|
|
||||||
{"/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler, []},
|
|
||||||
{"/websocket", Phoenix.Endpoint.CowboyWebSocket,
|
|
||||||
{Phoenix.Transports.WebSocket,
|
|
||||||
{Pleroma.Web.Endpoint, Pleroma.Web.UserSocket, [path: "/websocket"]}}},
|
|
||||||
{:_, Phoenix.Endpoint.Cowboy2Handler, {Pleroma.Web.Endpoint, []}}
|
|
||||||
]}
|
|
||||||
]}
|
|
||||||
end
|
|
||||||
|
|
||||||
test "map with string key" do
|
|
||||||
binary = Config.transform(%{"key" => "value"})
|
|
||||||
assert binary == :erlang.term_to_binary(%{"key" => "value"})
|
|
||||||
assert Config.from_binary(binary) == %{"key" => "value"}
|
|
||||||
end
|
|
||||||
|
|
||||||
test "map with atom key" do
|
|
||||||
binary = Config.transform(%{":key" => "value"})
|
|
||||||
assert binary == :erlang.term_to_binary(%{key: "value"})
|
|
||||||
assert Config.from_binary(binary) == %{key: "value"}
|
|
||||||
end
|
|
||||||
|
|
||||||
test "list of strings" do
|
|
||||||
binary = Config.transform(["v1", "v2", "v3"])
|
|
||||||
assert binary == :erlang.term_to_binary(["v1", "v2", "v3"])
|
|
||||||
assert Config.from_binary(binary) == ["v1", "v2", "v3"]
|
|
||||||
end
|
|
||||||
|
|
||||||
test "list of modules" do
|
|
||||||
binary = Config.transform(["Pleroma.Repo", "Pleroma.Activity"])
|
|
||||||
assert binary == :erlang.term_to_binary([Pleroma.Repo, Pleroma.Activity])
|
|
||||||
assert Config.from_binary(binary) == [Pleroma.Repo, Pleroma.Activity]
|
|
||||||
end
|
|
||||||
|
|
||||||
test "list of atoms" do
|
|
||||||
binary = Config.transform([":v1", ":v2", ":v3"])
|
|
||||||
assert binary == :erlang.term_to_binary([:v1, :v2, :v3])
|
|
||||||
assert Config.from_binary(binary) == [:v1, :v2, :v3]
|
|
||||||
end
|
|
||||||
|
|
||||||
test "list of mixed values" do
|
|
||||||
binary =
|
|
||||||
Config.transform([
|
|
||||||
"v1",
|
|
||||||
":v2",
|
|
||||||
"Pleroma.Repo",
|
|
||||||
"Phoenix.Socket.V1.JSONSerializer",
|
|
||||||
15,
|
|
||||||
false
|
|
||||||
])
|
|
||||||
|
|
||||||
assert binary ==
|
|
||||||
:erlang.term_to_binary([
|
|
||||||
"v1",
|
|
||||||
:v2,
|
|
||||||
Pleroma.Repo,
|
|
||||||
Phoenix.Socket.V1.JSONSerializer,
|
|
||||||
15,
|
|
||||||
false
|
|
||||||
])
|
|
||||||
|
|
||||||
assert Config.from_binary(binary) == [
|
|
||||||
"v1",
|
|
||||||
:v2,
|
|
||||||
Pleroma.Repo,
|
|
||||||
Phoenix.Socket.V1.JSONSerializer,
|
|
||||||
15,
|
|
||||||
false
|
|
||||||
]
|
|
||||||
end
|
|
||||||
|
|
||||||
test "simple keyword" do
|
|
||||||
binary = Config.transform([%{"tuple" => [":key", "value"]}])
|
|
||||||
assert binary == :erlang.term_to_binary([{:key, "value"}])
|
|
||||||
assert Config.from_binary(binary) == [{:key, "value"}]
|
|
||||||
assert Config.from_binary(binary) == [key: "value"]
|
|
||||||
end
|
|
||||||
|
|
||||||
test "keyword with partial_chain key" do
|
|
||||||
binary =
|
|
||||||
Config.transform([%{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]}])
|
|
||||||
|
|
||||||
assert binary == :erlang.term_to_binary(partial_chain: &:hackney_connect.partial_chain/1)
|
|
||||||
assert Config.from_binary(binary) == [partial_chain: &:hackney_connect.partial_chain/1]
|
|
||||||
end
|
|
||||||
|
|
||||||
test "keyword" do
|
|
||||||
binary =
|
|
||||||
Config.transform([
|
|
||||||
%{"tuple" => [":types", "Pleroma.PostgresTypes"]},
|
|
||||||
%{"tuple" => [":telemetry_event", ["Pleroma.Repo.Instrumenter"]]},
|
|
||||||
%{"tuple" => [":migration_lock", nil]},
|
|
||||||
%{"tuple" => [":key1", 150]},
|
|
||||||
%{"tuple" => [":key2", "string"]}
|
|
||||||
])
|
|
||||||
|
|
||||||
assert binary ==
|
|
||||||
:erlang.term_to_binary(
|
|
||||||
types: Pleroma.PostgresTypes,
|
|
||||||
telemetry_event: [Pleroma.Repo.Instrumenter],
|
|
||||||
migration_lock: nil,
|
|
||||||
key1: 150,
|
|
||||||
key2: "string"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert Config.from_binary(binary) == [
|
|
||||||
types: Pleroma.PostgresTypes,
|
|
||||||
telemetry_event: [Pleroma.Repo.Instrumenter],
|
|
||||||
migration_lock: nil,
|
|
||||||
key1: 150,
|
|
||||||
key2: "string"
|
|
||||||
]
|
|
||||||
end
|
|
||||||
|
|
||||||
test "complex keyword with nested mixed childs" do
|
|
||||||
binary =
|
|
||||||
Config.transform([
|
|
||||||
%{"tuple" => [":uploader", "Pleroma.Uploaders.Local"]},
|
|
||||||
%{"tuple" => [":filters", ["Pleroma.Upload.Filter.Dedupe"]]},
|
|
||||||
%{"tuple" => [":link_name", true]},
|
|
||||||
%{"tuple" => [":proxy_remote", false]},
|
|
||||||
%{"tuple" => [":common_map", %{":key" => "value"}]},
|
|
||||||
%{
|
|
||||||
"tuple" => [
|
|
||||||
":proxy_opts",
|
|
||||||
[
|
|
||||||
%{"tuple" => [":redirect_on_failure", false]},
|
|
||||||
%{"tuple" => [":max_body_length", 1_048_576]},
|
|
||||||
%{
|
|
||||||
"tuple" => [
|
|
||||||
":http",
|
|
||||||
[%{"tuple" => [":follow_redirect", true]}, %{"tuple" => [":pool", ":upload"]}]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
])
|
|
||||||
|
|
||||||
assert binary ==
|
|
||||||
:erlang.term_to_binary(
|
|
||||||
uploader: Pleroma.Uploaders.Local,
|
|
||||||
filters: [Pleroma.Upload.Filter.Dedupe],
|
|
||||||
link_name: true,
|
|
||||||
proxy_remote: false,
|
|
||||||
common_map: %{key: "value"},
|
|
||||||
proxy_opts: [
|
|
||||||
redirect_on_failure: false,
|
|
||||||
max_body_length: 1_048_576,
|
|
||||||
http: [
|
|
||||||
follow_redirect: true,
|
|
||||||
pool: :upload
|
|
||||||
]
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
assert Config.from_binary(binary) ==
|
|
||||||
[
|
|
||||||
uploader: Pleroma.Uploaders.Local,
|
|
||||||
filters: [Pleroma.Upload.Filter.Dedupe],
|
|
||||||
link_name: true,
|
|
||||||
proxy_remote: false,
|
|
||||||
common_map: %{key: "value"},
|
|
||||||
proxy_opts: [
|
|
||||||
redirect_on_failure: false,
|
|
||||||
max_body_length: 1_048_576,
|
|
||||||
http: [
|
|
||||||
follow_redirect: true,
|
|
||||||
pool: :upload
|
|
||||||
]
|
|
||||||
]
|
|
||||||
]
|
|
||||||
end
|
|
||||||
|
|
||||||
test "common keyword" do
|
|
||||||
binary =
|
|
||||||
Config.transform([
|
|
||||||
%{"tuple" => [":level", ":warn"]},
|
|
||||||
%{"tuple" => [":meta", [":all"]]},
|
|
||||||
%{"tuple" => [":path", ""]},
|
|
||||||
%{"tuple" => [":val", nil]},
|
|
||||||
%{"tuple" => [":webhook_url", "https://hooks.slack.com/services/YOUR-KEY-HERE"]}
|
|
||||||
])
|
|
||||||
|
|
||||||
assert binary ==
|
|
||||||
:erlang.term_to_binary(
|
|
||||||
level: :warn,
|
|
||||||
meta: [:all],
|
|
||||||
path: "",
|
|
||||||
val: nil,
|
|
||||||
webhook_url: "https://hooks.slack.com/services/YOUR-KEY-HERE"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert Config.from_binary(binary) == [
|
|
||||||
level: :warn,
|
|
||||||
meta: [:all],
|
|
||||||
path: "",
|
|
||||||
val: nil,
|
|
||||||
webhook_url: "https://hooks.slack.com/services/YOUR-KEY-HERE"
|
|
||||||
]
|
|
||||||
end
|
|
||||||
|
|
||||||
test "complex keyword with sigil" do
|
|
||||||
binary =
|
|
||||||
Config.transform([
|
|
||||||
%{"tuple" => [":federated_timeline_removal", []]},
|
|
||||||
%{"tuple" => [":reject", ["~r/comp[lL][aA][iI][nN]er/"]]},
|
|
||||||
%{"tuple" => [":replace", []]}
|
|
||||||
])
|
|
||||||
|
|
||||||
assert binary ==
|
|
||||||
:erlang.term_to_binary(
|
|
||||||
federated_timeline_removal: [],
|
|
||||||
reject: [~r/comp[lL][aA][iI][nN]er/],
|
|
||||||
replace: []
|
|
||||||
)
|
|
||||||
|
|
||||||
assert Config.from_binary(binary) ==
|
|
||||||
[federated_timeline_removal: [], reject: [~r/comp[lL][aA][iI][nN]er/], replace: []]
|
|
||||||
end
|
|
||||||
|
|
||||||
test "complex keyword with tuples with more than 2 values" do
|
|
||||||
binary =
|
|
||||||
Config.transform([
|
|
||||||
%{
|
|
||||||
"tuple" => [
|
|
||||||
":http",
|
|
||||||
[
|
|
||||||
%{
|
|
||||||
"tuple" => [
|
|
||||||
":key1",
|
|
||||||
[
|
|
||||||
%{
|
|
||||||
"tuple" => [
|
|
||||||
":_",
|
|
||||||
[
|
|
||||||
%{
|
|
||||||
"tuple" => [
|
|
||||||
"/api/v1/streaming",
|
|
||||||
"Pleroma.Web.MastodonAPI.WebsocketHandler",
|
|
||||||
[]
|
|
||||||
]
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
"tuple" => [
|
|
||||||
"/websocket",
|
|
||||||
"Phoenix.Endpoint.CowboyWebSocket",
|
|
||||||
%{
|
|
||||||
"tuple" => [
|
|
||||||
"Phoenix.Transports.WebSocket",
|
|
||||||
%{
|
|
||||||
"tuple" => [
|
|
||||||
"Pleroma.Web.Endpoint",
|
|
||||||
"Pleroma.Web.UserSocket",
|
|
||||||
[]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
"tuple" => [
|
|
||||||
":_",
|
|
||||||
"Phoenix.Endpoint.Cowboy2Handler",
|
|
||||||
%{"tuple" => ["Pleroma.Web.Endpoint", []]}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
])
|
|
||||||
|
|
||||||
assert binary ==
|
|
||||||
:erlang.term_to_binary(
|
|
||||||
http: [
|
|
||||||
key1: [
|
|
||||||
_: [
|
|
||||||
{"/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler, []},
|
|
||||||
{"/websocket", Phoenix.Endpoint.CowboyWebSocket,
|
|
||||||
{Phoenix.Transports.WebSocket,
|
|
||||||
{Pleroma.Web.Endpoint, Pleroma.Web.UserSocket, []}}},
|
|
||||||
{:_, Phoenix.Endpoint.Cowboy2Handler, {Pleroma.Web.Endpoint, []}}
|
|
||||||
]
|
|
||||||
]
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
assert Config.from_binary(binary) == [
|
|
||||||
http: [
|
|
||||||
key1: [
|
|
||||||
{:_,
|
|
||||||
[
|
|
||||||
{"/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler, []},
|
|
||||||
{"/websocket", Phoenix.Endpoint.CowboyWebSocket,
|
|
||||||
{Phoenix.Transports.WebSocket,
|
|
||||||
{Pleroma.Web.Endpoint, Pleroma.Web.UserSocket, []}}},
|
|
||||||
{:_, Phoenix.Endpoint.Cowboy2Handler, {Pleroma.Web.Endpoint, []}}
|
|
||||||
]}
|
|
||||||
]
|
|
||||||
]
|
|
||||||
]
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
Loading…
Reference in a new issue