Merge branch 'develop' of git.pleroma.social:pleroma/pleroma into update-validator

This commit is contained in:
lain 2020-06-19 14:56:50 +02:00
commit 7e488cd4a7
53 changed files with 851 additions and 901 deletions

View file

@ -1,4 +1,4 @@
image: elixir:1.8.1 image: elixir:1.9.4
variables: &global_variables variables: &global_variables
POSTGRES_DB: pleroma_test POSTGRES_DB: pleroma_test
@ -170,8 +170,7 @@ stop_review_app:
amd64: amd64:
stage: release stage: release
# TODO: Replace with upstream image when 1.9.0 comes out image: elixir:1.10.3
image: rinpatch/elixir:1.9.0-rc.0
only: &release-only only: &release-only
- stable@pleroma/pleroma - stable@pleroma/pleroma
- develop@pleroma/pleroma - develop@pleroma/pleroma
@ -208,8 +207,7 @@ amd64-musl:
stage: release stage: release
artifacts: *release-artifacts artifacts: *release-artifacts
only: *release-only only: *release-only
# TODO: Replace with upstream image when 1.9.0 comes out image: elixir:1.10.3-alpine
image: rinpatch/elixir:1.9.0-rc.0-alpine
cache: *release-cache cache: *release-cache
variables: *release-variables variables: *release-variables
before_script: &before-release-musl before_script: &before-release-musl
@ -225,8 +223,7 @@ arm:
only: *release-only only: *release-only
tags: tags:
- arm32 - arm32
# TODO: Replace with upstream image when 1.9.0 comes out image: elixir:1.10.3
image: rinpatch/elixir:1.9.0-rc.0-arm
cache: *release-cache cache: *release-cache
variables: *release-variables variables: *release-variables
before_script: *before-release before_script: *before-release
@ -238,8 +235,7 @@ arm-musl:
only: *release-only only: *release-only
tags: tags:
- arm32 - arm32
# TODO: Replace with upstream image when 1.9.0 comes out image: elixir:1.10.3-alpine
image: rinpatch/elixir:1.9.0-rc.0-arm-alpine
cache: *release-cache cache: *release-cache
variables: *release-variables variables: *release-variables
before_script: *before-release-musl before_script: *before-release-musl
@ -251,8 +247,7 @@ arm64:
only: *release-only only: *release-only
tags: tags:
- arm - arm
# TODO: Replace with upstream image when 1.9.0 comes out image: elixir:1.10.3
image: rinpatch/elixir:1.9.0-rc.0-arm64
cache: *release-cache cache: *release-cache
variables: *release-variables variables: *release-variables
before_script: *before-release before_script: *before-release
@ -265,7 +260,7 @@ arm64-musl:
tags: tags:
- arm - arm
# TODO: Replace with upstream image when 1.9.0 comes out # TODO: Replace with upstream image when 1.9.0 comes out
image: rinpatch/elixir:1.9.0-rc.0-arm64-alpine image: elixir:1.10.3-alpine
cache: *release-cache cache: *release-cache
variables: *release-variables variables: *release-variables
before_script: *before-release-musl before_script: *before-release-musl

View file

@ -6,6 +6,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [unreleased] ## [unreleased]
### Changed ### Changed
- **Breaking:** Elixir >=1.9 is now required (was >= 1.8)
- In Conversations, return only direct messages as `last_status`
- MFR policy to set global expiration for all local Create activities - MFR policy to set global expiration for all local Create activities
- OGP rich media parser merged with TwitterCard - OGP rich media parser merged with TwitterCard
<details> <details>

View file

@ -1,2 +1,2 @@
elixir_version=1.8.2 elixir_version=1.9.4
erlang_version=21.3.7 erlang_version=22.3.4.1

View file

@ -72,8 +72,7 @@ defp create(group, settings) do
group group
|> Pleroma.Config.Loader.filter_group(settings) |> Pleroma.Config.Loader.filter_group(settings)
|> Enum.each(fn {key, value} -> |> Enum.each(fn {key, value} ->
key = inspect(key) {:ok, _} = ConfigDB.update_or_create(%{group: group, key: key, value: value})
{:ok, _} = ConfigDB.update_or_create(%{group: inspect(group), key: key, value: value})
shell_info("Settings for key #{key} migrated.") shell_info("Settings for key #{key} migrated.")
end) end)
@ -131,12 +130,9 @@ defp write_and_delete(config, file, delete?) do
end end
defp write(config, file) do defp write(config, file) do
value = value = inspect(config.value, limit: :infinity)
config.value
|> ConfigDB.from_binary()
|> inspect(limit: :infinity)
IO.write(file, "config #{config.group}, #{config.key}, #{value}\r\n\r\n") IO.write(file, "config #{inspect(config.group)}, #{inspect(config.key)}, #{value}\r\n\r\n")
config config
end end

View file

@ -6,7 +6,7 @@ defmodule Pleroma.ConfigDB do
use Ecto.Schema use Ecto.Schema
import Ecto.Changeset import Ecto.Changeset
import Ecto.Query import Ecto.Query, only: [select: 3]
import Pleroma.Web.Gettext import Pleroma.Web.Gettext
alias __MODULE__ alias __MODULE__
@ -14,16 +14,6 @@ defmodule Pleroma.ConfigDB do
@type t :: %__MODULE__{} @type t :: %__MODULE__{}
@full_key_update [
{:pleroma, :ecto_repos},
{:quack, :meta},
{:mime, :types},
{:cors_plug, [:max_age, :methods, :expose, :headers]},
{:auto_linker, :opts},
{:swarm, :node_blacklist},
{:logger, :backends}
]
@full_subkey_update [ @full_subkey_update [
{:pleroma, :assets, :mascots}, {:pleroma, :assets, :mascots},
{:pleroma, :emoji, :groups}, {:pleroma, :emoji, :groups},
@ -32,14 +22,10 @@ defmodule Pleroma.ConfigDB do
{:pleroma, :mrf_keyword, :replace} {:pleroma, :mrf_keyword, :replace}
] ]
@regex ~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u
@delimiters ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}]
schema "config" do schema "config" do
field(:key, :string) field(:key, Pleroma.EctoType.Config.Atom)
field(:group, :string) field(:group, Pleroma.EctoType.Config.Atom)
field(:value, :binary) field(:value, Pleroma.EctoType.Config.BinaryValue)
field(:db, {:array, :string}, virtual: true, default: []) field(:db, {:array, :string}, virtual: true, default: [])
timestamps() timestamps()
@ -51,10 +37,6 @@ def get_all_as_keyword do
|> select([c], {c.group, c.key, c.value}) |> select([c], {c.group, c.key, c.value})
|> Repo.all() |> Repo.all()
|> Enum.reduce([], fn {group, key, value}, acc -> |> Enum.reduce([], fn {group, key, value}, acc ->
group = ConfigDB.from_string(group)
key = ConfigDB.from_string(key)
value = from_binary(value)
Keyword.update(acc, group, [{key, value}], &Keyword.merge(&1, [{key, value}])) Keyword.update(acc, group, [{key, value}], &Keyword.merge(&1, [{key, value}]))
end) end)
end end
@ -64,50 +46,41 @@ def get_by_params(params), do: Repo.get_by(ConfigDB, params)
@spec changeset(ConfigDB.t(), map()) :: Changeset.t() @spec changeset(ConfigDB.t(), map()) :: Changeset.t()
def changeset(config, params \\ %{}) do def changeset(config, params \\ %{}) do
params = Map.put(params, :value, transform(params[:value]))
config config
|> cast(params, [:key, :group, :value]) |> cast(params, [:key, :group, :value])
|> validate_required([:key, :group, :value]) |> validate_required([:key, :group, :value])
|> unique_constraint(:key, name: :config_group_key_index) |> unique_constraint(:key, name: :config_group_key_index)
end end
@spec create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} defp create(params) do
def create(params) do
%ConfigDB{} %ConfigDB{}
|> changeset(params) |> changeset(params)
|> Repo.insert() |> Repo.insert()
end end
@spec update(ConfigDB.t(), map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} defp update(%ConfigDB{} = config, %{value: value}) do
def update(%ConfigDB{} = config, %{value: value}) do
config config
|> changeset(%{value: value}) |> changeset(%{value: value})
|> Repo.update() |> Repo.update()
end end
@spec get_db_keys(ConfigDB.t()) :: [String.t()]
def get_db_keys(%ConfigDB{} = config) do
config.value
|> ConfigDB.from_binary()
|> get_db_keys(config.key)
end
@spec get_db_keys(keyword(), any()) :: [String.t()] @spec get_db_keys(keyword(), any()) :: [String.t()]
def get_db_keys(value, key) do def get_db_keys(value, key) do
keys =
if Keyword.keyword?(value) do if Keyword.keyword?(value) do
value |> Keyword.keys() |> Enum.map(&convert(&1)) Keyword.keys(value)
else else
[convert(key)] [key]
end end
Enum.map(keys, &to_json_types(&1))
end end
@spec merge_group(atom(), atom(), keyword(), keyword()) :: keyword() @spec merge_group(atom(), atom(), keyword(), keyword()) :: keyword()
def merge_group(group, key, old_value, new_value) do def merge_group(group, key, old_value, new_value) do
new_keys = to_map_set(new_value) new_keys = to_mapset(new_value)
intersect_keys = intersect_keys = old_value |> to_mapset() |> MapSet.intersection(new_keys) |> MapSet.to_list()
old_value |> to_map_set() |> MapSet.intersection(new_keys) |> MapSet.to_list()
merged_value = ConfigDB.merge(old_value, new_value) merged_value = ConfigDB.merge(old_value, new_value)
@ -120,12 +93,10 @@ def merge_group(group, key, old_value, new_value) do
[] []
end) end)
|> List.flatten() |> List.flatten()
|> Enum.reduce(merged_value, fn subkey, acc -> |> Enum.reduce(merged_value, &Keyword.put(&2, &1, new_value[&1]))
Keyword.put(acc, subkey, new_value[subkey])
end)
end end
defp to_map_set(keyword) do defp to_mapset(keyword) do
keyword keyword
|> Keyword.keys() |> Keyword.keys()
|> MapSet.new() |> MapSet.new()
@ -159,43 +130,40 @@ defp deep_merge(_key, value1, value2) do
@spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()} @spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
def update_or_create(params) do def update_or_create(params) do
params = Map.put(params, :value, to_elixir_types(params[:value]))
search_opts = Map.take(params, [:group, :key]) search_opts = Map.take(params, [:group, :key])
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts), with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
{:partial_update, true, config} <- {_, true, config} <- {:partial_update, can_be_partially_updated?(config), config},
{:partial_update, can_be_partially_updated?(config), config}, {_, true, config} <-
old_value <- from_binary(config.value), {:can_be_merged, is_list(params[:value]) and is_list(config.value), config} do
transformed_value <- do_transform(params[:value]), new_value = merge_group(config.group, config.key, config.value, params[:value])
{:can_be_merged, true, config} <- {:can_be_merged, is_list(transformed_value), config}, update(config, %{value: new_value})
new_value <-
merge_group(
ConfigDB.from_string(config.group),
ConfigDB.from_string(config.key),
old_value,
transformed_value
) do
ConfigDB.update(config, %{value: new_value})
else else
{reason, false, config} when reason in [:partial_update, :can_be_merged] -> {reason, false, config} when reason in [:partial_update, :can_be_merged] ->
ConfigDB.update(config, params) update(config, params)
nil -> nil ->
ConfigDB.create(params) create(params)
end end
end end
defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config) defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config)
defp only_full_update?(%ConfigDB{} = config) do defp only_full_update?(%ConfigDB{group: group, key: key}) do
config_group = ConfigDB.from_string(config.group) full_key_update = [
config_key = ConfigDB.from_string(config.key) {:pleroma, :ecto_repos},
{:quack, :meta},
{:mime, :types},
{:cors_plug, [:max_age, :methods, :expose, :headers]},
{:auto_linker, :opts},
{:swarm, :node_blacklist},
{:logger, :backends}
]
Enum.any?(@full_key_update, fn Enum.any?(full_key_update, fn
{group, key} when is_list(key) -> {s_group, s_key} ->
config_group == group and config_key in key group == s_group and ((is_list(s_key) and key in s_key) or key == s_key)
{group, key} ->
config_group == group and config_key == key
end) end)
end end
@ -205,11 +173,10 @@ def delete(params) do
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts), with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
{config, sub_keys} when is_list(sub_keys) <- {config, params[:subkeys]}, {config, sub_keys} when is_list(sub_keys) <- {config, params[:subkeys]},
old_value <- from_binary(config.value), keys <- Enum.map(sub_keys, &string_to_elixir_types(&1)),
keys <- Enum.map(sub_keys, &do_transform_string(&1)), {_, config, new_value} when new_value != [] <-
{:partial_remove, config, new_value} when new_value != [] <- {:partial_remove, config, Keyword.drop(config.value, keys)} do
{:partial_remove, config, Keyword.drop(old_value, keys)} do update(config, %{value: new_value})
ConfigDB.update(config, %{value: new_value})
else else
{:partial_remove, config, []} -> {:partial_remove, config, []} ->
Repo.delete(config) Repo.delete(config)
@ -225,37 +192,32 @@ def delete(params) do
end end
end end
@spec from_binary(binary()) :: term() @spec to_json_types(term()) :: map() | list() | boolean() | String.t()
def from_binary(binary), do: :erlang.binary_to_term(binary) def to_json_types(entity) when is_list(entity) do
Enum.map(entity, &to_json_types/1)
@spec from_binary_with_convert(binary()) :: any()
def from_binary_with_convert(binary) do
binary
|> from_binary()
|> do_convert()
end end
@spec from_string(String.t()) :: atom() | no_return() def to_json_types(%Regex{} = entity), do: inspect(entity)
def from_string(string), do: do_transform_string(string)
@spec convert(any()) :: any() def to_json_types(entity) when is_map(entity) do
def convert(entity), do: do_convert(entity) Map.new(entity, fn {k, v} -> {to_json_types(k), to_json_types(v)} end)
defp do_convert(entity) when is_list(entity) do
for v <- entity, into: [], do: do_convert(v)
end end
defp do_convert(%Regex{} = entity), do: inspect(entity) def to_json_types({:args, args}) when is_list(args) do
arguments =
Enum.map(args, fn
arg when is_tuple(arg) -> inspect(arg)
arg -> to_json_types(arg)
end)
defp do_convert(entity) when is_map(entity) do %{"tuple" => [":args", arguments]}
for {k, v} <- entity, into: %{}, do: {do_convert(k), do_convert(v)}
end end
defp do_convert({:proxy_url, {type, :localhost, port}}) do def to_json_types({:proxy_url, {type, :localhost, port}}) do
%{"tuple" => [":proxy_url", %{"tuple" => [do_convert(type), "localhost", port]}]} %{"tuple" => [":proxy_url", %{"tuple" => [to_json_types(type), "localhost", port]}]}
end end
defp do_convert({:proxy_url, {type, host, port}}) when is_tuple(host) do def to_json_types({:proxy_url, {type, host, port}}) when is_tuple(host) do
ip = ip =
host host
|> :inet_parse.ntoa() |> :inet_parse.ntoa()
@ -264,66 +226,64 @@ defp do_convert({:proxy_url, {type, host, port}}) when is_tuple(host) do
%{ %{
"tuple" => [ "tuple" => [
":proxy_url", ":proxy_url",
%{"tuple" => [do_convert(type), ip, port]} %{"tuple" => [to_json_types(type), ip, port]}
] ]
} }
end end
defp do_convert({:proxy_url, {type, host, port}}) do def to_json_types({:proxy_url, {type, host, port}}) do
%{ %{
"tuple" => [ "tuple" => [
":proxy_url", ":proxy_url",
%{"tuple" => [do_convert(type), to_string(host), port]} %{"tuple" => [to_json_types(type), to_string(host), port]}
] ]
} }
end end
defp do_convert({:partial_chain, entity}), do: %{"tuple" => [":partial_chain", inspect(entity)]} def to_json_types({:partial_chain, entity}),
do: %{"tuple" => [":partial_chain", inspect(entity)]}
defp do_convert(entity) when is_tuple(entity) do def to_json_types(entity) when is_tuple(entity) do
value = value =
entity entity
|> Tuple.to_list() |> Tuple.to_list()
|> do_convert() |> to_json_types()
%{"tuple" => value} %{"tuple" => value}
end end
defp do_convert(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do def to_json_types(entity) when is_binary(entity), do: entity
def to_json_types(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do
entity entity
end end
defp do_convert(entity) def to_json_types(entity) when entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do
when is_atom(entity) and entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do
":#{entity}" ":#{entity}"
end end
defp do_convert(entity) when is_atom(entity), do: inspect(entity) def to_json_types(entity) when is_atom(entity), do: inspect(entity)
defp do_convert(entity) when is_binary(entity), do: entity @spec to_elixir_types(boolean() | String.t() | map() | list()) :: term()
def to_elixir_types(%{"tuple" => [":args", args]}) when is_list(args) do
arguments =
Enum.map(args, fn arg ->
if String.contains?(arg, ["{", "}"]) do
{elem, []} = Code.eval_string(arg)
elem
else
to_elixir_types(arg)
end
end)
@spec transform(any()) :: binary() | no_return() {:args, arguments}
def transform(entity) when is_binary(entity) or is_map(entity) or is_list(entity) do
entity
|> do_transform()
|> to_binary()
end end
def transform(entity), do: to_binary(entity) def to_elixir_types(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do
{:proxy_url, {string_to_elixir_types(type), parse_host(host), port}}
@spec transform_with_out_binary(any()) :: any()
def transform_with_out_binary(entity), do: do_transform(entity)
@spec to_binary(any()) :: binary()
def to_binary(entity), do: :erlang.term_to_binary(entity)
defp do_transform(%Regex{} = entity), do: entity
defp do_transform(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do
{:proxy_url, {do_transform_string(type), parse_host(host), port}}
end end
defp do_transform(%{"tuple" => [":partial_chain", entity]}) do def to_elixir_types(%{"tuple" => [":partial_chain", entity]}) do
{partial_chain, []} = {partial_chain, []} =
entity entity
|> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "") |> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "")
@ -332,25 +292,51 @@ defp do_transform(%{"tuple" => [":partial_chain", entity]}) do
{:partial_chain, partial_chain} {:partial_chain, partial_chain}
end end
defp do_transform(%{"tuple" => entity}) do def to_elixir_types(%{"tuple" => entity}) do
Enum.reduce(entity, {}, fn val, acc -> Tuple.append(acc, do_transform(val)) end) Enum.reduce(entity, {}, &Tuple.append(&2, to_elixir_types(&1)))
end end
defp do_transform(entity) when is_map(entity) do def to_elixir_types(entity) when is_map(entity) do
for {k, v} <- entity, into: %{}, do: {do_transform(k), do_transform(v)} Map.new(entity, fn {k, v} -> {to_elixir_types(k), to_elixir_types(v)} end)
end end
defp do_transform(entity) when is_list(entity) do def to_elixir_types(entity) when is_list(entity) do
for v <- entity, into: [], do: do_transform(v) Enum.map(entity, &to_elixir_types/1)
end end
defp do_transform(entity) when is_binary(entity) do def to_elixir_types(entity) when is_binary(entity) do
entity entity
|> String.trim() |> String.trim()
|> do_transform_string() |> string_to_elixir_types()
end end
defp do_transform(entity), do: entity def to_elixir_types(entity), do: entity
@spec string_to_elixir_types(String.t()) ::
atom() | Regex.t() | module() | String.t() | no_return()
def string_to_elixir_types("~r" <> _pattern = regex) do
pattern =
~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u
delimiters = ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}]
with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <-
Regex.named_captures(pattern, regex),
{:ok, {leading, closing}} <- find_valid_delimiter(delimiters, pattern, regex_delimiter),
{result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do
result
end
end
def string_to_elixir_types(":" <> atom), do: String.to_atom(atom)
def string_to_elixir_types(value) do
if module_name?(value) do
String.to_existing_atom("Elixir." <> value)
else
value
end
end
defp parse_host("localhost"), do: :localhost defp parse_host("localhost"), do: :localhost
@ -387,27 +373,8 @@ defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do
end end
end end
defp do_transform_string("~r" <> _pattern = regex) do @spec module_name?(String.t()) :: boolean()
with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <- def module_name?(string) do
Regex.named_captures(@regex, regex),
{:ok, {leading, closing}} <- find_valid_delimiter(@delimiters, pattern, regex_delimiter),
{result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do
result
end
end
defp do_transform_string(":" <> atom), do: String.to_atom(atom)
defp do_transform_string(value) do
if is_module_name?(value) do
String.to_existing_atom("Elixir." <> value)
else
value
end
end
@spec is_module_name?(String.t()) :: boolean()
def is_module_name?(string) do
Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or
string in ["Oban", "Ueberauth", "ExSyslogger"] string in ["Oban", "Ueberauth", "ExSyslogger"]
end end

View file

@ -28,10 +28,6 @@ defmodule Pleroma.Config.TransferTask do
{:pleroma, Pleroma.Captcha, [:seconds_valid]}, {:pleroma, Pleroma.Captcha, [:seconds_valid]},
{:pleroma, Pleroma.Upload, [:proxy_remote]}, {:pleroma, Pleroma.Upload, [:proxy_remote]},
{:pleroma, :instance, [:upload_limit]}, {:pleroma, :instance, [:upload_limit]},
{:pleroma, :email_notifications, [:digest]},
{:pleroma, :oauth2, [:clean_expired_tokens]},
{:pleroma, Pleroma.ActivityExpiration, [:enabled]},
{:pleroma, Pleroma.ScheduledActivity, [:enabled]},
{:pleroma, :gopher, [:enabled]} {:pleroma, :gopher, [:enabled]}
] ]
@ -48,7 +44,7 @@ def load_and_update_env(deleted_settings \\ [], restart_pleroma? \\ true) do
{logger, other} = {logger, other} =
(Repo.all(ConfigDB) ++ deleted_settings) (Repo.all(ConfigDB) ++ deleted_settings)
|> Enum.map(&transform_and_merge/1) |> Enum.map(&merge_with_default/1)
|> Enum.split_with(fn {group, _, _, _} -> group in [:logger, :quack] end) |> Enum.split_with(fn {group, _, _, _} -> group in [:logger, :quack] end)
logger logger
@ -92,11 +88,7 @@ defp maybe_set_pleroma_last(apps) do
end end
end end
defp transform_and_merge(%{group: group, key: key, value: value} = setting) do defp merge_with_default(%{group: group, key: key, value: value} = setting) do
group = ConfigDB.from_string(group)
key = ConfigDB.from_string(key)
value = ConfigDB.from_binary(value)
default = Config.Holder.default_config(group, key) default = Config.Holder.default_config(group, key)
merged = merged =

View file

@ -162,10 +162,13 @@ def for_user_with_last_activity_id(user, params \\ %{}) do
for_user(user, params) for_user(user, params)
|> Enum.map(fn participation -> |> Enum.map(fn participation ->
activity_id = activity_id =
ActivityPub.fetch_latest_activity_id_for_context(participation.conversation.ap_id, %{ ActivityPub.fetch_latest_direct_activity_id_for_context(
participation.conversation.ap_id,
%{
user: user, user: user,
blocking_user: user blocking_user: user
}) }
)
%{ %{
participation participation

View file

@ -1,4 +1,8 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.DateTime do # Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.DateTime do
@moduledoc """ @moduledoc """
The AP standard defines the date fields in AP as xsd:DateTime. Elixir's The AP standard defines the date fields in AP as xsd:DateTime. Elixir's
DateTime can't parse this, but it can parse the related iso8601. This DateTime can't parse this, but it can parse the related iso8601. This

View file

@ -1,4 +1,8 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.ObjectID do # Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.ObjectID do
use Ecto.Type use Ecto.Type
def type, do: :string def type, do: :string

View file

@ -1,7 +1,11 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.Recipients do # Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.Recipients do
use Ecto.Type use Ecto.Type
alias Pleroma.Web.ActivityPub.ObjectValidators.Types.ObjectID alias Pleroma.EctoType.ActivityPub.ObjectValidators.ObjectID
def type, do: {:array, ObjectID} def type, do: {:array, ObjectID}

View file

@ -2,7 +2,7 @@
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/> # Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.SafeText do defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.SafeText do
use Ecto.Type use Ecto.Type
alias Pleroma.HTML alias Pleroma.HTML

View file

@ -1,4 +1,8 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.Uri do # Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.ActivityPub.ObjectValidators.Uri do
use Ecto.Type use Ecto.Type
def type, do: :string def type, do: :string

View file

@ -0,0 +1,26 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.Config.Atom do
use Ecto.Type
def type, do: :atom
def cast(key) when is_atom(key) do
{:ok, key}
end
def cast(key) when is_binary(key) do
{:ok, Pleroma.ConfigDB.string_to_elixir_types(key)}
end
def cast(_), do: :error
def load(key) do
{:ok, Pleroma.ConfigDB.string_to_elixir_types(key)}
end
def dump(key) when is_atom(key), do: {:ok, inspect(key)}
def dump(_), do: :error
end

View file

@ -0,0 +1,27 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.EctoType.Config.BinaryValue do
use Ecto.Type
def type, do: :term
def cast(value) when is_binary(value) do
if String.valid?(value) do
{:ok, value}
else
{:ok, :erlang.binary_to_term(value)}
end
end
def cast(value), do: {:ok, value}
def load(value) when is_binary(value) do
{:ok, :erlang.binary_to_term(value)}
end
def dump(value) do
{:ok, :erlang.term_to_binary(value)}
end
end

View file

@ -18,7 +18,7 @@ def fill_in_notification_types do
) )
query query
|> Repo.all() |> Repo.chunk_stream(100)
|> Enum.each(fn notification -> |> Enum.each(fn notification ->
type = type =
notification.activity notification.activity

View file

@ -8,6 +8,7 @@ defmodule Pleroma.Repo do
adapter: Ecto.Adapters.Postgres, adapter: Ecto.Adapters.Postgres,
migration_timestamps: [type: :naive_datetime_usec] migration_timestamps: [type: :naive_datetime_usec]
import Ecto.Query
require Logger require Logger
defmodule Instrumenter do defmodule Instrumenter do
@ -78,6 +79,33 @@ def check_migrations_applied!() do
:ok :ok
end end
end end
def chunk_stream(query, chunk_size) do
# We don't actually need start and end funcitons of resource streaming,
# but it seems to be the only way to not fetch records one-by-one and
# have individual records be the elements of the stream, instead of
# lists of records
Stream.resource(
fn -> 0 end,
fn
last_id ->
query
|> order_by(asc: :id)
|> where([r], r.id > ^last_id)
|> limit(^chunk_size)
|> all()
|> case do
[] ->
{:halt, last_id}
records ->
last_id = List.last(records).id
{records, last_id}
end
end,
fn _ -> :ok end
)
end
end end
defmodule Pleroma.Repo.UnappliedMigrationsError do defmodule Pleroma.Repo.UnappliedMigrationsError do

View file

@ -5,10 +5,10 @@
defmodule Pleroma.Signature do defmodule Pleroma.Signature do
@behaviour HTTPSignatures.Adapter @behaviour HTTPSignatures.Adapter
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Keys alias Pleroma.Keys
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
def key_id_to_actor_id(key_id) do def key_id_to_actor_id(key_id) do
uri = uri =
@ -24,7 +24,7 @@ def key_id_to_actor_id(key_id) do
maybe_ap_id = URI.to_string(uri) maybe_ap_id = URI.to_string(uri)
case Types.ObjectID.cast(maybe_ap_id) do case ObjectValidators.ObjectID.cast(maybe_ap_id) do
{:ok, ap_id} -> {:ok, ap_id} ->
{:ok, ap_id} {:ok, ap_id}

View file

@ -14,6 +14,7 @@ defmodule Pleroma.User do
alias Pleroma.Config alias Pleroma.Config
alias Pleroma.Conversation.Participation alias Pleroma.Conversation.Participation
alias Pleroma.Delivery alias Pleroma.Delivery
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Emoji alias Pleroma.Emoji
alias Pleroma.FollowingRelationship alias Pleroma.FollowingRelationship
alias Pleroma.Formatter alias Pleroma.Formatter
@ -30,7 +31,6 @@ defmodule Pleroma.User do
alias Pleroma.Web alias Pleroma.Web
alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Builder alias Pleroma.Web.ActivityPub.Builder
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
alias Pleroma.Web.ActivityPub.Pipeline alias Pleroma.Web.ActivityPub.Pipeline
alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.CommonAPI alias Pleroma.Web.CommonAPI
@ -79,6 +79,7 @@ defmodule Pleroma.User do
schema "users" do schema "users" do
field(:bio, :string) field(:bio, :string)
field(:raw_bio, :string)
field(:email, :string) field(:email, :string)
field(:name, :string) field(:name, :string)
field(:nickname, :string) field(:nickname, :string)
@ -115,7 +116,7 @@ defmodule Pleroma.User do
field(:is_admin, :boolean, default: false) field(:is_admin, :boolean, default: false)
field(:show_role, :boolean, default: true) field(:show_role, :boolean, default: true)
field(:settings, :map, default: nil) field(:settings, :map, default: nil)
field(:uri, Types.Uri, default: nil) field(:uri, ObjectValidators.Uri, default: nil)
field(:hide_followers_count, :boolean, default: false) field(:hide_followers_count, :boolean, default: false)
field(:hide_follows_count, :boolean, default: false) field(:hide_follows_count, :boolean, default: false)
field(:hide_followers, :boolean, default: false) field(:hide_followers, :boolean, default: false)
@ -432,6 +433,7 @@ def update_changeset(struct, params \\ %{}) do
params, params,
[ [
:bio, :bio,
:raw_bio,
:name, :name,
:emoji, :emoji,
:avatar, :avatar,
@ -607,7 +609,16 @@ def register_changeset(struct, params \\ %{}, opts \\ []) do
struct struct
|> confirmation_changeset(need_confirmation: need_confirmation?) |> confirmation_changeset(need_confirmation: need_confirmation?)
|> cast(params, [:bio, :email, :name, :nickname, :password, :password_confirmation, :emoji]) |> cast(params, [
:bio,
:raw_bio,
:email,
:name,
:nickname,
:password,
:password_confirmation,
:emoji
])
|> validate_required([:name, :nickname, :password, :password_confirmation]) |> validate_required([:name, :nickname, :password, :password_confirmation])
|> validate_confirmation(:password) |> validate_confirmation(:password)
|> unique_constraint(:email) |> unique_constraint(:email)

View file

@ -210,7 +210,7 @@ def stream_out_participations(%Object{data: %{"context" => context}}, user) do
conversation = Repo.preload(conversation, :participations) conversation = Repo.preload(conversation, :participations)
last_activity_id = last_activity_id =
fetch_latest_activity_id_for_context(conversation.ap_id, %{ fetch_latest_direct_activity_id_for_context(conversation.ap_id, %{
user: user, user: user,
blocking_user: user blocking_user: user
}) })
@ -517,11 +517,12 @@ def fetch_activities_for_context(context, opts \\ %{}) do
|> Repo.all() |> Repo.all()
end end
@spec fetch_latest_activity_id_for_context(String.t(), keyword() | map()) :: @spec fetch_latest_direct_activity_id_for_context(String.t(), keyword() | map()) ::
FlakeId.Ecto.CompatType.t() | nil FlakeId.Ecto.CompatType.t() | nil
def fetch_latest_activity_id_for_context(context, opts \\ %{}) do def fetch_latest_direct_activity_id_for_context(context, opts \\ %{}) do
context context
|> fetch_activities_for_context_query(Map.merge(%{skip_preload: true}, opts)) |> fetch_activities_for_context_query(Map.merge(%{skip_preload: true}, opts))
|> restrict_visibility(%{visibility: "direct"})
|> limit(1) |> limit(1)
|> select([a], a.id) |> select([a], a.id)
|> Repo.one() |> Repo.one()

View file

@ -9,6 +9,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do
the system. the system.
""" """
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.ActivityPub.ObjectValidators.AnnounceValidator alias Pleroma.Web.ActivityPub.ObjectValidators.AnnounceValidator
@ -17,7 +18,6 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do
alias Pleroma.Web.ActivityPub.ObjectValidators.DeleteValidator alias Pleroma.Web.ActivityPub.ObjectValidators.DeleteValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.EmojiReactValidator alias Pleroma.Web.ActivityPub.ObjectValidators.EmojiReactValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.LikeValidator alias Pleroma.Web.ActivityPub.ObjectValidators.LikeValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
alias Pleroma.Web.ActivityPub.ObjectValidators.UndoValidator alias Pleroma.Web.ActivityPub.ObjectValidators.UndoValidator
@spec validate(map(), keyword()) :: {:ok, map(), keyword()} | {:error, any()} @spec validate(map(), keyword()) :: {:ok, map(), keyword()} | {:error, any()}
@ -120,7 +120,7 @@ def stringify_keys(object) when is_list(object) do
def stringify_keys(object), do: object def stringify_keys(object), do: object
def fetch_actor(object) do def fetch_actor(object) do
with {:ok, actor} <- Types.ObjectID.cast(object["actor"]) do with {:ok, actor} <- ObjectValidators.ObjectID.cast(object["actor"]) do
User.get_or_fetch_by_ap_id(actor) User.get_or_fetch_by_ap_id(actor)
end end
end end

View file

@ -5,9 +5,9 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.AnnounceValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.AnnounceValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.ActivityPub.Visibility
@ -19,14 +19,14 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AnnounceValidator do
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:type, :string) field(:type, :string)
field(:object, Types.ObjectID) field(:object, ObjectValidators.ObjectID)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:context, :string, autogenerate: {Utils, :generate_context_id, []}) field(:context, :string, autogenerate: {Utils, :generate_context_id, []})
field(:to, Types.Recipients, default: []) field(:to, ObjectValidators.Recipients, default: [])
field(:cc, Types.Recipients, default: []) field(:cc, ObjectValidators.Recipients, default: [])
field(:published, Types.DateTime) field(:published, ObjectValidators.DateTime)
end end
def cast_and_validate(data) do def cast_and_validate(data) do

View file

@ -5,9 +5,9 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.ChatMessageValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.ChatMessageValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
import Ecto.Changeset import Ecto.Changeset
import Pleroma.Web.ActivityPub.Transmogrifier, only: [fix_emoji: 1] import Pleroma.Web.ActivityPub.Transmogrifier, only: [fix_emoji: 1]
@ -16,12 +16,12 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ChatMessageValidator do
@derive Jason.Encoder @derive Jason.Encoder
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:to, Types.Recipients, default: []) field(:to, ObjectValidators.Recipients, default: [])
field(:type, :string) field(:type, :string)
field(:content, Types.SafeText) field(:content, ObjectValidators.SafeText)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:published, Types.DateTime) field(:published, ObjectValidators.DateTime)
field(:emoji, :map, default: %{}) field(:emoji, :map, default: %{})
embeds_one(:attachment, AttachmentValidator) embeds_one(:attachment, AttachmentValidator)

View file

@ -7,9 +7,9 @@
# - doesn't embed, will only get the object id # - doesn't embed, will only get the object id
defmodule Pleroma.Web.ActivityPub.ObjectValidators.CreateChatMessageValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.CreateChatMessageValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
import Ecto.Changeset import Ecto.Changeset
import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
@ -17,11 +17,11 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CreateChatMessageValidator do
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:type, :string) field(:type, :string)
field(:to, Types.Recipients, default: []) field(:to, ObjectValidators.Recipients, default: [])
field(:object, Types.ObjectID) field(:object, ObjectValidators.ObjectID)
end end
def cast_and_apply(data) do def cast_and_apply(data) do

View file

@ -5,16 +5,16 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.CreateNoteValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.CreateNoteValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator alias Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
import Ecto.Changeset import Ecto.Changeset
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:type, :string) field(:type, :string)
field(:to, {:array, :string}) field(:to, {:array, :string})
field(:cc, {:array, :string}) field(:cc, {:array, :string})

View file

@ -6,8 +6,8 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.DeleteValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.Activity alias Pleroma.Activity
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
import Ecto.Changeset import Ecto.Changeset
import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
@ -15,13 +15,13 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.DeleteValidator do
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:type, :string) field(:type, :string)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:to, Types.Recipients, default: []) field(:to, ObjectValidators.Recipients, default: [])
field(:cc, Types.Recipients, default: []) field(:cc, ObjectValidators.Recipients, default: [])
field(:deleted_activity_id, Types.ObjectID) field(:deleted_activity_id, ObjectValidators.ObjectID)
field(:object, Types.ObjectID) field(:object, ObjectValidators.ObjectID)
end end
def cast_data(data) do def cast_data(data) do

View file

@ -5,8 +5,8 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.EmojiReactValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.EmojiReactValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
import Ecto.Changeset import Ecto.Changeset
import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
@ -14,10 +14,10 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EmojiReactValidator do
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:type, :string) field(:type, :string)
field(:object, Types.ObjectID) field(:object, ObjectValidators.ObjectID)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:context, :string) field(:context, :string)
field(:content, :string) field(:content, :string)
field(:to, {:array, :string}, default: []) field(:to, {:array, :string}, default: [])

View file

@ -5,8 +5,8 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.LikeValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.LikeValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Utils
import Ecto.Changeset import Ecto.Changeset
@ -15,13 +15,13 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.LikeValidator do
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:type, :string) field(:type, :string)
field(:object, Types.ObjectID) field(:object, ObjectValidators.ObjectID)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:context, :string) field(:context, :string)
field(:to, Types.Recipients, default: []) field(:to, ObjectValidators.Recipients, default: [])
field(:cc, Types.Recipients, default: []) field(:cc, ObjectValidators.Recipients, default: [])
end end
def cast_and_validate(data) do def cast_and_validate(data) do
@ -67,7 +67,7 @@ def fix_recipients(cng) do
with {[], []} <- {to, cc}, with {[], []} <- {to, cc},
%Object{data: %{"actor" => actor}} <- Object.get_cached_by_ap_id(object), %Object{data: %{"actor" => actor}} <- Object.get_cached_by_ap_id(object),
{:ok, actor} <- Types.ObjectID.cast(actor) do {:ok, actor} <- ObjectValidators.ObjectID.cast(actor) do
cng cng
|> put_change(:to, [actor]) |> put_change(:to, [actor])
else else

View file

@ -5,14 +5,14 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.Web.ActivityPub.ObjectValidators.Types alias Pleroma.EctoType.ActivityPub.ObjectValidators
import Ecto.Changeset import Ecto.Changeset
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:to, {:array, :string}, default: []) field(:to, {:array, :string}, default: [])
field(:cc, {:array, :string}, default: []) field(:cc, {:array, :string}, default: [])
field(:bto, {:array, :string}, default: []) field(:bto, {:array, :string}, default: [])
@ -22,10 +22,10 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator do
field(:type, :string) field(:type, :string)
field(:content, :string) field(:content, :string)
field(:context, :string) field(:context, :string)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:attributedTo, Types.ObjectID) field(:attributedTo, ObjectValidators.ObjectID)
field(:summary, :string) field(:summary, :string)
field(:published, Types.DateTime) field(:published, ObjectValidators.DateTime)
# TODO: Write type # TODO: Write type
field(:emoji, :map, default: %{}) field(:emoji, :map, default: %{})
field(:sensitive, :boolean, default: false) field(:sensitive, :boolean, default: false)
@ -35,7 +35,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator do
field(:like_count, :integer, default: 0) field(:like_count, :integer, default: 0)
field(:announcement_count, :integer, default: 0) field(:announcement_count, :integer, default: 0)
field(:inRepyTo, :string) field(:inRepyTo, :string)
field(:uri, Types.Uri) field(:uri, ObjectValidators.Uri)
field(:likes, {:array, :string}, default: []) field(:likes, {:array, :string}, default: [])
field(:announcements, {:array, :string}, default: []) field(:announcements, {:array, :string}, default: [])

View file

@ -6,7 +6,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.UndoValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.Activity alias Pleroma.Activity
alias Pleroma.Web.ActivityPub.ObjectValidators.Types alias Pleroma.EctoType.ActivityPub.ObjectValidators
import Ecto.Changeset import Ecto.Changeset
import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
@ -14,10 +14,10 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.UndoValidator do
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:id, Types.ObjectID, primary_key: true) field(:id, ObjectValidators.ObjectID, primary_key: true)
field(:type, :string) field(:type, :string)
field(:object, Types.ObjectID) field(:object, ObjectValidators.ObjectID)
field(:actor, Types.ObjectID) field(:actor, ObjectValidators.ObjectID)
field(:to, {:array, :string}, default: []) field(:to, {:array, :string}, default: [])
field(:cc, {:array, :string}, default: []) field(:cc, {:array, :string}, default: [])
end end

View file

@ -1,14 +1,18 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.ObjectValidators.UrlObjectValidator do defmodule Pleroma.Web.ActivityPub.ObjectValidators.UrlObjectValidator do
use Ecto.Schema use Ecto.Schema
alias Pleroma.Web.ActivityPub.ObjectValidators.Types alias Pleroma.EctoType.ActivityPub.ObjectValidators
import Ecto.Changeset import Ecto.Changeset
@primary_key false @primary_key false
embedded_schema do embedded_schema do
field(:type, :string) field(:type, :string)
field(:href, Types.Uri) field(:href, ObjectValidators.Uri)
field(:mediaType, :string) field(:mediaType, :string)
end end

View file

@ -8,6 +8,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
""" """
alias Pleroma.Activity alias Pleroma.Activity
alias Pleroma.EarmarkRenderer alias Pleroma.EarmarkRenderer
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.FollowingRelationship alias Pleroma.FollowingRelationship
alias Pleroma.Maps alias Pleroma.Maps
alias Pleroma.Notification alias Pleroma.Notification
@ -18,7 +19,6 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Builder alias Pleroma.Web.ActivityPub.Builder
alias Pleroma.Web.ActivityPub.ObjectValidator alias Pleroma.Web.ActivityPub.ObjectValidator
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
alias Pleroma.Web.ActivityPub.Pipeline alias Pleroma.Web.ActivityPub.Pipeline
alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.Visibility alias Pleroma.Web.ActivityPub.Visibility
@ -725,7 +725,7 @@ def handle_incoming(
else else
{:error, {:validate_object, _}} = e -> {:error, {:validate_object, _}} = e ->
# Check if we have a create activity for this # Check if we have a create activity for this
with {:ok, object_id} <- Types.ObjectID.cast(data["object"]), with {:ok, object_id} <- ObjectValidators.ObjectID.cast(data["object"]),
%Activity{data: %{"actor" => actor}} <- %Activity{data: %{"actor" => actor}} <-
Activity.create_by_object_ap_id(object_id) |> Repo.one(), Activity.create_by_object_ap_id(object_id) |> Repo.one(),
# We have one, insert a tombstone and retry # We have one, insert a tombstone and retry

View file

@ -33,7 +33,11 @@ def descriptions(conn, _params) do
def show(conn, %{only_db: true}) do def show(conn, %{only_db: true}) do
with :ok <- configurable_from_database() do with :ok <- configurable_from_database() do
configs = Pleroma.Repo.all(ConfigDB) configs = Pleroma.Repo.all(ConfigDB)
render(conn, "index.json", %{configs: configs})
render(conn, "index.json", %{
configs: configs,
need_reboot: Restarter.Pleroma.need_reboot?()
})
end end
end end
@ -61,17 +65,20 @@ def show(conn, _params) do
value value
end end
%{ %ConfigDB{
group: ConfigDB.convert(group), group: group,
key: ConfigDB.convert(key), key: key,
value: ConfigDB.convert(merged_value) value: merged_value
} }
|> Pleroma.Maps.put_if_present(:db, db) |> Pleroma.Maps.put_if_present(:db, db)
end) end)
end) end)
|> List.flatten() |> List.flatten()
json(conn, %{configs: merged, need_reboot: Restarter.Pleroma.need_reboot?()}) render(conn, "index.json", %{
configs: merged,
need_reboot: Restarter.Pleroma.need_reboot?()
})
end end
end end
@ -91,24 +98,17 @@ def update(%{body_params: %{configs: configs}} = conn, _) do
{deleted, updated} = {deleted, updated} =
results results
|> Enum.map(fn {:ok, config} -> |> Enum.map(fn {:ok, %{key: key, value: value} = config} ->
Map.put(config, :db, ConfigDB.get_db_keys(config)) Map.put(config, :db, ConfigDB.get_db_keys(value, key))
end)
|> Enum.split_with(fn config ->
Ecto.get_meta(config, :state) == :deleted
end) end)
|> Enum.split_with(&(Ecto.get_meta(&1, :state) == :deleted))
Config.TransferTask.load_and_update_env(deleted, false) Config.TransferTask.load_and_update_env(deleted, false)
if not Restarter.Pleroma.need_reboot?() do if not Restarter.Pleroma.need_reboot?() do
changed_reboot_settings? = changed_reboot_settings? =
(updated ++ deleted) (updated ++ deleted)
|> Enum.any?(fn config -> |> Enum.any?(&Config.TransferTask.pleroma_need_restart?(&1.group, &1.key, &1.value))
group = ConfigDB.from_string(config.group)
key = ConfigDB.from_string(config.key)
value = ConfigDB.from_binary(config.value)
Config.TransferTask.pleroma_need_restart?(group, key, value)
end)
if changed_reboot_settings?, do: Restarter.Pleroma.need_reboot() if changed_reboot_settings?, do: Restarter.Pleroma.need_reboot()
end end

View file

@ -5,23 +5,20 @@
defmodule Pleroma.Web.AdminAPI.ConfigView do defmodule Pleroma.Web.AdminAPI.ConfigView do
use Pleroma.Web, :view use Pleroma.Web, :view
def render("index.json", %{configs: configs} = params) do alias Pleroma.ConfigDB
map = %{
configs: render_many(configs, __MODULE__, "show.json", as: :config)
}
if params[:need_reboot] do def render("index.json", %{configs: configs} = params) do
Map.put(map, :need_reboot, true) %{
else configs: render_many(configs, __MODULE__, "show.json", as: :config),
map need_reboot: params[:need_reboot]
end }
end end
def render("show.json", %{config: config}) do def render("show.json", %{config: config}) do
map = %{ map = %{
key: config.key, key: ConfigDB.to_json_types(config.key),
group: config.group, group: ConfigDB.to_json_types(config.group),
value: Pleroma.ConfigDB.from_binary_with_convert(config.value) value: ConfigDB.to_json_types(config.value)
} }
if config.db != [] do if config.db != [] do

View file

@ -183,7 +183,6 @@ defp notification_type do
"favourite", "favourite",
"reblog", "reblog",
"mention", "mention",
"poll",
"pleroma:emoji_reaction", "pleroma:emoji_reaction",
"pleroma:chat_mention", "pleroma:chat_mention",
"move", "move",

View file

@ -165,6 +165,7 @@ def update_credentials(%{assigns: %{user: user}, body_params: params} = conn, _p
end) end)
|> Maps.put_if_present(:name, params[:display_name]) |> Maps.put_if_present(:name, params[:display_name])
|> Maps.put_if_present(:bio, params[:note]) |> Maps.put_if_present(:bio, params[:note])
|> Maps.put_if_present(:raw_bio, params[:note])
|> Maps.put_if_present(:avatar, params[:avatar]) |> Maps.put_if_present(:avatar, params[:avatar])
|> Maps.put_if_present(:banner, params[:header]) |> Maps.put_if_present(:banner, params[:header])
|> Maps.put_if_present(:background, params[:pleroma_background_image]) |> Maps.put_if_present(:background, params[:pleroma_background_image])

View file

@ -224,7 +224,7 @@ defp do_render("show.json", %{user: user} = opts) do
fields: user.fields, fields: user.fields,
bot: bot, bot: bot,
source: %{ source: %{
note: prepare_user_bio(user), note: user.raw_bio || "",
sensitive: false, sensitive: false,
fields: user.raw_fields, fields: user.raw_fields,
pleroma: %{ pleroma: %{
@ -260,17 +260,6 @@ defp do_render("show.json", %{user: user} = opts) do
|> maybe_put_unread_notification_count(user, opts[:for]) |> maybe_put_unread_notification_count(user, opts[:for])
end end
defp prepare_user_bio(%User{bio: ""}), do: ""
defp prepare_user_bio(%User{bio: bio}) when is_binary(bio) do
bio
|> String.replace(~r(<br */?>), "\n")
|> Pleroma.HTML.strip_tags()
|> HtmlEntities.decode()
end
defp prepare_user_bio(_), do: ""
defp username_from_nickname(string) when is_binary(string) do defp username_from_nickname(string) when is_binary(string) do
hd(String.split(string, "@")) hd(String.split(string, "@"))
end end

View file

@ -23,10 +23,13 @@ def render("participation.json", %{participation: participation, for: user}) do
last_activity_id = last_activity_id =
with nil <- participation.last_activity_id do with nil <- participation.last_activity_id do
ActivityPub.fetch_latest_activity_id_for_context(participation.conversation.ap_id, %{ ActivityPub.fetch_latest_direct_activity_id_for_context(
participation.conversation.ap_id,
%{
user: user, user: user,
blocking_user: user blocking_user: user
}) }
)
end end
activity = Activity.get_by_id_with_object(last_activity_id) activity = Activity.get_by_id_with_object(last_activity_id)

View file

@ -5,7 +5,7 @@ def project do
[ [
app: :pleroma, app: :pleroma,
version: version("2.0.50"), version: version("2.0.50"),
elixir: "~> 1.8", elixir: "~> 1.9",
elixirc_paths: elixirc_paths(Mix.env()), elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(), compilers: [:phoenix, :gettext] ++ Mix.compilers(),
elixirc_options: [warnings_as_errors: warnings_as_errors(Mix.env())], elixirc_options: [warnings_as_errors: warnings_as_errors(Mix.env())],

View file

@ -0,0 +1,9 @@
defmodule Pleroma.Repo.Migrations.UserRawBio do
use Ecto.Migration
def change do
alter table(:users) do
add_if_not_exists(:raw_bio, :text)
end
end
end

View file

@ -0,0 +1,25 @@
defmodule Pleroma.Repo.Migrations.PopulateUserRawBio do
use Ecto.Migration
import Ecto.Query
alias Pleroma.User
alias Pleroma.Repo
def change do
{:ok, _} = Application.ensure_all_started(:fast_sanitize)
User.Query.build(%{local: true})
|> select([u], struct(u, [:id, :ap_id, :bio]))
|> Repo.stream()
|> Enum.each(fn %{bio: bio} = user ->
if bio do
raw_bio =
bio
|> String.replace(~r(<br */?>), "\n")
|> Pleroma.HTML.strip_tags()
Ecto.Changeset.cast(user, %{raw_bio: raw_bio}, [:raw_bio])
|> Repo.update()
end
end)
end
end

View file

@ -7,40 +7,28 @@ defmodule Pleroma.ConfigDBTest do
import Pleroma.Factory import Pleroma.Factory
alias Pleroma.ConfigDB alias Pleroma.ConfigDB
test "get_by_key/1" do test "get_by_params/1" do
config = insert(:config) config = insert(:config)
insert(:config) insert(:config)
assert config == ConfigDB.get_by_params(%{group: config.group, key: config.key}) assert config == ConfigDB.get_by_params(%{group: config.group, key: config.key})
end end
test "create/1" do
{:ok, config} = ConfigDB.create(%{group: ":pleroma", key: ":some_key", value: "some_value"})
assert config == ConfigDB.get_by_params(%{group: ":pleroma", key: ":some_key"})
end
test "update/1" do
config = insert(:config)
{:ok, updated} = ConfigDB.update(config, %{value: "some_value"})
loaded = ConfigDB.get_by_params(%{group: config.group, key: config.key})
assert loaded == updated
end
test "get_all_as_keyword/0" do test "get_all_as_keyword/0" do
saved = insert(:config) saved = insert(:config)
insert(:config, group: ":quack", key: ":level", value: ConfigDB.to_binary(:info)) insert(:config, group: ":quack", key: ":level", value: :info)
insert(:config, group: ":quack", key: ":meta", value: ConfigDB.to_binary([:none])) insert(:config, group: ":quack", key: ":meta", value: [:none])
insert(:config, insert(:config,
group: ":quack", group: ":quack",
key: ":webhook_url", key: ":webhook_url",
value: ConfigDB.to_binary("https://hooks.slack.com/services/KEY/some_val") value: "https://hooks.slack.com/services/KEY/some_val"
) )
config = ConfigDB.get_all_as_keyword() config = ConfigDB.get_all_as_keyword()
assert config[:pleroma] == [ assert config[:pleroma] == [
{ConfigDB.from_string(saved.key), ConfigDB.from_binary(saved.value)} {saved.key, saved.value}
] ]
assert config[:quack][:level] == :info assert config[:quack][:level] == :info
@ -51,11 +39,11 @@ test "get_all_as_keyword/0" do
describe "update_or_create/1" do describe "update_or_create/1" do
test "common" do test "common" do
config = insert(:config) config = insert(:config)
key2 = "another_key" key2 = :another_key
params = [ params = [
%{group: "pleroma", key: key2, value: "another_value"}, %{group: :pleroma, key: key2, value: "another_value"},
%{group: config.group, key: config.key, value: "new_value"} %{group: :pleroma, key: config.key, value: [a: 1, b: 2, c: "new_value"]}
] ]
assert Repo.all(ConfigDB) |> length() == 1 assert Repo.all(ConfigDB) |> length() == 1
@ -65,16 +53,16 @@ test "common" do
assert Repo.all(ConfigDB) |> length() == 2 assert Repo.all(ConfigDB) |> length() == 2
config1 = ConfigDB.get_by_params(%{group: config.group, key: config.key}) config1 = ConfigDB.get_by_params(%{group: config.group, key: config.key})
config2 = ConfigDB.get_by_params(%{group: "pleroma", key: key2}) config2 = ConfigDB.get_by_params(%{group: :pleroma, key: key2})
assert config1.value == ConfigDB.transform("new_value") assert config1.value == [a: 1, b: 2, c: "new_value"]
assert config2.value == ConfigDB.transform("another_value") assert config2.value == "another_value"
end end
test "partial update" do test "partial update" do
config = insert(:config, value: ConfigDB.to_binary(key1: "val1", key2: :val2)) config = insert(:config, value: [key1: "val1", key2: :val2])
{:ok, _config} = {:ok, config} =
ConfigDB.update_or_create(%{ ConfigDB.update_or_create(%{
group: config.group, group: config.group,
key: config.key, key: config.key,
@ -83,15 +71,14 @@ test "partial update" do
updated = ConfigDB.get_by_params(%{group: config.group, key: config.key}) updated = ConfigDB.get_by_params(%{group: config.group, key: config.key})
value = ConfigDB.from_binary(updated.value) assert config.value == updated.value
assert length(value) == 3 assert updated.value[:key1] == :val1
assert value[:key1] == :val1 assert updated.value[:key2] == :val2
assert value[:key2] == :val2 assert updated.value[:key3] == :val3
assert value[:key3] == :val3
end end
test "deep merge" do test "deep merge" do
config = insert(:config, value: ConfigDB.to_binary(key1: "val1", key2: [k1: :v1, k2: "v2"])) config = insert(:config, value: [key1: "val1", key2: [k1: :v1, k2: "v2"]])
{:ok, config} = {:ok, config} =
ConfigDB.update_or_create(%{ ConfigDB.update_or_create(%{
@ -103,18 +90,15 @@ test "deep merge" do
updated = ConfigDB.get_by_params(%{group: config.group, key: config.key}) updated = ConfigDB.get_by_params(%{group: config.group, key: config.key})
assert config.value == updated.value assert config.value == updated.value
assert updated.value[:key1] == :val1
value = ConfigDB.from_binary(updated.value) assert updated.value[:key2] == [k1: :v1, k2: :v2, k3: :v3]
assert value[:key1] == :val1 assert updated.value[:key3] == :val3
assert value[:key2] == [k1: :v1, k2: :v2, k3: :v3]
assert value[:key3] == :val3
end end
test "only full update for some keys" do test "only full update for some keys" do
config1 = insert(:config, key: ":ecto_repos", value: ConfigDB.to_binary(repo: Pleroma.Repo)) config1 = insert(:config, key: :ecto_repos, value: [repo: Pleroma.Repo])
config2 = config2 = insert(:config, group: :cors_plug, key: :max_age, value: 18)
insert(:config, group: ":cors_plug", key: ":max_age", value: ConfigDB.to_binary(18))
{:ok, _config} = {:ok, _config} =
ConfigDB.update_or_create(%{ ConfigDB.update_or_create(%{
@ -133,8 +117,8 @@ test "only full update for some keys" do
updated1 = ConfigDB.get_by_params(%{group: config1.group, key: config1.key}) updated1 = ConfigDB.get_by_params(%{group: config1.group, key: config1.key})
updated2 = ConfigDB.get_by_params(%{group: config2.group, key: config2.key}) updated2 = ConfigDB.get_by_params(%{group: config2.group, key: config2.key})
assert ConfigDB.from_binary(updated1.value) == [another_repo: [Pleroma.Repo]] assert updated1.value == [another_repo: [Pleroma.Repo]]
assert ConfigDB.from_binary(updated2.value) == 777 assert updated2.value == 777
end end
test "full update if value is not keyword" do test "full update if value is not keyword" do
@ -142,7 +126,7 @@ test "full update if value is not keyword" do
insert(:config, insert(:config,
group: ":tesla", group: ":tesla",
key: ":adapter", key: ":adapter",
value: ConfigDB.to_binary(Tesla.Adapter.Hackney) value: Tesla.Adapter.Hackney
) )
{:ok, _config} = {:ok, _config} =
@ -154,20 +138,20 @@ test "full update if value is not keyword" do
updated = ConfigDB.get_by_params(%{group: config.group, key: config.key}) updated = ConfigDB.get_by_params(%{group: config.group, key: config.key})
assert ConfigDB.from_binary(updated.value) == Tesla.Adapter.Httpc assert updated.value == Tesla.Adapter.Httpc
end end
test "only full update for some subkeys" do test "only full update for some subkeys" do
config1 = config1 =
insert(:config, insert(:config,
key: ":emoji", key: ":emoji",
value: ConfigDB.to_binary(groups: [a: 1, b: 2], key: [a: 1]) value: [groups: [a: 1, b: 2], key: [a: 1]]
) )
config2 = config2 =
insert(:config, insert(:config,
key: ":assets", key: ":assets",
value: ConfigDB.to_binary(mascots: [a: 1, b: 2], key: [a: 1]) value: [mascots: [a: 1, b: 2], key: [a: 1]]
) )
{:ok, _config} = {:ok, _config} =
@ -187,8 +171,8 @@ test "only full update for some subkeys" do
updated1 = ConfigDB.get_by_params(%{group: config1.group, key: config1.key}) updated1 = ConfigDB.get_by_params(%{group: config1.group, key: config1.key})
updated2 = ConfigDB.get_by_params(%{group: config2.group, key: config2.key}) updated2 = ConfigDB.get_by_params(%{group: config2.group, key: config2.key})
assert ConfigDB.from_binary(updated1.value) == [groups: [c: 3, d: 4], key: [a: 1, b: 2]] assert updated1.value == [groups: [c: 3, d: 4], key: [a: 1, b: 2]]
assert ConfigDB.from_binary(updated2.value) == [mascots: [c: 3, d: 4], key: [a: 1, b: 2]] assert updated2.value == [mascots: [c: 3, d: 4], key: [a: 1, b: 2]]
end end
end end
@ -206,14 +190,14 @@ test "full delete" do
end end
test "partial subkeys delete" do test "partial subkeys delete" do
config = insert(:config, value: ConfigDB.to_binary(groups: [a: 1, b: 2], key: [a: 1])) config = insert(:config, value: [groups: [a: 1, b: 2], key: [a: 1]])
{:ok, deleted} = {:ok, deleted} =
ConfigDB.delete(%{group: config.group, key: config.key, subkeys: [":groups"]}) ConfigDB.delete(%{group: config.group, key: config.key, subkeys: [":groups"]})
assert Ecto.get_meta(deleted, :state) == :loaded assert Ecto.get_meta(deleted, :state) == :loaded
assert deleted.value == ConfigDB.to_binary(key: [a: 1]) assert deleted.value == [key: [a: 1]]
updated = ConfigDB.get_by_params(%{group: config.group, key: config.key}) updated = ConfigDB.get_by_params(%{group: config.group, key: config.key})
@ -221,7 +205,7 @@ test "partial subkeys delete" do
end end
test "full delete if remaining value after subkeys deletion is empty list" do test "full delete if remaining value after subkeys deletion is empty list" do
config = insert(:config, value: ConfigDB.to_binary(groups: [a: 1, b: 2])) config = insert(:config, value: [groups: [a: 1, b: 2]])
{:ok, deleted} = {:ok, deleted} =
ConfigDB.delete(%{group: config.group, key: config.key, subkeys: [":groups"]}) ConfigDB.delete(%{group: config.group, key: config.key, subkeys: [":groups"]})
@ -232,163 +216,116 @@ test "full delete if remaining value after subkeys deletion is empty list" do
end end
end end
describe "transform/1" do describe "to_elixir_types/1" do
test "string" do test "string" do
binary = ConfigDB.transform("value as string") assert ConfigDB.to_elixir_types("value as string") == "value as string"
assert binary == :erlang.term_to_binary("value as string")
assert ConfigDB.from_binary(binary) == "value as string"
end end
test "boolean" do test "boolean" do
binary = ConfigDB.transform(false) assert ConfigDB.to_elixir_types(false) == false
assert binary == :erlang.term_to_binary(false)
assert ConfigDB.from_binary(binary) == false
end end
test "nil" do test "nil" do
binary = ConfigDB.transform(nil) assert ConfigDB.to_elixir_types(nil) == nil
assert binary == :erlang.term_to_binary(nil)
assert ConfigDB.from_binary(binary) == nil
end end
test "integer" do test "integer" do
binary = ConfigDB.transform(150) assert ConfigDB.to_elixir_types(150) == 150
assert binary == :erlang.term_to_binary(150)
assert ConfigDB.from_binary(binary) == 150
end end
test "atom" do test "atom" do
binary = ConfigDB.transform(":atom") assert ConfigDB.to_elixir_types(":atom") == :atom
assert binary == :erlang.term_to_binary(:atom)
assert ConfigDB.from_binary(binary) == :atom
end end
test "ssl options" do test "ssl options" do
binary = ConfigDB.transform([":tlsv1", ":tlsv1.1", ":tlsv1.2"]) assert ConfigDB.to_elixir_types([":tlsv1", ":tlsv1.1", ":tlsv1.2"]) == [
assert binary == :erlang.term_to_binary([:tlsv1, :"tlsv1.1", :"tlsv1.2"]) :tlsv1,
assert ConfigDB.from_binary(binary) == [:tlsv1, :"tlsv1.1", :"tlsv1.2"] :"tlsv1.1",
:"tlsv1.2"
]
end end
test "pleroma module" do test "pleroma module" do
binary = ConfigDB.transform("Pleroma.Bookmark") assert ConfigDB.to_elixir_types("Pleroma.Bookmark") == Pleroma.Bookmark
assert binary == :erlang.term_to_binary(Pleroma.Bookmark)
assert ConfigDB.from_binary(binary) == Pleroma.Bookmark
end end
test "pleroma string" do test "pleroma string" do
binary = ConfigDB.transform("Pleroma") assert ConfigDB.to_elixir_types("Pleroma") == "Pleroma"
assert binary == :erlang.term_to_binary("Pleroma")
assert ConfigDB.from_binary(binary) == "Pleroma"
end end
test "phoenix module" do test "phoenix module" do
binary = ConfigDB.transform("Phoenix.Socket.V1.JSONSerializer") assert ConfigDB.to_elixir_types("Phoenix.Socket.V1.JSONSerializer") ==
assert binary == :erlang.term_to_binary(Phoenix.Socket.V1.JSONSerializer) Phoenix.Socket.V1.JSONSerializer
assert ConfigDB.from_binary(binary) == Phoenix.Socket.V1.JSONSerializer
end end
test "tesla module" do test "tesla module" do
binary = ConfigDB.transform("Tesla.Adapter.Hackney") assert ConfigDB.to_elixir_types("Tesla.Adapter.Hackney") == Tesla.Adapter.Hackney
assert binary == :erlang.term_to_binary(Tesla.Adapter.Hackney)
assert ConfigDB.from_binary(binary) == Tesla.Adapter.Hackney
end end
test "ExSyslogger module" do test "ExSyslogger module" do
binary = ConfigDB.transform("ExSyslogger") assert ConfigDB.to_elixir_types("ExSyslogger") == ExSyslogger
assert binary == :erlang.term_to_binary(ExSyslogger)
assert ConfigDB.from_binary(binary) == ExSyslogger
end end
test "Quack.Logger module" do test "Quack.Logger module" do
binary = ConfigDB.transform("Quack.Logger") assert ConfigDB.to_elixir_types("Quack.Logger") == Quack.Logger
assert binary == :erlang.term_to_binary(Quack.Logger)
assert ConfigDB.from_binary(binary) == Quack.Logger
end end
test "Swoosh.Adapters modules" do test "Swoosh.Adapters modules" do
binary = ConfigDB.transform("Swoosh.Adapters.SMTP") assert ConfigDB.to_elixir_types("Swoosh.Adapters.SMTP") == Swoosh.Adapters.SMTP
assert binary == :erlang.term_to_binary(Swoosh.Adapters.SMTP) assert ConfigDB.to_elixir_types("Swoosh.Adapters.AmazonSES") == Swoosh.Adapters.AmazonSES
assert ConfigDB.from_binary(binary) == Swoosh.Adapters.SMTP
binary = ConfigDB.transform("Swoosh.Adapters.AmazonSES")
assert binary == :erlang.term_to_binary(Swoosh.Adapters.AmazonSES)
assert ConfigDB.from_binary(binary) == Swoosh.Adapters.AmazonSES
end end
test "sigil" do test "sigil" do
binary = ConfigDB.transform("~r[comp[lL][aA][iI][nN]er]") assert ConfigDB.to_elixir_types("~r[comp[lL][aA][iI][nN]er]") == ~r/comp[lL][aA][iI][nN]er/
assert binary == :erlang.term_to_binary(~r/comp[lL][aA][iI][nN]er/)
assert ConfigDB.from_binary(binary) == ~r/comp[lL][aA][iI][nN]er/
end end
test "link sigil" do test "link sigil" do
binary = ConfigDB.transform("~r/https:\/\/example.com/") assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/") == ~r/https:\/\/example.com/
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/)
assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/
end end
test "link sigil with um modifiers" do test "link sigil with um modifiers" do
binary = ConfigDB.transform("~r/https:\/\/example.com/um") assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/um") ==
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/um) ~r/https:\/\/example.com/um
assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/um
end end
test "link sigil with i modifier" do test "link sigil with i modifier" do
binary = ConfigDB.transform("~r/https:\/\/example.com/i") assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/i") == ~r/https:\/\/example.com/i
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/i)
assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/i
end end
test "link sigil with s modifier" do test "link sigil with s modifier" do
binary = ConfigDB.transform("~r/https:\/\/example.com/s") assert ConfigDB.to_elixir_types("~r/https:\/\/example.com/s") == ~r/https:\/\/example.com/s
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/s)
assert ConfigDB.from_binary(binary) == ~r/https:\/\/example.com/s
end end
test "raise if valid delimiter not found" do test "raise if valid delimiter not found" do
assert_raise ArgumentError, "valid delimiter for Regex expression not found", fn -> assert_raise ArgumentError, "valid delimiter for Regex expression not found", fn ->
ConfigDB.transform("~r/https://[]{}<>\"'()|example.com/s") ConfigDB.to_elixir_types("~r/https://[]{}<>\"'()|example.com/s")
end end
end end
test "2 child tuple" do test "2 child tuple" do
binary = ConfigDB.transform(%{"tuple" => ["v1", ":v2"]}) assert ConfigDB.to_elixir_types(%{"tuple" => ["v1", ":v2"]}) == {"v1", :v2}
assert binary == :erlang.term_to_binary({"v1", :v2})
assert ConfigDB.from_binary(binary) == {"v1", :v2}
end end
test "proxy tuple with localhost" do test "proxy tuple with localhost" do
binary = assert ConfigDB.to_elixir_types(%{
ConfigDB.transform(%{
"tuple" => [":proxy_url", %{"tuple" => [":socks5", "localhost", 1234]}] "tuple" => [":proxy_url", %{"tuple" => [":socks5", "localhost", 1234]}]
}) }) == {:proxy_url, {:socks5, :localhost, 1234}}
assert binary == :erlang.term_to_binary({:proxy_url, {:socks5, :localhost, 1234}})
assert ConfigDB.from_binary(binary) == {:proxy_url, {:socks5, :localhost, 1234}}
end end
test "proxy tuple with domain" do test "proxy tuple with domain" do
binary = assert ConfigDB.to_elixir_types(%{
ConfigDB.transform(%{
"tuple" => [":proxy_url", %{"tuple" => [":socks5", "domain.com", 1234]}] "tuple" => [":proxy_url", %{"tuple" => [":socks5", "domain.com", 1234]}]
}) }) == {:proxy_url, {:socks5, 'domain.com', 1234}}
assert binary == :erlang.term_to_binary({:proxy_url, {:socks5, 'domain.com', 1234}})
assert ConfigDB.from_binary(binary) == {:proxy_url, {:socks5, 'domain.com', 1234}}
end end
test "proxy tuple with ip" do test "proxy tuple with ip" do
binary = assert ConfigDB.to_elixir_types(%{
ConfigDB.transform(%{
"tuple" => [":proxy_url", %{"tuple" => [":socks5", "127.0.0.1", 1234]}] "tuple" => [":proxy_url", %{"tuple" => [":socks5", "127.0.0.1", 1234]}]
}) }) == {:proxy_url, {:socks5, {127, 0, 0, 1}, 1234}}
assert binary == :erlang.term_to_binary({:proxy_url, {:socks5, {127, 0, 0, 1}, 1234}})
assert ConfigDB.from_binary(binary) == {:proxy_url, {:socks5, {127, 0, 0, 1}, 1234}}
end end
test "tuple with n childs" do test "tuple with n childs" do
binary = assert ConfigDB.to_elixir_types(%{
ConfigDB.transform(%{
"tuple" => [ "tuple" => [
"v1", "v1",
":v2", ":v2",
@ -397,69 +334,41 @@ test "tuple with n childs" do
false, false,
"Phoenix.Socket.V1.JSONSerializer" "Phoenix.Socket.V1.JSONSerializer"
] ]
}) }) == {"v1", :v2, Pleroma.Bookmark, 150, false, Phoenix.Socket.V1.JSONSerializer}
assert binary ==
:erlang.term_to_binary(
{"v1", :v2, Pleroma.Bookmark, 150, false, Phoenix.Socket.V1.JSONSerializer}
)
assert ConfigDB.from_binary(binary) ==
{"v1", :v2, Pleroma.Bookmark, 150, false, Phoenix.Socket.V1.JSONSerializer}
end end
test "map with string key" do test "map with string key" do
binary = ConfigDB.transform(%{"key" => "value"}) assert ConfigDB.to_elixir_types(%{"key" => "value"}) == %{"key" => "value"}
assert binary == :erlang.term_to_binary(%{"key" => "value"})
assert ConfigDB.from_binary(binary) == %{"key" => "value"}
end end
test "map with atom key" do test "map with atom key" do
binary = ConfigDB.transform(%{":key" => "value"}) assert ConfigDB.to_elixir_types(%{":key" => "value"}) == %{key: "value"}
assert binary == :erlang.term_to_binary(%{key: "value"})
assert ConfigDB.from_binary(binary) == %{key: "value"}
end end
test "list of strings" do test "list of strings" do
binary = ConfigDB.transform(["v1", "v2", "v3"]) assert ConfigDB.to_elixir_types(["v1", "v2", "v3"]) == ["v1", "v2", "v3"]
assert binary == :erlang.term_to_binary(["v1", "v2", "v3"])
assert ConfigDB.from_binary(binary) == ["v1", "v2", "v3"]
end end
test "list of modules" do test "list of modules" do
binary = ConfigDB.transform(["Pleroma.Repo", "Pleroma.Activity"]) assert ConfigDB.to_elixir_types(["Pleroma.Repo", "Pleroma.Activity"]) == [
assert binary == :erlang.term_to_binary([Pleroma.Repo, Pleroma.Activity]) Pleroma.Repo,
assert ConfigDB.from_binary(binary) == [Pleroma.Repo, Pleroma.Activity] Pleroma.Activity
]
end end
test "list of atoms" do test "list of atoms" do
binary = ConfigDB.transform([":v1", ":v2", ":v3"]) assert ConfigDB.to_elixir_types([":v1", ":v2", ":v3"]) == [:v1, :v2, :v3]
assert binary == :erlang.term_to_binary([:v1, :v2, :v3])
assert ConfigDB.from_binary(binary) == [:v1, :v2, :v3]
end end
test "list of mixed values" do test "list of mixed values" do
binary = assert ConfigDB.to_elixir_types([
ConfigDB.transform([
"v1", "v1",
":v2", ":v2",
"Pleroma.Repo", "Pleroma.Repo",
"Phoenix.Socket.V1.JSONSerializer", "Phoenix.Socket.V1.JSONSerializer",
15, 15,
false false
]) ]) == [
assert binary ==
:erlang.term_to_binary([
"v1",
:v2,
Pleroma.Repo,
Phoenix.Socket.V1.JSONSerializer,
15,
false
])
assert ConfigDB.from_binary(binary) == [
"v1", "v1",
:v2, :v2,
Pleroma.Repo, Pleroma.Repo,
@ -470,40 +379,17 @@ test "list of mixed values" do
end end
test "simple keyword" do test "simple keyword" do
binary = ConfigDB.transform([%{"tuple" => [":key", "value"]}]) assert ConfigDB.to_elixir_types([%{"tuple" => [":key", "value"]}]) == [key: "value"]
assert binary == :erlang.term_to_binary([{:key, "value"}])
assert ConfigDB.from_binary(binary) == [{:key, "value"}]
assert ConfigDB.from_binary(binary) == [key: "value"]
end
test "keyword with partial_chain key" do
binary =
ConfigDB.transform([%{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]}])
assert binary == :erlang.term_to_binary(partial_chain: &:hackney_connect.partial_chain/1)
assert ConfigDB.from_binary(binary) == [partial_chain: &:hackney_connect.partial_chain/1]
end end
test "keyword" do test "keyword" do
binary = assert ConfigDB.to_elixir_types([
ConfigDB.transform([
%{"tuple" => [":types", "Pleroma.PostgresTypes"]}, %{"tuple" => [":types", "Pleroma.PostgresTypes"]},
%{"tuple" => [":telemetry_event", ["Pleroma.Repo.Instrumenter"]]}, %{"tuple" => [":telemetry_event", ["Pleroma.Repo.Instrumenter"]]},
%{"tuple" => [":migration_lock", nil]}, %{"tuple" => [":migration_lock", nil]},
%{"tuple" => [":key1", 150]}, %{"tuple" => [":key1", 150]},
%{"tuple" => [":key2", "string"]} %{"tuple" => [":key2", "string"]}
]) ]) == [
assert binary ==
:erlang.term_to_binary(
types: Pleroma.PostgresTypes,
telemetry_event: [Pleroma.Repo.Instrumenter],
migration_lock: nil,
key1: 150,
key2: "string"
)
assert ConfigDB.from_binary(binary) == [
types: Pleroma.PostgresTypes, types: Pleroma.PostgresTypes,
telemetry_event: [Pleroma.Repo.Instrumenter], telemetry_event: [Pleroma.Repo.Instrumenter],
migration_lock: nil, migration_lock: nil,
@ -512,9 +398,12 @@ test "keyword" do
] ]
end end
test "trandformed keyword" do
assert ConfigDB.to_elixir_types(a: 1, b: 2, c: "string") == [a: 1, b: 2, c: "string"]
end
test "complex keyword with nested mixed childs" do test "complex keyword with nested mixed childs" do
binary = assert ConfigDB.to_elixir_types([
ConfigDB.transform([
%{"tuple" => [":uploader", "Pleroma.Uploaders.Local"]}, %{"tuple" => [":uploader", "Pleroma.Uploaders.Local"]},
%{"tuple" => [":filters", ["Pleroma.Upload.Filter.Dedupe"]]}, %{"tuple" => [":filters", ["Pleroma.Upload.Filter.Dedupe"]]},
%{"tuple" => [":link_name", true]}, %{"tuple" => [":link_name", true]},
@ -529,33 +418,16 @@ test "complex keyword with nested mixed childs" do
%{ %{
"tuple" => [ "tuple" => [
":http", ":http",
[%{"tuple" => [":follow_redirect", true]}, %{"tuple" => [":pool", ":upload"]}]
]
}
]
]
}
])
assert binary ==
:erlang.term_to_binary(
uploader: Pleroma.Uploaders.Local,
filters: [Pleroma.Upload.Filter.Dedupe],
link_name: true,
proxy_remote: false,
common_map: %{key: "value"},
proxy_opts: [
redirect_on_failure: false,
max_body_length: 1_048_576,
http: [
follow_redirect: true,
pool: :upload
]
]
)
assert ConfigDB.from_binary(binary) ==
[ [
%{"tuple" => [":follow_redirect", true]},
%{"tuple" => [":pool", ":upload"]}
]
]
}
]
]
}
]) == [
uploader: Pleroma.Uploaders.Local, uploader: Pleroma.Uploaders.Local,
filters: [Pleroma.Upload.Filter.Dedupe], filters: [Pleroma.Upload.Filter.Dedupe],
link_name: true, link_name: true,
@ -573,25 +445,13 @@ test "complex keyword with nested mixed childs" do
end end
test "common keyword" do test "common keyword" do
binary = assert ConfigDB.to_elixir_types([
ConfigDB.transform([
%{"tuple" => [":level", ":warn"]}, %{"tuple" => [":level", ":warn"]},
%{"tuple" => [":meta", [":all"]]}, %{"tuple" => [":meta", [":all"]]},
%{"tuple" => [":path", ""]}, %{"tuple" => [":path", ""]},
%{"tuple" => [":val", nil]}, %{"tuple" => [":val", nil]},
%{"tuple" => [":webhook_url", "https://hooks.slack.com/services/YOUR-KEY-HERE"]} %{"tuple" => [":webhook_url", "https://hooks.slack.com/services/YOUR-KEY-HERE"]}
]) ]) == [
assert binary ==
:erlang.term_to_binary(
level: :warn,
meta: [:all],
path: "",
val: nil,
webhook_url: "https://hooks.slack.com/services/YOUR-KEY-HERE"
)
assert ConfigDB.from_binary(binary) == [
level: :warn, level: :warn,
meta: [:all], meta: [:all],
path: "", path: "",
@ -601,27 +461,19 @@ test "common keyword" do
end end
test "complex keyword with sigil" do test "complex keyword with sigil" do
binary = assert ConfigDB.to_elixir_types([
ConfigDB.transform([
%{"tuple" => [":federated_timeline_removal", []]}, %{"tuple" => [":federated_timeline_removal", []]},
%{"tuple" => [":reject", ["~r/comp[lL][aA][iI][nN]er/"]]}, %{"tuple" => [":reject", ["~r/comp[lL][aA][iI][nN]er/"]]},
%{"tuple" => [":replace", []]} %{"tuple" => [":replace", []]}
]) ]) == [
assert binary ==
:erlang.term_to_binary(
federated_timeline_removal: [], federated_timeline_removal: [],
reject: [~r/comp[lL][aA][iI][nN]er/], reject: [~r/comp[lL][aA][iI][nN]er/],
replace: [] replace: []
) ]
assert ConfigDB.from_binary(binary) ==
[federated_timeline_removal: [], reject: [~r/comp[lL][aA][iI][nN]er/], replace: []]
end end
test "complex keyword with tuples with more than 2 values" do test "complex keyword with tuples with more than 2 values" do
binary = assert ConfigDB.to_elixir_types([
ConfigDB.transform([
%{ %{
"tuple" => [ "tuple" => [
":http", ":http",
@ -675,24 +527,7 @@ test "complex keyword with tuples with more than 2 values" do
] ]
] ]
} }
]) ]) == [
assert binary ==
:erlang.term_to_binary(
http: [
key1: [
_: [
{"/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler, []},
{"/websocket", Phoenix.Endpoint.CowboyWebSocket,
{Phoenix.Transports.WebSocket,
{Pleroma.Web.Endpoint, Pleroma.Web.UserSocket, []}}},
{:_, Phoenix.Endpoint.Cowboy2Handler, {Pleroma.Web.Endpoint, []}}
]
]
]
)
assert ConfigDB.from_binary(binary) == [
http: [ http: [
key1: [ key1: [
{:_, {:_,

View file

@ -6,9 +6,9 @@ defmodule Pleroma.Config.TransferTaskTest do
use Pleroma.DataCase use Pleroma.DataCase
import ExUnit.CaptureLog import ExUnit.CaptureLog
import Pleroma.Factory
alias Pleroma.Config.TransferTask alias Pleroma.Config.TransferTask
alias Pleroma.ConfigDB
setup do: clear_config(:configurable_from_database, true) setup do: clear_config(:configurable_from_database, true)
@ -19,31 +19,11 @@ test "transfer config values from db to env" do
refute Application.get_env(:postgrex, :test_key) refute Application.get_env(:postgrex, :test_key)
initial = Application.get_env(:logger, :level) initial = Application.get_env(:logger, :level)
ConfigDB.create(%{ insert(:config, key: :test_key, value: [live: 2, com: 3])
group: ":pleroma", insert(:config, group: :idna, key: :test_key, value: [live: 15, com: 35])
key: ":test_key", insert(:config, group: :quack, key: :test_key, value: [:test_value1, :test_value2])
value: [live: 2, com: 3] insert(:config, group: :postgrex, key: :test_key, value: :value)
}) insert(:config, group: :logger, key: :level, value: :debug)
ConfigDB.create(%{
group: ":idna",
key: ":test_key",
value: [live: 15, com: 35]
})
ConfigDB.create(%{
group: ":quack",
key: ":test_key",
value: [:test_value1, :test_value2]
})
ConfigDB.create(%{
group: ":postgrex",
key: ":test_key",
value: :value
})
ConfigDB.create(%{group: ":logger", key: ":level", value: :debug})
TransferTask.start_link([]) TransferTask.start_link([])
@ -66,17 +46,8 @@ test "transfer config values for 1 group and some keys" do
level = Application.get_env(:quack, :level) level = Application.get_env(:quack, :level)
meta = Application.get_env(:quack, :meta) meta = Application.get_env(:quack, :meta)
ConfigDB.create(%{ insert(:config, group: :quack, key: :level, value: :info)
group: ":quack", insert(:config, group: :quack, key: :meta, value: [:none])
key: ":level",
value: :info
})
ConfigDB.create(%{
group: ":quack",
key: ":meta",
value: [:none]
})
TransferTask.start_link([]) TransferTask.start_link([])
@ -95,17 +66,8 @@ test "transfer config values with full subkey update" do
clear_config(:emoji) clear_config(:emoji)
clear_config(:assets) clear_config(:assets)
ConfigDB.create(%{ insert(:config, key: :emoji, value: [groups: [a: 1, b: 2]])
group: ":pleroma", insert(:config, key: :assets, value: [mascots: [a: 1, b: 2]])
key: ":emoji",
value: [groups: [a: 1, b: 2]]
})
ConfigDB.create(%{
group: ":pleroma",
key: ":assets",
value: [mascots: [a: 1, b: 2]]
})
TransferTask.start_link([]) TransferTask.start_link([])
@ -122,12 +84,7 @@ test "transfer config values with full subkey update" do
test "don't restart if no reboot time settings were changed" do test "don't restart if no reboot time settings were changed" do
clear_config(:emoji) clear_config(:emoji)
insert(:config, key: :emoji, value: [groups: [a: 1, b: 2]])
ConfigDB.create(%{
group: ":pleroma",
key: ":emoji",
value: [groups: [a: 1, b: 2]]
})
refute String.contains?( refute String.contains?(
capture_log(fn -> TransferTask.start_link([]) end), capture_log(fn -> TransferTask.start_link([]) end),
@ -137,25 +94,13 @@ test "don't restart if no reboot time settings were changed" do
test "on reboot time key" do test "on reboot time key" do
clear_config(:chat) clear_config(:chat)
insert(:config, key: :chat, value: [enabled: false])
ConfigDB.create(%{
group: ":pleroma",
key: ":chat",
value: [enabled: false]
})
assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted" assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted"
end end
test "on reboot time subkey" do test "on reboot time subkey" do
clear_config(Pleroma.Captcha) clear_config(Pleroma.Captcha)
insert(:config, key: Pleroma.Captcha, value: [seconds_valid: 60])
ConfigDB.create(%{
group: ":pleroma",
key: "Pleroma.Captcha",
value: [seconds_valid: 60]
})
assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted" assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted"
end end
@ -163,17 +108,8 @@ test "don't restart pleroma on reboot time key and subkey if there is false flag
clear_config(:chat) clear_config(:chat)
clear_config(Pleroma.Captcha) clear_config(Pleroma.Captcha)
ConfigDB.create(%{ insert(:config, key: :chat, value: [enabled: false])
group: ":pleroma", insert(:config, key: Pleroma.Captcha, value: [seconds_valid: 60])
key: ":chat",
value: [enabled: false]
})
ConfigDB.create(%{
group: ":pleroma",
key: "Pleroma.Captcha",
value: [seconds_valid: 60]
})
refute String.contains?( refute String.contains?(
capture_log(fn -> TransferTask.load_and_update_env([], false) end), capture_log(fn -> TransferTask.load_and_update_env([], false) end),

View file

@ -42,7 +42,8 @@ def user_factory do
user user
| ap_id: User.ap_id(user), | ap_id: User.ap_id(user),
follower_address: User.ap_followers(user), follower_address: User.ap_followers(user),
following_address: User.ap_following(user) following_address: User.ap_following(user),
raw_bio: user.bio
} }
end end
@ -396,24 +397,17 @@ def registration_factory do
} }
end end
def config_factory do def config_factory(attrs \\ %{}) do
%Pleroma.ConfigDB{ %Pleroma.ConfigDB{
key: key: sequence(:key, &String.to_atom("some_key_#{&1}")),
sequence(:key, fn key -> group: :pleroma,
# Atom dynamic registration hack in tests
"some_key_#{key}"
|> String.to_atom()
|> inspect()
end),
group: ":pleroma",
value: value:
sequence( sequence(
:value, :value,
fn key -> &%{another_key: "#{&1}somevalue", another: "#{&1}somevalue"}
:erlang.term_to_binary(%{another_key: "#{key}somevalue", another: "#{key}somevalue"})
end
) )
} }
|> merge_attributes(attrs)
end end
def marker_factory do def marker_factory do

View file

@ -5,6 +5,8 @@
defmodule Mix.Tasks.Pleroma.ConfigTest do defmodule Mix.Tasks.Pleroma.ConfigTest do
use Pleroma.DataCase use Pleroma.DataCase
import Pleroma.Factory
alias Pleroma.ConfigDB alias Pleroma.ConfigDB
alias Pleroma.Repo alias Pleroma.Repo
@ -49,24 +51,19 @@ test "filtered settings are migrated to db" do
refute ConfigDB.get_by_params(%{group: ":pleroma", key: "Pleroma.Repo"}) refute ConfigDB.get_by_params(%{group: ":pleroma", key: "Pleroma.Repo"})
refute ConfigDB.get_by_params(%{group: ":postgrex", key: ":json_library"}) refute ConfigDB.get_by_params(%{group: ":postgrex", key: ":json_library"})
assert ConfigDB.from_binary(config1.value) == [key: "value", key2: [Repo]] assert config1.value == [key: "value", key2: [Repo]]
assert ConfigDB.from_binary(config2.value) == [key: "value2", key2: ["Activity"]] assert config2.value == [key: "value2", key2: ["Activity"]]
assert ConfigDB.from_binary(config3.value) == :info assert config3.value == :info
end end
test "config table is truncated before migration" do test "config table is truncated before migration" do
ConfigDB.create(%{ insert(:config, key: :first_setting, value: [key: "value", key2: ["Activity"]])
group: ":pleroma",
key: ":first_setting",
value: [key: "value", key2: ["Activity"]]
})
assert Repo.aggregate(ConfigDB, :count, :id) == 1 assert Repo.aggregate(ConfigDB, :count, :id) == 1
Mix.Tasks.Pleroma.Config.migrate_to_db("test/fixtures/config/temp.secret.exs") Mix.Tasks.Pleroma.Config.migrate_to_db("test/fixtures/config/temp.secret.exs")
config = ConfigDB.get_by_params(%{group: ":pleroma", key: ":first_setting"}) config = ConfigDB.get_by_params(%{group: ":pleroma", key: ":first_setting"})
assert ConfigDB.from_binary(config.value) == [key: "value", key2: [Repo]] assert config.value == [key: "value", key2: [Repo]]
end end
end end
@ -82,19 +79,9 @@ test "config table is truncated before migration" do
end end
test "settings are migrated to file and deleted from db", %{temp_file: temp_file} do test "settings are migrated to file and deleted from db", %{temp_file: temp_file} do
ConfigDB.create(%{ insert(:config, key: :setting_first, value: [key: "value", key2: ["Activity"]])
group: ":pleroma", insert(:config, key: :setting_second, value: [key: "value2", key2: [Repo]])
key: ":setting_first", insert(:config, group: :quack, key: :level, value: :info)
value: [key: "value", key2: ["Activity"]]
})
ConfigDB.create(%{
group: ":pleroma",
key: ":setting_second",
value: [key: "value2", key2: [Repo]]
})
ConfigDB.create(%{group: ":quack", key: ":level", value: :info})
Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "--env", "temp", "-d"]) Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "--env", "temp", "-d"])
@ -107,9 +94,8 @@ test "settings are migrated to file and deleted from db", %{temp_file: temp_file
end end
test "load a settings with large values and pass to file", %{temp_file: temp_file} do test "load a settings with large values and pass to file", %{temp_file: temp_file} do
ConfigDB.create(%{ insert(:config,
group: ":pleroma", key: :instance,
key: ":instance",
value: [ value: [
name: "Pleroma", name: "Pleroma",
email: "example@example.com", email: "example@example.com",
@ -163,7 +149,6 @@ test "load a settings with large values and pass to file", %{temp_file: temp_fil
extended_nickname_format: true, extended_nickname_format: true,
multi_factor_authentication: [ multi_factor_authentication: [
totp: [ totp: [
# digits 6 or 8
digits: 6, digits: 6,
period: 30 period: 30
], ],
@ -173,7 +158,7 @@ test "load a settings with large values and pass to file", %{temp_file: temp_fil
] ]
] ]
] ]
}) )
Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "--env", "temp", "-d"]) Mix.Tasks.Pleroma.Config.run(["migrate_from_db", "--env", "temp", "-d"])

View file

@ -6,21 +6,17 @@ defmodule Pleroma.Upload.Filter.MogrifyTest do
use Pleroma.DataCase use Pleroma.DataCase
import Mock import Mock
alias Pleroma.Config
alias Pleroma.Upload
alias Pleroma.Upload.Filter alias Pleroma.Upload.Filter
setup do: clear_config([Filter.Mogrify, :args])
test "apply mogrify filter" do test "apply mogrify filter" do
Config.put([Filter.Mogrify, :args], [{"tint", "40"}]) clear_config(Filter.Mogrify, args: [{"tint", "40"}])
File.cp!( File.cp!(
"test/fixtures/image.jpg", "test/fixtures/image.jpg",
"test/fixtures/image_tmp.jpg" "test/fixtures/image_tmp.jpg"
) )
upload = %Upload{ upload = %Pleroma.Upload{
name: "an… image.jpg", name: "an… image.jpg",
content_type: "image/jpg", content_type: "image/jpg",
path: Path.absname("test/fixtures/image_tmp.jpg"), path: Path.absname("test/fixtures/image_tmp.jpg"),

View file

@ -1,5 +1,5 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.DateTimeTest do defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.DateTimeTest do
alias Pleroma.Web.ActivityPub.ObjectValidators.Types.DateTime alias Pleroma.EctoType.ActivityPub.ObjectValidators.DateTime
use Pleroma.DataCase use Pleroma.DataCase
test "it validates an xsd:Datetime" do test "it validates an xsd:Datetime" do

View file

@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ObjectValidators.Types.ObjectIDTest do defmodule Pleroma.Web.ObjectValidators.Types.ObjectIDTest do
alias Pleroma.Web.ActivityPub.ObjectValidators.Types.ObjectID alias Pleroma.EctoType.ActivityPub.ObjectValidators.ObjectID
use Pleroma.DataCase use Pleroma.DataCase
@uris [ @uris [

View file

@ -1,5 +1,5 @@
defmodule Pleroma.Web.ObjectValidators.Types.RecipientsTest do defmodule Pleroma.Web.ObjectValidators.Types.RecipientsTest do
alias Pleroma.Web.ActivityPub.ObjectValidators.Types.Recipients alias Pleroma.EctoType.ActivityPub.ObjectValidators.Recipients
use Pleroma.DataCase use Pleroma.DataCase
test "it asserts that all elements of the list are object ids" do test "it asserts that all elements of the list are object ids" do

View file

@ -5,7 +5,7 @@
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.SafeTextTest do defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.SafeTextTest do
use Pleroma.DataCase use Pleroma.DataCase
alias Pleroma.Web.ActivityPub.ObjectValidators.Types.SafeText alias Pleroma.EctoType.ActivityPub.ObjectValidators.SafeText
test "it lets normal text go through" do test "it lets normal text go through" do
text = "hey how are you" text = "hey how are you"

View file

@ -57,12 +57,12 @@ test "with settings only in db", %{conn: conn} do
] ]
} = json_response_and_validate_schema(conn, 200) } = json_response_and_validate_schema(conn, 200)
assert key1 == config1.key assert key1 == inspect(config1.key)
assert key2 == config2.key assert key2 == inspect(config2.key)
end end
test "db is added to settings that are in db", %{conn: conn} do test "db is added to settings that are in db", %{conn: conn} do
_config = insert(:config, key: ":instance", value: ConfigDB.to_binary(name: "Some name")) _config = insert(:config, key: ":instance", value: [name: "Some name"])
%{"configs" => configs} = %{"configs" => configs} =
conn conn
@ -83,7 +83,7 @@ test "merged default setting with db settings", %{conn: conn} do
config3 = config3 =
insert(:config, insert(:config,
value: ConfigDB.to_binary(k1: :v1, k2: :v2) value: [k1: :v1, k2: :v2]
) )
%{"configs" => configs} = %{"configs" => configs} =
@ -93,41 +93,44 @@ test "merged default setting with db settings", %{conn: conn} do
assert length(configs) > 3 assert length(configs) > 3
saved_configs = [config1, config2, config3]
keys = Enum.map(saved_configs, &inspect(&1.key))
received_configs = received_configs =
Enum.filter(configs, fn %{"group" => group, "key" => key} -> Enum.filter(configs, fn %{"group" => group, "key" => key} ->
group == ":pleroma" and key in [config1.key, config2.key, config3.key] group == ":pleroma" and key in keys
end) end)
assert length(received_configs) == 3 assert length(received_configs) == 3
db_keys = db_keys =
config3.value config3.value
|> ConfigDB.from_binary()
|> Keyword.keys() |> Keyword.keys()
|> ConfigDB.convert() |> ConfigDB.to_json_types()
keys = Enum.map(saved_configs -- [config3], &inspect(&1.key))
values = Enum.map(saved_configs, &ConfigDB.to_json_types(&1.value))
mapset_keys = MapSet.new(keys ++ db_keys)
Enum.each(received_configs, fn %{"value" => value, "db" => db} -> Enum.each(received_configs, fn %{"value" => value, "db" => db} ->
assert db in [[config1.key], [config2.key], db_keys] db = MapSet.new(db)
assert MapSet.subset?(db, mapset_keys)
assert value in [ assert value in values
ConfigDB.from_binary_with_convert(config1.value),
ConfigDB.from_binary_with_convert(config2.value),
ConfigDB.from_binary_with_convert(config3.value)
]
end) end)
end end
test "subkeys with full update right merge", %{conn: conn} do test "subkeys with full update right merge", %{conn: conn} do
config1 =
insert(:config, insert(:config,
key: ":emoji", key: ":emoji",
value: ConfigDB.to_binary(groups: [a: 1, b: 2], key: [a: 1]) value: [groups: [a: 1, b: 2], key: [a: 1]]
) )
config2 =
insert(:config, insert(:config,
key: ":assets", key: ":assets",
value: ConfigDB.to_binary(mascots: [a: 1, b: 2], key: [a: 1]) value: [mascots: [a: 1, b: 2], key: [a: 1]]
) )
%{"configs" => configs} = %{"configs" => configs} =
@ -137,14 +140,14 @@ test "subkeys with full update right merge", %{conn: conn} do
vals = vals =
Enum.filter(configs, fn %{"group" => group, "key" => key} -> Enum.filter(configs, fn %{"group" => group, "key" => key} ->
group == ":pleroma" and key in [config1.key, config2.key] group == ":pleroma" and key in [":emoji", ":assets"]
end) end)
emoji = Enum.find(vals, fn %{"key" => key} -> key == ":emoji" end) emoji = Enum.find(vals, fn %{"key" => key} -> key == ":emoji" end)
assets = Enum.find(vals, fn %{"key" => key} -> key == ":assets" end) assets = Enum.find(vals, fn %{"key" => key} -> key == ":assets" end)
emoji_val = ConfigDB.transform_with_out_binary(emoji["value"]) emoji_val = ConfigDB.to_elixir_types(emoji["value"])
assets_val = ConfigDB.transform_with_out_binary(assets["value"]) assets_val = ConfigDB.to_elixir_types(assets["value"])
assert emoji_val[:groups] == [a: 1, b: 2] assert emoji_val[:groups] == [a: 1, b: 2]
assert assets_val[:mascots] == [a: 1, b: 2] assert assets_val[:mascots] == [a: 1, b: 2]
@ -277,7 +280,8 @@ test "create new config setting in db", %{conn: conn} do
"value" => %{"tuple" => ["string", "Pleroma.Captcha.NotReal", []]}, "value" => %{"tuple" => ["string", "Pleroma.Captcha.NotReal", []]},
"db" => [":key5"] "db" => [":key5"]
} }
] ],
"need_reboot" => false
} }
assert Application.get_env(:pleroma, :key1) == "value1" assert Application.get_env(:pleroma, :key1) == "value1"
@ -357,7 +361,8 @@ test "save configs setting without explicit key", %{conn: conn} do
"value" => "https://hooks.slack.com/services/KEY", "value" => "https://hooks.slack.com/services/KEY",
"db" => [":webhook_url"] "db" => [":webhook_url"]
} }
] ],
"need_reboot" => false
} }
assert Application.get_env(:quack, :level) == :info assert Application.get_env(:quack, :level) == :info
@ -366,14 +371,14 @@ test "save configs setting without explicit key", %{conn: conn} do
end end
test "saving config with partial update", %{conn: conn} do test "saving config with partial update", %{conn: conn} do
config = insert(:config, key: ":key1", value: :erlang.term_to_binary(key1: 1, key2: 2)) insert(:config, key: ":key1", value: :erlang.term_to_binary(key1: 1, key2: 2))
conn = conn =
conn conn
|> put_req_header("content-type", "application/json") |> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{group: config.group, key: config.key, value: [%{"tuple" => [":key3", 3]}]} %{group: ":pleroma", key: ":key1", value: [%{"tuple" => [":key3", 3]}]}
] ]
}) })
@ -389,7 +394,8 @@ test "saving config with partial update", %{conn: conn} do
], ],
"db" => [":key1", ":key2", ":key3"] "db" => [":key1", ":key2", ":key3"]
} }
] ],
"need_reboot" => false
} }
end end
@ -500,8 +506,7 @@ test "update setting which need reboot, don't change reboot flag until reboot",
end end
test "saving config with nested merge", %{conn: conn} do test "saving config with nested merge", %{conn: conn} do
config = insert(:config, key: :key1, value: [key1: 1, key2: [k1: 1, k2: 2]])
insert(:config, key: ":key1", value: :erlang.term_to_binary(key1: 1, key2: [k1: 1, k2: 2]))
conn = conn =
conn conn
@ -509,8 +514,8 @@ test "saving config with nested merge", %{conn: conn} do
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{ %{
group: config.group, group: ":pleroma",
key: config.key, key: ":key1",
value: [ value: [
%{"tuple" => [":key3", 3]}, %{"tuple" => [":key3", 3]},
%{ %{
@ -548,7 +553,8 @@ test "saving config with nested merge", %{conn: conn} do
], ],
"db" => [":key1", ":key3", ":key2"] "db" => [":key1", ":key3", ":key2"]
} }
] ],
"need_reboot" => false
} }
end end
@ -588,7 +594,8 @@ test "saving special atoms", %{conn: conn} do
], ],
"db" => [":ssl_options"] "db" => [":ssl_options"]
} }
] ],
"need_reboot" => false
} }
assert Application.get_env(:pleroma, :key1) == [ assert Application.get_env(:pleroma, :key1) == [
@ -600,11 +607,10 @@ test "saving full setting if value is in full_key_update list", %{conn: conn} do
backends = Application.get_env(:logger, :backends) backends = Application.get_env(:logger, :backends)
on_exit(fn -> Application.put_env(:logger, :backends, backends) end) on_exit(fn -> Application.put_env(:logger, :backends, backends) end)
config =
insert(:config, insert(:config,
group: ":logger", group: :logger,
key: ":backends", key: :backends,
value: :erlang.term_to_binary([]) value: []
) )
Pleroma.Config.TransferTask.load_and_update_env([], false) Pleroma.Config.TransferTask.load_and_update_env([], false)
@ -617,8 +623,8 @@ test "saving full setting if value is in full_key_update list", %{conn: conn} do
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{ %{
group: config.group, group: ":logger",
key: config.key, key: ":backends",
value: [":console"] value: [":console"]
} }
] ]
@ -634,7 +640,8 @@ test "saving full setting if value is in full_key_update list", %{conn: conn} do
], ],
"db" => [":backends"] "db" => [":backends"]
} }
] ],
"need_reboot" => false
} }
assert Application.get_env(:logger, :backends) == [ assert Application.get_env(:logger, :backends) == [
@ -643,11 +650,10 @@ test "saving full setting if value is in full_key_update list", %{conn: conn} do
end end
test "saving full setting if value is not keyword", %{conn: conn} do test "saving full setting if value is not keyword", %{conn: conn} do
config =
insert(:config, insert(:config,
group: ":tesla", group: :tesla,
key: ":adapter", key: :adapter,
value: :erlang.term_to_binary(Tesla.Adapter.Hackey) value: Tesla.Adapter.Hackey
) )
conn = conn =
@ -655,7 +661,7 @@ test "saving full setting if value is not keyword", %{conn: conn} do
|> put_req_header("content-type", "application/json") |> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{group: config.group, key: config.key, value: "Tesla.Adapter.Httpc"} %{group: ":tesla", key: ":adapter", value: "Tesla.Adapter.Httpc"}
] ]
}) })
@ -667,7 +673,8 @@ test "saving full setting if value is not keyword", %{conn: conn} do
"value" => "Tesla.Adapter.Httpc", "value" => "Tesla.Adapter.Httpc",
"db" => [":adapter"] "db" => [":adapter"]
} }
] ],
"need_reboot" => false
} }
end end
@ -677,13 +684,13 @@ test "update config setting & delete with fallback to default value", %{
token: token token: token
} do } do
ueberauth = Application.get_env(:ueberauth, Ueberauth) ueberauth = Application.get_env(:ueberauth, Ueberauth)
config1 = insert(:config, key: ":keyaa1") insert(:config, key: :keyaa1)
config2 = insert(:config, key: ":keyaa2") insert(:config, key: :keyaa2)
config3 = config3 =
insert(:config, insert(:config,
group: ":ueberauth", group: :ueberauth,
key: "Ueberauth" key: Ueberauth
) )
conn = conn =
@ -691,8 +698,8 @@ test "update config setting & delete with fallback to default value", %{
|> put_req_header("content-type", "application/json") |> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{group: config1.group, key: config1.key, value: "another_value"}, %{group: ":pleroma", key: ":keyaa1", value: "another_value"},
%{group: config2.group, key: config2.key, value: "another_value"} %{group: ":pleroma", key: ":keyaa2", value: "another_value"}
] ]
}) })
@ -700,22 +707,23 @@ test "update config setting & delete with fallback to default value", %{
"configs" => [ "configs" => [
%{ %{
"group" => ":pleroma", "group" => ":pleroma",
"key" => config1.key, "key" => ":keyaa1",
"value" => "another_value", "value" => "another_value",
"db" => [":keyaa1"] "db" => [":keyaa1"]
}, },
%{ %{
"group" => ":pleroma", "group" => ":pleroma",
"key" => config2.key, "key" => ":keyaa2",
"value" => "another_value", "value" => "another_value",
"db" => [":keyaa2"] "db" => [":keyaa2"]
} }
] ],
"need_reboot" => false
} }
assert Application.get_env(:pleroma, :keyaa1) == "another_value" assert Application.get_env(:pleroma, :keyaa1) == "another_value"
assert Application.get_env(:pleroma, :keyaa2) == "another_value" assert Application.get_env(:pleroma, :keyaa2) == "another_value"
assert Application.get_env(:ueberauth, Ueberauth) == ConfigDB.from_binary(config3.value) assert Application.get_env(:ueberauth, Ueberauth) == config3.value
conn = conn =
build_conn() build_conn()
@ -724,7 +732,7 @@ test "update config setting & delete with fallback to default value", %{
|> put_req_header("content-type", "application/json") |> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{group: config2.group, key: config2.key, delete: true}, %{group: ":pleroma", key: ":keyaa2", delete: true},
%{ %{
group: ":ueberauth", group: ":ueberauth",
key: "Ueberauth", key: "Ueberauth",
@ -734,7 +742,8 @@ test "update config setting & delete with fallback to default value", %{
}) })
assert json_response_and_validate_schema(conn, 200) == %{ assert json_response_and_validate_schema(conn, 200) == %{
"configs" => [] "configs" => [],
"need_reboot" => false
} }
assert Application.get_env(:ueberauth, Ueberauth) == ueberauth assert Application.get_env(:ueberauth, Ueberauth) == ueberauth
@ -801,7 +810,8 @@ test "common config example", %{conn: conn} do
":name" ":name"
] ]
} }
] ],
"need_reboot" => false
} }
end end
@ -935,7 +945,8 @@ test "tuples with more than two values", %{conn: conn} do
], ],
"db" => [":http"] "db" => [":http"]
} }
] ],
"need_reboot" => false
} }
end end
@ -1000,7 +1011,8 @@ test "settings with nesting map", %{conn: conn} do
], ],
"db" => [":key2", ":key3"] "db" => [":key2", ":key3"]
} }
] ],
"need_reboot" => false
} }
end end
@ -1027,7 +1039,8 @@ test "value as map", %{conn: conn} do
"value" => %{"key" => "some_val"}, "value" => %{"key" => "some_val"},
"db" => [":key1"] "db" => [":key1"]
} }
] ],
"need_reboot" => false
} }
end end
@ -1077,15 +1090,15 @@ test "queues key as atom", %{conn: conn} do
":background" ":background"
] ]
} }
] ],
"need_reboot" => false
} }
end end
test "delete part of settings by atom subkeys", %{conn: conn} do test "delete part of settings by atom subkeys", %{conn: conn} do
config =
insert(:config, insert(:config,
key: ":keyaa1", key: :keyaa1,
value: :erlang.term_to_binary(subkey1: "val1", subkey2: "val2", subkey3: "val3") value: [subkey1: "val1", subkey2: "val2", subkey3: "val3"]
) )
conn = conn =
@ -1094,8 +1107,8 @@ test "delete part of settings by atom subkeys", %{conn: conn} do
|> post("/api/pleroma/admin/config", %{ |> post("/api/pleroma/admin/config", %{
configs: [ configs: [
%{ %{
group: config.group, group: ":pleroma",
key: config.key, key: ":keyaa1",
subkeys: [":subkey1", ":subkey3"], subkeys: [":subkey1", ":subkey3"],
delete: true delete: true
} }
@ -1110,7 +1123,8 @@ test "delete part of settings by atom subkeys", %{conn: conn} do
"value" => [%{"tuple" => [":subkey2", "val2"]}], "value" => [%{"tuple" => [":subkey2", "val2"]}],
"db" => [":subkey2"] "db" => [":subkey2"]
} }
] ],
"need_reboot" => false
} }
end end
@ -1236,6 +1250,90 @@ test "doesn't set keys not in the whitelist", %{conn: conn} do
assert Application.get_env(:pleroma, Pleroma.Captcha.NotReal) == "value5" assert Application.get_env(:pleroma, Pleroma.Captcha.NotReal) == "value5"
assert Application.get_env(:not_real, :anything) == "value6" assert Application.get_env(:not_real, :anything) == "value6"
end end
test "args for Pleroma.Upload.Filter.Mogrify with custom tuples", %{conn: conn} do
clear_config(Pleroma.Upload.Filter.Mogrify)
assert conn
|> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{
configs: [
%{
group: ":pleroma",
key: "Pleroma.Upload.Filter.Mogrify",
value: [
%{"tuple" => [":args", ["auto-orient", "strip"]]}
]
}
]
})
|> json_response_and_validate_schema(200) == %{
"configs" => [
%{
"group" => ":pleroma",
"key" => "Pleroma.Upload.Filter.Mogrify",
"value" => [
%{"tuple" => [":args", ["auto-orient", "strip"]]}
],
"db" => [":args"]
}
],
"need_reboot" => false
}
assert Config.get(Pleroma.Upload.Filter.Mogrify) == [args: ["auto-orient", "strip"]]
assert conn
|> put_req_header("content-type", "application/json")
|> post("/api/pleroma/admin/config", %{
configs: [
%{
group: ":pleroma",
key: "Pleroma.Upload.Filter.Mogrify",
value: [
%{
"tuple" => [
":args",
[
"auto-orient",
"strip",
"{\"implode\", \"1\"}",
"{\"resize\", \"3840x1080>\"}"
]
]
}
]
}
]
})
|> json_response(200) == %{
"configs" => [
%{
"group" => ":pleroma",
"key" => "Pleroma.Upload.Filter.Mogrify",
"value" => [
%{
"tuple" => [
":args",
[
"auto-orient",
"strip",
"{\"implode\", \"1\"}",
"{\"resize\", \"3840x1080>\"}"
]
]
}
],
"db" => [":args"]
}
],
"need_reboot" => false
}
assert Config.get(Pleroma.Upload.Filter.Mogrify) == [
args: ["auto-orient", "strip", {"implode", "1"}, {"resize", "3840x1080>"}]
]
end
end end
describe "GET /api/pleroma/admin/config/descriptions" do describe "GET /api/pleroma/admin/config/descriptions" do

View file

@ -83,10 +83,9 @@ test "sets user settings in a generic way", %{conn: conn} do
test "updates the user's bio", %{conn: conn} do test "updates the user's bio", %{conn: conn} do
user2 = insert(:user) user2 = insert(:user)
conn = raw_bio = "I drink #cofe with @#{user2.nickname}\n\nsuya.."
patch(conn, "/api/v1/accounts/update_credentials", %{
"note" => "I drink #cofe with @#{user2.nickname}\n\nsuya.." conn = patch(conn, "/api/v1/accounts/update_credentials", %{"note" => raw_bio})
})
assert user_data = json_response_and_validate_schema(conn, 200) assert user_data = json_response_and_validate_schema(conn, 200)
@ -94,6 +93,12 @@ test "updates the user's bio", %{conn: conn} do
~s(I drink <a class="hashtag" data-tag="cofe" href="http://localhost:4001/tag/cofe">#cofe</a> with <span class="h-card"><a class="u-url mention" data-user="#{ ~s(I drink <a class="hashtag" data-tag="cofe" href="http://localhost:4001/tag/cofe">#cofe</a> with <span class="h-card"><a class="u-url mention" data-user="#{
user2.id user2.id
}" href="#{user2.ap_id}" rel="ugc">@<span>#{user2.nickname}</span></a></span><br/><br/>suya..) }" href="#{user2.ap_id}" rel="ugc">@<span>#{user2.nickname}</span></a></span><br/><br/>suya..)
assert user_data["source"]["note"] == raw_bio
user = Repo.get(User, user_data["id"])
assert user.raw_bio == raw_bio
end end
test "updates the user's locking status", %{conn: conn} do test "updates the user's locking status", %{conn: conn} do

View file

@ -33,7 +33,8 @@ test "Represent a user account" do
bio: bio:
"<script src=\"invalid-html\"></script><span>valid html</span>. a<br>b<br/>c<br >d<br />f '&<>\"", "<script src=\"invalid-html\"></script><span>valid html</span>. a<br>b<br/>c<br >d<br />f '&<>\"",
inserted_at: ~N[2017-08-15 15:47:06.597036], inserted_at: ~N[2017-08-15 15:47:06.597036],
emoji: %{"karjalanpiirakka" => "/file.png"} emoji: %{"karjalanpiirakka" => "/file.png"},
raw_bio: "valid html. a\nb\nc\nd\nf '&<>\""
}) })
expected = %{ expected = %{

View file

@ -15,8 +15,17 @@ test "represents a Mastodon Conversation entity" do
user = insert(:user) user = insert(:user)
other_user = insert(:user) other_user = insert(:user)
{:ok, parent} = CommonAPI.post(user, %{status: "parent"})
{:ok, activity} = {:ok, activity} =
CommonAPI.post(user, %{status: "hey @#{other_user.nickname}", visibility: "direct"}) CommonAPI.post(user, %{
status: "hey @#{other_user.nickname}",
visibility: "direct",
in_reply_to_id: parent.id
})
{:ok, _reply_activity} =
CommonAPI.post(user, %{status: "hu", visibility: "public", in_reply_to_id: parent.id})
[participation] = Participation.for_user_with_last_activity_id(user) [participation] = Participation.for_user_with_last_activity_id(user)