2019-06-14 15:45:05 +00:00
|
|
|
# Pleroma: A lightweight social networking server
|
2020-03-02 05:08:45 +00:00
|
|
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
2019-06-14 15:45:05 +00:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2020-01-16 05:50:27 +00:00
|
|
|
defmodule Pleroma.ConfigDB do
|
2019-06-14 15:45:05 +00:00
|
|
|
use Ecto.Schema
|
2020-01-21 14:49:22 +00:00
|
|
|
|
2019-06-14 15:45:05 +00:00
|
|
|
import Ecto.Changeset
|
2020-05-31 07:46:02 +00:00
|
|
|
import Ecto.Query, only: [select: 3]
|
2019-07-10 09:25:58 +00:00
|
|
|
import Pleroma.Web.Gettext
|
2020-01-21 14:49:22 +00:00
|
|
|
|
2019-06-14 15:45:05 +00:00
|
|
|
alias __MODULE__
|
|
|
|
alias Pleroma.Repo
|
|
|
|
|
|
|
|
@type t :: %__MODULE__{}
|
|
|
|
|
2020-01-21 14:49:22 +00:00
|
|
|
@full_subkey_update [
|
|
|
|
{:pleroma, :assets, :mascots},
|
|
|
|
{:pleroma, :emoji, :groups},
|
|
|
|
{:pleroma, :workers, :retries},
|
|
|
|
{:pleroma, :mrf_subchain, :match_actor},
|
|
|
|
{:pleroma, :mrf_keyword, :replace}
|
|
|
|
]
|
|
|
|
|
2019-06-14 15:45:05 +00:00
|
|
|
schema "config" do
|
2020-05-31 07:46:02 +00:00
|
|
|
field(:key, Pleroma.Config.Type.Atom)
|
|
|
|
field(:group, Pleroma.Config.Type.Atom)
|
|
|
|
field(:value, Pleroma.Config.Type.BinaryValue)
|
2020-01-17 15:08:45 +00:00
|
|
|
field(:db, {:array, :string}, virtual: true, default: [])
|
2019-06-14 15:45:05 +00:00
|
|
|
|
|
|
|
timestamps()
|
|
|
|
end
|
|
|
|
|
2020-01-17 08:45:44 +00:00
|
|
|
@spec get_all_as_keyword() :: keyword()
|
|
|
|
def get_all_as_keyword do
|
|
|
|
ConfigDB
|
|
|
|
|> select([c], {c.group, c.key, c.value})
|
|
|
|
|> Repo.all()
|
|
|
|
|> Enum.reduce([], fn {group, key, value}, acc ->
|
|
|
|
Keyword.update(acc, group, [{key, value}], &Keyword.merge(&1, [{key, value}]))
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2020-01-16 05:50:27 +00:00
|
|
|
@spec get_by_params(map()) :: ConfigDB.t() | nil
|
|
|
|
def get_by_params(params), do: Repo.get_by(ConfigDB, params)
|
2019-06-14 15:45:05 +00:00
|
|
|
|
2020-01-16 05:50:27 +00:00
|
|
|
@spec changeset(ConfigDB.t(), map()) :: Changeset.t()
|
2019-06-14 15:45:05 +00:00
|
|
|
def changeset(config, params \\ %{}) do
|
|
|
|
config
|
2019-06-23 05:16:16 +00:00
|
|
|
|> cast(params, [:key, :group, :value])
|
|
|
|
|> validate_required([:key, :group, :value])
|
|
|
|
|> unique_constraint(:key, name: :config_group_key_index)
|
2019-06-14 15:45:05 +00:00
|
|
|
end
|
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
defp create(params) do
|
2020-01-16 05:50:27 +00:00
|
|
|
%ConfigDB{}
|
2019-09-29 08:17:38 +00:00
|
|
|
|> changeset(params)
|
2019-06-14 15:45:05 +00:00
|
|
|
|> Repo.insert()
|
|
|
|
end
|
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
defp update(%ConfigDB{} = config, %{value: value}) do
|
2019-06-14 15:45:05 +00:00
|
|
|
config
|
2019-09-29 08:17:38 +00:00
|
|
|
|> changeset(%{value: value})
|
2019-06-14 15:45:05 +00:00
|
|
|
|> Repo.update()
|
|
|
|
end
|
|
|
|
|
2020-01-18 09:25:56 +00:00
|
|
|
@spec get_db_keys(keyword(), any()) :: [String.t()]
|
2020-01-17 15:08:45 +00:00
|
|
|
def get_db_keys(value, key) do
|
2020-05-31 07:46:02 +00:00
|
|
|
keys =
|
|
|
|
if Keyword.keyword?(value) do
|
|
|
|
Keyword.keys(value)
|
|
|
|
else
|
|
|
|
[key]
|
|
|
|
end
|
|
|
|
|
|
|
|
Enum.map(keys, &to_json_types(&1))
|
2020-01-17 15:08:45 +00:00
|
|
|
end
|
|
|
|
|
2020-01-21 14:49:22 +00:00
|
|
|
@spec merge_group(atom(), atom(), keyword(), keyword()) :: keyword()
|
|
|
|
def merge_group(group, key, old_value, new_value) do
|
2020-05-31 07:46:02 +00:00
|
|
|
new_keys = to_mapset(new_value)
|
2020-01-18 09:25:56 +00:00
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
intersect_keys = old_value |> to_mapset() |> MapSet.intersection(new_keys) |> MapSet.to_list()
|
2020-01-18 09:25:56 +00:00
|
|
|
|
2020-01-20 15:25:00 +00:00
|
|
|
merged_value = ConfigDB.merge(old_value, new_value)
|
2020-01-18 09:25:56 +00:00
|
|
|
|
2020-01-21 14:49:22 +00:00
|
|
|
@full_subkey_update
|
|
|
|
|> Enum.map(fn
|
|
|
|
{g, k, subkey} when g == group and k == key ->
|
|
|
|
if subkey in intersect_keys, do: subkey, else: []
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
[]
|
|
|
|
end)
|
|
|
|
|> List.flatten()
|
2020-05-31 07:46:02 +00:00
|
|
|
|> Enum.reduce(merged_value, &Keyword.put(&2, &1, new_value[&1]))
|
2020-01-18 09:25:56 +00:00
|
|
|
end
|
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
defp to_mapset(keyword) do
|
2020-01-21 14:49:22 +00:00
|
|
|
keyword
|
|
|
|
|> Keyword.keys()
|
|
|
|
|> MapSet.new()
|
|
|
|
end
|
|
|
|
|
2020-01-18 09:25:56 +00:00
|
|
|
@spec sub_key_full_update?(atom(), atom(), [Keyword.key()]) :: boolean()
|
|
|
|
def sub_key_full_update?(group, key, subkeys) do
|
|
|
|
Enum.any?(@full_subkey_update, fn {g, k, subkey} ->
|
|
|
|
g == group and k == key and subkey in subkeys
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2020-01-21 14:49:22 +00:00
|
|
|
@spec merge(keyword(), keyword()) :: keyword()
|
2020-01-20 15:25:00 +00:00
|
|
|
def merge(config1, config2) when is_list(config1) and is_list(config2) do
|
|
|
|
Keyword.merge(config1, config2, fn _, app1, app2 ->
|
|
|
|
if Keyword.keyword?(app1) and Keyword.keyword?(app2) do
|
|
|
|
Keyword.merge(app1, app2, &deep_merge/3)
|
|
|
|
else
|
|
|
|
app2
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp deep_merge(_key, value1, value2) do
|
|
|
|
if Keyword.keyword?(value1) and Keyword.keyword?(value2) do
|
|
|
|
Keyword.merge(value1, value2, &deep_merge/3)
|
|
|
|
else
|
|
|
|
value2
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-01-16 05:50:27 +00:00
|
|
|
@spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
2019-06-23 05:16:16 +00:00
|
|
|
def update_or_create(params) do
|
2020-05-31 07:46:02 +00:00
|
|
|
params = Map.put(params, :value, to_elixir_types(params[:value]))
|
2019-09-29 08:17:38 +00:00
|
|
|
search_opts = Map.take(params, [:group, :key])
|
|
|
|
|
2020-01-16 05:50:27 +00:00
|
|
|
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
|
2020-05-31 07:46:02 +00:00
|
|
|
{_, true, config} <- {:partial_update, can_be_partially_updated?(config), config},
|
2020-05-31 10:25:04 +00:00
|
|
|
{_, true, config} <-
|
|
|
|
{:can_be_merged, is_list(params[:value]) and is_list(config.value), config} do
|
2020-05-31 07:46:02 +00:00
|
|
|
new_value = merge_group(config.group, config.key, config.value, params[:value])
|
|
|
|
update(config, %{value: new_value})
|
2019-06-14 15:45:05 +00:00
|
|
|
else
|
2019-12-06 12:12:56 +00:00
|
|
|
{reason, false, config} when reason in [:partial_update, :can_be_merged] ->
|
2020-05-31 07:46:02 +00:00
|
|
|
update(config, params)
|
2019-12-06 12:12:56 +00:00
|
|
|
|
|
|
|
nil ->
|
2020-05-31 07:46:02 +00:00
|
|
|
create(params)
|
2019-06-14 15:45:05 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-01-21 14:49:22 +00:00
|
|
|
defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config)
|
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
defp only_full_update?(%ConfigDB{group: group, key: key}) do
|
|
|
|
full_key_update = [
|
|
|
|
{:pleroma, :ecto_repos},
|
|
|
|
{:quack, :meta},
|
|
|
|
{:mime, :types},
|
|
|
|
{:cors_plug, [:max_age, :methods, :expose, :headers]},
|
|
|
|
{:auto_linker, :opts},
|
|
|
|
{:swarm, :node_blacklist},
|
|
|
|
{:logger, :backends}
|
|
|
|
]
|
|
|
|
|
|
|
|
Enum.any?(full_key_update, fn
|
|
|
|
{s_group, s_key} ->
|
|
|
|
group == s_group and ((is_list(s_key) and key in s_key) or key == s_key)
|
2020-01-21 14:49:22 +00:00
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2020-01-22 12:14:11 +00:00
|
|
|
@spec delete(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
2019-06-23 05:16:16 +00:00
|
|
|
def delete(params) do
|
2019-09-29 08:17:38 +00:00
|
|
|
search_opts = Map.delete(params, :subkeys)
|
|
|
|
|
2020-01-16 05:50:27 +00:00
|
|
|
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
|
2019-09-29 08:17:38 +00:00
|
|
|
{config, sub_keys} when is_list(sub_keys) <- {config, params[:subkeys]},
|
2020-05-31 07:46:02 +00:00
|
|
|
keys <- Enum.map(sub_keys, &string_to_elixir_types(&1)),
|
|
|
|
{_, config, new_value} when new_value != [] <-
|
|
|
|
{:partial_remove, config, Keyword.drop(config.value, keys)} do
|
|
|
|
update(config, %{value: new_value})
|
2019-09-29 08:17:38 +00:00
|
|
|
else
|
2020-01-21 07:14:48 +00:00
|
|
|
{:partial_remove, config, []} ->
|
|
|
|
Repo.delete(config)
|
|
|
|
|
2019-09-29 08:17:38 +00:00
|
|
|
{config, nil} ->
|
2019-08-03 18:16:09 +00:00
|
|
|
Repo.delete(config)
|
2019-09-29 08:17:38 +00:00
|
|
|
|
2019-07-10 09:25:58 +00:00
|
|
|
nil ->
|
|
|
|
err =
|
|
|
|
dgettext("errors", "Config with params %{params} not found", params: inspect(params))
|
|
|
|
|
|
|
|
{:error, err}
|
2019-06-14 15:45:05 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
@spec to_json_types(term()) :: map() | list() | boolean() | String.t()
|
|
|
|
def to_json_types(entity) when is_list(entity) do
|
|
|
|
Enum.map(entity, &to_json_types/1)
|
2019-06-14 15:45:05 +00:00
|
|
|
end
|
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def to_json_types(%Regex{} = entity), do: inspect(entity)
|
2020-01-17 08:45:44 +00:00
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def to_json_types(entity) when is_map(entity) do
|
|
|
|
Map.new(entity, fn {k, v} -> {to_json_types(k), to_json_types(v)} end)
|
2019-07-11 13:02:13 +00:00
|
|
|
end
|
2019-06-14 15:45:05 +00:00
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def to_json_types({:args, args}) when is_list(args) do
|
|
|
|
arguments =
|
|
|
|
Enum.map(args, fn
|
|
|
|
arg when is_tuple(arg) -> inspect(arg)
|
|
|
|
arg -> to_json_types(arg)
|
|
|
|
end)
|
2019-08-30 11:04:21 +00:00
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
%{"tuple" => [":args", arguments]}
|
2019-07-11 13:02:13 +00:00
|
|
|
end
|
2019-06-14 15:45:05 +00:00
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def to_json_types({:proxy_url, {type, :localhost, port}}) do
|
|
|
|
%{"tuple" => [":proxy_url", %{"tuple" => [to_json_types(type), "localhost", port]}]}
|
2019-12-17 16:51:01 +00:00
|
|
|
end
|
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def to_json_types({:proxy_url, {type, host, port}}) when is_tuple(host) do
|
2019-12-17 16:51:01 +00:00
|
|
|
ip =
|
|
|
|
host
|
|
|
|
|> :inet_parse.ntoa()
|
|
|
|
|> to_string()
|
|
|
|
|
|
|
|
%{
|
|
|
|
"tuple" => [
|
|
|
|
":proxy_url",
|
2020-05-31 07:46:02 +00:00
|
|
|
%{"tuple" => [to_json_types(type), ip, port]}
|
2019-12-17 16:51:01 +00:00
|
|
|
]
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def to_json_types({:proxy_url, {type, host, port}}) do
|
2019-12-17 16:51:01 +00:00
|
|
|
%{
|
|
|
|
"tuple" => [
|
|
|
|
":proxy_url",
|
2020-05-31 07:46:02 +00:00
|
|
|
%{"tuple" => [to_json_types(type), to_string(host), port]}
|
2019-12-17 16:51:01 +00:00
|
|
|
]
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def to_json_types({:partial_chain, entity}),
|
|
|
|
do: %{"tuple" => [":partial_chain", inspect(entity)]}
|
2020-05-15 13:16:02 +00:00
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def to_json_types(entity) when is_tuple(entity) do
|
2020-01-21 14:49:22 +00:00
|
|
|
value =
|
|
|
|
entity
|
|
|
|
|> Tuple.to_list()
|
2020-05-31 07:46:02 +00:00
|
|
|
|> to_json_types()
|
2020-01-21 14:49:22 +00:00
|
|
|
|
|
|
|
%{"tuple" => value}
|
|
|
|
end
|
2019-06-22 14:30:53 +00:00
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def to_json_types(entity) when is_binary(entity), do: entity
|
|
|
|
|
|
|
|
def to_json_types(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do
|
2020-01-21 14:49:22 +00:00
|
|
|
entity
|
|
|
|
end
|
2019-06-22 06:01:30 +00:00
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def to_json_types(entity) when entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do
|
2020-01-21 14:49:22 +00:00
|
|
|
":#{entity}"
|
2019-12-25 12:31:51 +00:00
|
|
|
end
|
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def to_json_types(entity) when is_atom(entity), do: inspect(entity)
|
2019-06-14 15:45:05 +00:00
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
@spec to_elixir_types(boolean() | String.t() | map() | list()) :: term()
|
|
|
|
def to_elixir_types(%{"tuple" => [":args", args]}) when is_list(args) do
|
|
|
|
arguments =
|
|
|
|
Enum.map(args, fn arg ->
|
|
|
|
if String.contains?(arg, ["{", "}"]) do
|
|
|
|
{elem, []} = Code.eval_string(arg)
|
|
|
|
elem
|
|
|
|
else
|
|
|
|
to_elixir_types(arg)
|
|
|
|
end
|
|
|
|
end)
|
2019-06-14 15:45:05 +00:00
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
{:args, arguments}
|
2019-06-14 15:45:05 +00:00
|
|
|
end
|
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def to_elixir_types(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do
|
|
|
|
{:proxy_url, {string_to_elixir_types(type), parse_host(host), port}}
|
2019-12-17 16:51:01 +00:00
|
|
|
end
|
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def to_elixir_types(%{"tuple" => [":partial_chain", entity]}) do
|
2020-05-15 13:16:02 +00:00
|
|
|
{partial_chain, []} =
|
|
|
|
entity
|
|
|
|
|> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "")
|
|
|
|
|> Code.eval_string()
|
|
|
|
|
|
|
|
{:partial_chain, partial_chain}
|
|
|
|
end
|
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def to_elixir_types(%{"tuple" => entity}) do
|
|
|
|
Enum.reduce(entity, {}, &Tuple.append(&2, to_elixir_types(&1)))
|
2019-06-22 14:30:53 +00:00
|
|
|
end
|
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def to_elixir_types(entity) when is_map(entity) do
|
|
|
|
Map.new(entity, fn {k, v} -> {to_elixir_types(k), to_elixir_types(v)} end)
|
2019-06-14 15:45:05 +00:00
|
|
|
end
|
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def to_elixir_types(entity) when is_list(entity) do
|
|
|
|
Enum.map(entity, &to_elixir_types/1)
|
2019-06-14 15:45:05 +00:00
|
|
|
end
|
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def to_elixir_types(entity) when is_binary(entity) do
|
2019-09-29 08:17:38 +00:00
|
|
|
entity
|
|
|
|
|> String.trim()
|
2020-05-31 07:46:02 +00:00
|
|
|
|> string_to_elixir_types()
|
|
|
|
end
|
|
|
|
|
|
|
|
def to_elixir_types(entity), do: entity
|
|
|
|
|
|
|
|
@spec string_to_elixir_types(String.t()) ::
|
|
|
|
atom() | Regex.t() | module() | String.t() | no_return()
|
|
|
|
def string_to_elixir_types("~r" <> _pattern = regex) do
|
|
|
|
pattern =
|
|
|
|
~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u
|
|
|
|
|
|
|
|
delimiters = ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}]
|
|
|
|
|
|
|
|
with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <-
|
|
|
|
Regex.named_captures(pattern, regex),
|
|
|
|
{:ok, {leading, closing}} <- find_valid_delimiter(delimiters, pattern, regex_delimiter),
|
|
|
|
{result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do
|
|
|
|
result
|
|
|
|
end
|
2019-06-22 14:30:53 +00:00
|
|
|
end
|
|
|
|
|
2020-05-31 07:46:02 +00:00
|
|
|
def string_to_elixir_types(":" <> atom), do: String.to_atom(atom)
|
|
|
|
|
|
|
|
def string_to_elixir_types(value) do
|
2020-05-31 11:42:15 +00:00
|
|
|
if module_name?(value) do
|
2020-05-31 07:46:02 +00:00
|
|
|
String.to_existing_atom("Elixir." <> value)
|
|
|
|
else
|
|
|
|
value
|
|
|
|
end
|
|
|
|
end
|
2019-06-14 15:45:05 +00:00
|
|
|
|
2019-12-17 16:51:01 +00:00
|
|
|
defp parse_host("localhost"), do: :localhost
|
|
|
|
|
|
|
|
defp parse_host(host) do
|
|
|
|
charlist = to_charlist(host)
|
|
|
|
|
|
|
|
case :inet.parse_address(charlist) do
|
|
|
|
{:error, :einval} ->
|
|
|
|
charlist
|
|
|
|
|
|
|
|
{:ok, ip} ->
|
|
|
|
ip
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-01-21 14:49:22 +00:00
|
|
|
defp find_valid_delimiter([], _string, _) do
|
|
|
|
raise(ArgumentError, message: "valid delimiter for Regex expression not found")
|
|
|
|
end
|
2019-08-30 11:04:21 +00:00
|
|
|
|
2019-09-29 08:17:38 +00:00
|
|
|
defp find_valid_delimiter([{leading, closing} = delimiter | others], pattern, regex_delimiter)
|
|
|
|
when is_tuple(delimiter) do
|
|
|
|
if String.contains?(pattern, closing) do
|
|
|
|
find_valid_delimiter(others, pattern, regex_delimiter)
|
|
|
|
else
|
|
|
|
{:ok, {leading, closing}}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do
|
|
|
|
if String.contains?(pattern, delimiter) do
|
|
|
|
find_valid_delimiter(others, pattern, regex_delimiter)
|
|
|
|
else
|
|
|
|
{:ok, {delimiter, delimiter}}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-05-31 11:42:15 +00:00
|
|
|
@spec module_name?(String.t()) :: boolean()
|
|
|
|
def module_name?(string) do
|
2020-01-23 15:21:29 +00:00
|
|
|
Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or
|
2019-12-21 10:54:22 +00:00
|
|
|
string in ["Oban", "Ueberauth", "ExSyslogger"]
|
2019-06-14 15:45:05 +00:00
|
|
|
end
|
|
|
|
end
|