giant massive dep upgrade and dialyxir-found error emporium (#371)
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Some checks are pending
ci/woodpecker/push/woodpecker Pipeline is pending
Co-authored-by: FloatingGhost <hannah@coffee-and-dreams.uk> Reviewed-on: #371
This commit is contained in:
parent
7f4d218cff
commit
07a48b9293
75 changed files with 688 additions and 555 deletions
|
@ -8,11 +8,14 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
|
||||
### Removed
|
||||
- Non-finch HTTP adapters
|
||||
### Upgrade notes
|
||||
- Ensure `config :tesla, :adapter` is either unset, or set to `{Tesla.Adapter.Finch, name: MyFinch}` in your .exs config
|
||||
- Legacy redirect from /api/pleroma/admin to /api/v1/pleroma/admin
|
||||
|
||||
### Changed
|
||||
- Return HTTP error 413 when uploading an avatar or banner that's above the configured upload limit instead of a 500.
|
||||
|
||||
### Upgrade notes
|
||||
- Ensure `config :tesla, :adapter` is either unset, or set to `{Tesla.Adapter.Finch, name: MyFinch}` in your .exs config
|
||||
|
||||
## 2022.12
|
||||
|
||||
## Added
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# credo:disable-for-this-file
|
||||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
|
|
@ -115,7 +115,6 @@ def run(["prune_task"]) do
|
|||
|
||||
nil
|
||||
|> Pleroma.Workers.Cron.PruneDatabaseWorker.perform()
|
||||
|> IO.inspect()
|
||||
end
|
||||
|
||||
def run(["fix_likes_collections"]) do
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# credo:disable-for-this-file
|
||||
defmodule Mix.Tasks.Pleroma.Diagnostics do
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
|
|
|
@ -247,9 +247,13 @@ def run(["gen" | rest]) do
|
|||
config_dir = Path.dirname(config_path)
|
||||
psql_dir = Path.dirname(psql_path)
|
||||
|
||||
[config_dir, psql_dir, static_dir, uploads_dir]
|
||||
|> Enum.reject(&File.exists?/1)
|
||||
|> Enum.map(&File.mkdir_p!/1)
|
||||
to_create =
|
||||
[config_dir, psql_dir, static_dir, uploads_dir]
|
||||
|> Enum.reject(&File.exists?/1)
|
||||
|
||||
for dir <- to_create do
|
||||
File.mkdir_p!(dir)
|
||||
end
|
||||
|
||||
shell_info("Writing config to #{config_path}.")
|
||||
|
||||
|
@ -319,6 +323,4 @@ defp upload_filters(filters) when is_map(filters) do
|
|||
|
||||
enabled_filters
|
||||
end
|
||||
|
||||
defp upload_filters(_), do: []
|
||||
end
|
||||
|
|
|
@ -10,14 +10,11 @@ defmodule Mix.Tasks.Pleroma.Search do
|
|||
|
||||
def run(["import", "activities" | _rest]) do
|
||||
start_pleroma()
|
||||
IO.inspect(Pleroma.Config.get([Pleroma.Search.Elasticsearch.Cluster, :indexes, :activities]))
|
||||
|
||||
IO.inspect(
|
||||
Elasticsearch.Index.Bulk.upload(
|
||||
Pleroma.Search.Elasticsearch.Cluster,
|
||||
"activities",
|
||||
Pleroma.Config.get([Pleroma.Search.Elasticsearch.Cluster, :indexes, :activities])
|
||||
)
|
||||
Elasticsearch.Index.Bulk.upload(
|
||||
Pleroma.Search.Elasticsearch.Cluster,
|
||||
"activities",
|
||||
Pleroma.Config.get([Pleroma.Search.Elasticsearch.Cluster, :indexes, :activities])
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -378,9 +378,11 @@ def run(["change_email", nickname, email]) do
|
|||
def run(["show", nickname]) do
|
||||
start_pleroma()
|
||||
|
||||
nickname
|
||||
|> User.get_cached_by_nickname()
|
||||
|> IO.inspect()
|
||||
user =
|
||||
nickname
|
||||
|> User.get_cached_by_nickname()
|
||||
|
||||
shell_info("#{inspect(user)}")
|
||||
end
|
||||
|
||||
def run(["send_confirmation", nickname]) do
|
||||
|
@ -389,7 +391,6 @@ def run(["send_confirmation", nickname]) do
|
|||
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||
user
|
||||
|> Pleroma.Emails.UserEmail.account_confirmation_email()
|
||||
|> IO.inspect()
|
||||
|> Pleroma.Emails.Mailer.deliver!()
|
||||
|
||||
shell_info("#{nickname}'s email sent")
|
||||
|
@ -465,7 +466,7 @@ def run(["blocking", nickname]) do
|
|||
|
||||
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
||||
blocks = User.following_ap_ids(user)
|
||||
IO.inspect(blocks, limit: :infinity)
|
||||
IO.puts("#{inspect(blocks)}")
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -38,7 +38,11 @@ defp add_cache_key_for(activity_id, additional_key) do
|
|||
|
||||
def invalidate_cache_for(activity_id) do
|
||||
keys = get_cache_keys_for(activity_id)
|
||||
Enum.map(keys, &@cachex.del(:scrubber_cache, &1))
|
||||
|
||||
for key <- keys do
|
||||
@cachex.del(:scrubber_cache, key)
|
||||
end
|
||||
|
||||
@cachex.del(:scrubber_management_cache, activity_id)
|
||||
end
|
||||
|
||||
|
|
|
@ -24,8 +24,10 @@ defmodule Pleroma.Announcement do
|
|||
end
|
||||
|
||||
def change(struct, params \\ %{}) do
|
||||
params = validate_params(struct, params)
|
||||
|
||||
struct
|
||||
|> cast(validate_params(struct, params), [:data, :starts_at, :ends_at, :rendered])
|
||||
|> cast(params, [:data, :starts_at, :ends_at, :rendered])
|
||||
|> validate_required([:data])
|
||||
end
|
||||
|
||||
|
|
|
@ -198,6 +198,8 @@ defp background_migrators do
|
|||
]
|
||||
end
|
||||
|
||||
@spec task_children(atom()) :: [map()]
|
||||
|
||||
defp task_children(:test) do
|
||||
[
|
||||
%{
|
||||
|
@ -223,6 +225,7 @@ defp task_children(_) do
|
|||
]
|
||||
end
|
||||
|
||||
@spec elasticsearch_children :: [Pleroma.Search.Elasticsearch.Cluster]
|
||||
def elasticsearch_children do
|
||||
config = Config.get([Pleroma.Search, :module])
|
||||
|
||||
|
|
|
@ -194,8 +194,6 @@ defp check_system_commands!(:ok) do
|
|||
end
|
||||
end
|
||||
|
||||
defp check_system_commands!(result), do: result
|
||||
|
||||
defp check_repo_pool_size!(:ok) do
|
||||
if Pleroma.Config.get([Pleroma.Repo, :pool_size], 10) != 10 and
|
||||
not Pleroma.Config.get([:dangerzone, :override_repo_pool_size], false) do
|
||||
|
|
|
@ -209,7 +209,9 @@ def list_remote(opts) do
|
|||
|
||||
with :ok <- validate_shareable_packs_available(uri) do
|
||||
uri
|
||||
|> URI.merge("/api/pleroma/emoji/packs?page=#{opts[:page]}&page_size=#{opts[:page_size]}")
|
||||
|> URI.merge(
|
||||
"/api/v1/pleroma/emoji/packs?page=#{opts[:page]}&page_size=#{opts[:page_size]}"
|
||||
)
|
||||
|> http_get()
|
||||
end
|
||||
end
|
||||
|
@ -250,7 +252,7 @@ def download(name, url, as) do
|
|||
|
||||
with :ok <- validate_shareable_packs_available(uri),
|
||||
{:ok, remote_pack} <-
|
||||
uri |> URI.merge("/api/pleroma/emoji/pack?name=#{name}") |> http_get(),
|
||||
uri |> URI.merge("/api/v1/pleroma/emoji/pack?name=#{name}") |> http_get(),
|
||||
{:ok, %{sha: sha, url: url} = pack_info} <- fetch_pack_info(remote_pack, uri, name),
|
||||
{:ok, archive} <- download_archive(url, sha),
|
||||
pack <- copy_as(remote_pack, as || name),
|
||||
|
@ -591,7 +593,7 @@ defp fetch_pack_info(remote_pack, uri, name) do
|
|||
{:ok,
|
||||
%{
|
||||
sha: sha,
|
||||
url: URI.merge(uri, "/api/pleroma/emoji/packs/archive?name=#{name}") |> to_string()
|
||||
url: URI.merge(uri, "/api/v1/pleroma/emoji/packs/archive?name=#{name}") |> to_string()
|
||||
}}
|
||||
|
||||
%{"fallback-src" => src, "fallback-src-sha256" => sha} when is_binary(src) ->
|
||||
|
|
|
@ -14,6 +14,8 @@ defmodule Pleroma.FollowingRelationship do
|
|||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
|
||||
@type follow_state :: :follow_pending | :follow_accept | :follow_reject | :unfollow
|
||||
|
||||
schema "following_relationships" do
|
||||
field(:state, State, default: :follow_pending)
|
||||
|
||||
|
@ -72,6 +74,7 @@ def update(%User{} = follower, %User{} = following, state) do
|
|||
end
|
||||
end
|
||||
|
||||
@spec follow(User.t(), User.t()) :: {:ok, User.t(), User.t()} | {:error, any}
|
||||
def follow(%User{} = follower, %User{} = following, state \\ :follow_accept) do
|
||||
with {:ok, _following_relationship} <-
|
||||
%__MODULE__{}
|
||||
|
@ -81,6 +84,7 @@ def follow(%User{} = follower, %User{} = following, state \\ :follow_accept) do
|
|||
end
|
||||
end
|
||||
|
||||
@spec unfollow(User.t(), User.t()) :: {:ok, User.t(), User.t()} | {:error, any}
|
||||
def unfollow(%User{} = follower, %User{} = following) do
|
||||
case get(follower, following) do
|
||||
%__MODULE__{} = following_relationship ->
|
||||
|
@ -89,10 +93,12 @@ def unfollow(%User{} = follower, %User{} = following) do
|
|||
end
|
||||
|
||||
_ ->
|
||||
{:ok, nil}
|
||||
{:ok, follower, following}
|
||||
end
|
||||
end
|
||||
|
||||
@spec after_update(follow_state(), User.t(), User.t()) ::
|
||||
{:ok, User.t(), User.t()} | {:error, any()}
|
||||
defp after_update(state, %User{} = follower, %User{} = following) do
|
||||
with {:ok, following} <- User.update_follower_count(following),
|
||||
{:ok, follower} <- User.update_following_count(follower) do
|
||||
|
@ -103,6 +109,8 @@ defp after_update(state, %User{} = follower, %User{} = following) do
|
|||
})
|
||||
|
||||
{:ok, follower, following}
|
||||
else
|
||||
err -> {:error, err}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -104,10 +104,10 @@ defp run_fifo(fifo_path, env, executable, args) do
|
|||
args: args
|
||||
])
|
||||
|
||||
fifo = Port.open(to_charlist(fifo_path), [:eof, :binary, :stream, :out])
|
||||
fifo = File.open!(fifo_path, [:append, :binary])
|
||||
fix = Pleroma.Helpers.QtFastStart.fix(env.body)
|
||||
true = Port.command(fifo, fix)
|
||||
:erlang.port_close(fifo)
|
||||
IO.binwrite(fifo, fix)
|
||||
File.close(fifo)
|
||||
loop_recv(pid)
|
||||
after
|
||||
File.rm(fifo_path)
|
||||
|
|
|
@ -14,9 +14,7 @@ defmodule Pleroma.HTTP.AdapterHelper do
|
|||
alias Pleroma.HTTP.AdapterHelper
|
||||
require Logger
|
||||
|
||||
@type proxy ::
|
||||
{Connection.host(), pos_integer()}
|
||||
| {Connection.proxy_type(), Connection.host(), pos_integer()}
|
||||
@type proxy :: {Connection.proxy_type(), Connection.host(), pos_integer(), list()}
|
||||
|
||||
@callback options(keyword(), URI.t()) :: keyword()
|
||||
|
||||
|
@ -25,7 +23,6 @@ def format_proxy(nil), do: nil
|
|||
|
||||
def format_proxy(proxy_url) do
|
||||
case parse_proxy(proxy_url) do
|
||||
{:ok, host, port} -> {:http, host, port, []}
|
||||
{:ok, type, host, port} -> {type, host, port, []}
|
||||
_ -> nil
|
||||
end
|
||||
|
@ -94,8 +91,7 @@ defp proxy_type("https"), do: {:ok, :https}
|
|||
defp proxy_type(_), do: {:error, :unknown}
|
||||
|
||||
@spec parse_proxy(String.t() | tuple() | nil) ::
|
||||
{:ok, host(), pos_integer()}
|
||||
| {:ok, proxy_type(), host(), pos_integer()}
|
||||
{:ok, proxy_type(), host(), pos_integer()}
|
||||
| {:error, atom()}
|
||||
| nil
|
||||
def parse_proxy(nil), do: nil
|
||||
|
|
|
@ -14,7 +14,7 @@ defmodule Pleroma.Migrators.Support.BaseMigrator do
|
|||
@callback fault_rate_allowance() :: integer() | float()
|
||||
|
||||
defmacro __using__(_opts) do
|
||||
quote do
|
||||
quote generated: true do
|
||||
use GenServer
|
||||
|
||||
require Logger
|
||||
|
|
|
@ -237,7 +237,8 @@ def insert_log(%{actor: %User{}, action: action, target: target} = attrs)
|
|||
insert_log_entry_with_message(%ModerationLog{data: data})
|
||||
end
|
||||
|
||||
@spec insert_log_entry_with_message(ModerationLog) :: {:ok, ModerationLog} | {:error, any}
|
||||
@spec insert_log_entry_with_message(ModerationLog.t()) ::
|
||||
{:ok, ModerationLog.t()} | {:error, any}
|
||||
defp insert_log_entry_with_message(entry) do
|
||||
entry.data["message"]
|
||||
|> put_in(get_log_entry_message(entry))
|
||||
|
|
|
@ -240,7 +240,7 @@ def delete(%Object{data: %{"id" => id}} = object) do
|
|||
{:ok, _} <- invalid_object_cache(object) do
|
||||
cleanup_attachments(
|
||||
Config.get([:instance, :cleanup_attachments]),
|
||||
%{"object" => object}
|
||||
%{object: object}
|
||||
)
|
||||
|
||||
{:ok, object, deleted_activity}
|
||||
|
@ -249,7 +249,7 @@ def delete(%Object{data: %{"id" => id}} = object) do
|
|||
|
||||
@spec cleanup_attachments(boolean(), %{required(:object) => map()}) ::
|
||||
{:ok, Oban.Job.t() | nil}
|
||||
def cleanup_attachments(true, %{"object" => _} = params) do
|
||||
def cleanup_attachments(true, %{object: _} = params) do
|
||||
AttachmentsCleanupWorker.enqueue("cleanup_attachments", params)
|
||||
end
|
||||
|
||||
|
|
|
@ -61,9 +61,6 @@ def create do
|
|||
IO.puts("The database for #{inspect(@repo)} has already been created")
|
||||
|
||||
{:error, term} when is_binary(term) ->
|
||||
IO.puts(:stderr, "The database for #{inspect(@repo)} couldn't be created: #{term}")
|
||||
|
||||
{:error, term} ->
|
||||
IO.puts(
|
||||
:stderr,
|
||||
"The database for #{inspect(@repo)} couldn't be created: #{inspect(term)}"
|
||||
|
|
|
@ -66,6 +66,7 @@ defp read_chunk!(%{pid: pid, stream: stream, opts: opts}) do
|
|||
@impl true
|
||||
@spec close(map) :: :ok | no_return()
|
||||
def close(%{pid: _pid}) do
|
||||
:ok
|
||||
end
|
||||
|
||||
defp check_adapter do
|
||||
|
|
|
@ -13,25 +13,21 @@ defmodule Pleroma.Search.Elasticsearch do
|
|||
def es_query(:activity, query, offset, limit) do
|
||||
must = Parsers.Activity.parse(query)
|
||||
|
||||
if must == [] do
|
||||
:skip
|
||||
else
|
||||
%{
|
||||
size: limit,
|
||||
from: offset,
|
||||
terminate_after: 50,
|
||||
timeout: "5s",
|
||||
sort: [
|
||||
"_score",
|
||||
%{"_timestamp" => %{order: "desc", format: "basic_date_time"}}
|
||||
],
|
||||
query: %{
|
||||
bool: %{
|
||||
must: must
|
||||
}
|
||||
%{
|
||||
size: limit,
|
||||
from: offset,
|
||||
terminate_after: 50,
|
||||
timeout: "5s",
|
||||
sort: [
|
||||
"_score",
|
||||
%{"_timestamp" => %{order: "desc", format: "basic_date_time"}}
|
||||
],
|
||||
query: %{
|
||||
bool: %{
|
||||
must: must
|
||||
}
|
||||
}
|
||||
end
|
||||
}
|
||||
end
|
||||
|
||||
defp maybe_fetch(:activity, search_query) do
|
||||
|
|
|
@ -57,5 +57,5 @@ def encode(activity) do
|
|||
defimpl Elasticsearch.Document, for: Pleroma.Object do
|
||||
def id(obj), do: obj.id
|
||||
def routing(_), do: false
|
||||
def encode(_), do: nil
|
||||
def encode(_), do: %{}
|
||||
end
|
||||
|
|
|
@ -154,10 +154,11 @@ def add_to_index(activity) do
|
|||
|
||||
with {:ok, res} <- result,
|
||||
true <- Map.has_key?(res, "taskUid") do
|
||||
# Do nothing
|
||||
{:ok, res}
|
||||
else
|
||||
_ ->
|
||||
err ->
|
||||
Logger.error("Failed to add activity #{activity.id} to index: #{inspect(result)}")
|
||||
{:error, err}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -4,7 +4,7 @@ defmodule Pleroma.Search.SearchBackend do
|
|||
|
||||
The whole activity is passed, to allow filtering on things such as scope.
|
||||
"""
|
||||
@callback add_to_index(activity :: Pleroma.Activity.t()) :: nil
|
||||
@callback add_to_index(activity :: Pleroma.Activity.t()) :: {:ok, any()} | {:error, any()}
|
||||
|
||||
@doc """
|
||||
Remove the object from the index.
|
||||
|
@ -13,5 +13,5 @@ defmodule Pleroma.Search.SearchBackend do
|
|||
is what contains the actual content and there is no need for fitlering when removing
|
||||
from index.
|
||||
"""
|
||||
@callback remove_from_index(object :: Pleroma.Object.t()) :: nil
|
||||
@callback remove_from_index(object :: Pleroma.Object.t()) :: {:ok, any()} | {:error, any()}
|
||||
end
|
||||
|
|
|
@ -27,7 +27,7 @@ def key_id_to_actor_id(key_id) do
|
|||
|
||||
_ ->
|
||||
case Pleroma.Web.WebFinger.finger(maybe_ap_id) do
|
||||
%{"ap_id" => ap_id} -> {:ok, ap_id}
|
||||
{:ok, %{"ap_id" => ap_id}} -> {:ok, ap_id}
|
||||
_ -> {:error, maybe_ap_id}
|
||||
end
|
||||
end
|
||||
|
|
|
@ -162,7 +162,7 @@ defp prepare_upload(%Plug.Upload{} = file, opts) do
|
|||
defp prepare_upload(%{img: "data:image/" <> image_data}, opts) do
|
||||
parsed = Regex.named_captures(~r/(?<filetype>jpeg|png|gif);base64,(?<data>.*)/, image_data)
|
||||
data = Base.decode64!(parsed["data"], ignore: :whitespace)
|
||||
hash = Base.encode16(:crypto.hash(:sha256, data), lower: true)
|
||||
hash = Base.encode16(:crypto.hash(:sha256, data), case: :lower)
|
||||
|
||||
with :ok <- check_binary_size(data, opts.size_limit),
|
||||
tmp_path <- tempfile_for_image(data),
|
||||
|
|
|
@ -77,7 +77,6 @@ defp media_dimensions(file) do
|
|||
%{width: width, height: height}
|
||||
else
|
||||
nil -> {:error, {:ffprobe, :command_not_found}}
|
||||
{:error, _} = error -> error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -9,7 +9,7 @@ defmodule Pleroma.Upload.Filter.Exiftool do
|
|||
"""
|
||||
@behaviour Pleroma.Upload.Filter
|
||||
|
||||
@spec filter(Pleroma.Upload.t()) :: {:ok, any()} | {:error, String.t()}
|
||||
@spec filter(Pleroma.Upload.t()) :: {:ok, :noop} | {:ok, :filtered} | {:error, String.t()}
|
||||
|
||||
# Formats not compatible with exiftool at this time
|
||||
def filter(%Pleroma.Upload{content_type: "image/heic"}), do: {:ok, :noop}
|
||||
|
|
|
@ -38,7 +38,7 @@ defmodule Pleroma.Upload.Filter.Mogrifun do
|
|||
[{"fill", "yellow"}, {"tint", "40"}]
|
||||
]
|
||||
|
||||
@spec filter(Pleroma.Upload.t()) :: {:ok, atom()} | {:error, String.t()}
|
||||
@spec filter(Pleroma.Upload.t()) :: {:ok, :filtered | :noop} | {:error, String.t()}
|
||||
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
||||
try do
|
||||
Filter.Mogrify.do_filter(file, [Enum.random(@filters)])
|
||||
|
|
|
@ -3,6 +3,10 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.User do
|
||||
@moduledoc """
|
||||
A user, local or remote
|
||||
"""
|
||||
|
||||
use Ecto.Schema
|
||||
|
||||
import Ecto.Changeset
|
||||
|
@ -549,9 +553,17 @@ def update_changeset(struct, params \\ %{}) do
|
|||
end
|
||||
|
||||
defp put_fields(changeset) do
|
||||
# These fields are inconsistent in tests when it comes to binary/atom keys
|
||||
if raw_fields = get_change(changeset, :raw_fields) do
|
||||
raw_fields =
|
||||
raw_fields
|
||||
|> Enum.map(fn
|
||||
%{name: name, value: value} ->
|
||||
%{"name" => name, "value" => value}
|
||||
|
||||
%{"name" => _} = field ->
|
||||
field
|
||||
end)
|
||||
|> Enum.filter(fn %{"name" => n} -> n != "" end)
|
||||
|
||||
fields =
|
||||
|
@ -705,7 +717,8 @@ def register_changeset_ldap(struct, params = %{password: password})
|
|||
|> put_private_key()
|
||||
end
|
||||
|
||||
def register_changeset(struct, params \\ %{}, opts \\ []) do
|
||||
@spec register_changeset(User.t(), map(), keyword()) :: Changeset.t()
|
||||
def register_changeset(%User{} = struct, params \\ %{}, opts \\ []) do
|
||||
bio_limit = Config.get([:instance, :user_bio_length], 5000)
|
||||
name_limit = Config.get([:instance, :user_name_length], 100)
|
||||
reason_limit = Config.get([:instance, :registration_reason_length], 500)
|
||||
|
@ -819,12 +832,14 @@ defp autofollowing_users(user) do
|
|||
end
|
||||
|
||||
@doc "Inserts provided changeset, performs post-registration actions (confirmation email sending etc.)"
|
||||
@spec register(Changeset.t()) :: {:ok, User.t()} | {:error, any} | nil
|
||||
def register(%Ecto.Changeset{} = changeset) do
|
||||
with {:ok, user} <- Repo.insert(changeset) do
|
||||
post_register_action(user)
|
||||
end
|
||||
end
|
||||
|
||||
@spec post_register_action(User.t()) :: {:error, any} | {:ok, User.t()}
|
||||
def post_register_action(%User{is_confirmed: false} = user) do
|
||||
with {:ok, _} <- maybe_send_confirmation_email(user) do
|
||||
{:ok, user}
|
||||
|
@ -939,7 +954,8 @@ def needs_update?(%User{local: false} = user) do
|
|||
|
||||
def needs_update?(_), do: true
|
||||
|
||||
@spec maybe_direct_follow(User.t(), User.t()) :: {:ok, User.t()} | {:error, String.t()}
|
||||
@spec maybe_direct_follow(User.t(), User.t()) ::
|
||||
{:ok, User.t(), User.t()} | {:error, String.t()}
|
||||
|
||||
# "Locked" (self-locked) users demand explicit authorization of follow requests
|
||||
def maybe_direct_follow(%User{} = follower, %User{local: true, is_locked: true} = followed) do
|
||||
|
@ -1072,6 +1088,11 @@ def get_by_guessed_nickname(ap_id) do
|
|||
get_cached_by_nickname(nickname)
|
||||
end
|
||||
|
||||
@spec set_cache(
|
||||
{:error, any}
|
||||
| {:ok, User.t()}
|
||||
| User.t()
|
||||
) :: {:ok, User.t()} | {:error, any}
|
||||
def set_cache({:ok, user}), do: set_cache(user)
|
||||
def set_cache({:error, err}), do: {:error, err}
|
||||
|
||||
|
@ -1082,12 +1103,14 @@ def set_cache(%User{} = user) do
|
|||
{:ok, user}
|
||||
end
|
||||
|
||||
@spec update_and_set_cache(User.t(), map()) :: {:ok, User.t()} | {:error, any}
|
||||
def update_and_set_cache(struct, params) do
|
||||
struct
|
||||
|> update_changeset(params)
|
||||
|> update_and_set_cache()
|
||||
end
|
||||
|
||||
@spec update_and_set_cache(Changeset.t()) :: {:ok, User.t()} | {:error, any}
|
||||
def update_and_set_cache(%{data: %Pleroma.User{} = user} = changeset) do
|
||||
was_superuser_before_update = User.superuser?(user)
|
||||
|
||||
|
@ -1142,6 +1165,7 @@ def get_cached_by_ap_id(ap_id) do
|
|||
end
|
||||
end
|
||||
|
||||
@spec get_cached_by_id(String.t()) :: nil | Pleroma.User.t()
|
||||
def get_cached_by_id(id) do
|
||||
key = "id:#{id}"
|
||||
|
||||
|
@ -2302,6 +2326,7 @@ def add_alias(user, new_alias_user) do
|
|||
end
|
||||
end
|
||||
|
||||
@spec delete_alias(User.t(), User.t()) :: {:error, :no_such_alias}
|
||||
def delete_alias(user, alias_user) do
|
||||
current_aliases = user.also_known_as || []
|
||||
alias_ap_id = alias_user.ap_id
|
||||
|
@ -2417,7 +2442,7 @@ def confirmation_changeset(user, set_confirmation: confirmed?) do
|
|||
cast(user, params, [:is_confirmed, :confirmation_token])
|
||||
end
|
||||
|
||||
@spec approval_changeset(User.t(), keyword()) :: Changeset.t()
|
||||
@spec approval_changeset(Changeset.t(), keyword()) :: Changeset.t()
|
||||
def approval_changeset(user, set_approval: approved?) do
|
||||
cast(user, %{is_approved: approved?}, [:is_approved])
|
||||
end
|
||||
|
@ -2492,15 +2517,19 @@ defp add_to_block(%User{} = user, %User{} = blocked) do
|
|||
with {:ok, relationship} <- UserRelationship.create_block(user, blocked) do
|
||||
@cachex.del(:user_cache, "blocked_users_ap_ids:#{user.ap_id}")
|
||||
{:ok, relationship}
|
||||
else
|
||||
err -> err
|
||||
end
|
||||
end
|
||||
|
||||
@spec add_to_block(User.t(), User.t()) ::
|
||||
@spec remove_from_block(User.t(), User.t()) ::
|
||||
{:ok, UserRelationship.t()} | {:ok, nil} | {:error, Ecto.Changeset.t()}
|
||||
defp remove_from_block(%User{} = user, %User{} = blocked) do
|
||||
with {:ok, relationship} <- UserRelationship.delete_block(user, blocked) do
|
||||
@cachex.del(:user_cache, "blocked_users_ap_ids:#{user.ap_id}")
|
||||
{:ok, relationship}
|
||||
else
|
||||
err -> err
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -130,7 +130,8 @@ def export(%__MODULE__{} = backup) do
|
|||
:ok <- statuses(dir, backup.user),
|
||||
:ok <- likes(dir, backup.user),
|
||||
:ok <- bookmarks(dir, backup.user),
|
||||
{:ok, zip_path} <- :zip.create(String.to_charlist(dir <> ".zip"), @files, cwd: dir),
|
||||
{:ok, zip_path} <-
|
||||
:zip.create(String.to_charlist(dir <> ".zip"), @files, cwd: String.to_charlist(dir)),
|
||||
{:ok, _} <- File.rm_rf(dir) do
|
||||
{:ok, to_string(zip_path)}
|
||||
end
|
||||
|
|
|
@ -56,7 +56,10 @@ defp skip_plug(conn, plug_modules) do
|
|||
plug_module.skip_plug(conn)
|
||||
rescue
|
||||
UndefinedFunctionError ->
|
||||
raise "`#{plug_module}` is not skippable. Append `use Pleroma.Web, :plug` to its code."
|
||||
reraise(
|
||||
"`#{plug_module}` is not skippable. Append `use Pleroma.Web, :plug` to its code.",
|
||||
__STACKTRACE__
|
||||
)
|
||||
end
|
||||
end
|
||||
)
|
||||
|
|
|
@ -1531,6 +1531,10 @@ defp normalize_image(%{"url" => url}) do
|
|||
defp normalize_image(urls) when is_list(urls), do: urls |> List.first() |> normalize_image()
|
||||
defp normalize_image(_), do: nil
|
||||
|
||||
defp normalize_also_known_as(aka) when is_list(aka), do: aka
|
||||
defp normalize_also_known_as(aka) when is_binary(aka), do: [aka]
|
||||
defp normalize_also_known_as(nil), do: []
|
||||
|
||||
defp object_to_user_data(data, additional) do
|
||||
fields =
|
||||
data
|
||||
|
@ -1576,6 +1580,7 @@ defp object_to_user_data(data, additional) do
|
|||
also_known_as =
|
||||
data
|
||||
|> Map.get("alsoKnownAs", [])
|
||||
|> normalize_also_known_as()
|
||||
|> Enum.filter(fn url ->
|
||||
case URI.parse(url) do
|
||||
%URI{scheme: "http"} -> true
|
||||
|
|
|
@ -3,6 +3,8 @@ defmodule Pleroma.Web.AkkomaAPI.TranslationController do
|
|||
|
||||
alias Pleroma.Web.Plugs.OAuthScopesPlug
|
||||
|
||||
require Logger
|
||||
|
||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
||||
|
||||
@unauthenticated_access %{fallback: :proceed_unauthenticated, scopes: []}
|
||||
|
@ -26,8 +28,12 @@ def languages(conn, _params) do
|
|||
conn
|
||||
|> json(%{source: source_languages, target: dest_languages})
|
||||
else
|
||||
{:enabled, false} -> json(conn, %{})
|
||||
e -> IO.inspect(e)
|
||||
{:enabled, false} ->
|
||||
json(conn, %{})
|
||||
|
||||
e ->
|
||||
Logger.error("Translation language list error: #{inspect(e)}")
|
||||
{:error, e}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -231,9 +231,18 @@ defp emoji_packs_response do
|
|||
"application/json",
|
||||
%Schema{
|
||||
type: :object,
|
||||
additionalProperties: emoji_pack(),
|
||||
properties: %{
|
||||
count: %Schema{type: :integer},
|
||||
packs: %Schema{
|
||||
type: :object,
|
||||
additionalProperties: emoji_pack()
|
||||
}
|
||||
},
|
||||
example: %{
|
||||
"emojos" => emoji_pack().example
|
||||
"count" => 4,
|
||||
"packs" => %{
|
||||
"emojos" => emoji_pack().example
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
|
|
@ -60,6 +60,8 @@ def get_registration(%Plug.Conn{
|
|||
def get_registration(%Plug.Conn{} = _conn), do: {:error, :missing_credentials}
|
||||
|
||||
@doc "Creates Pleroma.User record basing on params and Pleroma.Registration record."
|
||||
@spec create_from_registration(Plug.Conn.t(), Registration.t()) ::
|
||||
{:ok, User.t()} | {:error, any()}
|
||||
def create_from_registration(
|
||||
%Plug.Conn{params: %{"authorization" => registration_attrs}},
|
||||
%Registration{} = registration
|
||||
|
@ -89,6 +91,8 @@ def create_from_registration(
|
|||
{:ok, _} <-
|
||||
Registration.changeset(registration, %{user_id: new_user.id}) |> Repo.update() do
|
||||
{:ok, new_user}
|
||||
else
|
||||
err -> err
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -87,16 +87,18 @@ def get_pagination_fields(conn, entries, extra_params \\ %{}) do
|
|||
|
||||
def assign_account_by_id(conn, _) do
|
||||
case Pleroma.User.get_cached_by_id(conn.params.id) do
|
||||
%Pleroma.User{} = account -> assign(conn, :account, account)
|
||||
nil -> Pleroma.Web.MastodonAPI.FallbackController.call(conn, {:error, :not_found}) |> halt()
|
||||
%Pleroma.User{} = account ->
|
||||
assign(conn, :account, account)
|
||||
|
||||
nil ->
|
||||
Pleroma.Web.MastodonAPI.FallbackController.call(conn, {:error, :not_found})
|
||||
|> halt()
|
||||
end
|
||||
end
|
||||
|
||||
@spec try_render(Plug.Conn.t(), any, any) :: Plug.Conn.t()
|
||||
def try_render(conn, target, params) when is_binary(target) do
|
||||
case render(conn, target, params) do
|
||||
nil -> render_error(conn, :not_implemented, "Can't display this activity")
|
||||
res -> res
|
||||
end
|
||||
render(conn, target, params)
|
||||
end
|
||||
|
||||
def try_render(conn, _, _) do
|
||||
|
|
|
@ -123,7 +123,7 @@ defmodule Pleroma.Web.Endpoint do
|
|||
plug(Plug.Parsers,
|
||||
parsers: [
|
||||
:urlencoded,
|
||||
{:multipart, length: {Config, :get, [[:instance, :upload_limit]]}},
|
||||
Pleroma.Web.Plugs.Parsers.Multipart,
|
||||
:json
|
||||
],
|
||||
pass: ["*/*"],
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.Fallback.LegacyPleromaApiRerouterPlug do
|
||||
alias Pleroma.Web.Endpoint
|
||||
alias Pleroma.Web.Fallback.RedirectController
|
||||
|
||||
def init(opts), do: opts
|
||||
|
||||
def call(%{path_info: ["api", "pleroma" | path_info_rest]} = conn, _opts) do
|
||||
new_path_info = ["api", "v1", "pleroma" | path_info_rest]
|
||||
new_request_path = Enum.join(new_path_info, "/")
|
||||
|
||||
conn
|
||||
|> Map.merge(%{
|
||||
path_info: new_path_info,
|
||||
request_path: new_request_path
|
||||
})
|
||||
|> Endpoint.call(conn.params)
|
||||
end
|
||||
|
||||
def call(conn, _opts) do
|
||||
RedirectController.api_not_implemented(conn, %{})
|
||||
end
|
||||
end
|
|
@ -30,6 +30,10 @@ def scrub_html_and_truncate(%{data: %{"content" => content}} = object) do
|
|||
|> scrub_html_and_truncate_object_field(object)
|
||||
end
|
||||
|
||||
def scrub_html_and_truncate(%{data: _}) do
|
||||
""
|
||||
end
|
||||
|
||||
def scrub_html_and_truncate(content, max_length \\ 200) when is_binary(content) do
|
||||
content
|
||||
|> scrub_html
|
||||
|
|
|
@ -558,10 +558,9 @@ def register(%Plug.Conn{} = conn, %{"authorization" => _, "op" => "register"} =
|
|||
else
|
||||
{:error, changeset} ->
|
||||
message =
|
||||
Enum.map(changeset.errors, fn {field, {error, _}} ->
|
||||
Enum.map_join(changeset.errors, "; ", fn {field, {error, _}} ->
|
||||
"#{field} #{error}"
|
||||
end)
|
||||
|> Enum.join("; ")
|
||||
|
||||
message =
|
||||
String.replace(
|
||||
|
|
21
lib/pleroma/web/plugs/parsers/multipart.ex
Normal file
21
lib/pleroma/web/plugs/parsers/multipart.ex
Normal file
|
@ -0,0 +1,21 @@
|
|||
defmodule Pleroma.Web.Plugs.Parsers.Multipart do
|
||||
@multipart Plug.Parsers.MULTIPART
|
||||
|
||||
alias Pleroma.Config
|
||||
|
||||
def init(opts) do
|
||||
opts
|
||||
end
|
||||
|
||||
def parse(conn, "multipart", subtype, headers, opts) do
|
||||
length = Config.get([:instance, :upload_limit])
|
||||
|
||||
opts = @multipart.init([length: length] ++ opts)
|
||||
|
||||
@multipart.parse(conn, "multipart", subtype, headers, opts)
|
||||
end
|
||||
|
||||
def parse(conn, _type, _subtype, _headers, _opts) do
|
||||
{:next, conn}
|
||||
end
|
||||
end
|
|
@ -8,7 +8,6 @@ defmodule Pleroma.Web.Plugs.RemoteIp do
|
|||
"""
|
||||
|
||||
alias Pleroma.Config
|
||||
import Plug.Conn
|
||||
|
||||
@behaviour Plug
|
||||
|
||||
|
@ -16,15 +15,21 @@ def init(_), do: nil
|
|||
|
||||
def call(%{remote_ip: original_remote_ip} = conn, _) do
|
||||
if Config.get([__MODULE__, :enabled]) do
|
||||
%{remote_ip: new_remote_ip} = conn = RemoteIp.call(conn, remote_ip_opts())
|
||||
assign(conn, :remote_ip_found, original_remote_ip != new_remote_ip)
|
||||
{headers, proxies} = remote_ip_opts()
|
||||
new_remote_ip = RemoteIp.from(conn.req_headers, headers: headers, proxies: proxies)
|
||||
|
||||
if new_remote_ip != original_remote_ip do
|
||||
Map.put(conn, :remote_ip, new_remote_ip)
|
||||
else
|
||||
conn
|
||||
end
|
||||
else
|
||||
conn
|
||||
end
|
||||
end
|
||||
|
||||
defp remote_ip_opts do
|
||||
headers = Config.get([__MODULE__, :headers], []) |> MapSet.new()
|
||||
headers = Config.get([__MODULE__, :headers], [])
|
||||
reserved = Config.get([__MODULE__, :reserved], [])
|
||||
|
||||
proxies =
|
||||
|
@ -36,13 +41,10 @@ defp remote_ip_opts do
|
|||
end
|
||||
|
||||
defp maybe_add_cidr(proxy) when is_binary(proxy) do
|
||||
proxy =
|
||||
cond do
|
||||
"/" in String.codepoints(proxy) -> proxy
|
||||
InetCidr.v4?(InetCidr.parse_address!(proxy)) -> proxy <> "/32"
|
||||
InetCidr.v6?(InetCidr.parse_address!(proxy)) -> proxy <> "/128"
|
||||
end
|
||||
|
||||
InetCidr.parse(proxy, true)
|
||||
cond do
|
||||
"/" in String.codepoints(proxy) -> proxy
|
||||
InetCidr.v4?(InetCidr.parse_address!(proxy)) -> proxy <> "/32"
|
||||
InetCidr.v6?(InetCidr.parse_address!(proxy)) -> proxy <> "/128"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -906,8 +906,6 @@ defmodule Pleroma.Web.Router do
|
|||
scope "/", Pleroma.Web.Fallback do
|
||||
get("/registration/:token", RedirectController, :registration_page)
|
||||
get("/:maybe_nickname_or_id", RedirectController, :redirector_with_meta)
|
||||
match(:*, "/api/pleroma*path", LegacyPleromaApiRerouterPlug, [])
|
||||
get("/api*path", RedirectController, :api_not_implemented)
|
||||
get("/*path", RedirectController, :redirector_with_preload)
|
||||
|
||||
options("/*path", RedirectController, :empty)
|
||||
|
|
|
@ -150,7 +150,10 @@ def remote_subscribe(conn, %{"status" => %{"status_id" => id, "profile" => profi
|
|||
end
|
||||
end
|
||||
|
||||
def remote_interaction(%{body_params: %{ap_id: ap_id, profile: profile}} = conn, _params) do
|
||||
def remote_interaction(
|
||||
%Plug.Conn{body_params: %{ap_id: ap_id, profile: profile}} = conn,
|
||||
_params
|
||||
) do
|
||||
with {:ok, %{"subscribe_address" => template}} <- WebFinger.finger(profile) do
|
||||
conn
|
||||
|> json(%{url: String.replace(template, "{uri}", ap_id)})
|
||||
|
|
|
@ -100,6 +100,7 @@ defp domain do
|
|||
Pleroma.Config.get([__MODULE__, :domain]) || Pleroma.Web.Endpoint.host()
|
||||
end
|
||||
|
||||
@spec webfinger_from_xml(binary()) :: {:ok, map()} | nil
|
||||
defp webfinger_from_xml(body) do
|
||||
with {:ok, doc} <- XML.parse_document(body) do
|
||||
subject = XML.string_from_xpath("//Subject", doc)
|
||||
|
|
|
@ -36,7 +36,7 @@ def webfinger(%{assigns: %{format: format}} = conn, %{"resource" => resource})
|
|||
else
|
||||
_e ->
|
||||
conn
|
||||
|> put_status(404)
|
||||
|> put_status(:not_found)
|
||||
|> json("Couldn't find user")
|
||||
end
|
||||
end
|
||||
|
|
49
mix.exs
49
mix.exs
|
@ -7,7 +7,7 @@ def project do
|
|||
version: version("3.5.0"),
|
||||
elixir: "~> 1.12",
|
||||
elixirc_paths: elixirc_paths(Mix.env()),
|
||||
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
|
||||
compilers: [:phoenix] ++ Mix.compilers(),
|
||||
elixirc_options: [warnings_as_errors: warnings_as_errors()],
|
||||
xref: [exclude: [:eldap]],
|
||||
start_permanent: Mix.env() == :prod,
|
||||
|
@ -94,7 +94,8 @@ defp warnings_as_errors, do: System.get_env("CI") == "true"
|
|||
# Specifies OAuth dependencies.
|
||||
defp oauth_deps do
|
||||
oauth_strategy_packages =
|
||||
System.get_env("OAUTH_CONSUMER_STRATEGIES")
|
||||
"OAUTH_CONSUMER_STRATEGIES"
|
||||
|> System.get_env()
|
||||
|> to_string()
|
||||
|> String.split()
|
||||
|> Enum.map(fn strategy_entry ->
|
||||
|
@ -113,32 +114,29 @@ defp oauth_deps do
|
|||
# Type `mix help deps` for examples and options.
|
||||
defp deps do
|
||||
[
|
||||
{:phoenix, "~> 1.6.11"},
|
||||
{:phoenix, "~> 1.6.15"},
|
||||
{:tzdata, "~> 1.1.1"},
|
||||
{:plug_cowboy, "~> 2.5"},
|
||||
{:plug_cowboy, "~> 2.6"},
|
||||
{:phoenix_pubsub, "~> 2.1"},
|
||||
{:phoenix_ecto, "~> 4.4"},
|
||||
{:inet_cidr, "~> 1.0.0"},
|
||||
{:ecto_enum, "~> 1.4"},
|
||||
{:ecto_sql, "~> 3.9.0"},
|
||||
{:postgrex, ">= 0.16.3"},
|
||||
{:oban, "~> 2.12.1"},
|
||||
{:gettext,
|
||||
git: "https://github.com/tusooa/gettext.git",
|
||||
ref: "72fb2496b6c5280ed911bdc3756890e7f38a4808",
|
||||
override: true},
|
||||
{:gettext, "~> 0.20.0"},
|
||||
{:bcrypt_elixir, "~> 2.2"},
|
||||
{:trailing_format_plug, "~> 0.0.7"},
|
||||
{:fast_sanitize, "~> 0.2.3"},
|
||||
{:html_entities, "~> 0.5", override: true},
|
||||
{:phoenix_html, "~> 3.1", override: true},
|
||||
{:html_entities, "~> 0.5"},
|
||||
{:phoenix_html, "~> 3.2"},
|
||||
{:calendar, "~> 1.0"},
|
||||
{:cachex, "~> 3.4"},
|
||||
{:poison, "~> 5.0", override: true},
|
||||
{:tesla, "~> 1.4.4", override: true},
|
||||
{:tesla, "~> 1.4.4"},
|
||||
{:castore, "~> 0.1"},
|
||||
{:cowlib, "~> 2.9", override: true},
|
||||
{:cowlib, "~> 2.9"},
|
||||
{:finch, "~> 0.14.0"},
|
||||
{:jason, "~> 1.2"},
|
||||
{:trailing_format_plug, "~> 0.0.7"},
|
||||
{:mogrify, "~> 0.9.1"},
|
||||
{:ex_aws, "~> 2.1.6"},
|
||||
{:ex_aws_s3, "~> 2.0"},
|
||||
|
@ -172,39 +170,40 @@ defp deps do
|
|||
{:plug_static_index_html, "~> 1.0.0"},
|
||||
{:flake_id, "~> 0.1.0"},
|
||||
{:concurrent_limiter, "~> 0.1.1"},
|
||||
{:remote_ip,
|
||||
git: "https://git.pleroma.social/pleroma/remote_ip.git",
|
||||
ref: "b647d0deecaa3acb140854fe4bda5b7e1dc6d1c8"},
|
||||
{:remote_ip, "~> 1.1.0"},
|
||||
{:captcha,
|
||||
git: "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git",
|
||||
ref: "e0f16822d578866e186a0974d65ad58cddc1e2ab"},
|
||||
{:restarter, path: "./restarter"},
|
||||
{:majic, "~> 1.0"},
|
||||
{:eblurhash, "~> 1.2.2"},
|
||||
{:open_api_spex, "3.10.0"},
|
||||
{:open_api_spex, "~> 3.16.0"},
|
||||
{:search_parser,
|
||||
git: "https://github.com/FloatingGhost/pleroma-contrib-search-parser.git",
|
||||
ref: "08971a81e68686f9ac465cfb6661d51c5e4e1e7f"},
|
||||
{:nimble_parsec, "~> 1.0", override: true},
|
||||
{:phoenix_live_dashboard, "~> 0.6.2"},
|
||||
|