mix: consistently use shell_info and shell_error

Logger output being visible depends on user configuration, but most of
the prints in mix tasks should always be shown. When running inside a
mix shell, it’s probably preferable to send output directly to it rather
than using raw IO.puts and we already have shell_* functions for this,
let’s use them everywhere.
This commit is contained in:
Oneric 2024-05-30 01:15:04 +00:00
parent 70cd5f91d8
commit bed7ff8e89
9 changed files with 80 additions and 73 deletions

View file

@ -112,18 +112,26 @@ def shell_prompt(prompt, defval \\ nil, defname \\ nil) do
end
end
def shell_info(message) do
def shell_info(message) when is_binary(message) or is_list(message) do
if mix_shell?(),
do: Mix.shell().info(message),
else: IO.puts(message)
end
def shell_error(message) do
def shell_info(message) do
shell_info("#{inspect(message)}")
end
def shell_error(message) when is_binary(message) or is_list(message) do
if mix_shell?(),
do: Mix.shell().error(message),
else: IO.puts(:stderr, message)
end
def shell_error(message) do
shell_error("#{inspect(message)}")
end
@doc "Performs a safe check whether `Mix.shell/0` is available (does not raise if Mix is not loaded)"
def mix_shell?, do: :erlang.function_exported(Mix, :shell, 0)

View file

@ -8,7 +8,6 @@ defmodule Mix.Tasks.Pleroma.Activity do
alias Pleroma.User
alias Pleroma.Web.CommonAPI
alias Pleroma.Pagination
require Logger
import Mix.Pleroma
import Ecto.Query
@ -17,7 +16,7 @@ def run(["get", id | _rest]) do
id
|> Activity.get_by_id()
|> IO.inspect()
|> shell_info()
end
def run(["delete_by_keyword", user, keyword | _rest]) do
@ -35,7 +34,7 @@ def run(["delete_by_keyword", user, keyword | _rest]) do
)
|> Enum.map(fn x -> CommonAPI.delete(x.id, u) end)
|> Enum.count()
|> IO.puts()
|> shell_info()
end
defp query_with(q, search_query) do

View file

@ -3,7 +3,6 @@ defmodule Mix.Tasks.Pleroma.Diagnostics do
alias Pleroma.Repo
alias Pleroma.User
require Logger
require Pleroma.Constants
import Mix.Pleroma
@ -14,7 +13,7 @@ def run(["http", url]) do
start_pleroma()
Pleroma.HTTP.get(url)
|> IO.inspect()
|> shell_info()
end
def run(["fetch_object", url]) do
@ -27,7 +26,7 @@ def run(["fetch_object", url]) do
def run(["home_timeline", nickname]) do
start_pleroma()
user = Repo.get_by!(User, nickname: nickname)
Logger.info("Home timeline query #{user.nickname}")
shell_info("Home timeline query #{user.nickname}")
followed_hashtags =
user
@ -56,14 +55,14 @@ def run(["home_timeline", nickname]) do
|> limit(20)
Ecto.Adapters.SQL.explain(Repo, :all, query, analyze: true, timeout: :infinity)
|> IO.puts()
|> shell_info()
end
def run(["user_timeline", nickname, reading_nickname]) do
start_pleroma()
user = Repo.get_by!(User, nickname: nickname)
reading_user = Repo.get_by!(User, nickname: reading_nickname)
Logger.info("User timeline query #{user.nickname}")
shell_info("User timeline query #{user.nickname}")
params =
%{limit: 20}
@ -87,7 +86,7 @@ def run(["user_timeline", nickname, reading_nickname]) do
|> limit(20)
Ecto.Adapters.SQL.explain(Repo, :all, query, analyze: true, timeout: :infinity)
|> IO.puts()
|> shell_info()
end
def run(["notifications", nickname]) do
@ -103,7 +102,7 @@ def run(["notifications", nickname]) do
|> limit(20)
Ecto.Adapters.SQL.explain(Repo, :all, query, analyze: true, timeout: :infinity)
|> IO.puts()
|> shell_info()
end
def run(["known_network", nickname]) do
@ -129,6 +128,6 @@ def run(["known_network", nickname]) do
|> limit(20)
Ecto.Adapters.SQL.explain(Repo, :all, query, analyze: true, timeout: :infinity)
|> IO.puts()
|> shell_info()
end
end

View file

@ -27,11 +27,11 @@ def run(["ls-packs" | args]) do
]
for {param, value} <- to_print do
IO.puts(IO.ANSI.format([:bright, param, :normal, ": ", value]))
shell_info(IO.ANSI.format([:bright, param, :normal, ": ", value]))
end
# A newline
IO.puts("")
shell_info("")
end)
end
@ -49,7 +49,7 @@ def run(["get-packs" | args]) do
pack = manifest[pack_name]
src = pack["src"]
IO.puts(
shell_info(
IO.ANSI.format([
"Downloading ",
:bright,
@ -67,9 +67,9 @@ def run(["get-packs" | args]) do
sha_status_text = ["SHA256 of ", :bright, pack_name, :normal, " source file is ", :bright]
if archive_sha == String.upcase(pack["src_sha256"]) do
IO.puts(IO.ANSI.format(sha_status_text ++ [:green, "OK"]))
shell_info(IO.ANSI.format(sha_status_text ++ [:green, "OK"]))
else
IO.puts(IO.ANSI.format(sha_status_text ++ [:red, "BAD"]))
shell_info(IO.ANSI.format(sha_status_text ++ [:red, "BAD"]))
raise "Bad SHA256 for #{pack_name}"
end
@ -80,7 +80,7 @@ def run(["get-packs" | args]) do
|> Path.dirname()
|> Path.join(pack["files"])
IO.puts(
shell_info(
IO.ANSI.format([
"Fetching the file list for ",
:bright,
@ -94,7 +94,7 @@ def run(["get-packs" | args]) do
files = fetch_and_decode!(files_loc)
IO.puts(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
shell_info(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
pack_path =
Path.join([
@ -115,7 +115,7 @@ def run(["get-packs" | args]) do
file_list: files_to_unzip
)
IO.puts(IO.ANSI.format(["Writing pack.json for ", :bright, pack_name]))
shell_info(IO.ANSI.format(["Writing pack.json for ", :bright, pack_name]))
pack_json = %{
pack: %{
@ -132,7 +132,7 @@ def run(["get-packs" | args]) do
File.write!(Path.join(pack_path, "pack.json"), Jason.encode!(pack_json, pretty: true))
Pleroma.Emoji.reload()
else
IO.puts(IO.ANSI.format([:bright, :red, "No pack named \"#{pack_name}\" found"]))
shell_info(IO.ANSI.format([:bright, :red, "No pack named \"#{pack_name}\" found"]))
end
end
end
@ -180,14 +180,14 @@ def run(["gen-pack" | args]) do
custom_exts
end
IO.puts("Using #{Enum.join(exts, " ")} extensions")
shell_info("Using #{Enum.join(exts, " ")} extensions")
IO.puts("Downloading the pack and generating SHA256")
shell_info("Downloading the pack and generating SHA256")
{:ok, %{body: binary_archive}} = Pleroma.HTTP.get(src)
archive_sha = :crypto.hash(:sha256, binary_archive) |> Base.encode16()
IO.puts("SHA256 is #{archive_sha}")
shell_info("SHA256 is #{archive_sha}")
pack_json = %{
name => %{
@ -208,7 +208,7 @@ def run(["gen-pack" | args]) do
File.write!(files_name, Jason.encode!(emoji_map, pretty: true))
IO.puts("""
shell_info("""
#{files_name} has been created and contains the list of all found emojis in the pack.
Please review the files in the pack and remove those not needed.
@ -230,11 +230,11 @@ def run(["gen-pack" | args]) do
)
)
IO.puts("#{pack_file} has been updated with the #{name} pack")
shell_info("#{pack_file} has been updated with the #{name} pack")
else
File.write!(pack_file, Jason.encode!(pack_json, pretty: true))
IO.puts("#{pack_file} has been created with the #{name} pack")
shell_info("#{pack_file} has been created with the #{name} pack")
end
Pleroma.Emoji.reload()
@ -243,7 +243,7 @@ def run(["gen-pack" | args]) do
def run(["reload"]) do
start_pleroma()
Pleroma.Emoji.reload()
IO.puts("Emoji packs have been reloaded.")
shell_info("Emoji packs have been reloaded.")
end
defp fetch_and_decode!(from) do

View file

@ -11,7 +11,6 @@ defmodule Mix.Tasks.Pleroma.RefreshCounterCache do
alias Pleroma.CounterCache
alias Pleroma.Repo
require Logger
import Ecto.Query
def run([]) do

View file

@ -48,7 +48,7 @@ def run(["index"]) do
]
)
IO.puts("Created indices. Starting to insert posts.")
shell_info("Created indices. Starting to insert posts.")
chunk_size = Pleroma.Config.get([Pleroma.Search.Meilisearch, :initial_indexing_chunk_size])
@ -65,7 +65,7 @@ def run(["index"]) do
)
count = query |> Pleroma.Repo.aggregate(:count, :data)
IO.puts("Entries to index: #{count}")
shell_info("Entries to index: #{count}")
Pleroma.Repo.stream(
query,
@ -92,10 +92,10 @@ def run(["index"]) do
with {:ok, res} <- result do
if not Map.has_key?(res, "indexUid") do
IO.puts("\nFailed to index: #{inspect(result)}")
shell_info("\nFailed to index: #{inspect(result)}")
end
else
e -> IO.puts("\nFailed to index due to network error: #{inspect(e)}")
e -> shell_error("\nFailed to index due to network error: #{inspect(e)}")
end
end)
|> Stream.run()
@ -128,13 +128,13 @@ def run(["show-keys", master_key]) do
if decoded["results"] do
Enum.each(decoded["results"], fn
%{"name" => name, "key" => key} ->
IO.puts("#{name}: #{key}")
shell_info("#{name}: #{key}")
%{"description" => desc, "key" => key} ->
IO.puts("#{desc}: #{key}")
shell_info("#{desc}: #{key}")
end)
else
IO.puts("Error fetching the keys, check the master key is correct: #{inspect(decoded)}")
shell_error("Error fetching the keys, check the master key is correct: #{inspect(decoded)}")
end
end
@ -142,7 +142,7 @@ def run(["stats"]) do
start_pleroma()
{:ok, result} = meili_get("/indexes/objects/stats")
IO.puts("Number of entries: #{result["numberOfDocuments"]}")
IO.puts("Indexing? #{result["isIndexing"]}")
shell_info("Number of entries: #{result["numberOfDocuments"]}")
shell_info("Indexing? #{result["isIndexing"]}")
end
end

View file

@ -38,7 +38,7 @@ def run(["spoof-uploaded"]) do
Logger.put_process_level(self(), :notice)
start_pleroma()
IO.puts("""
shell_info("""
+------------------------+
| SPOOF SEARCH UPLOADS |
+------------------------+
@ -55,7 +55,7 @@ def run(["spoof-inserted"]) do
Logger.put_process_level(self(), :notice)
start_pleroma()
IO.puts("""
shell_info("""
+----------------------+
| SPOOF SEARCH NOTES |
+----------------------+
@ -77,7 +77,7 @@ defp do_spoof_uploaded() do
uploads_search_spoofs_local_dir(Config.get!([Pleroma.Uploaders.Local, :uploads]))
_ ->
IO.puts("""
shell_info("""
NOTE:
Not using local uploader; thus not affected by this exploit.
It's impossible to check for files, but in case local uploader was used before
@ -98,13 +98,13 @@ defp do_spoof_uploaded() do
orphaned_attachs = upload_search_orphaned_attachments(not_orphaned_urls)
IO.puts("\nSearch concluded; here are the results:")
shell_info("\nSearch concluded; here are the results:")
pretty_print_list_with_title(emoji, "Emoji")
pretty_print_list_with_title(files, "Uploaded Files")
pretty_print_list_with_title(post_attachs, "(Not Deleted) Post Attachments")
pretty_print_list_with_title(orphaned_attachs, "Orphaned Uploads")
IO.puts("""
shell_info("""
In total found
#{length(emoji)} emoji
#{length(files)} uploads
@ -116,7 +116,7 @@ defp do_spoof_uploaded() do
defp uploads_search_spoofs_local_dir(dir) do
local_dir = String.replace_suffix(dir, "/", "")
IO.puts("Searching for suspicious files in #{local_dir}...")
shell_info("Searching for suspicious files in #{local_dir}...")
glob_ext = "{" <> Enum.join(@activity_exts, ",") <> "}"
@ -128,7 +128,7 @@ defp uploads_search_spoofs_local_dir(dir) do
end
defp uploads_search_spoofs_notes() do
IO.puts("Now querying DB for posts with spoofing attachments. This might take a while...")
shell_info("Now querying DB for posts with spoofing attachments. This might take a while...")
patterns = [local_id_pattern() | activity_ext_url_patterns()]
@ -153,7 +153,7 @@ defp uploads_search_spoofs_notes() do
end
defp upload_search_orphaned_attachments(not_orphaned_urls) do
IO.puts("""
shell_info("""
Now querying DB for orphaned spoofing attachment (i.e. their post was deleted,
but if :cleanup_attachments was not enabled traces remain in the database)
This might take a bit...
@ -184,7 +184,7 @@ defp upload_search_orphaned_attachments(not_orphaned_urls) do
# | S P O O F - I N S E R T E D |
# +-----------------------------+
defp do_spoof_inserted() do
IO.puts("""
shell_info("""
Searching for local posts whose Create activity has no ActivityPub id...
This is a pretty good indicator, but only for spoofs of local actors
and only if the spoofing happened after around late 2021.
@ -194,9 +194,9 @@ defp do_spoof_inserted() do
search_local_notes_without_create_id()
|> Enum.sort()
IO.puts("Done.\n")
shell_info("Done.\n")
IO.puts("""
shell_info("""
Now trying to weed out other poorly hidden spoofs.
This can't detect all and may have some false positives.
""")
@ -207,9 +207,9 @@ defp do_spoof_inserted() do
search_sus_notes_by_id_patterns()
|> Enum.filter(fn r -> !(r in likely_spoofed_posts_set) end)
IO.puts("Done.\n")
shell_info("Done.\n")
IO.puts("""
shell_info("""
Finally, searching for spoofed, local user accounts.
(It's impossible to detect spoofed remote users)
""")
@ -220,7 +220,7 @@ defp do_spoof_inserted() do
pretty_print_list_with_title(idless_create, "Likely Spoofed Posts")
pretty_print_list_with_title(spoofed_users, "Spoofed local user accounts")
IO.puts("""
shell_info("""
In total found:
#{length(spoofed_users)} bogus users
#{length(idless_create)} likely spoofed posts
@ -289,27 +289,27 @@ defp search_bogus_local_users() do
defp pretty_print_list_with_title(list, title) do
title_len = String.length(title)
title_underline = String.duplicate("=", title_len)
IO.puts(title)
IO.puts(title_underline)
shell_info(title)
shell_info(title_underline)
pretty_print_list(list)
end
defp pretty_print_list([]), do: IO.puts("")
defp pretty_print_list([]), do: shell_info("")
defp pretty_print_list([{a, o} | rest])
when (is_binary(a) or is_number(a)) and is_binary(o) do
IO.puts(" {#{a}, #{o}}")
shell_info(" {#{a}, #{o}}")
pretty_print_list(rest)
end
defp pretty_print_list([{u, a, o} | rest])
when is_binary(a) and is_binary(u) and is_binary(o) do
IO.puts(" {#{u}, #{a}, #{o}}")
shell_info(" {#{u}, #{a}, #{o}}")
pretty_print_list(rest)
end
defp pretty_print_list([e | rest]) when is_binary(e) do
IO.puts(" #{e}")
shell_info(" #{e}")
pretty_print_list(rest)
end

View file

@ -114,7 +114,7 @@ def run(["reset_password", nickname]) do
{:ok, token} <- Pleroma.PasswordResetToken.create_token(user) do
shell_info("Generated password reset token for #{user.nickname}")
IO.puts("URL: #{~p[/api/v1/pleroma/password_reset/#{token.token}]}")
shell_info("URL: #{~p[/api/v1/pleroma/password_reset/#{token.token}]}")
else
_ ->
shell_error("No local user #{nickname}")
@ -301,7 +301,7 @@ def run(["invite" | rest]) do
shell_info("Generated user invite token " <> String.replace(invite.invite_type, "_", " "))
url = url(~p[/registration/#{invite.token}])
IO.puts(url)
shell_info(url)
else
error ->
shell_error("Could not create invite token: #{inspect(error)}")
@ -373,7 +373,7 @@ def run(["show", nickname]) do
nickname
|> User.get_cached_by_nickname()
shell_info("#{inspect(user)}")
shell_info(user)
end
def run(["send_confirmation", nickname]) do
@ -457,7 +457,7 @@ def run(["blocking", nickname]) do
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
blocks = User.following_ap_ids(user)
IO.puts("#{inspect(blocks)}")
shell_info(blocks)
end
end
@ -516,12 +516,12 @@ def run(["fix_follow_state", local_user, remote_user]) do
{:follow_data, Pleroma.Web.ActivityPub.Utils.fetch_latest_follow(local, remote)} do
calculated_state = User.following?(local, remote)
IO.puts(
shell_info(
"Request state is #{request_state}, vs calculated state of following=#{calculated_state}"
)
if calculated_state == false && request_state == "accept" do
IO.puts("Discrepancy found, fixing")
shell_info("Discrepancy found, fixing")
Pleroma.Web.CommonAPI.reject_follow_request(local, remote)
shell_info("Relationship fixed")
else
@ -551,14 +551,14 @@ defp refetch_public_keys(query) do
|> Stream.each(fn users ->
users
|> Enum.each(fn user ->
IO.puts("Re-Resolving: #{user.ap_id}")
shell_info("Re-Resolving: #{user.ap_id}")
with {:ok, user} <- Pleroma.User.fetch_by_ap_id(user.ap_id),
changeset <- Pleroma.User.update_changeset(user),
{:ok, _user} <- Pleroma.User.update_and_set_cache(changeset) do
:ok
else
error -> IO.puts("Could not resolve: #{user.ap_id}, #{inspect(error)}")
error -> shell_info("Could not resolve: #{user.ap_id}, #{inspect(error)}")
end
end)
end)

View file

@ -280,12 +280,13 @@ test "no user to set status" do
test "password reset token is generated" do
user = insert(:user)
assert capture_io(fn ->
Mix.Tasks.Pleroma.User.run(["reset_password", user.nickname])
end) =~ "URL:"
Mix.Tasks.Pleroma.User.run(["reset_password", user.nickname])
assert_receive {:mix_shell, :info, [message]}
assert message =~ "Generated"
assert_receive {:mix_shell, :info, [url]}
assert url =~ "URL:"
end
test "no user to reset password" do
@ -327,12 +328,13 @@ test "no user to reset MFA" do
describe "running invite" do
test "invite token is generated" do
assert capture_io(fn ->
Mix.Tasks.Pleroma.User.run(["invite"])
end) =~ "http"
Mix.Tasks.Pleroma.User.run(["invite"])
assert_receive {:mix_shell, :info, [message]}
assert message =~ "Generated user invite token one time"
assert_receive {:mix_shell, :info, [invite_token]}
assert invite_token =~ "http"
end
test "token is generated with expires_at" do