forked from AkkomaGang/akkoma
Generate permissive or restrictive robots.txt in the config generator
This commit is contained in:
parent
180b87257c
commit
cd41584ac4
2 changed files with 36 additions and 0 deletions
|
@ -81,6 +81,14 @@ def run(["gen" | rest]) do
|
||||||
|
|
||||||
email = Common.get_option(options, :admin_email, "What is your admin email address?")
|
email = Common.get_option(options, :admin_email, "What is your admin email address?")
|
||||||
|
|
||||||
|
indexable =
|
||||||
|
Common.get_option(
|
||||||
|
options,
|
||||||
|
:indexable,
|
||||||
|
"Do you want search engines to index your site? (y/n)",
|
||||||
|
"y"
|
||||||
|
) === "y"
|
||||||
|
|
||||||
dbhost =
|
dbhost =
|
||||||
Common.get_option(options, :dbhost, "What is the hostname of your database?", "localhost")
|
Common.get_option(options, :dbhost, "What is the hostname of your database?", "localhost")
|
||||||
|
|
||||||
|
@ -142,6 +150,8 @@ def run(["gen" | rest]) do
|
||||||
Mix.shell().info("Writing #{psql_path}.")
|
Mix.shell().info("Writing #{psql_path}.")
|
||||||
File.write(psql_path, result_psql)
|
File.write(psql_path, result_psql)
|
||||||
|
|
||||||
|
write_robots_txt(indexable)
|
||||||
|
|
||||||
Mix.shell().info(
|
Mix.shell().info(
|
||||||
"\n" <>
|
"\n" <>
|
||||||
"""
|
"""
|
||||||
|
@ -163,4 +173,28 @@ def run(["gen" | rest]) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp write_robots_txt(indexable) do
|
||||||
|
robots_txt =
|
||||||
|
EEx.eval_file(
|
||||||
|
Path.expand("robots_txt.eex", __DIR__),
|
||||||
|
indexable: indexable
|
||||||
|
)
|
||||||
|
|
||||||
|
static_dir = Pleroma.Config.get([:instance, :static_dir], "instance/static/")
|
||||||
|
|
||||||
|
unless File.exists?(static_dir) do
|
||||||
|
File.mkdir_p!(static_dir)
|
||||||
|
end
|
||||||
|
|
||||||
|
robots_txt_path = Path.join(static_dir, "robots.txt")
|
||||||
|
|
||||||
|
if File.exists?(robots_txt_path) do
|
||||||
|
File.cp!(robots_txt_path, "#{robots_txt_path}.bak")
|
||||||
|
Mix.shell().info("Backing up existing robots.txt to #{robots_txt_path}.bak")
|
||||||
|
end
|
||||||
|
|
||||||
|
File.write(robots_txt_path, robots_txt)
|
||||||
|
Mix.shell().info("Writing #{robots_txt_path}.")
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
2
lib/mix/tasks/pleroma/robots_txt.eex
Normal file
2
lib/mix/tasks/pleroma/robots_txt.eex
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
User-Agent: *
|
||||||
|
Disallow: <%= if indexable, do: "", else: "/" %>
|
Loading…
Reference in a new issue