robots.txt
Add default robots.txt that allows bots access to all paths. Add mix task to generate robots.txt taht allows bots access to no paths. Document custom emojis, MRF and static_dir static_dir documentation includes docs for the robots.txt Mix task.
This commit is contained in:
parent
4e72762322
commit
3dadaa4432
7 changed files with 191 additions and 2 deletions
32
lib/mix/tasks/pleroma/robotstxt.ex
Normal file
32
lib/mix/tasks/pleroma/robotstxt.ex
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Mix.Tasks.Pleroma.RobotsTxt do
|
||||
use Mix.Task
|
||||
|
||||
@shortdoc "Generate robots.txt"
|
||||
@moduledoc """
|
||||
Generates robots.txt
|
||||
|
||||
## Overwrite robots.txt to disallow all
|
||||
|
||||
mix pleroma.robots_txt disallow_all
|
||||
|
||||
This will write a robots.txt that will hide all paths on your instance
|
||||
from search engines and other robots that obey robots.txt
|
||||
|
||||
"""
|
||||
def run(["disallow_all"]) do
|
||||
static_dir = Pleroma.Config.get([:instance, :static_dir], "instance/static/")
|
||||
|
||||
if !File.exists?(static_dir) do
|
||||
File.mkdir_p!(static_dir)
|
||||
end
|
||||
|
||||
robots_txt_path = Path.join(static_dir, "robots.txt")
|
||||
robots_txt_content = "User-Agent: *\nDisallow: /\n"
|
||||
|
||||
File.write!(robots_txt_path, robots_txt_content, [:write])
|
||||
end
|
||||
end
|
||||
Loading…
Add table
Add a link
Reference in a new issue