[docs] add robots txt (#24726)

as per discussion on slack, this should avoid having old versions of the docs indexed by search engines, see readthedocs/readthedocs.org#2430 for reference.

Signed-off-by: Max Pumperla <max.pumperla@googlemail.com>
This commit is contained in:
Max Pumperla 2022-05-18 11:52:22 +02:00 committed by GitHub
parent 8f50087908
commit 7844aeafde
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 9 additions and 0 deletions

View file

@ -69,6 +69,9 @@ jupyter_execute_notebooks = os.getenv("RUN_NOTEBOOKS", "off")
external_toc_exclude_missing = False
external_toc_path = "_toc.yml"
html_extra_path = ["robots.txt"]
# There's a flaky autodoc import for "TensorFlowVariables" that fails depending on the doc structure / order
# of imports.
# autodoc_mock_imports = ["ray.experimental.tf_utils"]

6
doc/source/robots.txt Normal file
View file

@ -0,0 +1,6 @@
User-agent: *
Allow: /*/latest/
Allow: /en/latest/ # Fallback for bots that don't understand wildcards
Allow: /*/master/
Allow: /en/master/ # Fallback for bots that don't understand wildcards
Disallow: /