diff options
| author | 2015-11-07 15:12:09 -0500 | |
|---|---|---|
| committer | 2015-11-07 15:12:09 -0500 | |
| commit | 0bd95127e2fd78b8de8f37a92cfb92e2e849b7b5 (patch) | |
| tree | 3a819b5f308d285928591292788dd3c3a85a335c | |
| parent | 0e3a5075a99ebfc95eefca3ac41cf71eeaaa3eb9 (diff) | |
robots.txt: Disallow /debian/
| -rw-r--r-- | conf.py | 2 |
1 files changed, 1 insertions, 1 deletions
@@ -745,7 +745,7 @@ COMMENT_SYSTEM_ID = "" # from indexing and other robotic spidering. * is supported. Will only be effective # if SITE_URL points to server root. The list is used to exclude resources from # /robots.txt and /sitemap.xml, and to inform search engines about /sitemapindex.xml. -# ROBOTS_EXCLUSIONS = ["/archive.html", "/category/*.html"] +ROBOTS_EXCLUSIONS = ["/archive.html", "/category/*.html", "/debian/*"] # Instead of putting files in <slug>.html, put them in <slug>/index.html. # No web server configuration is required. Also enables STRIP_INDEXES. |
