summaryrefslogtreecommitdiffstats
path: root/nikola/plugins/task/robots.py
diff options
context:
space:
mode:
Diffstat (limited to 'nikola/plugins/task/robots.py')
-rw-r--r--nikola/plugins/task/robots.py19
1 files changed, 8 insertions, 11 deletions
diff --git a/nikola/plugins/task/robots.py b/nikola/plugins/task/robots.py
index 65254b6..627d436 100644
--- a/nikola/plugins/task/robots.py
+++ b/nikola/plugins/task/robots.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2015 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,20 +26,15 @@
"""Generate a robots.txt file."""
-from __future__ import print_function, absolute_import, unicode_literals
import io
import os
-try:
- from urlparse import urljoin, urlparse
-except ImportError:
- from urllib.parse import urljoin, urlparse # NOQA
+from urllib.parse import urljoin, urlparse
from nikola.plugin_categories import LateTask
from nikola import utils
class RobotsFile(LateTask):
-
"""Generate a robots.txt file."""
name = "robots_file"
@@ -60,18 +55,20 @@ class RobotsFile(LateTask):
def write_robots():
if kw["site_url"] != urljoin(kw["site_url"], "/"):
- utils.LOGGER.warn('robots.txt not ending up in server root, will be useless')
+ utils.LOGGER.warning('robots.txt not ending up in server root, will be useless')
+ utils.LOGGER.info('Add "robots" to DISABLED_PLUGINS to disable this warning and robots.txt generation.')
with io.open(robots_path, 'w+', encoding='utf8') as outf:
outf.write("Sitemap: {0}\n\n".format(sitemapindex_url))
+ outf.write("User-Agent: *\n")
if kw["robots_exclusions"]:
- outf.write("User-Agent: *\n")
for loc in kw["robots_exclusions"]:
outf.write("Disallow: {0}\n".format(loc))
+ outf.write("Host: {0}\n".format(urlparse(kw["base_url"]).netloc))
yield self.group_task()
- if not utils.get_asset_path("robots.txt", [], files_folders=kw["files_folders"]):
+ if not utils.get_asset_path("robots.txt", [], files_folders=kw["files_folders"], output_dir=False):
yield utils.apply_filters({
"basename": self.name,
"name": robots_path,
@@ -82,6 +79,6 @@ class RobotsFile(LateTask):
"task_dep": ["sitemap"]
}, kw["filters"])
elif kw["robots_exclusions"]:
- utils.LOGGER.warn('Did not generate robots.txt as one already exists in FILES_FOLDERS. ROBOTS_EXCLUSIONS will not have any affect on the copied fie.')
+ utils.LOGGER.warning('Did not generate robots.txt as one already exists in FILES_FOLDERS. ROBOTS_EXCLUSIONS will not have any affect on the copied file.')
else:
utils.LOGGER.debug('Did not generate robots.txt as one already exists in FILES_FOLDERS.')