aboutsummaryrefslogtreecommitdiffstats
path: root/nikola/plugins/task/robots.py
diff options
context:
space:
mode:
authorLibravatarUnit 193 <unit193@unit193.net>2021-02-03 19:17:50 -0500
committerLibravatarUnit 193 <unit193@unit193.net>2021-02-03 19:17:50 -0500
commit475d074fd74425efbe783fad08f97f2df0c4909f (patch)
tree2acdae53999b3c74b716efa4edb5b40311fa356a /nikola/plugins/task/robots.py
parentcd502d52787f666fff3254d7d7e7578930c813c2 (diff)
parent3a0d66f07b112b6d2bdc2b57bbf717a89a351ce6 (diff)
Update upstream source from tag 'upstream/8.1.2'
Update to upstream version '8.1.2' with Debian dir e5e966a9e6010ef70618dc9a61558fa4db35aceb
Diffstat (limited to 'nikola/plugins/task/robots.py')
-rw-r--r--nikola/plugins/task/robots.py13
1 files changed, 5 insertions, 8 deletions
diff --git a/nikola/plugins/task/robots.py b/nikola/plugins/task/robots.py
index 8537fc8..627d436 100644
--- a/nikola/plugins/task/robots.py
+++ b/nikola/plugins/task/robots.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,13 +26,9 @@
"""Generate a robots.txt file."""
-from __future__ import print_function, absolute_import, unicode_literals
import io
import os
-try:
- from urlparse import urljoin, urlparse
-except ImportError:
- from urllib.parse import urljoin, urlparse # NOQA
+from urllib.parse import urljoin, urlparse
from nikola.plugin_categories import LateTask
from nikola import utils
@@ -59,7 +55,8 @@ class RobotsFile(LateTask):
def write_robots():
if kw["site_url"] != urljoin(kw["site_url"], "/"):
- utils.LOGGER.warn('robots.txt not ending up in server root, will be useless')
+ utils.LOGGER.warning('robots.txt not ending up in server root, will be useless')
+ utils.LOGGER.info('Add "robots" to DISABLED_PLUGINS to disable this warning and robots.txt generation.')
with io.open(robots_path, 'w+', encoding='utf8') as outf:
outf.write("Sitemap: {0}\n\n".format(sitemapindex_url))
@@ -82,6 +79,6 @@ class RobotsFile(LateTask):
"task_dep": ["sitemap"]
}, kw["filters"])
elif kw["robots_exclusions"]:
- utils.LOGGER.warn('Did not generate robots.txt as one already exists in FILES_FOLDERS. ROBOTS_EXCLUSIONS will not have any affect on the copied file.')
+ utils.LOGGER.warning('Did not generate robots.txt as one already exists in FILES_FOLDERS. ROBOTS_EXCLUSIONS will not have any affect on the copied file.')
else:
utils.LOGGER.debug('Did not generate robots.txt as one already exists in FILES_FOLDERS.')