diff options
| author | 2014-10-21 10:33:15 -0300 | |
|---|---|---|
| committer | 2014-10-21 10:33:15 -0300 | |
| commit | 5ec02211214350ee558fd9f6bb052264fd24f75e (patch) | |
| tree | b61e8c61a95d18a91d053e71dcbd7b30e47552a1 /nikola/plugins/task/robots.py | |
| parent | 58c4878526dec5510f23c812274686787d8724ba (diff) | |
Imported Upstream version 7.1.0upstream/7.1.0
Diffstat (limited to 'nikola/plugins/task/robots.py')
| -rw-r--r-- | nikola/plugins/task/robots.py | 10 |
1 files changed, 5 insertions, 5 deletions
diff --git a/nikola/plugins/task/robots.py b/nikola/plugins/task/robots.py index 9944c0d..b229d37 100644 --- a/nikola/plugins/task/robots.py +++ b/nikola/plugins/task/robots.py @@ -25,7 +25,7 @@ # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from __future__ import print_function, absolute_import, unicode_literals -import codecs +import io import os try: from urlparse import urljoin, urlparse @@ -51,14 +51,14 @@ class RobotsFile(LateTask): "robots_exclusions": self.site.config["ROBOTS_EXCLUSIONS"] } - if kw["site_url"] != urljoin(kw["site_url"], "/"): - utils.LOGGER.warn('robots.txt not ending up in server root, will be useless') - sitemapindex_url = urljoin(kw["base_url"], "sitemapindex.xml") robots_path = os.path.join(kw['output_folder'], "robots.txt") def write_robots(): - with codecs.open(robots_path, 'wb+', 'utf8') as outf: + if kw["site_url"] != urljoin(kw["site_url"], "/"): + utils.LOGGER.warn('robots.txt not ending up in server root, will be useless') + + with io.open(robots_path, 'w+', encoding='utf8') as outf: outf.write("Sitemap: {0}\n\n".format(sitemapindex_url)) if kw["robots_exclusions"]: outf.write("User-Agent: *\n") |
