aboutsummaryrefslogtreecommitdiffstats
path: root/nikola/plugins/task/robots.py
diff options
context:
space:
mode:
authorLibravatarAgustin Henze <tin@sluc.org.ar>2015-07-08 07:35:06 -0300
committerLibravatarAgustin Henze <tin@sluc.org.ar>2015-07-08 07:35:06 -0300
commit055d72d76b44b0e627c8a17c48dbecd62e44197b (patch)
treee2c8d5475477c46115461fe9547c1ee797873635 /nikola/plugins/task/robots.py
parent61f3aad02cd6492cb38e41b66f2ed8ec56e98981 (diff)
parentb0b24795b24ee6809397fbbadf42f31f310a219f (diff)
Merge tag 'upstream/7.6.0'
Upstream version 7.6.0
Diffstat (limited to 'nikola/plugins/task/robots.py')
-rw-r--r--nikola/plugins/task/robots.py11
1 files changed, 6 insertions, 5 deletions
diff --git a/nikola/plugins/task/robots.py b/nikola/plugins/task/robots.py
index b229d37..2f25a21 100644
--- a/nikola/plugins/task/robots.py
+++ b/nikola/plugins/task/robots.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -48,7 +48,8 @@ class RobotsFile(LateTask):
"site_url": self.site.config["SITE_URL"],
"output_folder": self.site.config["OUTPUT_FOLDER"],
"files_folders": self.site.config['FILES_FOLDERS'],
- "robots_exclusions": self.site.config["ROBOTS_EXCLUSIONS"]
+ "robots_exclusions": self.site.config["ROBOTS_EXCLUSIONS"],
+ "filters": self.site.config["FILTERS"],
}
sitemapindex_url = urljoin(kw["base_url"], "sitemapindex.xml")
@@ -68,15 +69,15 @@ class RobotsFile(LateTask):
yield self.group_task()
if not utils.get_asset_path("robots.txt", [], files_folders=kw["files_folders"]):
- yield {
+ yield utils.apply_filters({
"basename": self.name,
"name": robots_path,
"targets": [robots_path],
"actions": [(write_robots)],
- "uptodate": [utils.config_changed(kw)],
+ "uptodate": [utils.config_changed(kw, 'nikola.plugins.task.robots')],
"clean": True,
"task_dep": ["sitemap"]
- }
+ }, kw["filters"])
elif kw["robots_exclusions"]:
utils.LOGGER.warn('Did not generate robots.txt as one already exists in FILES_FOLDERS. ROBOTS_EXCLUSIONS will not have any affect on the copied fie.')
else: