summaryrefslogtreecommitdiffstats
path: root/nikola/plugins
diff options
context:
space:
mode:
authorLibravatarAgustin Henze <tin@sluc.org.ar>2015-07-08 07:35:02 -0300
committerLibravatarAgustin Henze <tin@sluc.org.ar>2015-07-08 07:35:02 -0300
commitb0b24795b24ee6809397fbbadf42f31f310a219f (patch)
tree46d05bb47460b4ec679211717c4ab07414b80d9c /nikola/plugins
parent5ec02211214350ee558fd9f6bb052264fd24f75e (diff)
Imported Upstream version 7.6.0upstream/7.6.0
Diffstat (limited to 'nikola/plugins')
-rw-r--r--nikola/plugins/basic_import.py29
-rw-r--r--nikola/plugins/command/__init__.py2
-rw-r--r--nikola/plugins/command/auto.plugin2
-rw-r--r--nikola/plugins/command/auto.py87
-rw-r--r--nikola/plugins/command/auto/__init__.py366
l---------nikola/plugins/command/auto/livereload.js1
-rw-r--r--nikola/plugins/command/bootswatch_theme.plugin2
-rw-r--r--nikola/plugins/command/bootswatch_theme.py20
-rw-r--r--nikola/plugins/command/check.plugin2
-rw-r--r--nikola/plugins/command/check.py134
-rw-r--r--nikola/plugins/command/console.plugin2
-rw-r--r--nikola/plugins/command/console.py6
-rw-r--r--nikola/plugins/command/deploy.plugin2
-rw-r--r--nikola/plugins/command/deploy.py39
-rw-r--r--nikola/plugins/command/github_deploy.plugin2
-rw-r--r--nikola/plugins/command/github_deploy.py220
-rw-r--r--nikola/plugins/command/import_wordpress.plugin2
-rw-r--r--nikola/plugins/command/import_wordpress.py157
-rw-r--r--nikola/plugins/command/init.plugin2
-rw-r--r--nikola/plugins/command/init.py87
-rw-r--r--nikola/plugins/command/install_theme.plugin2
-rw-r--r--nikola/plugins/command/install_theme.py76
-rw-r--r--nikola/plugins/command/new_page.plugin2
-rw-r--r--nikola/plugins/command/new_page.py30
-rw-r--r--nikola/plugins/command/new_post.plugin2
-rw-r--r--nikola/plugins/command/new_post.py188
-rw-r--r--nikola/plugins/command/orphans.plugin2
-rw-r--r--nikola/plugins/command/orphans.py2
-rw-r--r--nikola/plugins/command/plugin.plugin2
-rw-r--r--nikola/plugins/command/plugin.py43
-rw-r--r--nikola/plugins/command/rst2html.plugin9
-rw-r--r--nikola/plugins/command/rst2html/__init__.py69
-rw-r--r--nikola/plugins/command/rst2html/rst2html.tmpl13
-rw-r--r--nikola/plugins/command/serve.plugin2
-rw-r--r--nikola/plugins/command/serve.py44
-rw-r--r--nikola/plugins/command/status.plugin9
-rw-r--r--nikola/plugins/command/status.py140
-rw-r--r--nikola/plugins/command/version.plugin2
-rw-r--r--nikola/plugins/command/version.py27
-rw-r--r--nikola/plugins/compile/__init__.py2
-rw-r--r--nikola/plugins/compile/html.plugin2
-rw-r--r--nikola/plugins/compile/html.py8
-rw-r--r--nikola/plugins/compile/ipynb.plugin8
-rw-r--r--nikola/plugins/compile/ipynb.py150
-rw-r--r--nikola/plugins/compile/ipynb/README.txt44
-rw-r--r--nikola/plugins/compile/ipynb/__init__.py97
-rw-r--r--nikola/plugins/compile/markdown.plugin2
-rw-r--r--nikola/plugins/compile/markdown/__init__.py12
-rw-r--r--nikola/plugins/compile/markdown/mdx_gist.py48
-rw-r--r--nikola/plugins/compile/markdown/mdx_nikola.py4
-rw-r--r--nikola/plugins/compile/markdown/mdx_podcast.py2
-rw-r--r--nikola/plugins/compile/pandoc.plugin2
-rw-r--r--nikola/plugins/compile/pandoc.py11
-rw-r--r--nikola/plugins/compile/php.plugin2
-rw-r--r--nikola/plugins/compile/php.py3
-rw-r--r--nikola/plugins/compile/rest.plugin2
-rw-r--r--nikola/plugins/compile/rest/__init__.py114
-rw-r--r--nikola/plugins/compile/rest/chart.py2
-rw-r--r--nikola/plugins/compile/rest/doc.py2
-rw-r--r--nikola/plugins/compile/rest/gist.py20
-rw-r--r--nikola/plugins/compile/rest/listing.py112
-rw-r--r--nikola/plugins/compile/rest/media.py2
-rw-r--r--nikola/plugins/compile/rest/post_list.py20
-rw-r--r--nikola/plugins/compile/rest/slides.py2
-rw-r--r--nikola/plugins/compile/rest/thumbnail.plugin9
-rw-r--r--nikola/plugins/compile/rest/thumbnail.py69
-rw-r--r--nikola/plugins/compile/rest/vimeo.py12
-rw-r--r--nikola/plugins/compile/rest/youtube.py2
-rw-r--r--nikola/plugins/loghandler/__init__.py2
-rw-r--r--nikola/plugins/loghandler/smtp.plugin2
-rw-r--r--nikola/plugins/loghandler/smtp.py2
-rw-r--r--nikola/plugins/loghandler/stderr.plugin2
-rw-r--r--nikola/plugins/loghandler/stderr.py2
-rw-r--r--nikola/plugins/misc/scan_posts.plugin10
-rw-r--r--nikola/plugins/misc/scan_posts.py100
-rw-r--r--nikola/plugins/task/__init__.py2
-rw-r--r--nikola/plugins/task/archive.plugin2
-rw-r--r--nikola/plugins/task/archive.py248
-rw-r--r--nikola/plugins/task/bundles.plugin2
-rw-r--r--nikola/plugins/task/bundles.py23
-rw-r--r--nikola/plugins/task/copy_assets.plugin2
-rw-r--r--nikola/plugins/task/copy_assets.py6
-rw-r--r--nikola/plugins/task/copy_files.plugin2
-rw-r--r--nikola/plugins/task/copy_files.py4
-rw-r--r--nikola/plugins/task/galleries.plugin2
-rw-r--r--nikola/plugins/task/galleries.py312
-rw-r--r--nikola/plugins/task/gzip.plugin2
-rw-r--r--nikola/plugins/task/gzip.py2
-rw-r--r--nikola/plugins/task/indexes.plugin2
-rw-r--r--nikola/plugins/task/indexes.py123
-rw-r--r--nikola/plugins/task/listings.plugin2
-rw-r--r--nikola/plugins/task/listings.py266
-rw-r--r--nikola/plugins/task/pages.plugin2
-rw-r--r--nikola/plugins/task/pages.py6
-rw-r--r--nikola/plugins/task/posts.plugin2
-rw-r--r--nikola/plugins/task/posts.py63
-rw-r--r--nikola/plugins/task/redirect.plugin2
-rw-r--r--nikola/plugins/task/redirect.py23
-rw-r--r--nikola/plugins/task/robots.plugin2
-rw-r--r--nikola/plugins/task/robots.py11
-rw-r--r--nikola/plugins/task/rss.plugin2
-rw-r--r--nikola/plugins/task/rss.py18
-rw-r--r--nikola/plugins/task/scale_images.plugin9
-rw-r--r--nikola/plugins/task/scale_images.py96
-rw-r--r--nikola/plugins/task/sitemap.plugin2
-rw-r--r--nikola/plugins/task/sitemap/__init__.py106
-rw-r--r--nikola/plugins/task/sources.plugin2
-rw-r--r--nikola/plugins/task/sources.py7
-rw-r--r--nikola/plugins/task/tags.plugin2
-rw-r--r--nikola/plugins/task/tags.py301
-rw-r--r--nikola/plugins/template/__init__.py2
-rw-r--r--nikola/plugins/template/jinja.plugin2
-rw-r--r--nikola/plugins/template/jinja.py5
-rw-r--r--nikola/plugins/template/mako.plugin2
-rw-r--r--nikola/plugins/template/mako.py4
115 files changed, 2902 insertions, 1484 deletions
diff --git a/nikola/plugins/basic_import.py b/nikola/plugins/basic_import.py
index 764968a..f8a3a3c 100644
--- a/nikola/plugins/basic_import.py
+++ b/nikola/plugins/basic_import.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -29,6 +29,7 @@ import io
import csv
import datetime
import os
+import sys
from pkg_resources import resource_filename
try:
@@ -114,32 +115,34 @@ class ImportMixin(object):
return content
@classmethod
- def write_content(cls, filename, content):
- doc = html.document_fromstring(content)
- doc.rewrite_links(replacer)
+ def write_content(cls, filename, content, rewrite_html=True):
+ if rewrite_html:
+ doc = html.document_fromstring(content)
+ doc.rewrite_links(replacer)
+ content = html.tostring(doc, encoding='utf8')
+ else:
+ content = content.encode('utf-8')
utils.makedirs(os.path.dirname(filename))
with open(filename, "wb+") as fd:
- fd.write(html.tostring(doc, encoding='utf8'))
+ fd.write(content)
@staticmethod
- def write_metadata(filename, title, slug, post_date, description, tags):
+ def write_metadata(filename, title, slug, post_date, description, tags, **kwargs):
if not description:
description = ""
utils.makedirs(os.path.dirname(filename))
with io.open(filename, "w+", encoding="utf8") as fd:
- fd.write('{0}\n'.format(title))
- fd.write('{0}\n'.format(slug))
- fd.write('{0}\n'.format(post_date))
- fd.write('{0}\n'.format(','.join(tags)))
- fd.write('\n')
- fd.write('{0}\n'.format(description))
+ data = {'title': title, 'slug': slug, 'date': post_date, 'tags': ','.join(tags), 'description': description}
+ data.update(kwargs)
+ fd.write(utils.write_metadata(data))
@staticmethod
def write_urlmap_csv(output_file, url_map):
utils.makedirs(os.path.dirname(output_file))
- with io.open(output_file, 'w+', encoding='utf8') as fd:
+ fmode = 'wb+' if sys.version_info[0] == 2 else 'w+'
+ with io.open(output_file, fmode) as fd:
csv_writer = csv.writer(fd)
for item in url_map.items():
csv_writer.writerow(item)
diff --git a/nikola/plugins/command/__init__.py b/nikola/plugins/command/__init__.py
index 6ad8bac..a1d17a6 100644
--- a/nikola/plugins/command/__init__.py
+++ b/nikola/plugins/command/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/command/auto.plugin b/nikola/plugins/command/auto.plugin
index 87939b2..a1c6820 100644
--- a/nikola/plugins/command/auto.plugin
+++ b/nikola/plugins/command/auto.plugin
@@ -4,6 +4,6 @@ Module = auto
[Documentation]
Author = Roberto Alsina
-Version = 0.2
+Version = 2.1.0
Website = http://getnikola.com
Description = Automatically detect site changes, rebuild and optionally refresh a browser.
diff --git a/nikola/plugins/command/auto.py b/nikola/plugins/command/auto.py
deleted file mode 100644
index 7f3f66f..0000000
--- a/nikola/plugins/command/auto.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2014 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-from __future__ import print_function, unicode_literals
-
-import os
-import subprocess
-
-from nikola.plugin_categories import Command
-from nikola.utils import req_missing
-
-
-class CommandAuto(Command):
- """Start debugging console."""
- name = "auto"
- doc_purpose = "automatically detect site changes, rebuild and optionally refresh a browser"
- cmd_options = [
- {
- 'name': 'browser',
- 'short': 'b',
- 'type': bool,
- 'help': 'Start a web browser.',
- 'default': False,
- },
- {
- 'name': 'port',
- 'short': 'p',
- 'long': 'port',
- 'default': 8000,
- 'type': int,
- 'help': 'Port nummber (default: 8000)',
- },
- ]
-
- def _execute(self, options, args):
- """Start the watcher."""
- try:
- from livereload import Server
- except ImportError:
- req_missing(['livereload'], 'use the "auto" command')
- return
-
- # Run an initial build so we are up-to-date
- subprocess.call(("nikola", "build"))
-
- port = options and options.get('port')
-
- server = Server()
- server.watch('conf.py', 'nikola build')
- server.watch('themes/', 'nikola build')
- server.watch('templates/', 'nikola build')
- server.watch(self.site.config['GALLERY_PATH'], 'nikola build')
- for item in self.site.config['post_pages']:
- server.watch(os.path.dirname(item[0]), 'nikola build')
- for item in self.site.config['FILES_FOLDERS']:
- server.watch(item, 'nikola build')
-
- out_folder = self.site.config['OUTPUT_FOLDER']
- if options and options.get('browser'):
- browser = True
- else:
- browser = False
-
- server.serve(port, None, out_folder, True, browser)
diff --git a/nikola/plugins/command/auto/__init__.py b/nikola/plugins/command/auto/__init__.py
new file mode 100644
index 0000000..c25ef8a
--- /dev/null
+++ b/nikola/plugins/command/auto/__init__.py
@@ -0,0 +1,366 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2012-2015 Roberto Alsina and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+from __future__ import print_function
+
+import json
+import mimetypes
+import os
+import re
+import subprocess
+try:
+ from urlparse import urlparse
+except ImportError:
+ from urllib.parse import urlparse # NOQA
+import webbrowser
+from wsgiref.simple_server import make_server
+import wsgiref.util
+
+from blinker import signal
+try:
+ from ws4py.websocket import WebSocket
+ from ws4py.server.wsgirefserver import WSGIServer, WebSocketWSGIRequestHandler
+ from ws4py.server.wsgiutils import WebSocketWSGIApplication
+ from ws4py.messaging import TextMessage
+except ImportError:
+ WebSocket = object
+try:
+ import watchdog
+ from watchdog.observers import Observer
+ from watchdog.events import FileSystemEventHandler, PatternMatchingEventHandler
+except ImportError:
+ watchdog = None
+ FileSystemEventHandler = object
+ PatternMatchingEventHandler = object
+
+
+from nikola.plugin_categories import Command
+from nikola.utils import req_missing, get_logger, get_theme_path
+LRJS_PATH = os.path.join(os.path.dirname(__file__), 'livereload.js')
+error_signal = signal('error')
+refresh_signal = signal('refresh')
+
+ERROR_N = '''<html>
+<head>
+</head>
+<boody>
+ERROR {}
+</body>
+</html>
+'''
+
+
+class CommandAuto(Command):
+ """Start debugging console."""
+ name = "auto"
+ logger = None
+ doc_purpose = "builds and serves a site; automatically detects site changes, rebuilds, and optionally refreshes a browser"
+ cmd_options = [
+ {
+ 'name': 'port',
+ 'short': 'p',
+ 'long': 'port',
+ 'default': 8000,
+ 'type': int,
+ 'help': 'Port nummber (default: 8000)',
+ },
+ {
+ 'name': 'address',
+ 'short': 'a',
+ 'long': 'address',
+ 'type': str,
+ 'default': '127.0.0.1',
+ 'help': 'Address to bind (default: 127.0.0.1 – localhost)',
+ },
+ {
+ 'name': 'browser',
+ 'short': 'b',
+ 'long': 'browser',
+ 'type': bool,
+ 'help': 'Start a web browser.',
+ 'default': False,
+ },
+ {
+ 'name': 'ipv6',
+ 'short': '6',
+ 'long': 'ipv6',
+ 'default': False,
+ 'type': bool,
+ 'help': 'Use IPv6',
+ },
+ ]
+
+ def _execute(self, options, args):
+ """Start the watcher."""
+
+ self.logger = get_logger('auto', self.site.loghandlers)
+ LRSocket.logger = self.logger
+
+ if WebSocket is object and watchdog is None:
+ req_missing(['ws4py', 'watchdog'], 'use the "auto" command')
+ elif WebSocket is object:
+ req_missing(['ws4py'], 'use the "auto" command')
+ elif watchdog is None:
+ req_missing(['watchdog'], 'use the "auto" command')
+
+ self.cmd_arguments = ['nikola', 'build']
+ if self.site.configuration_filename != 'conf.py':
+ self.cmd_arguments = ['--conf=' + self.site.configuration_filename] + self.cmd_arguments
+
+ # Run an initial build so we are up-to-date
+ subprocess.call(self.cmd_arguments)
+
+ port = options and options.get('port')
+ self.snippet = '''<script>document.write('<script src="http://'
+ + (location.host || 'localhost').split(':')[0]
+ + ':{0}/livereload.js?snipver=1"></'
+ + 'script>')</script>
+ </head>'''.format(port)
+
+ # Do not duplicate entries -- otherwise, multiple rebuilds are triggered
+ watched = set([
+ 'templates/',
+ ] + [get_theme_path(name) for name in self.site.THEMES])
+ for item in self.site.config['post_pages']:
+ watched.add(os.path.dirname(item[0]))
+ for item in self.site.config['FILES_FOLDERS']:
+ watched.add(item)
+ for item in self.site.config['GALLERY_FOLDERS']:
+ watched.add(item)
+ for item in self.site.config['LISTINGS_FOLDERS']:
+ watched.add(item)
+
+ out_folder = self.site.config['OUTPUT_FOLDER']
+ if options and options.get('browser'):
+ browser = True
+ else:
+ browser = False
+
+ if options['ipv6']:
+ dhost = '::'
+ else:
+ dhost = None
+
+ host = options['address'].strip('[').strip(']') or dhost
+
+ # Instantiate global observer
+ observer = Observer()
+ # Watch output folders and trigger reloads
+ observer.schedule(OurWatchHandler(self.do_refresh), out_folder, recursive=True)
+
+ # Watch input folders and trigger rebuilds
+ for p in watched:
+ if os.path.exists(p):
+ observer.schedule(OurWatchHandler(self.do_rebuild), p, recursive=True)
+
+ # Watch config file (a bit of a hack, but we need a directory)
+ _conf_fn = os.path.abspath(self.site.configuration_filename or 'conf.py')
+ _conf_dn = os.path.dirname(_conf_fn)
+ observer.schedule(ConfigWatchHandler(_conf_fn, self.do_rebuild), _conf_dn, recursive=False)
+
+ observer.start()
+
+ parent = self
+
+ class Mixed(WebSocketWSGIApplication):
+ """A class that supports WS and HTTP protocols in the same port."""
+ def __call__(self, environ, start_response):
+ if environ.get('HTTP_UPGRADE') is None:
+ return parent.serve_static(environ, start_response)
+ return super(Mixed, self).__call__(environ, start_response)
+
+ ws = make_server(
+ host, port, server_class=WSGIServer,
+ handler_class=WebSocketWSGIRequestHandler,
+ app=Mixed(handler_cls=LRSocket)
+ )
+ ws.initialize_websockets_manager()
+ self.logger.info("Serving HTTP on {0} port {1}...".format(host, port))
+ if browser:
+ if options['ipv6'] or '::' in host:
+ server_url = "http://[{0}]:{1}/".format(host, port)
+ else:
+ server_url = "http://{0}:{1}/".format(host, port)
+
+ self.logger.info("Opening {0} in the default web browser...".format(server_url))
+ # Yes, this is racy
+ webbrowser.open('http://{0}:{1}'.format(host, port))
+
+ try:
+ ws.serve_forever()
+ except KeyboardInterrupt:
+ self.logger.info("Server is shutting down.")
+ observer.stop()
+ observer.join()
+
+ def do_rebuild(self, event):
+ self.logger.info('REBUILDING SITE (from {0})'.format(event.src_path))
+ p = subprocess.Popen(self.cmd_arguments, stderr=subprocess.PIPE)
+ if p.wait() != 0:
+ error = p.stderr.read()
+ self.logger.error(error)
+ error_signal.send(error=error)
+ else:
+ error = p.stderr.read()
+ print(error)
+
+ def do_refresh(self, event):
+ self.logger.info('REFRESHING: {0}'.format(event.src_path))
+ p = os.path.relpath(event.src_path, os.path.abspath(self.site.config['OUTPUT_FOLDER']))
+ refresh_signal.send(path=p)
+
+ def serve_static(self, environ, start_response):
+ """Trivial static file server."""
+ uri = wsgiref.util.request_uri(environ)
+ p_uri = urlparse(uri)
+ f_path = os.path.join(self.site.config['OUTPUT_FOLDER'], *p_uri.path.split('/'))
+ mimetype = mimetypes.guess_type(uri)[0] or 'text/html'
+
+ if os.path.isdir(f_path):
+ f_path = os.path.join(f_path, self.site.config['INDEX_FILE'])
+
+ if p_uri.path == '/robots.txt':
+ start_response('200 OK', [('Content-type', 'text/plain')])
+ return ['User-Agent: *\nDisallow: /\n']
+ elif os.path.isfile(f_path):
+ with open(f_path, 'rb') as fd:
+ start_response('200 OK', [('Content-type', mimetype)])
+ return [self.inject_js(mimetype, fd.read())]
+ elif p_uri.path == '/livereload.js':
+ with open(LRJS_PATH, 'rb') as fd:
+ start_response('200 OK', [('Content-type', mimetype)])
+ return [self.inject_js(mimetype, fd.read())]
+ start_response('404 ERR', [])
+ return [self.inject_js('text/html', ERROR_N.format(404).format(uri))]
+
+ def inject_js(self, mimetype, data):
+ """Inject livereload.js in HTML files."""
+ if mimetype == 'text/html':
+ data = re.sub('</head>', self.snippet, data.decode('utf8'), 1, re.IGNORECASE)
+ data = data.encode('utf8')
+ return data
+
+
+pending = []
+
+
+class LRSocket(WebSocket):
+ """Speak Livereload protocol."""
+
+ def __init__(self, *a, **kw):
+ refresh_signal.connect(self.notify)
+ error_signal.connect(self.send_error)
+ super(LRSocket, self).__init__(*a, **kw)
+
+ def received_message(self, message):
+ message = json.loads(message.data.decode('utf8'))
+ self.logger.info('<--- {0}'.format(message))
+ response = None
+ if message['command'] == 'hello': # Handshake
+ response = {
+ 'command': 'hello',
+ 'protocols': [
+ 'http://livereload.com/protocols/official-7',
+ ],
+ 'serverName': 'nikola-livereload',
+ }
+ elif message['command'] == 'info': # Someone connected
+ self.logger.info('****** Browser connected: {0}'.format(message.get('url')))
+ self.logger.info('****** sending {0} pending messages'.format(len(pending)))
+ while pending:
+ msg = pending.pop()
+ self.logger.info('---> {0}'.format(msg.data))
+ self.send(msg, msg.is_binary)
+ else:
+ response = {
+ 'command': 'alert',
+ 'message': 'HEY',
+ }
+ if response is not None:
+ response = json.dumps(response)
+ self.logger.info('---> {0}'.format(response))
+ response = TextMessage(response)
+ self.send(response, response.is_binary)
+
+ def notify(self, sender, path):
+ """Send reload requests to the client."""
+ p = os.path.join('/', path)
+ message = {
+ 'command': 'reload',
+ 'liveCSS': True,
+ 'path': p,
+ }
+ response = json.dumps(message)
+ self.logger.info('---> {0}'.format(p))
+ response = TextMessage(response)
+ if self.stream is None: # No client connected or whatever
+ pending.append(response)
+ else:
+ self.send(response, response.is_binary)
+
+ def send_error(self, sender, error=None):
+ """Send reload requests to the client."""
+ if self.stream is None: # No client connected or whatever
+ return
+ message = {
+ 'command': 'alert',
+ 'message': error,
+ }
+ response = json.dumps(message)
+ response = TextMessage(response)
+ if self.stream is None: # No client connected or whatever
+ pending.append(response)
+ else:
+ self.send(response, response.is_binary)
+
+
+class OurWatchHandler(FileSystemEventHandler):
+
+ """A Nikola-specific handler for Watchdog."""
+
+ def __init__(self, function):
+ """Initialize the handler."""
+ self.function = function
+ super(OurWatchHandler, self).__init__()
+
+ def on_any_event(self, event):
+ """Call the provided function on any event."""
+ self.function(event)
+
+
+class ConfigWatchHandler(FileSystemEventHandler):
+
+ """A Nikola-specific handler for Watchdog that handles the config file (as a workaround)."""
+
+ def __init__(self, configuration_filename, function):
+ """Initialize the handler."""
+ self.configuration_filename = configuration_filename
+ self.function = function
+
+ def on_any_event(self, event):
+ """Call the provided function on any event."""
+ if event._src_path == self.configuration_filename:
+ self.function(event)
diff --git a/nikola/plugins/command/auto/livereload.js b/nikola/plugins/command/auto/livereload.js
new file mode 120000
index 0000000..b4cafb3
--- /dev/null
+++ b/nikola/plugins/command/auto/livereload.js
@@ -0,0 +1 @@
+../../../../bower_components/livereload-js/dist/livereload.js \ No newline at end of file
diff --git a/nikola/plugins/command/bootswatch_theme.plugin b/nikola/plugins/command/bootswatch_theme.plugin
index 7091310..b428da3 100644
--- a/nikola/plugins/command/bootswatch_theme.plugin
+++ b/nikola/plugins/command/bootswatch_theme.plugin
@@ -4,7 +4,7 @@ Module = bootswatch_theme
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Given a swatch name and a parent theme, creates a custom theme.
diff --git a/nikola/plugins/command/bootswatch_theme.py b/nikola/plugins/command/bootswatch_theme.py
index e65413b..e19c937 100644
--- a/nikola/plugins/command/bootswatch_theme.py
+++ b/nikola/plugins/command/bootswatch_theme.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,11 +26,7 @@
from __future__ import print_function
import os
-
-try:
- import requests
-except ImportError:
- requests = None # NOQA
+import requests
from nikola.plugin_categories import Command
from nikola import utils
@@ -57,7 +53,7 @@ class CommandBootswatchTheme(Command):
{
'name': 'swatch',
'short': 's',
- 'default': 'slate',
+ 'default': '',
'type': str,
'help': 'Name of the swatch from bootswatch.com.'
},
@@ -72,19 +68,19 @@ class CommandBootswatchTheme(Command):
def _execute(self, options, args):
"""Given a swatch name and a parent theme, creates a custom theme."""
- if requests is None:
- utils.req_missing(['requests'], 'install Bootswatch themes')
-
name = options['name']
swatch = options['swatch']
+ if not swatch:
+ LOGGER.error('The -s option is mandatory')
+ return 1
parent = options['parent']
version = ''
# See if we need bootswatch for bootstrap v2 or v3
themes = utils.get_theme_chain(parent)
- if 'bootstrap3' not in themes or 'bootstrap3-jinja' not in themes:
+ if 'bootstrap3' not in themes and 'bootstrap3-jinja' not in themes:
version = '2'
- elif 'bootstrap' not in themes or 'bootstrap-jinja' not in themes:
+ elif 'bootstrap' not in themes and 'bootstrap-jinja' not in themes:
LOGGER.warn('"bootswatch_theme" only makes sense for themes that use bootstrap')
elif 'bootstrap3-gradients' in themes or 'bootstrap3-gradients-jinja' in themes:
LOGGER.warn('"bootswatch_theme" doesn\'t work well with the bootstrap3-gradients family')
diff --git a/nikola/plugins/command/check.plugin b/nikola/plugins/command/check.plugin
index 8ceda5f..dd0980e 100644
--- a/nikola/plugins/command/check.plugin
+++ b/nikola/plugins/command/check.plugin
@@ -4,7 +4,7 @@ Module = check
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Check the generated site
diff --git a/nikola/plugins/command/check.py b/nikola/plugins/command/check.py
index bd254f4..a9bc44a 100644
--- a/nikola/plugins/command/check.py
+++ b/nikola/plugins/command/check.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -25,6 +25,7 @@
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
+from collections import defaultdict
import os
import re
import sys
@@ -34,21 +35,36 @@ try:
except ImportError:
from urllib.parse import unquote, urlparse, urljoin, urldefrag # NOQA
+from doit.loader import generate_tasks
import lxml.html
+import requests
from nikola.plugin_categories import Command
from nikola.utils import get_logger
+def _call_nikola_list(site):
+ files = []
+ deps = defaultdict(list)
+ for task in generate_tasks('render_site', site.gen_tasks('render_site', "Task", '')):
+ files.extend(task.targets)
+ for target in task.targets:
+ deps[target].extend(task.file_dep)
+ for task in generate_tasks('post_render', site.gen_tasks('render_site', "LateTask", '')):
+ files.extend(task.targets)
+ for target in task.targets:
+ deps[target].extend(task.file_dep)
+ return files, deps
+
+
def real_scan_files(site):
task_fnames = set([])
real_fnames = set([])
output_folder = site.config['OUTPUT_FOLDER']
# First check that all targets are generated in the right places
- for task in os.popen('nikola list --all', 'r').readlines():
- task = task.strip()
- if output_folder in task and ':' in task:
- fname = task.split(':', 1)[-1]
+ for fname in _call_nikola_list(site)[0]:
+ fname = fname.strip()
+ if fname.startswith(output_folder):
task_fnames.add(fname)
# And now check that there are no non-target files
for root, dirs, files in os.walk(output_folder, followlinks=True):
@@ -68,7 +84,7 @@ def fs_relpath_from_url_path(url_path):
url_path = unquote(url_path)
# in windows relative paths don't begin with os.sep
if sys.platform == 'win32' and len(url_path):
- url_path = url_path[1:].replace('/', '\\')
+ url_path = url_path.replace('/', '\\')
return url_path
@@ -78,7 +94,7 @@ class CommandCheck(Command):
name = "check"
logger = None
- doc_usage = "-l [--find-sources] | -f"
+ doc_usage = "[-v] (-l [--find-sources] [-r] | -f [--clean-files])"
doc_purpose = "check links and files in the generated site"
cmd_options = [
{
@@ -119,11 +135,18 @@ class CommandCheck(Command):
'default': False,
'help': 'Be more verbose.',
},
+ {
+ 'name': 'remote',
+ 'long': 'remote',
+ 'short': 'r',
+ 'type': bool,
+ 'default': False,
+ 'help': 'Check that remote links work.',
+ },
]
def _execute(self, options, args):
"""Check the generated site."""
-
self.logger = get_logger('check', self.site.loghandlers)
if not options['links'] and not options['files'] and not options['clean']:
@@ -134,59 +157,103 @@ class CommandCheck(Command):
else:
self.logger.level = 4
if options['links']:
- failure = self.scan_links(options['find_sources'])
+ failure = self.scan_links(options['find_sources'], options['remote'])
if options['files']:
failure = self.scan_files()
if options['clean']:
failure = self.clean_files()
if failure:
- sys.exit(1)
+ return 1
existing_targets = set([])
+ checked_remote_targets = {}
- def analyze(self, task, find_sources=False):
+ def analyze(self, fname, find_sources=False, check_remote=False):
rv = False
self.whitelist = [re.compile(x) for x in self.site.config['LINK_CHECK_WHITELIST']]
base_url = urlparse(self.site.config['BASE_URL'])
self.existing_targets.add(self.site.config['SITE_URL'])
self.existing_targets.add(self.site.config['BASE_URL'])
url_type = self.site.config['URL_TYPE']
- if url_type == 'absolute':
- url_netloc_to_root = urlparse(self.site.config['SITE_URL']).path
+
+ deps = {}
+ if find_sources:
+ deps = _call_nikola_list(self.site)[1]
+
+ if url_type in ('absolute', 'full_path'):
+ url_netloc_to_root = urlparse(self.site.config['BASE_URL']).path
try:
- filename = task.split(":")[-1]
- d = lxml.html.fromstring(open(filename).read())
+ filename = fname
+
+ if filename.startswith(self.site.config['CACHE_FOLDER']):
+ # Do not look at links in the cache, which are not parsed by
+ # anyone and may result in false positives. Problems arise
+ # with galleries, for example. Full rationale: (Issue #1447)
+ self.logger.notice("Ignoring {0} (in cache, links may be incorrect)".format(filename))
+ return False
+
+ if not os.path.exists(fname):
+ # Quietly ignore files that don’t exist; use `nikola check -f` instead (Issue #1831)
+ return False
+
+ d = lxml.html.fromstring(open(filename, 'rb').read())
for l in d.iterlinks():
- target = l[0].attrib[l[1]]
+ target = l[2]
if target == "#":
continue
target, _ = urldefrag(target)
parsed = urlparse(target)
- # Absolute links when using only paths, skip.
- if (parsed.scheme or target.startswith('//')) and url_type in ('rel_path', 'full_path'):
- continue
+ # Warn about links from https to http (mixed-security)
+ if base_url.netloc == parsed.netloc and base_url.scheme == "https" and parsed.scheme == "http":
+ self.logger.warn("Mixed-content security for link in {0}: {1}".format(filename, target))
# Absolute links to other domains, skip
- if (parsed.scheme or target.startswith('//')) and parsed.netloc != base_url.netloc:
+ # Absolute links when using only paths, skip.
+ if ((parsed.scheme or target.startswith('//')) and parsed.netloc != base_url.netloc) or \
+ ((parsed.scheme or target.startswith('//')) and url_type in ('rel_path', 'full_path')):
+ if not check_remote or parsed.scheme not in ["http", "https"]:
+ continue
+ if parsed.netloc == base_url.netloc: # absolute URL to self.site
+ continue
+ if target in self.checked_remote_targets: # already checked this exact target
+ if self.checked_remote_targets[target] > 399:
+ self.logger.warn("Broken link in {0}: {1} [Error {2}]".format(filename, target, self.checked_remote_targets[target]))
+ continue
+ # Check the remote link works
+ req_headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:45.0) Gecko/20100101 Firefox/45.0 (Nikola)'} # I’m a real boy!
+ resp = requests.head(target, headers=req_headers)
+ self.checked_remote_targets[target] = resp.status_code
+ if resp.status_code > 399: # Error
+ self.logger.warn("Broken link in {0}: {1} [Error {2}]".format(filename, target, resp.status_code))
+ continue
+ elif resp.status_code <= 399: # The address leads *somewhere* that is not an error
+ self.logger.debug("Successfully checked remote link in {0}: {1} [HTTP: {2}]".format(filename, target, resp.status_code))
+ continue
+ self.logger.warn("Could not check remote link in {0}: {1} [Unknown problem]".format(filename, target))
continue
if url_type == 'rel_path':
- target_filename = os.path.abspath(
- os.path.join(os.path.dirname(filename), unquote(target)))
+ if target.startswith('/'):
+ target_filename = os.path.abspath(
+ os.path.join(self.site.config['OUTPUT_FOLDER'], unquote(target.lstrip('/'))))
+ else: # Relative path
+ target_filename = os.path.abspath(
+ os.path.join(os.path.dirname(filename), unquote(target)))
elif url_type in ('full_path', 'absolute'):
if url_type == 'absolute':
# convert to 'full_path' case, ie url relative to root
- url_rel_path = target.path[len(url_netloc_to_root):]
+ url_rel_path = parsed.path[len(url_netloc_to_root):]
else:
- url_rel_path = target.path
+ # convert to relative to base path
+ url_rel_path = target[len(url_netloc_to_root):]
if url_rel_path == '' or url_rel_path.endswith('/'):
url_rel_path = urljoin(url_rel_path, self.site.config['INDEX_FILE'])
fs_rel_path = fs_relpath_from_url_path(url_rel_path)
target_filename = os.path.join(self.site.config['OUTPUT_FOLDER'], fs_rel_path)
- if any(re.match(x, target_filename) for x in self.whitelist):
+ if any(re.search(x, target_filename) for x in self.whitelist):
continue
elif target_filename not in self.existing_targets:
if os.path.exists(target_filename):
@@ -197,25 +264,22 @@ class CommandCheck(Command):
self.logger.warn("Broken link in {0}: {1}".format(filename, target))
if find_sources:
self.logger.warn("Possible sources:")
- self.logger.warn(os.popen('nikola list --deps ' + task, 'r').read())
+ self.logger.warn("\n".join(deps[filename]))
self.logger.warn("===============================\n")
except Exception as exc:
self.logger.error("Error with: {0} {1}".format(filename, exc))
return rv
- def scan_links(self, find_sources=False):
+ def scan_links(self, find_sources=False, check_remote=False):
self.logger.info("Checking Links:")
self.logger.info("===============\n")
self.logger.notice("{0} mode".format(self.site.config['URL_TYPE']))
failure = False
- for task in os.popen('nikola list --all', 'r').readlines():
- task = task.strip()
- if task.split(':')[0] in (
- 'render_tags', 'render_archive',
- 'render_galleries', 'render_indexes',
- 'render_pages'
- 'render_site') and '.html' in task:
- if self.analyze(task, find_sources):
+ # Maybe we should just examine all HTML files
+ output_folder = self.site.config['OUTPUT_FOLDER']
+ for fname in _call_nikola_list(self.site)[0]:
+ if fname.startswith(output_folder) and '.html' == fname[-5:]:
+ if self.analyze(fname, find_sources, check_remote):
failure = True
if not failure:
self.logger.info("All links checked.")
diff --git a/nikola/plugins/command/console.plugin b/nikola/plugins/command/console.plugin
index 2eeedae..3aef2e7 100644
--- a/nikola/plugins/command/console.plugin
+++ b/nikola/plugins/command/console.plugin
@@ -4,6 +4,6 @@ Module = console
[Documentation]
Author = Chris Warrick, Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Start a debugging python console
diff --git a/nikola/plugins/command/console.py b/nikola/plugins/command/console.py
index 9dfc975..b8e7825 100644
--- a/nikola/plugins/command/console.py
+++ b/nikola/plugins/command/console.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Chris Warrick, Roberto Alsina and others.
+# Copyright © 2012-2015 Chris Warrick, Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -30,7 +30,7 @@ import os
from nikola import __version__
from nikola.plugin_categories import Command
-from nikola.utils import get_logger, STDERR_HANDLER, req_missing
+from nikola.utils import get_logger, STDERR_HANDLER, req_missing, Commands
LOGGER = get_logger('console', STDERR_HANDLER)
@@ -122,6 +122,8 @@ If there is no console to use specified (as -b, -i, -p) it tries IPython, then f
self.site.scan_posts()
# Create nice object with all commands:
+ self.site.commands = Commands(self.site.doit, self.config, self._doitargs)
+
self.context = {
'conf': self.site.config,
'site': self.site,
diff --git a/nikola/plugins/command/deploy.plugin b/nikola/plugins/command/deploy.plugin
index 10cc796..14fd53f 100644
--- a/nikola/plugins/command/deploy.plugin
+++ b/nikola/plugins/command/deploy.plugin
@@ -4,6 +4,6 @@ Module = deploy
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Deploy the site
diff --git a/nikola/plugins/command/deploy.py b/nikola/plugins/command/deploy.py
index fde43fa..2c44e87 100644
--- a/nikola/plugins/command/deploy.py
+++ b/nikola/plugins/command/deploy.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -29,23 +29,22 @@ import io
from datetime import datetime
from dateutil.tz import gettz
import os
-import sys
import subprocess
import time
from blinker import signal
from nikola.plugin_categories import Command
-from nikola.utils import get_logger, remove_file, unicode_str
+from nikola.utils import get_logger, remove_file, unicode_str, makedirs
class CommandDeploy(Command):
"""Deploy site."""
name = "deploy"
- doc_usage = ""
+ doc_usage = "[[preset [preset...]]"
doc_purpose = "deploy the site"
-
+ doc_description = "Deploy the site by executing deploy commands from the presets listed on the command line. If no presets are specified, `default` is executed."
logger = None
def _execute(self, command, args):
@@ -74,14 +73,29 @@ class CommandDeploy(Command):
remove_file(os.path.join(out_dir, post.source_path))
undeployed_posts.append(post)
- for command in self.site.config['DEPLOY_COMMANDS']:
- self.logger.info("==> {0}".format(command))
+ if args:
+ presets = args
+ else:
+ presets = ['default']
+
+ # test for preset existence
+ for preset in presets:
try:
- subprocess.check_call(command, shell=True)
- except subprocess.CalledProcessError as e:
- self.logger.error('Failed deployment — command {0} '
- 'returned {1}'.format(e.cmd, e.returncode))
- sys.exit(e.returncode)
+ self.site.config['DEPLOY_COMMANDS'][preset]
+ except:
+ self.logger.error('No such preset: {0}'.format(preset))
+ return 255
+
+ for preset in presets:
+ self.logger.info("=> preset '{0}'".format(preset))
+ for command in self.site.config['DEPLOY_COMMANDS'][preset]:
+ self.logger.info("==> {0}".format(command))
+ try:
+ subprocess.check_call(command, shell=True)
+ except subprocess.CalledProcessError as e:
+ self.logger.error('Failed deployment — command {0} '
+ 'returned {1}'.format(e.cmd, e.returncode))
+ return e.returncode
self.logger.info("Successful deployment")
try:
@@ -96,6 +110,7 @@ class CommandDeploy(Command):
new_deploy = datetime.utcnow()
self._emit_deploy_event(last_deploy, new_deploy, clean, undeployed_posts)
+ makedirs(self.site.config['CACHE_FOLDER'])
# Store timestamp of successful deployment
with io.open(timestamp_path, 'w+', encoding='utf8') as outf:
outf.write(unicode_str(new_deploy.isoformat()))
diff --git a/nikola/plugins/command/github_deploy.plugin b/nikola/plugins/command/github_deploy.plugin
index 4cbc422..74e7902 100644
--- a/nikola/plugins/command/github_deploy.plugin
+++ b/nikola/plugins/command/github_deploy.plugin
@@ -4,6 +4,6 @@ Module = github_deploy
[Documentation]
Author = Puneeth Chaganti
-Version = 0.1
+Version = 1,0
Website = http://getnikola.com
Description = Deploy the site to GitHub pages.
diff --git a/nikola/plugins/command/github_deploy.py b/nikola/plugins/command/github_deploy.py
index 13da48c..888a4f9 100644
--- a/nikola/plugins/command/github_deploy.py
+++ b/nikola/plugins/command/github_deploy.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2014 Puneeth Chaganti and others.
+# Copyright © 2014-2015 Puneeth Chaganti and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -25,15 +25,15 @@
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
+from datetime import datetime
+import io
import os
-import shutil
import subprocess
-import sys
from textwrap import dedent
from nikola.plugin_categories import Command
from nikola.plugins.command.check import real_scan_files
-from nikola.utils import ask_yesno, get_logger
+from nikola.utils import get_logger, req_missing, makedirs, unicode_str
from nikola.__main__ import main
from nikola import __version__
@@ -43,79 +43,53 @@ def uni_check_output(*args, **kwargs):
return o.decode('utf-8')
+def check_ghp_import_installed():
+ try:
+ subprocess.check_output(['ghp-import', '-h'])
+ except OSError:
+ # req_missing defaults to `python=True` — and it’s meant to be like this.
+ # `ghp-import` is installed via pip, but the only way to use it is by executing the script it installs.
+ req_missing(['ghp-import'], 'deploy the site to GitHub Pages')
+
+
class CommandGitHubDeploy(Command):
- """ Deploy site to GitHub pages. """
+ """ Deploy site to GitHub Pages. """
name = 'github_deploy'
doc_usage = ''
- doc_purpose = 'deploy the site to GitHub pages'
+ doc_purpose = 'deploy the site to GitHub Pages'
doc_description = dedent(
"""\
- This command can be used to deploy your site to GitHub pages.
- It performs the following actions:
+ This command can be used to deploy your site to GitHub Pages.
- 1. Ensure that your site is a git repository, and git is on the PATH.
- 2. Ensure that the output directory is not committed on the
- source branch.
- 3. Check for changes, and prompt the user to continue, if required.
- 4. Build the site
- 5. Clean any files that are "unknown" to Nikola.
- 6. Create a deploy branch, if one doesn't exist.
- 7. Commit the output to this branch. (NOTE: Any untracked source
- files, may get committed at this stage, on the wrong branch!)
- 8. Push and deploy!
+ It uses ghp-import to do this task.
- NOTE: This command needs your site to be a git repository, with a
- master branch (or a different branch, configured using
- GITHUB_SOURCE_BRANCH if you are pushing to user.github
- .io/organization.github.io pages) containing the sources of your
- site. You also, obviously, need to have `git` on your PATH,
- and should be able to push to the repository specified as the remote
- (origin, by default).
"""
)
logger = None
- _deploy_branch = ''
- _source_branch = ''
- _remote_name = ''
-
def _execute(self, command, args):
self.logger = get_logger(
CommandGitHubDeploy.name, self.site.loghandlers
)
- self._source_branch = self.site.config.get(
- 'GITHUB_SOURCE_BRANCH', 'master'
- )
- self._deploy_branch = self.site.config.get(
- 'GITHUB_DEPLOY_BRANCH', 'gh-pages'
- )
- self._remote_name = self.site.config.get(
- 'GITHUB_REMOTE_NAME', 'origin'
- )
-
- self._ensure_git_repo()
-
- self._exit_if_output_committed()
- if not self._prompt_continue():
- return
+ # Check if ghp-import is installed
+ check_ghp_import_installed()
+ # Build before deploying
build = main(['build'])
if build != 0:
self.logger.error('Build failed, not deploying to GitHub')
- sys.exit(build)
+ return build
+ # Clean non-target files
only_on_output, _ = real_scan_files(self.site)
for f in only_on_output:
os.unlink(f)
- self._checkout_deploy_branch()
-
- self._copy_output()
-
+ # Commit and push
self._commit_and_push()
return
@@ -123,150 +97,34 @@ class CommandGitHubDeploy(Command):
def _commit_and_push(self):
""" Commit all the files and push. """
- deploy = self._deploy_branch
- source = self._source_branch
- remote = self._remote_name
-
+ source = self.site.config['GITHUB_SOURCE_BRANCH']
+ deploy = self.site.config['GITHUB_DEPLOY_BRANCH']
+ remote = self.site.config['GITHUB_REMOTE_NAME']
source_commit = uni_check_output(['git', 'rev-parse', source])
commit_message = (
'Nikola auto commit.\n\n'
'Source commit: %s'
'Nikola version: %s' % (source_commit, __version__)
)
-
- commands = [
- ['git', 'pull', remote, '%s:%s' % (deploy, deploy)],
- ['git', 'add', '-A'],
- ['git', 'commit', '-m', commit_message],
- ['git', 'push', remote, '%s:%s' % (deploy, deploy)],
- ['git', 'checkout', source],
- ]
-
- for command in commands:
- self.logger.info("==> {0}".format(command))
- try:
- subprocess.check_call(command)
- except subprocess.CalledProcessError as e:
- self.logger.error(
- 'Failed GitHub deployment — command {0} '
- 'returned {1}'.format(e.cmd, e.returncode)
- )
- sys.exit(e.returncode)
-
- def _copy_output(self):
- """ Copy all output to the top level directory. """
output_folder = self.site.config['OUTPUT_FOLDER']
- for each in os.listdir(output_folder):
- if os.path.exists(each):
- if os.path.isdir(each):
- shutil.rmtree(each)
-
- else:
- os.unlink(each)
-
- shutil.move(os.path.join(output_folder, each), '.')
-
- def _checkout_deploy_branch(self):
- """ Check out the deploy branch
-
- Creates an orphan branch if not present.
-
- """
- deploy = self._deploy_branch
+ command = ['ghp-import', '-n', '-m', commit_message, '-p', '-r', remote, '-b', deploy, output_folder]
+ self.logger.info("==> {0}".format(command))
try:
- subprocess.check_call(
- [
- 'git', 'show-ref', '--verify', '--quiet',
- 'refs/heads/%s' % deploy
- ]
- )
- except subprocess.CalledProcessError:
- self._create_orphan_deploy_branch()
- else:
- subprocess.check_call(['git', 'checkout', deploy])
-
- def _create_orphan_deploy_branch(self):
- """ Create an orphan deploy branch """
-
- result = subprocess.check_call(
- ['git', 'checkout', '--orphan', self._deploy_branch]
- )
- if result != 0:
- self.logger.error('Failed to create a deploy branch')
- sys.exit(1)
-
- result = subprocess.check_call(['git', 'rm', '-rf', '.'])
- if result != 0:
- self.logger.error('Failed to create a deploy branch')
- sys.exit(1)
-
- with open('.gitignore', 'w') as f:
- f.write('%s\n' % self.site.config['OUTPUT_FOLDER'])
- f.write('%s\n' % self.site.config['CACHE_FOLDER'])
- f.write('*.pyc\n')
- f.write('*.db\n')
-
- subprocess.check_call(['git', 'add', '.gitignore'])
- subprocess.check_call(['git', 'commit', '-m', 'Add .gitignore'])
-
- def _ensure_git_repo(self):
- """ Ensure that the site is a git-repo.
-
- Also make sure that a remote with the specified name exists.
-
- """
-
- try:
- remotes = uni_check_output(['git', 'remote'])
+ subprocess.check_call(command)
except subprocess.CalledProcessError as e:
- self.logger.notice('github_deploy needs a git repository!')
- sys.exit(e.returncode)
- except OSError as e:
- import errno
- self.logger.error('Running git failed with {0}'.format(e))
- if e.errno == errno.ENOENT:
- self.logger.notice('Is git on the PATH?')
- sys.exit(1)
- else:
- if self._remote_name not in remotes:
- self.logger.error(
- 'Need a remote called "%s" configured' % self._remote_name
- )
- sys.exit(1)
-
- def _exit_if_output_committed(self):
- """ Exit if the output folder is committed on the source branch. """
-
- source = self._source_branch
- subprocess.check_call(['git', 'checkout', source])
-
- output_folder = self.site.config['OUTPUT_FOLDER']
- output_log = uni_check_output(
- ['git', 'ls-files', '--', output_folder]
- )
-
- if len(output_log.strip()) > 0:
self.logger.error(
- 'Output folder is committed on the source branch. '
- 'Cannot proceed until it is removed.'
+ 'Failed GitHub deployment — command {0} '
+ 'returned {1}'.format(e.cmd, e.returncode)
)
- sys.exit(1)
-
- def _prompt_continue(self):
- """ Show uncommitted changes, and ask if user wants to continue. """
+ return e.returncode
- changes = uni_check_output(['git', 'status', '--porcelain'])
- if changes.strip():
- changes = uni_check_output(['git', 'status']).strip()
- message = (
- "You have the following changes:\n%s\n\n"
- "Anything not committed, and unknown to Nikola may be lost, "
- "or committed onto the wrong branch. Do you wish to continue?"
- ) % changes
- proceed = ask_yesno(message, False)
- else:
- proceed = True
+ self.logger.info("Successful deployment")
- return proceed
+ # Store timestamp of successful deployment
+ timestamp_path = os.path.join(self.site.config["CACHE_FOLDER"], "lastdeploy")
+ new_deploy = datetime.utcnow()
+ makedirs(self.site.config["CACHE_FOLDER"])
+ with io.open(timestamp_path, "w+", encoding="utf8") as outf:
+ outf.write(unicode_str(new_deploy.isoformat()))
diff --git a/nikola/plugins/command/import_wordpress.plugin b/nikola/plugins/command/import_wordpress.plugin
index fadc759..e072224 100644
--- a/nikola/plugins/command/import_wordpress.plugin
+++ b/nikola/plugins/command/import_wordpress.plugin
@@ -4,7 +4,7 @@ Module = import_wordpress
[Documentation]
Author = Roberto Alsina
-Version = 0.2
+Version = 1.0
Website = http://getnikola.com
Description = Import a wordpress site from a XML dump (requires markdown).
diff --git a/nikola/plugins/command/import_wordpress.py b/nikola/plugins/command/import_wordpress.py
index 1af4083..674fc2a 100644
--- a/nikola/plugins/command/import_wordpress.py
+++ b/nikola/plugins/command/import_wordpress.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -28,6 +28,8 @@ from __future__ import unicode_literals, print_function
import os
import re
import sys
+import datetime
+import requests
from lxml import etree
try:
@@ -37,11 +39,6 @@ except ImportError:
from urllib.parse import urlparse, unquote # NOQA
try:
- import requests
-except ImportError:
- requests = None # NOQA
-
-try:
import phpserialize
except ImportError:
phpserialize = None # NOQA
@@ -87,6 +84,13 @@ class CommandImportWordpress(Command, ImportMixin):
'help': "Do not try to download files for the import",
},
{
+ 'name': 'download_auth',
+ 'long': 'download-auth',
+ 'default': None,
+ 'type': str,
+ 'help': "Specify username and password for HTTP authentication (separated by ':')",
+ },
+ {
'name': 'separate_qtranslate_content',
'long': 'qtranslate',
'default': False,
@@ -104,6 +108,7 @@ class CommandImportWordpress(Command, ImportMixin):
'help': "The pattern for translation files names",
},
]
+ all_tags = set([])
def _execute(self, options={}, args=[]):
"""Import a WordPress blog from an export file into a Nikola site."""
@@ -133,6 +138,14 @@ class CommandImportWordpress(Command, ImportMixin):
self.exclude_drafts = options.get('exclude_drafts', False)
self.no_downloads = options.get('no_downloads', False)
+ self.auth = None
+ if options.get('download_auth') is not None:
+ username_password = options.get('download_auth')
+ self.auth = tuple(username_password.split(':', 1))
+ if len(self.auth) < 2:
+ print("Please specify HTTP authentication credentials in the form username:password.")
+ return False
+
self.separate_qtranslate_content = options.get('separate_qtranslate_content')
self.translations_pattern = options.get('translations_pattern')
@@ -149,11 +162,7 @@ class CommandImportWordpress(Command, ImportMixin):
package=modulename)
)
- if requests is None and phpserialize is None:
- req_missing(['requests', 'phpserialize'], 'import WordPress dumps without --no-downloads')
- elif requests is None:
- req_missing(['requests'], 'import WordPress dumps without --no-downloads')
- elif phpserialize is None:
+ if phpserialize is None:
req_missing(['phpserialize'], 'import WordPress dumps without --no-downloads')
channel = self.get_channel_from_file(self.wordpress_export_file)
@@ -172,6 +181,19 @@ class CommandImportWordpress(Command, ImportMixin):
self.extra_languages)
self.context['REDIRECTIONS'] = self.configure_redirections(
self.url_map)
+
+ # Add tag redirects
+ for tag in self.all_tags:
+ try:
+ tag_str = tag.decode('utf8')
+ except AttributeError:
+ tag_str = tag
+ tag = utils.slugify(tag_str)
+ src_url = '{}tag/{}'.format(self.context['SITE_URL'], tag)
+ dst_url = self.site.link('tag', tag)
+ if src_url != dst_url:
+ self.url_map[src_url] = dst_url
+
self.write_urlmap_csv(
os.path.join(self.output_folder, 'url_map.csv'), self.url_map)
rendered_template = conf_template.render(**prepare_config(self.context))
@@ -186,26 +208,6 @@ class CommandImportWordpress(Command, ImportMixin):
rendered_template)
@classmethod
- def _glue_xml_lines(cls, xml):
- new_xml = xml[0]
- previous_line_ended_in_newline = new_xml.endswith(b'\n')
- previous_line_was_indentet = False
- for line in xml[1:]:
- if (re.match(b'^[ \t]+', line) and previous_line_ended_in_newline):
- new_xml = b''.join((new_xml, line))
- previous_line_was_indentet = True
- elif previous_line_was_indentet:
- new_xml = b''.join((new_xml, line))
- previous_line_was_indentet = False
- else:
- new_xml = b'\n'.join((new_xml, line))
- previous_line_was_indentet = False
-
- previous_line_ended_in_newline = line.endswith(b'\n')
-
- return new_xml
-
- @classmethod
def read_xml_file(cls, filename):
xml = []
@@ -215,8 +217,7 @@ class CommandImportWordpress(Command, ImportMixin):
if b'<atom:link rel=' in line:
continue
xml.append(line)
-
- return cls._glue_xml_lines(xml)
+ return b'\n'.join(xml)
@classmethod
def get_channel_from_file(cls, filename):
@@ -255,9 +256,15 @@ class CommandImportWordpress(Command, ImportMixin):
'{{{0}}}author_display_name'.format(wordpress_namespace),
"Joe Example")
context['POSTS'] = '''(
+ ("posts/*.rst", "posts", "post.tmpl"),
+ ("posts/*.txt", "posts", "post.tmpl"),
+ ("posts/*.md", "posts", "post.tmpl"),
("posts/*.wp", "posts", "post.tmpl"),
)'''
context['PAGES'] = '''(
+ ("stories/*.rst", "stories", "story.tmpl"),
+ ("stories/*.txt", "stories", "story.tmpl"),
+ ("stories/*.md", "stories", "story.tmpl"),
("stories/*.wp", "stories", "story.tmpl"),
)'''
context['COMPILERS'] = '''{
@@ -274,8 +281,12 @@ class CommandImportWordpress(Command, ImportMixin):
return
try:
+ request = requests.get(url, auth=self.auth)
+ if request.status_code >= 400:
+ LOGGER.warn("Downloading {0} to {1} failed with HTTP status code {2}".format(url, dst_path, request.status_code))
+ return
with open(dst_path, 'wb+') as fd:
- fd.write(requests.get(url).content)
+ fd.write(request.content)
except requests.exceptions.ConnectionError as err:
LOGGER.warn("Downloading {0} to {1} failed: {2}".format(url, dst_path, err))
@@ -285,8 +296,7 @@ class CommandImportWordpress(Command, ImportMixin):
link = get_text_tag(item, '{{{0}}}link'.format(wordpress_namespace),
'foo')
path = urlparse(url).path
- dst_path = os.path.join(*([self.output_folder, 'files']
- + list(path.split('/'))))
+ dst_path = os.path.join(*([self.output_folder, 'files'] + list(path.split('/'))))
dst_dir = os.path.dirname(dst_path)
utils.makedirs(dst_dir)
LOGGER.info("Downloading {0} => {1}".format(url, dst_path))
@@ -306,7 +316,6 @@ class CommandImportWordpress(Command, ImportMixin):
return
additional_metadata = item.findall('{{{0}}}postmeta'.format(wordpress_namespace))
-
if additional_metadata is None:
return
@@ -341,8 +350,7 @@ class CommandImportWordpress(Command, ImportMixin):
url = '/'.join([source_path, filename.decode('utf-8')])
path = urlparse(url).path
- dst_path = os.path.join(*([self.output_folder, 'files']
- + list(path.split('/'))))
+ dst_path = os.path.join(*([self.output_folder, 'files'] + list(path.split('/'))))
dst_dir = os.path.dirname(dst_path)
utils.makedirs(dst_dir)
LOGGER.info("Downloading {0} => {1}".format(url, dst_path))
@@ -351,13 +359,34 @@ class CommandImportWordpress(Command, ImportMixin):
links[url] = '/' + dst_url
links[url] = '/' + dst_url
- @staticmethod
- def transform_sourcecode(content):
- new_content = re.sub('\[sourcecode language="([^"]+)"\]',
- "\n~~~~~~~~~~~~{.\\1}\n", content)
- new_content = new_content.replace('[/sourcecode]',
- "\n~~~~~~~~~~~~\n")
- return new_content
+ code_re1 = re.compile(r'\[code.* lang.*?="(.*?)?".*\](.*?)\[/code\]', re.DOTALL | re.MULTILINE)
+ code_re2 = re.compile(r'\[sourcecode.* lang.*?="(.*?)?".*\](.*?)\[/sourcecode\]', re.DOTALL | re.MULTILINE)
+ code_re3 = re.compile(r'\[code.*?\](.*?)\[/code\]', re.DOTALL | re.MULTILINE)
+ code_re4 = re.compile(r'\[sourcecode.*?\](.*?)\[/sourcecode\]', re.DOTALL | re.MULTILINE)
+
+ def transform_code(self, content):
+ # http://en.support.wordpress.com/code/posting-source-code/. There are
+ # a ton of things not supported here. We only do a basic [code
+ # lang="x"] -> ```x translation, and remove quoted html entities (<,
+ # >, &, and ").
+ def replacement(m, c=content):
+ if len(m.groups()) == 1:
+ language = ''
+ code = m.group(0)
+ else:
+ language = m.group(1) or ''
+ code = m.group(2)
+ code = code.replace('&amp;', '&')
+ code = code.replace('&gt;', '>')
+ code = code.replace('&lt;', '<')
+ code = code.replace('&quot;', '"')
+ return '```{language}\n{code}\n```'.format(language=language, code=code)
+
+ content = self.code_re1.sub(replacement, content)
+ content = self.code_re2.sub(replacement, content)
+ content = self.code_re3.sub(replacement, content)
+ content = self.code_re4.sub(replacement, content)
+ return content
@staticmethod
def transform_caption(content):
@@ -374,10 +403,10 @@ class CommandImportWordpress(Command, ImportMixin):
return content
def transform_content(self, content):
- new_content = self.transform_sourcecode(content)
- new_content = self.transform_caption(new_content)
- new_content = self.transform_multiple_newlines(new_content)
- return new_content
+ content = self.transform_code(content)
+ content = self.transform_caption(content)
+ content = self.transform_multiple_newlines(content)
+ return content
def import_item(self, item, wordpress_namespace, out_folder=None):
"""Takes an item from the feed and creates a post file."""
@@ -391,11 +420,10 @@ class CommandImportWordpress(Command, ImportMixin):
parsed = urlparse(link)
path = unquote(parsed.path.strip('/'))
- # In python 2, path is a str. slug requires a unicode
- # object. According to wikipedia, unquoted strings will
- # usually be UTF8
- if isinstance(path, utils.bytes_str):
+ try:
path = path.decode('utf8')
+ except AttributeError:
+ pass
# Cut out the base directory.
if path.startswith(self.base_dir.strip('/')):
@@ -420,7 +448,13 @@ class CommandImportWordpress(Command, ImportMixin):
description = get_text_tag(item, 'description', '')
post_date = get_text_tag(
item, '{{{0}}}post_date'.format(wordpress_namespace), None)
- dt = utils.to_datetime(post_date)
+ try:
+ dt = utils.to_datetime(post_date)
+ except ValueError:
+ dt = datetime.datetime(1970, 1, 1, 0, 0, 0)
+ LOGGER.error('Malformed date "{0}" in "{1}" [{2}], assuming 1970-01-01 00:00:00 instead.'.format(post_date, title, slug))
+ post_date = dt.strftime('%Y-%m-%d %H:%M:%S')
+
if dt.tzinfo and self.timezone is None:
self.timezone = utils.get_tzname(dt)
status = get_text_tag(
@@ -443,12 +477,20 @@ class CommandImportWordpress(Command, ImportMixin):
if text == 'Uncategorized':
continue
tags.append(text)
+ self.all_tags.add(text)
if '$latex' in content:
tags.append('mathjax')
+ # Find post format if it's there
+ post_format = 'wp'
+ format_tag = [x for x in item.findall('*//{%s}meta_key' % wordpress_namespace) if x.text == '_tc_post_format']
+ if format_tag:
+ post_format = format_tag[0].getparent().find('{%s}meta_value' % wordpress_namespace).text
+
if is_draft and self.exclude_drafts:
LOGGER.notice('Draft "{0}" will not be imported.'.format(title))
+
elif content.strip():
# If no content is found, no files are written.
self.url_map[link] = (self.context['SITE_URL'] +
@@ -475,7 +517,8 @@ class CommandImportWordpress(Command, ImportMixin):
out_meta_filename = slug + '.meta'
out_content_filename = slug + '.wp'
meta_slug = slug
- content = self.transform_content(content)
+ if post_format == 'wp':
+ content = self.transform_content(content)
self.write_metadata(os.path.join(self.output_folder, out_folder,
out_meta_filename),
title, meta_slug, post_date, description, tags)
@@ -510,7 +553,7 @@ def get_text_tag(tag, name, default):
if tag is None:
return default
t = tag.find(name)
- if t is not None:
+ if t is not None and t.text is not None:
return t.text
else:
return default
diff --git a/nikola/plugins/command/init.plugin b/nikola/plugins/command/init.plugin
index a539f51..850dba9 100644
--- a/nikola/plugins/command/init.plugin
+++ b/nikola/plugins/command/init.plugin
@@ -4,6 +4,6 @@ Module = init
[Documentation]
Author = Roberto Alsina
-Version = 0.2
+Version = 1.0
Website = http://getnikola.com
Description = Create a new site.
diff --git a/nikola/plugins/command/init.py b/nikola/plugins/command/init.py
index a8b60db..7a36894 100644
--- a/nikola/plugins/command/init.py
+++ b/nikola/plugins/command/init.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -33,11 +33,13 @@ import textwrap
import datetime
import unidecode
import dateutil.tz
+import dateutil.zoneinfo
from mako.template import Template
from pkg_resources import resource_filename
+import tarfile
import nikola
-from nikola.nikola import DEFAULT_TRANSLATIONS_PATTERN, DEFAULT_INDEX_READ_MORE_LINK, DEFAULT_RSS_READ_MORE_LINK, LEGAL_VALUES
+from nikola.nikola import DEFAULT_TRANSLATIONS_PATTERN, DEFAULT_INDEX_READ_MORE_LINK, DEFAULT_RSS_READ_MORE_LINK, LEGAL_VALUES, urlsplit, urlunsplit
from nikola.plugin_categories import Command
from nikola.utils import ask, ask_yesno, get_logger, makedirs, STDERR_HANDLER, load_messages
from nikola.packages.tzlocal import get_localzone
@@ -48,9 +50,10 @@ LOGGER = get_logger('init', STDERR_HANDLER)
SAMPLE_CONF = {
'BLOG_AUTHOR': "Your Name",
'BLOG_TITLE': "Demo Site",
- 'SITE_URL': "http://getnikola.com/",
+ 'SITE_URL': "https://example.com/",
'BLOG_EMAIL': "joe@demo.site",
'BLOG_DESCRIPTION': "This is a demo site for Nikola.",
+ 'PRETTY_URLS': False,
'DEFAULT_LANG': "en",
'TRANSLATIONS': """{
DEFAULT_LANG: "",
@@ -186,7 +189,7 @@ def format_navigation_links(additional_languages, default_lang, messages):
pairs.append(f.format('DEFAULT_LANG', '', get_msg(default_lang)))
for l in additional_languages:
- pairs.append(f.format(json.dumps(l), '/' + l, get_msg(l)))
+ pairs.append(f.format(json.dumps(l, ensure_ascii=False), '/' + l, get_msg(l)))
return u'{{\n{0}\n}}'.format('\n\n'.join(pairs))
@@ -196,11 +199,13 @@ def format_navigation_links(additional_languages, default_lang, messages):
def prepare_config(config):
"""Parse sample config with JSON."""
p = config.copy()
- p.update(dict((k, json.dumps(v)) for k, v in p.items()
- if k not in ('POSTS', 'PAGES', 'COMPILERS', 'TRANSLATIONS', 'NAVIGATION_LINKS', '_SUPPORTED_LANGUAGES', '_SUPPORTED_COMMENT_SYSTEMS', 'INDEX_READ_MORE_LINK', 'RSS_READ_MORE_LINK')))
+ p.update(dict((k, json.dumps(v, ensure_ascii=False)) for k, v in p.items()
+ if k not in ('POSTS', 'PAGES', 'COMPILERS', 'TRANSLATIONS', 'NAVIGATION_LINKS', '_SUPPORTED_LANGUAGES', '_SUPPORTED_COMMENT_SYSTEMS', 'INDEX_READ_MORE_LINK', 'RSS_READ_MORE_LINK', 'PRETTY_URLS')))
# READ_MORE_LINKs require some special treatment.
p['INDEX_READ_MORE_LINK'] = "'" + p['INDEX_READ_MORE_LINK'].replace("'", "\\'") + "'"
p['RSS_READ_MORE_LINK'] = "'" + p['RSS_READ_MORE_LINK'].replace("'", "\\'") + "'"
+ # json would make that `true` instead of `True`
+ p['PRETTY_URLS'] = str(p['PRETTY_URLS'])
return p
@@ -237,14 +242,20 @@ class CommandInit(Command):
src = resource_filename('nikola', os.path.join('data', 'samplesite'))
shutil.copytree(src, target)
- @classmethod
- def create_configuration(cls, target):
+ @staticmethod
+ def create_configuration(target):
template_path = resource_filename('nikola', 'conf.py.in')
conf_template = Template(filename=template_path)
conf_path = os.path.join(target, 'conf.py')
with io.open(conf_path, 'w+', encoding='utf8') as fd:
fd.write(conf_template.render(**prepare_config(SAMPLE_CONF)))
+ @staticmethod
+ def create_configuration_to_string():
+ template_path = resource_filename('nikola', 'conf.py.in')
+ conf_template = Template(filename=template_path)
+ return conf_template.render(**prepare_config(SAMPLE_CONF))
+
@classmethod
def create_empty_site(cls, target):
for folder in ('files', 'galleries', 'listings', 'posts', 'stories'):
@@ -253,6 +264,39 @@ class CommandInit(Command):
@staticmethod
def ask_questions(target):
"""Ask some questions about Nikola."""
+ def urlhandler(default, toconf):
+ answer = ask('Site URL', 'https://example.com/')
+ try:
+ answer = answer.decode('utf-8')
+ except (AttributeError, UnicodeDecodeError):
+ pass
+ if not answer.startswith(u'http'):
+ print(" ERROR: You must specify a protocol (http or https).")
+ urlhandler(default, toconf)
+ return
+ if not answer.endswith('/'):
+ print(" The URL does not end in '/' -- adding it.")
+ answer += '/'
+
+ dst_url = urlsplit(answer)
+ try:
+ dst_url.netloc.encode('ascii')
+ except (UnicodeEncodeError, UnicodeDecodeError):
+ # The IDN contains characters beyond ASCII. We must convert it
+ # to Punycode. (Issue #1644)
+ nl = dst_url.netloc.encode('idna')
+ answer = urlunsplit((dst_url.scheme,
+ nl,
+ dst_url.path,
+ dst_url.query,
+ dst_url.fragment))
+ print(" Converting to Punycode:", answer)
+
+ SAMPLE_CONF['SITE_URL'] = answer
+
+ def prettyhandler(default, toconf):
+ SAMPLE_CONF['PRETTY_URLS'] = ask_yesno('Enable pretty URLs (/page/ instead of /page.html) that don’t need web server configuration?', default=True)
+
def lhandler(default, toconf, show_header=True):
if show_header:
print("We will now ask you to provide the list of languages you want to use.")
@@ -297,7 +341,7 @@ class CommandInit(Command):
lhandler(default, toconf, show_header=False)
def tzhandler(default, toconf):
- print("\nPlease choose the correct time zone for your blog. Nikola uses the tz database.")
+ print("\nPlease choose the correct time zone for your blog. Nikola uses the tz database.")
print("You can find your time zone here:")
print("http://en.wikipedia.org/wiki/List_of_tz_database_time_zones")
print("")
@@ -309,12 +353,26 @@ class CommandInit(Command):
lz = None
answer = ask('Time zone', lz if lz else "UTC")
tz = dateutil.tz.gettz(answer)
+
+ if tz is None:
+ print(" WARNING: Time zone not found. Searching list of time zones for a match.")
+ zonesfile = tarfile.open(fileobj=dateutil.zoneinfo.getzoneinfofile_stream())
+ zonenames = [zone for zone in zonesfile.getnames() if answer.lower() in zone.lower()]
+ if len(zonenames) == 1:
+ tz = dateutil.tz.gettz(zonenames[0])
+ answer = zonenames[0]
+ print(" Picking '{0}'.".format(answer))
+ elif len(zonenames) > 1:
+ print(" The following time zones match your query:")
+ print(' ' + '\n '.join(zonenames))
+ continue
+
if tz is not None:
time = datetime.datetime.now(tz).strftime('%H:%M:%S')
print(" Current time in {0}: {1}".format(answer, time))
answered = ask_yesno("Use this time zone?", True)
else:
- print(" ERROR: Time zone not found. Please try again. Time zones are case-sensitive.")
+ print(" ERROR: No matches found. Please try again.")
SAMPLE_CONF['TIMEZONE'] = answer
@@ -353,7 +411,8 @@ class CommandInit(Command):
('Site author', 'Nikola Tesla', True, 'BLOG_AUTHOR'),
('Site author\'s e-mail', 'n.tesla@example.com', True, 'BLOG_EMAIL'),
('Site description', 'This is a demo site for Nikola.', True, 'BLOG_DESCRIPTION'),
- ('Site URL', 'http://getnikola.com/', True, 'SITE_URL'),
+ (urlhandler, None, True, True),
+ (prettyhandler, None, True, True),
('Questions about languages and locales', None, None, None),
(lhandler, None, True, True),
(tzhandler, None, True, True),
@@ -377,6 +436,10 @@ class CommandInit(Command):
query(default, toconf)
else:
answer = ask(query, default)
+ try:
+ answer = answer.decode('utf-8')
+ except (AttributeError, UnicodeDecodeError):
+ pass
if toconf:
SAMPLE_CONF[destination] = answer
if destination == '!target':
@@ -386,7 +449,7 @@ class CommandInit(Command):
STORAGE['target'] = answer
print("\nThat's it, Nikola is now configured. Make sure to edit conf.py to your liking.")
- print("If you are looking for themes and addons, check out http://themes.getnikola.com/ and http://plugins.getnikola.com/.")
+ print("If you are looking for themes and addons, check out https://themes.getnikola.com/ and https://plugins.getnikola.com/.")
print("Have fun!")
return STORAGE
diff --git a/nikola/plugins/command/install_theme.plugin b/nikola/plugins/command/install_theme.plugin
index 84b2623..54a91ff 100644
--- a/nikola/plugins/command/install_theme.plugin
+++ b/nikola/plugins/command/install_theme.plugin
@@ -4,7 +4,7 @@ Module = install_theme
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Install a theme into the current site.
diff --git a/nikola/plugins/command/install_theme.py b/nikola/plugins/command/install_theme.py
index 5397772..4937509 100644
--- a/nikola/plugins/command/install_theme.py
+++ b/nikola/plugins/command/install_theme.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -28,42 +28,18 @@ from __future__ import print_function
import os
import io
import json
-import shutil
+import requests
import pygments
from pygments.lexers import PythonLexer
from pygments.formatters import TerminalFormatter
-try:
- import requests
-except ImportError:
- requests = None # NOQA
-
from nikola.plugin_categories import Command
from nikola import utils
LOGGER = utils.get_logger('install_theme', utils.STDERR_HANDLER)
-# Stolen from textwrap in Python 3.3.2.
-def indent(text, prefix, predicate=None): # NOQA
- """Adds 'prefix' to the beginning of selected lines in 'text'.
-
- If 'predicate' is provided, 'prefix' will only be added to the lines
- where 'predicate(line)' is True. If 'predicate' is not provided,
- it will default to adding 'prefix' to all non-empty lines that do not
- consist solely of whitespace characters.
- """
- if predicate is None:
- def predicate(line):
- return line.strip()
-
- def prefixed_lines():
- for line in text.splitlines(True):
- yield (prefix + line if predicate(line) else line)
- return ''.join(prefixed_lines())
-
-
class CommandInstallTheme(Command):
"""Install a theme."""
@@ -86,16 +62,21 @@ class CommandInstallTheme(Command):
'long': 'url',
'type': str,
'help': "URL for the theme repository (default: "
- "http://themes.getnikola.com/v7/themes.json)",
- 'default': 'http://themes.getnikola.com/v7/themes.json'
+ "https://themes.getnikola.com/v7/themes.json)",
+ 'default': 'https://themes.getnikola.com/v7/themes.json'
+ },
+ {
+ 'name': 'getpath',
+ 'short': 'g',
+ 'long': 'get-path',
+ 'type': bool,
+ 'default': False,
+ 'help': "Print the path for installed theme",
},
]
def _execute(self, options, args):
"""Install theme into current site."""
- if requests is None:
- utils.req_missing(['requests'], 'install themes')
-
listing = options['list']
url = options['url']
if args:
@@ -103,6 +84,14 @@ class CommandInstallTheme(Command):
else:
name = None
+ if options['getpath'] and name:
+ path = utils.get_theme_path(name)
+ if path:
+ print(path)
+ else:
+ print('not installed')
+ return 0
+
if name is None and not listing:
LOGGER.error("This command needs either a theme name or the -l option.")
return False
@@ -135,36 +124,31 @@ class CommandInstallTheme(Command):
def do_install(self, name, data):
if name in data:
utils.makedirs(self.output_dir)
- LOGGER.info('Downloading: ' + data[name])
+ LOGGER.info("Downloading '{0}'".format(data[name]))
zip_file = io.BytesIO()
zip_file.write(requests.get(data[name]).content)
- LOGGER.info('Extracting: {0} into themes'.format(name))
+ LOGGER.info("Extracting '{0}' into themes/".format(name))
utils.extract_all(zip_file)
- dest_path = os.path.join('themes', name)
+ dest_path = os.path.join(self.output_dir, name)
else:
+ dest_path = os.path.join(self.output_dir, name)
try:
theme_path = utils.get_theme_path(name)
- except:
- LOGGER.error("Can't find theme " + name)
- return False
+ LOGGER.error("Theme '{0}' is already installed in {1}".format(name, theme_path))
+ except Exception:
+ LOGGER.error("Can't find theme {0}".format(name))
- utils.makedirs(self.output_dir)
- dest_path = os.path.join(self.output_dir, name)
- if os.path.exists(dest_path):
- LOGGER.error("{0} is already installed".format(name))
- return False
+ return False
- LOGGER.info('Copying {0} into themes'.format(theme_path))
- shutil.copytree(theme_path, dest_path)
confpypath = os.path.join(dest_path, 'conf.py.sample')
if os.path.exists(confpypath):
LOGGER.notice('This theme has a sample config file. Integrate it with yours in order to make this theme work!')
print('Contents of the conf.py.sample file:\n')
with io.open(confpypath, 'r', encoding='utf-8') as fh:
if self.site.colorful:
- print(indent(pygments.highlight(
+ print(utils.indent(pygments.highlight(
fh.read(), PythonLexer(), TerminalFormatter()),
4 * ' '))
else:
- print(indent(fh.read(), 4 * ' '))
+ print(utils.indent(fh.read(), 4 * ' '))
return True
diff --git a/nikola/plugins/command/new_page.plugin b/nikola/plugins/command/new_page.plugin
index 1f1c84c..f078dd6 100644
--- a/nikola/plugins/command/new_page.plugin
+++ b/nikola/plugins/command/new_page.plugin
@@ -4,6 +4,6 @@ Module = new_page
[Documentation]
Author = Roberto Alsina, Chris Warrick
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Create a new page.
diff --git a/nikola/plugins/command/new_page.py b/nikola/plugins/command/new_page.py
index f07ba39..39a85bd 100644
--- a/nikola/plugins/command/new_page.py
+++ b/nikola/plugins/command/new_page.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina, Chris Warrick and others.
+# Copyright © 2012-2015 Roberto Alsina, Chris Warrick and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -45,6 +45,14 @@ class CommandNewPage(Command):
'help': 'Title for the page.'
},
{
+ 'name': 'author',
+ 'short': 'a',
+ 'long': 'author',
+ 'type': str,
+ 'default': '',
+ 'help': 'Author of the post.'
+ },
+ {
'name': 'onefile',
'short': '1',
'type': bool,
@@ -71,13 +79,29 @@ class CommandNewPage(Command):
'long': 'format',
'type': str,
'default': '',
- 'help': 'Markup format for the page, one of rest, markdown, wiki, '
- 'bbcode, html, textile, txt2tags',
+ 'help': 'Markup format for the page (use --available-formats for list)',
+ },
+ {
+ 'name': 'available-formats',
+ 'short': 'F',
+ 'long': 'available-formats',
+ 'type': bool,
+ 'default': False,
+ 'help': 'List all available input formats'
+ },
+ {
+ 'name': 'import',
+ 'short': 'i',
+ 'long': 'import',
+ 'type': str,
+ 'default': '',
+ 'help': 'Import an existing file instead of creating a placeholder'
},
]
def _execute(self, options, args):
"""Create a new page."""
+ # Defaults for some values that don’t apply to pages and the is_page option (duh!)
options['tags'] = ''
options['schedule'] = False
options['is_page'] = True
diff --git a/nikola/plugins/command/new_post.plugin b/nikola/plugins/command/new_post.plugin
index ec35c35..fec4b1d 100644
--- a/nikola/plugins/command/new_post.plugin
+++ b/nikola/plugins/command/new_post.plugin
@@ -4,7 +4,7 @@ Module = new_post
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Create a new post.
diff --git a/nikola/plugins/command/new_post.py b/nikola/plugins/command/new_post.py
index 24c09d0..5141c7e 100644
--- a/nikola/plugins/command/new_post.py
+++ b/nikola/plugins/command/new_post.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -30,6 +30,7 @@ import datetime
import os
import sys
import subprocess
+import operator
from blinker import signal
import dateutil.tz
@@ -37,12 +38,13 @@ import dateutil.tz
from nikola.plugin_categories import Command
from nikola import utils
+COMPILERS_DOC_LINK = 'https://getnikola.com/handbook.html#configuring-other-input-formats'
POSTLOGGER = utils.get_logger('new_post', utils.STDERR_HANDLER)
PAGELOGGER = utils.get_logger('new_page', utils.STDERR_HANDLER)
LOGGER = POSTLOGGER
-def filter_post_pages(compiler, is_post, compilers, post_pages):
+def filter_post_pages(compiler, is_post, compilers, post_pages, compiler_objs, compilers_raw):
"""Given a compiler ("markdown", "rest"), and whether it's meant for
a post or a page, and compilers, return the correct entry from
post_pages."""
@@ -51,7 +53,15 @@ def filter_post_pages(compiler, is_post, compilers, post_pages):
filtered = [entry for entry in post_pages if entry[3] == is_post]
# These are the extensions supported by the required format
- extensions = compilers[compiler]
+ extensions = compilers.get(compiler)
+ if extensions is None:
+ if compiler in compiler_objs:
+ LOGGER.error("There is a {0} compiler available, but it's not set in your COMPILERS option.".format(compiler))
+ LOGGER.info("Read more: {0}".format(COMPILERS_DOC_LINK))
+ else:
+ LOGGER.error('Unknown format {0}'.format(compiler))
+ print_compilers(compilers_raw, post_pages, compiler_objs)
+ return False
# Throw away the post_pages with the wrong extensions
filtered = [entry for entry in filtered if any([ext in entry[0] for ext in
@@ -59,13 +69,77 @@ def filter_post_pages(compiler, is_post, compilers, post_pages):
if not filtered:
type_name = "post" if is_post else "page"
- raise Exception("Can't find a way, using your configuration, to create "
- "a {0} in format {1}. You may want to tweak "
- "COMPILERS or {2}S in conf.py".format(
- type_name, compiler, type_name.upper()))
+ LOGGER.error("Can't find a way, using your configuration, to create "
+ "a {0} in format {1}. You may want to tweak "
+ "COMPILERS or {2}S in conf.py".format(
+ type_name, compiler, type_name.upper()))
+ LOGGER.info("Read more: {0}".format(COMPILERS_DOC_LINK))
+
+ return False
return filtered[0]
+def print_compilers(compilers_raw, post_pages, compiler_objs):
+ """
+ List all available compilers in a human-friendly format.
+
+ :param compilers_raw: The compilers dict, mapping compiler names to tuples of extensions
+ :param post_pages: The post_pages structure
+ :param compilers_objs: Compiler objects
+ """
+
+ # We use compilers_raw, because the normal dict can contain
+ # garbage coming from the translation candidate implementation.
+ # Entries are in format: (name, extensions, used_in_post_pages)
+ parsed_compilers = {'used': [], 'unused': [], 'disabled': []}
+
+ for compiler_name, compiler_obj in compiler_objs.items():
+ fname = compiler_obj.friendly_name or compiler_name
+ if compiler_name not in compilers_raw:
+ parsed_compilers['disabled'].append((compiler_name, fname, (), False))
+ else:
+ # stolen from filter_post_pages
+ extensions = compilers_raw[compiler_name]
+ filtered = [entry for entry in post_pages if any(
+ [ext in entry[0] for ext in extensions])]
+ if filtered:
+ parsed_compilers['used'].append((compiler_name, fname, extensions, True))
+ else:
+ parsed_compilers['unused'].append((compiler_name, fname, extensions, False))
+
+ # Sort compilers alphabetically by name, just so it’s prettier (and
+ # deterministic)
+ parsed_compilers['used'].sort(key=operator.itemgetter(0))
+ parsed_compilers['unused'].sort(key=operator.itemgetter(0))
+ parsed_compilers['disabled'].sort(key=operator.itemgetter(0))
+
+ # We also group the compilers by status for readability.
+ parsed_list = parsed_compilers['used'] + parsed_compilers['unused'] + parsed_compilers['disabled']
+
+ print("Available input formats:\n")
+
+ name_width = max([len(i[0]) for i in parsed_list] + [4]) # 4 == len('NAME')
+ fname_width = max([len(i[1]) for i in parsed_list] + [11]) # 11 == len('DESCRIPTION')
+
+ print((' {0:<' + str(name_width) + '} {1:<' + str(fname_width) + '} EXTENSIONS\n').format('NAME', 'DESCRIPTION'))
+
+ for name, fname, extensions, used in parsed_list:
+ flag = ' ' if used else '!'
+ flag = flag if extensions else '~'
+
+ extensions = ', '.join(extensions) if extensions else '(disabled: not in COMPILERS)'
+
+ print(('{flag}{name:<' + str(name_width) + '} {fname:<' + str(fname_width) + '} {extensions}').format(flag=flag, name=name, fname=fname, extensions=extensions))
+
+ print("""
+More compilers are available in the Plugins Index.
+
+Compilers marked with ! and ~ require additional configuration:
+ ! not in the PAGES/POSTS tuples (unused)
+ ~ not in the COMPILERS dict (disabled)
+Read more: {0}""".format(COMPILERS_DOC_LINK))
+
+
def get_default_compiler(is_post, compilers, post_pages):
"""Given compilers and post_pages, return a reasonable
default compiler for this kind of post/page.
@@ -116,7 +190,7 @@ def get_date(schedule=False, rule=None, last_date=None, tz=None, iso8601=False):
rrule = None # NOQA
if schedule and rrule and rule:
try:
- rule_ = rrule.rrulestr(rule, dtstart=last_date)
+ rule_ = rrule.rrulestr(rule, dtstart=last_date or date)
except Exception:
LOGGER.error('Unable to parse rule string, using current time.')
else:
@@ -161,6 +235,14 @@ class CommandNewPost(Command):
'help': 'Title for the post.'
},
{
+ 'name': 'author',
+ 'short': 'a',
+ 'long': 'author',
+ 'type': str,
+ 'default': '',
+ 'help': 'Author of the post.'
+ },
+ {
'name': 'tags',
'long': 'tags',
'type': str,
@@ -194,8 +276,15 @@ class CommandNewPost(Command):
'long': 'format',
'type': str,
'default': '',
- 'help': 'Markup format for the post, one of rest, markdown, wiki, '
- 'bbcode, html, textile, txt2tags',
+ 'help': 'Markup format for the post (use --available-formats for list)',
+ },
+ {
+ 'name': 'available-formats',
+ 'short': 'F',
+ 'long': 'available-formats',
+ 'type': bool,
+ 'default': False,
+ 'help': 'List all available input formats'
},
{
'name': 'schedule',
@@ -204,6 +293,14 @@ class CommandNewPost(Command):
'default': False,
'help': 'Schedule the post based on recurrence rule'
},
+ {
+ 'name': 'import',
+ 'short': 'i',
+ 'long': 'import',
+ 'type': str,
+ 'default': '',
+ 'help': 'Import an existing file instead of creating a placeholder'
+ },
]
@@ -228,9 +325,16 @@ class CommandNewPost(Command):
is_post = not is_page
content_type = 'page' if is_page else 'post'
title = options['title'] or None
+ author = options['author'] or ''
tags = options['tags']
onefile = options['onefile']
twofile = options['twofile']
+ import_file = options['import']
+ wants_available = options['available-formats']
+
+ if wants_available:
+ print_compilers(self.site.config['_COMPILERS_RAW'], self.site.config['post_pages'], self.site.compilers)
+ return
if is_page:
LOGGER = PAGELOGGER
@@ -243,6 +347,10 @@ class CommandNewPost(Command):
onefile = self.site.config.get('ONE_FILE_POSTS', True)
content_format = options['content_format']
+ content_subformat = None
+
+ if "@" in content_format:
+ content_format, content_subformat = content_format.split("@")
if not content_format: # Issue #400
content_format = get_default_compiler(
@@ -251,7 +359,8 @@ class CommandNewPost(Command):
self.site.config['post_pages'])
if content_format not in compiler_names:
- LOGGER.error("Unknown {0} format {1}".format(content_type, content_format))
+ LOGGER.error("Unknown {0} format {1}, maybe you need to install a plugin?".format(content_type, content_format))
+ print_compilers(self.site.config['_COMPILERS_RAW'], self.site.config['post_pages'], self.site.compilers)
return
compiler_plugin = self.site.plugin_manager.getPluginByName(
content_format, "PageCompiler").plugin_object
@@ -259,10 +368,19 @@ class CommandNewPost(Command):
# Guess where we should put this
entry = filter_post_pages(content_format, is_post,
self.site.config['COMPILERS'],
- self.site.config['post_pages'])
+ self.site.config['post_pages'],
+ self.site.compilers,
+ self.site.config['_COMPILERS_RAW'])
+
+ if entry is False:
+ return 1
- print("Creating New {0}".format(content_type.title()))
- print("-----------------\n")
+ if import_file:
+ print("Importing Existing {xx}".format(xx=content_type.title()))
+ print("-----------------------\n")
+ else:
+ print("Creating New {xx}".format(xx=content_type.title()))
+ print("-----------------\n")
if title is not None:
print("Title:", title)
else:
@@ -272,7 +390,7 @@ class CommandNewPost(Command):
if isinstance(title, utils.bytes_str):
try:
title = title.decode(sys.stdin.encoding)
- except AttributeError: # for tests
+ except (AttributeError, TypeError): # for tests
title = title.decode('utf-8')
title = title.strip()
@@ -282,9 +400,16 @@ class CommandNewPost(Command):
if isinstance(path, utils.bytes_str):
try:
path = path.decode(sys.stdin.encoding)
- except AttributeError: # for tests
+ except (AttributeError, TypeError): # for tests
path = path.decode('utf-8')
slug = utils.slugify(os.path.splitext(os.path.basename(path))[0])
+
+ if isinstance(author, utils.bytes_str):
+ try:
+ author = author.decode(sys.stdin.encoding)
+ except (AttributeError, TypeError): # for tests
+ author = author.decode('utf-8')
+
# Calculate the date to use for the content
schedule = options['schedule'] or self.site.config['SCHEDULE_ALL']
rule = self.site.config['SCHEDULE_RULE']
@@ -308,23 +433,46 @@ class CommandNewPost(Command):
if not path:
txt_path = os.path.join(output_path, slug + suffix)
else:
- txt_path = path
+ txt_path = os.path.join(self.site.original_cwd, path)
if (not onefile and os.path.isfile(meta_path)) or \
os.path.isfile(txt_path):
+
+ # Emit an event when a post exists
+ event = dict(path=txt_path)
+ if not onefile: # write metadata file
+ event['meta_path'] = meta_path
+ signal('existing_' + content_type).send(self, **event)
+
LOGGER.error("The title already exists!")
- exit()
+ return 8
d_name = os.path.dirname(txt_path)
utils.makedirs(d_name)
- metadata = self.site.config['ADDITIONAL_METADATA']
+ metadata = {}
+ if author:
+ metadata['author'] = author
+ metadata.update(self.site.config['ADDITIONAL_METADATA'])
+ data.update(metadata)
+
+ # ipynb plugin needs the ipython kernel info. We get the kernel name
+ # from the content_subformat and pass it to the compiler in the metadata
+ if content_format == "ipynb" and content_subformat is not None:
+ metadata["ipython_kernel"] = content_subformat
# Override onefile if not really supported.
if not compiler_plugin.supports_onefile and onefile:
onefile = False
LOGGER.warn('This compiler does not support one-file posts.')
- content = "Write your {0} here.".format('page' if is_page else 'post')
+ if import_file:
+ with io.open(import_file, 'r', encoding='utf-8') as fh:
+ content = fh.read()
+ else:
+ if is_page:
+ content = self.site.MESSAGES[self.site.default_lang]["Write your page here."]
+ else:
+ content = self.site.MESSAGES[self.site.default_lang]["Write your post here."]
compiler_plugin.create_post(
txt_path, content=content, onefile=onefile, title=title,
slug=slug, date=date, tags=tags, is_page=is_page, **metadata)
diff --git a/nikola/plugins/command/orphans.plugin b/nikola/plugins/command/orphans.plugin
index 408578b..f491eaf 100644
--- a/nikola/plugins/command/orphans.plugin
+++ b/nikola/plugins/command/orphans.plugin
@@ -4,7 +4,7 @@ Module = orphans
[Documentation]
Author = Roberto Alsina, Chris Warrick
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = List all orphans
diff --git a/nikola/plugins/command/orphans.py b/nikola/plugins/command/orphans.py
index ff114b4..f550e17 100644
--- a/nikola/plugins/command/orphans.py
+++ b/nikola/plugins/command/orphans.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina, Chris Warrick and others.
+# Copyright © 2012-2015 Roberto Alsina, Chris Warrick and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/command/plugin.plugin b/nikola/plugins/command/plugin.plugin
index d2bca92..2815caa 100644
--- a/nikola/plugins/command/plugin.plugin
+++ b/nikola/plugins/command/plugin.plugin
@@ -4,7 +4,7 @@ Module = plugin
[Documentation]
Author = Roberto Alsina and Chris Warrick
-Version = 0.2
+Version = 1.0
Website = http://getnikola.com
Description = Manage Nikola plugins
diff --git a/nikola/plugins/command/plugin.py b/nikola/plugins/command/plugin.py
index 71901b8..56eb1d7 100644
--- a/nikola/plugins/command/plugin.py
+++ b/nikola/plugins/command/plugin.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -30,41 +30,18 @@ import os
import shutil
import subprocess
import sys
+import requests
import pygments
from pygments.lexers import PythonLexer
from pygments.formatters import TerminalFormatter
-try:
- import requests
-except ImportError:
- requests = None # NOQA
-
from nikola.plugin_categories import Command
from nikola import utils
LOGGER = utils.get_logger('plugin', utils.STDERR_HANDLER)
-# Stolen from textwrap in Python 3.3.2.
-def indent(text, prefix, predicate=None): # NOQA
- """Adds 'prefix' to the beginning of selected lines in 'text'.
-
- If 'predicate' is provided, 'prefix' will only be added to the lines
- where 'predicate(line)' is True. If 'predicate' is not provided,
- it will default to adding 'prefix' to all non-empty lines that do not
- consist solely of whitespace characters.
- """
- if predicate is None:
- def predicate(line):
- return line.strip()
-
- def prefixed_lines():
- for line in text.splitlines(True):
- yield (prefix + line if predicate(line) else line)
- return ''.join(prefixed_lines())
-
-
class CommandPlugin(Command):
"""Manage plugins."""
@@ -105,8 +82,8 @@ class CommandPlugin(Command):
'long': 'url',
'type': str,
'help': "URL for the plugin repository (default: "
- "http://plugins.getnikola.com/v7/plugins.json)",
- 'default': 'http://plugins.getnikola.com/v7/plugins.json'
+ "https://plugins.getnikola.com/v7/plugins.json)",
+ 'default': 'https://plugins.getnikola.com/v7/plugins.json'
},
{
'name': 'user',
@@ -258,7 +235,7 @@ class CommandPlugin(Command):
LOGGER.error('Could not install the dependencies.')
print('Contents of the requirements.txt file:\n')
with io.open(reqpath, 'r', encoding='utf-8') as fh:
- print(indent(fh.read(), 4 * ' '))
+ print(utils.indent(fh.read(), 4 * ' '))
print('You have to install those yourself or through a '
'package manager.')
else:
@@ -272,8 +249,8 @@ class CommandPlugin(Command):
with io.open(reqnpypath, 'r', encoding='utf-8') as fh:
for l in fh.readlines():
i, j = l.split('::')
- print(indent(i.strip(), 4 * ' '))
- print(indent(j.strip(), 8 * ' '))
+ print(utils.indent(i.strip(), 4 * ' '))
+ print(utils.indent(j.strip(), 8 * ' '))
print()
print('You have to install those yourself or through a package '
@@ -284,11 +261,11 @@ class CommandPlugin(Command):
print('Contents of the conf.py.sample file:\n')
with io.open(confpypath, 'r', encoding='utf-8') as fh:
if self.site.colorful:
- print(indent(pygments.highlight(
+ print(utils.indent(pygments.highlight(
fh.read(), PythonLexer(), TerminalFormatter()),
4 * ' '))
else:
- print(indent(fh.read(), 4 * ' '))
+ print(utils.indent(fh.read(), 4 * ' '))
return True
def do_uninstall(self, name):
@@ -311,8 +288,6 @@ class CommandPlugin(Command):
return False
def get_json(self, url):
- if requests is None:
- utils.req_missing(['requests'], 'install or list available plugins', python=True, optional=False)
if self.json is None:
self.json = requests.get(url).json()
return self.json
diff --git a/nikola/plugins/command/rst2html.plugin b/nikola/plugins/command/rst2html.plugin
new file mode 100644
index 0000000..0d0d3b0
--- /dev/null
+++ b/nikola/plugins/command/rst2html.plugin
@@ -0,0 +1,9 @@
+[Core]
+Name = rst2html
+Module = rst2html
+
+[Documentation]
+Author = Chris Warrick
+Version = 1.0
+Website = http://getnikola.com
+Description = Compile reStructuredText to HTML using the Nikola architecture
diff --git a/nikola/plugins/command/rst2html/__init__.py b/nikola/plugins/command/rst2html/__init__.py
new file mode 100644
index 0000000..342aaeb
--- /dev/null
+++ b/nikola/plugins/command/rst2html/__init__.py
@@ -0,0 +1,69 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2015 Chris Warrick and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+from __future__ import unicode_literals, print_function
+
+import io
+import lxml.html
+from pkg_resources import resource_filename
+from mako.template import Template
+from nikola.plugin_categories import Command
+
+
+class CommandRst2Html(Command):
+ """Compile reStructuredText to HTML, using Nikola architecture."""
+
+ name = "rst2html"
+ doc_usage = "infile"
+ doc_purpose = "compile reStructuredText to HTML files"
+ needs_config = False
+
+ def _execute(self, options, args):
+ """Compile reStructuredText to standalone HTML files."""
+ compiler = self.site.plugin_manager.getPluginByName('rest', 'PageCompiler').plugin_object
+ if len(args) != 1:
+ print("This command takes only one argument (input file name).")
+ return 2
+ source = args[0]
+ with io.open(source, "r", encoding="utf8") as in_file:
+ data = in_file.read()
+ output, error_level, deps = compiler.compile_html_string(data, source, True)
+
+ rstcss_path = resource_filename('nikola', 'data/themes/base/assets/css/rst.css')
+ with io.open(rstcss_path, "r", encoding="utf8") as fh:
+ rstcss = fh.read()
+
+ template_path = resource_filename('nikola', 'plugins/command/rst2html/rst2html.tmpl')
+ template = Template(filename=template_path)
+ template_output = template.render(rstcss=rstcss, output=output)
+ parser = lxml.html.HTMLParser(remove_blank_text=True)
+ doc = lxml.html.document_fromstring(template_output, parser)
+ html = b'<!DOCTYPE html>\n' + lxml.html.tostring(doc, encoding='utf8', method='html', pretty_print=True)
+ print(html)
+ if error_level < 3:
+ return 0
+ else:
+ return 1
diff --git a/nikola/plugins/command/rst2html/rst2html.tmpl b/nikola/plugins/command/rst2html/rst2html.tmpl
new file mode 100644
index 0000000..5a892ea
--- /dev/null
+++ b/nikola/plugins/command/rst2html/rst2html.tmpl
@@ -0,0 +1,13 @@
+<!DOCTYPE html>
+<html>
+<head>
+<meta charset="utf-8">
+<style class="text/css">
+${rstcss}
+</style>
+</head>
+
+<body>
+${output}
+</body>
+</html>
diff --git a/nikola/plugins/command/serve.plugin b/nikola/plugins/command/serve.plugin
index e663cc6..0c1176d 100644
--- a/nikola/plugins/command/serve.plugin
+++ b/nikola/plugins/command/serve.plugin
@@ -4,7 +4,7 @@ Module = serve
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Start test server.
diff --git a/nikola/plugins/command/serve.py b/nikola/plugins/command/serve.py
index de4f6e2..0e4d01f 100644
--- a/nikola/plugins/command/serve.py
+++ b/nikola/plugins/command/serve.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,6 +26,7 @@
from __future__ import print_function
import os
+import socket
import webbrowser
try:
from BaseHTTPServer import HTTPServer
@@ -38,6 +39,11 @@ from nikola.plugin_categories import Command
from nikola.utils import get_logger
+class IPv6Server(HTTPServer):
+ """An IPv6 HTTPServer."""
+ address_family = socket.AF_INET6
+
+
class CommandServe(Command):
"""Start test server."""
@@ -53,7 +59,7 @@ class CommandServe(Command):
'long': 'port',
'default': 8000,
'type': int,
- 'help': 'Port nummber (default: 8000)',
+ 'help': 'Port number (default: 8000)',
},
{
'name': 'address',
@@ -61,7 +67,7 @@ class CommandServe(Command):
'long': 'address',
'type': str,
'default': '',
- 'help': 'Address to bind (default: 0.0.0.0 – all local interfaces)',
+ 'help': 'Address to bind (default: 0.0.0.0 – all local IPv4 interfaces)',
},
{
'name': 'browser',
@@ -70,7 +76,15 @@ class CommandServe(Command):
'type': bool,
'default': False,
'help': 'Open the test server in a web browser',
- }
+ },
+ {
+ 'name': 'ipv6',
+ 'short': '6',
+ 'long': 'ipv6',
+ 'type': bool,
+ 'default': False,
+ 'help': 'Use IPv6',
+ },
)
def _execute(self, options, args):
@@ -81,19 +95,33 @@ class CommandServe(Command):
self.logger.error("Missing '{0}' folder?".format(out_dir))
else:
os.chdir(out_dir)
- httpd = HTTPServer((options['address'], options['port']),
- OurHTTPRequestHandler)
+ if '[' in options['address']:
+ options['address'] = options['address'].strip('[').strip(']')
+ ipv6 = True
+ OurHTTP = IPv6Server
+ elif options['ipv6']:
+ ipv6 = True
+ OurHTTP = IPv6Server
+ else:
+ ipv6 = False
+ OurHTTP = HTTPServer
+
+ httpd = OurHTTP((options['address'], options['port']),
+ OurHTTPRequestHandler)
sa = httpd.socket.getsockname()
self.logger.info("Serving HTTP on {0} port {1}...".format(*sa))
if options['browser']:
- server_url = "http://{0}:{1}/".format(*sa)
+ if ipv6:
+ server_url = "http://[{0}]:{1}/".format(*sa)
+ else:
+ server_url = "http://{0}:{1}/".format(*sa)
self.logger.info("Opening {0} in the default web browser...".format(server_url))
webbrowser.open(server_url)
try:
httpd.serve_forever()
except KeyboardInterrupt:
self.logger.info("Server is shutting down.")
- exit(130)
+ return 130
class OurHTTPRequestHandler(SimpleHTTPRequestHandler):
diff --git a/nikola/plugins/command/status.plugin b/nikola/plugins/command/status.plugin
new file mode 100644
index 0000000..e02da8b
--- /dev/null
+++ b/nikola/plugins/command/status.plugin
@@ -0,0 +1,9 @@
+[Core]
+Name = status
+Module = status
+
+[Documentation]
+Author = Daniel Aleksandersen
+Version = 1.0
+Website = https://getnikola.com
+Description = Site status
diff --git a/nikola/plugins/command/status.py b/nikola/plugins/command/status.py
new file mode 100644
index 0000000..b8a6a60
--- /dev/null
+++ b/nikola/plugins/command/status.py
@@ -0,0 +1,140 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2012-2015 Roberto Alsina and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+from __future__ import print_function
+import io
+import os
+from datetime import datetime
+from dateutil.tz import gettz, tzlocal
+
+from nikola.plugin_categories import Command
+
+
+class CommandDeploy(Command):
+ """ Site status. """
+ name = "status"
+
+ doc_purpose = "display site status"
+ doc_description = "Show information about the posts and site deployment."
+ doc_usage = '[-l|--list-drafts] [-m|--list-modified] [-s|--list-scheduled]'
+ logger = None
+ cmd_options = [
+ {
+ 'name': 'list_drafts',
+ 'short': 'd',
+ 'long': 'list-drafts',
+ 'type': bool,
+ 'default': False,
+ 'help': 'List all drafts',
+ },
+ {
+ 'name': 'list_modified',
+ 'short': 'm',
+ 'long': 'list-modified',
+ 'type': bool,
+ 'default': False,
+ 'help': 'List all modified files since last deployment',
+ },
+ {
+ 'name': 'list_scheduled',
+ 'short': 's',
+ 'long': 'list-scheduled',
+ 'type': bool,
+ 'default': False,
+ 'help': 'List all scheduled posts',
+ },
+ ]
+
+ def _execute(self, options, args):
+
+ self.site.scan_posts()
+
+ timestamp_path = os.path.join(self.site.config["CACHE_FOLDER"], "lastdeploy")
+
+ last_deploy = None
+
+ try:
+ with io.open(timestamp_path, "r", encoding="utf8") as inf:
+ last_deploy = datetime.strptime(inf.read().strip(), "%Y-%m-%dT%H:%M:%S.%f")
+ last_deploy_offset = datetime.utcnow() - last_deploy
+ except (IOError, Exception):
+ print("It does not seem like you’ve ever deployed the site (or cache missing).")
+
+ if last_deploy:
+
+ fmod_since_deployment = []
+ for root, dirs, files in os.walk(self.site.config["OUTPUT_FOLDER"], followlinks=True):
+ if not dirs and not files:
+ continue
+ for fname in files:
+ fpath = os.path.join(root, fname)
+ fmodtime = datetime.fromtimestamp(os.stat(fpath).st_mtime)
+ if fmodtime.replace(tzinfo=tzlocal()) > last_deploy.replace(tzinfo=gettz("UTC")).astimezone(tz=tzlocal()):
+ fmod_since_deployment.append(fpath)
+
+ if len(fmod_since_deployment) > 0:
+ print("{0} output files modified since last deployment {1} ago.".format(str(len(fmod_since_deployment)), self.human_time(last_deploy_offset)))
+ if options['list_modified']:
+ for fpath in fmod_since_deployment:
+ print("Modified: '{0}'".format(fpath))
+ else:
+ print("Last deployment {0} ago.".format(self.human_time(last_deploy_offset)))
+
+ now = datetime.utcnow().replace(tzinfo=gettz("UTC"))
+
+ posts_count = len(self.site.all_posts)
+
+ # find all drafts
+ posts_drafts = [post for post in self.site.all_posts if post.is_draft]
+ posts_drafts = sorted(posts_drafts, key=lambda post: post.source_path)
+
+ # find all scheduled posts with offset from now until publishing time
+ posts_scheduled = [(post.date - now, post) for post in self.site.all_posts if post.publish_later]
+ posts_scheduled = sorted(posts_scheduled, key=lambda offset_post: (offset_post[0], offset_post[1].source_path))
+
+ if len(posts_scheduled) > 0:
+ if options['list_scheduled']:
+ for offset, post in posts_scheduled:
+ print("Scheduled: '{1}' ({2}; source: {3}) in {0}".format(self.human_time(offset), post.meta('title'), post.permalink(), post.source_path))
+ else:
+ offset, post = posts_scheduled[0]
+ print("{0} to next scheduled post ('{1}'; {2}; source: {3}).".format(self.human_time(offset), post.meta('title'), post.permalink(), post.source_path))
+ if options['list_drafts']:
+ for post in posts_drafts:
+ print("Draft: '{0}' ({1}; source: {2})".format(post.meta('title'), post.permalink(), post.source_path))
+ print("{0} posts in total, {1} scheduled, and {2} drafts.".format(posts_count, len(posts_scheduled), len(posts_drafts)))
+
+ def human_time(self, dt):
+ days = dt.days
+ hours = dt.seconds / 60 // 60
+ minutes = dt.seconds / 60 - (hours * 60)
+ if days > 0:
+ return "{0:.0f} days and {1:.0f} hours".format(days, hours)
+ elif hours > 0:
+ return "{0:.0f} hours and {1:.0f} minutes".format(hours, minutes)
+ elif minutes:
+ return "{0:.0f} minutes".format(minutes)
+ return False
diff --git a/nikola/plugins/command/version.plugin b/nikola/plugins/command/version.plugin
index 3c1ae95..a3f58e8 100644
--- a/nikola/plugins/command/version.plugin
+++ b/nikola/plugins/command/version.plugin
@@ -4,6 +4,6 @@ Module = version
[Documentation]
Author = Roberto Alsina
-Version = 0.2
+Version = 1.0
Website = http://getnikola.com
Description = Show nikola version
diff --git a/nikola/plugins/command/version.py b/nikola/plugins/command/version.py
index 9b42423..b6520d7 100644
--- a/nikola/plugins/command/version.py
+++ b/nikola/plugins/command/version.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,19 +26,42 @@
from __future__ import print_function
+import lxml
+import requests
+
from nikola.plugin_categories import Command
from nikola import __version__
+URL = 'https://pypi.python.org/pypi?:action=doap&name=Nikola'
+
class CommandVersion(Command):
"""Print the version."""
name = "version"
- doc_usage = ""
+ doc_usage = "[--check]"
needs_config = False
doc_purpose = "print the Nikola version number"
+ cmd_options = [
+ {
+ 'name': 'check',
+ 'long': 'check',
+ 'short': '',
+ 'default': False,
+ 'type': bool,
+ 'help': "Check for new versions.",
+ }
+ ]
def _execute(self, options={}, args=None):
"""Print the version number."""
print("Nikola v" + __version__)
+ if options.get('check'):
+ data = requests.get(URL).text
+ doc = lxml.etree.fromstring(data.encode('utf8'))
+ revision = doc.findall('*//{http://usefulinc.com/ns/doap#}revision')[0].text
+ if revision == __version__:
+ print("Nikola is up-to-date")
+ else:
+ print("The latest version of Nikola is v{0} -- please upgrade using `pip install --upgrade Nikola=={0}` or your system package manager".format(revision))
diff --git a/nikola/plugins/compile/__init__.py b/nikola/plugins/compile/__init__.py
index 6ad8bac..a1d17a6 100644
--- a/nikola/plugins/compile/__init__.py
+++ b/nikola/plugins/compile/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/compile/html.plugin b/nikola/plugins/compile/html.plugin
index 21dd338..66623b2 100644
--- a/nikola/plugins/compile/html.plugin
+++ b/nikola/plugins/compile/html.plugin
@@ -4,7 +4,7 @@ Module = html
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Compile HTML into HTML (just copy)
diff --git a/nikola/plugins/compile/html.py b/nikola/plugins/compile/html.py
index 24bf385..ab0c2f6 100644
--- a/nikola/plugins/compile/html.py
+++ b/nikola/plugins/compile/html.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -29,18 +29,16 @@
from __future__ import unicode_literals
import os
-import re
import io
from nikola.plugin_categories import PageCompiler
from nikola.utils import makedirs, write_metadata
-_META_SEPARATOR = '(' + os.linesep * 2 + '|' + ('\n' * 2) + '|' + ("\r\n" * 2) + ')'
-
class CompileHtml(PageCompiler):
"""Compile HTML into HTML."""
name = "html"
+ friendly_name = "HTML"
def compile_html(self, source, dest, is_two_file=True):
makedirs(os.path.dirname(dest))
@@ -48,7 +46,7 @@ class CompileHtml(PageCompiler):
with io.open(source, "r", encoding="utf8") as in_file:
data = in_file.read()
if not is_two_file:
- data = re.split(_META_SEPARATOR, data, maxsplit=1)[-1]
+ _, data = self.split_metadata(data)
out_file.write(data)
return True
diff --git a/nikola/plugins/compile/ipynb.plugin b/nikola/plugins/compile/ipynb.plugin
index e258d8a..efe6702 100644
--- a/nikola/plugins/compile/ipynb.plugin
+++ b/nikola/plugins/compile/ipynb.plugin
@@ -3,8 +3,8 @@ Name = ipynb
Module = ipynb
[Documentation]
-Author = Damian Avila
-Version = 1.0
-Website = http://www.oquanta.info
-Description = Compile IPython notebooks into HTML
+Author = Damian Avila, Chris Warrick and others
+Version = 2.0.0
+Website = http://www.damian.oquanta.info/
+Description = Compile IPython notebooks into Nikola posts
diff --git a/nikola/plugins/compile/ipynb.py b/nikola/plugins/compile/ipynb.py
new file mode 100644
index 0000000..82b76c8
--- /dev/null
+++ b/nikola/plugins/compile/ipynb.py
@@ -0,0 +1,150 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2013-2015 Damián Avila, Chris Warrick and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+"""Implementation of compile_html based on nbconvert."""
+
+from __future__ import unicode_literals, print_function
+import io
+import os
+import sys
+
+try:
+ import IPython
+ from IPython.nbconvert.exporters import HTMLExporter
+ if IPython.version_info[0] >= 3: # API changed with 3.0.0
+ from IPython import nbformat
+ current_nbformat = nbformat.current_nbformat
+ from IPython.kernel import kernelspec
+ else:
+ import IPython.nbformat.current as nbformat
+ current_nbformat = 'json'
+ kernelspec = None
+
+ from IPython.config import Config
+ flag = True
+except ImportError:
+ flag = None
+
+from nikola.plugin_categories import PageCompiler
+from nikola.utils import makedirs, req_missing, get_logger
+
+
+class CompileIPynb(PageCompiler):
+ """Compile IPynb into HTML."""
+
+ name = "ipynb"
+ friendly_name = "Jupyter/IPython Notebook"
+ demote_headers = True
+ default_kernel = 'python2' if sys.version_info[0] == 2 else 'python3'
+
+ def set_site(self, site):
+ self.logger = get_logger('compile_ipynb', site.loghandlers)
+ super(CompileIPynb, self).set_site(site)
+
+ def compile_html(self, source, dest, is_two_file=True):
+ if flag is None:
+ req_missing(['ipython[notebook]>=2.0.0'], 'build this site (compile ipynb)')
+ makedirs(os.path.dirname(dest))
+ HTMLExporter.default_template = 'basic'
+ c = Config(self.site.config['IPYNB_CONFIG'])
+ exportHtml = HTMLExporter(config=c)
+ with io.open(dest, "w+", encoding="utf8") as out_file:
+ with io.open(source, "r", encoding="utf8") as in_file:
+ nb_json = nbformat.read(in_file, current_nbformat)
+ (body, resources) = exportHtml.from_notebook_node(nb_json)
+ out_file.write(body)
+
+ def read_metadata(self, post, file_metadata_regexp=None, unslugify_titles=False, lang=None):
+ """read metadata directly from ipynb file.
+
+ As ipynb file support arbitrary metadata as json, the metadata used by Nikola
+ will be assume to be in the 'nikola' subfield.
+ """
+ if flag is None:
+ req_missing(['ipython[notebook]>=2.0.0'], 'build this site (compile ipynb)')
+ source = post.source_path
+ with io.open(source, "r", encoding="utf8") as in_file:
+ nb_json = nbformat.read(in_file, current_nbformat)
+ # Metadata might not exist in two-file posts or in hand-crafted
+ # .ipynb files.
+ return nb_json.get('metadata', {}).get('nikola', {})
+
+ def create_post(self, path, **kw):
+ if flag is None:
+ req_missing(['ipython[notebook]>=2.0.0'], 'build this site (compile ipynb)')
+ content = kw.pop('content', None)
+ onefile = kw.pop('onefile', False)
+ kernel = kw.pop('ipython_kernel', None)
+ # is_page is not needed to create the file
+ kw.pop('is_page', False)
+
+ metadata = {}
+ metadata.update(self.default_metadata)
+ metadata.update(kw)
+
+ makedirs(os.path.dirname(path))
+
+ if content.startswith("{"):
+ # imported .ipynb file, guaranteed to start with "{" because it’s JSON.
+ nb = nbformat.reads(content, current_nbformat)
+ else:
+ if IPython.version_info[0] >= 3:
+ nb = nbformat.v4.new_notebook()
+ nb["cells"] = [nbformat.v4.new_markdown_cell(content)]
+ else:
+ nb = nbformat.new_notebook()
+ nb["worksheets"] = [nbformat.new_worksheet(cells=[nbformat.new_text_cell('markdown', [content])])]
+
+ if kernelspec is not None:
+ if kernel is None:
+ kernel = self.default_kernel
+ self.logger.notice('No kernel specified, assuming "{0}".'.format(kernel))
+
+ IPYNB_KERNELS = {}
+ ksm = kernelspec.KernelSpecManager()
+ for k in ksm.find_kernel_specs():
+ IPYNB_KERNELS[k] = ksm.get_kernel_spec(k).to_dict()
+ IPYNB_KERNELS[k]['name'] = k
+ del IPYNB_KERNELS[k]['argv']
+
+ if kernel not in IPYNB_KERNELS:
+ self.logger.error('Unknown kernel "{0}". Maybe you mispelled it?'.format(kernel))
+ self.logger.info("Available kernels: {0}".format(", ".join(sorted(IPYNB_KERNELS))))
+ raise Exception('Unknown kernel "{0}"'.format(kernel))
+
+ nb["metadata"]["kernelspec"] = IPYNB_KERNELS[kernel]
+ else:
+ # Older IPython versions don’t need kernelspecs.
+ pass
+
+ if onefile:
+ nb["metadata"]["nikola"] = metadata
+
+ with io.open(path, "w+", encoding="utf8") as fd:
+ if IPython.version_info[0] >= 3:
+ nbformat.write(nb, fd, 4)
+ else:
+ nbformat.write(nb, fd, 'ipynb')
diff --git a/nikola/plugins/compile/ipynb/README.txt b/nikola/plugins/compile/ipynb/README.txt
deleted file mode 100644
index 0a7d6db..0000000
--- a/nikola/plugins/compile/ipynb/README.txt
+++ /dev/null
@@ -1,44 +0,0 @@
-To make this work...
-
-1- You can install the "jinja-site-ipython" theme using this command:
-
-$ nikola install_theme -n jinja-site-ipython
-
-(or xkcd-site-ipython, if you want xkcd styling)
-
-More info here about themes:
-http://getnikola.com/handbook.html#getting-more-themes
-
-OR
-
-You can to download the "jinja-site-ipython" theme from here:
-https://github.com/damianavila/jinja-site-ipython-theme-for-Nikola
-and copy the "site-ipython" folder inside the "themes" folder of your site.
-
-
-2- Then, just add:
-
-post_pages = (
- ("posts/*.ipynb", "posts", "post.tmpl", True),
- ("stories/*.ipynb", "stories", "story.tmpl", False),
-)
-
-and
-
-THEME = 'jinja-site-ipython' (or 'xkcd-site-ipython', if you want xkcd styling)
-
-to your conf.py.
-Finally... to use it:
-
-$nikola new_page -f ipynb
-
-**NOTE**: Just IGNORE the "-1" and "-2" options in nikola new_page command, by default this compiler
-create one metadata file and the corresponding naive IPython notebook.
-
-$nikola build
-
-And deploy the output folder... to see it locally: $nikola serve
-If you have any doubts, just ask: @damianavila
-
-Cheers.
-Damián
diff --git a/nikola/plugins/compile/ipynb/__init__.py b/nikola/plugins/compile/ipynb/__init__.py
deleted file mode 100644
index 7dde279..0000000
--- a/nikola/plugins/compile/ipynb/__init__.py
+++ /dev/null
@@ -1,97 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2013-2014 Damián Avila and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-"""Implementation of compile_html based on nbconvert."""
-
-from __future__ import unicode_literals, print_function
-import io
-import os
-
-try:
- from IPython.nbconvert.exporters import HTMLExporter
- from IPython.nbformat import current as nbformat
- from IPython.config import Config
- flag = True
-except ImportError:
- flag = None
-
-from nikola.plugin_categories import PageCompiler
-from nikola.utils import makedirs, req_missing
-
-
-class CompileIPynb(PageCompiler):
- """Compile IPynb into HTML."""
-
- name = "ipynb"
- supports_onefile = False
- demote_headers = True
-
- def compile_html(self, source, dest, is_two_file=True):
- if flag is None:
- req_missing(['ipython>=1.1.0'], 'build this site (compile ipynb)')
- makedirs(os.path.dirname(dest))
- HTMLExporter.default_template = 'basic'
- c = Config(self.site.config['IPYNB_CONFIG'])
- exportHtml = HTMLExporter(config=c)
- with io.open(dest, "w+", encoding="utf8") as out_file:
- with io.open(source, "r", encoding="utf8") as in_file:
- nb = in_file.read()
- nb_json = nbformat.reads_json(nb)
- (body, resources) = exportHtml.from_notebook_node(nb_json)
- out_file.write(body)
-
- def create_post(self, path, **kw):
- # content and onefile are ignored by ipynb.
- kw.pop('content', None)
- onefile = kw.pop('onefile', False)
- kw.pop('is_page', False)
-
- makedirs(os.path.dirname(path))
- if onefile:
- raise Exception('The one-file format is not supported by this compiler.')
- with io.open(path, "w+", encoding="utf8") as fd:
- fd.write("""{
- "metadata": {
- "name": ""
- },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
- {
- "cells": [
- {
- "cell_type": "code",
- "collapsed": false,
- "input": [],
- "language": "python",
- "metadata": {},
- "outputs": []
- }
- ],
- "metadata": {}
- }
- ]
-}""")
diff --git a/nikola/plugins/compile/markdown.plugin b/nikola/plugins/compile/markdown.plugin
index 157579a..a44b798 100644
--- a/nikola/plugins/compile/markdown.plugin
+++ b/nikola/plugins/compile/markdown.plugin
@@ -4,7 +4,7 @@ Module = markdown
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Compile Markdown into HTML
diff --git a/nikola/plugins/compile/markdown/__init__.py b/nikola/plugins/compile/markdown/__init__.py
index 47c7c9b..fbe049d 100644
--- a/nikola/plugins/compile/markdown/__init__.py
+++ b/nikola/plugins/compile/markdown/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -30,7 +30,6 @@ from __future__ import unicode_literals
import io
import os
-import re
try:
from markdown import markdown
@@ -45,24 +44,27 @@ from nikola.utils import makedirs, req_missing, write_metadata
class CompileMarkdown(PageCompiler):
- """Compile markdown into HTML."""
+ """Compile Markdown into HTML."""
name = "markdown"
+ friendly_name = "Markdown"
demote_headers = True
extensions = []
site = None
def set_site(self, site):
+ self.config_dependencies = []
for plugin_info in site.plugin_manager.getPluginsOfCategory("MarkdownExtension"):
if plugin_info.name in site.config['DISABLED_PLUGINS']:
site.plugin_manager.removePluginFromCategory(plugin_info, "MarkdownExtension")
continue
-
+ self.config_dependencies.append(plugin_info.name)
site.plugin_manager.activatePluginByName(plugin_info.name)
plugin_info.plugin_object.set_site(site)
self.extensions.append(plugin_info.plugin_object)
plugin_info.plugin_object.short_help = plugin_info.description
+ self.config_dependencies.append(str(sorted(site.config.get("MARKDOWN_EXTENSIONS"))))
return super(CompileMarkdown, self).set_site(site)
def compile_html(self, source, dest, is_two_file=True):
@@ -74,7 +76,7 @@ class CompileMarkdown(PageCompiler):
with io.open(source, "r", encoding="utf8") as in_file:
data = in_file.read()
if not is_two_file:
- data = re.split('(\n\n|\r\n\r\n)', data, maxsplit=1)[-1]
+ _, data = self.split_metadata(data)
output = markdown(data, self.extensions)
out_file.write(output)
diff --git a/nikola/plugins/compile/markdown/mdx_gist.py b/nikola/plugins/compile/markdown/mdx_gist.py
index 4209bdd..70e7394 100644
--- a/nikola/plugins/compile/markdown/mdx_gist.py
+++ b/nikola/plugins/compile/markdown/mdx_gist.py
@@ -203,14 +203,11 @@ except ImportError:
Extension = Pattern = object
from nikola.plugin_categories import MarkdownExtension
-from nikola.utils import get_logger, req_missing, STDERR_HANDLER
+from nikola.utils import get_logger, STDERR_HANDLER
-LOGGER = get_logger('compile_markdown.mdx_gist', STDERR_HANDLER)
+import requests
-try:
- import requests
-except ImportError:
- requests = None # NOQA
+LOGGER = get_logger('compile_markdown.mdx_gist', STDERR_HANDLER)
GIST_JS_URL = "https://gist.github.com/{0}.js"
GIST_FILE_JS_URL = "https://gist.github.com/{0}.js?file={1}"
@@ -261,32 +258,27 @@ class GistPattern(Pattern):
gist_elem.set('class', 'gist')
script_elem = etree.SubElement(gist_elem, 'script')
- if requests:
- noscript_elem = etree.SubElement(gist_elem, 'noscript')
-
- try:
- if gist_file:
- script_elem.set('src', GIST_FILE_JS_URL.format(
- gist_id, gist_file))
- raw_gist = (self.get_raw_gist_with_filename(
- gist_id, gist_file))
+ noscript_elem = etree.SubElement(gist_elem, 'noscript')
- else:
- script_elem.set('src', GIST_JS_URL.format(
- gist_id))
- raw_gist = (self.get_raw_gist(gist_id))
+ try:
+ if gist_file:
+ script_elem.set('src', GIST_FILE_JS_URL.format(
+ gist_id, gist_file))
+ raw_gist = (self.get_raw_gist_with_filename(
+ gist_id, gist_file))
- # Insert source as <pre/> within <noscript>
- pre_elem = etree.SubElement(noscript_elem, 'pre')
- pre_elem.text = AtomicString(raw_gist)
+ else:
+ script_elem.set('src', GIST_JS_URL.format(gist_id))
+ raw_gist = (self.get_raw_gist(gist_id))
- except GistFetchException as e:
- LOGGER.warn(e.message)
- warning_comment = etree.Comment(' WARNING: {0} '.format(e.message))
- noscript_elem.append(warning_comment)
+ # Insert source as <pre/> within <noscript>
+ pre_elem = etree.SubElement(noscript_elem, 'pre')
+ pre_elem.text = AtomicString(raw_gist)
- else:
- req_missing('requests', 'have inline gist source', optional=True)
+ except GistFetchException as e:
+ LOGGER.warn(e.message)
+ warning_comment = etree.Comment(' WARNING: {0} '.format(e.message))
+ noscript_elem.append(warning_comment)
return gist_elem
diff --git a/nikola/plugins/compile/markdown/mdx_nikola.py b/nikola/plugins/compile/markdown/mdx_nikola.py
index ca67511..a03547f 100644
--- a/nikola/plugins/compile/markdown/mdx_nikola.py
+++ b/nikola/plugins/compile/markdown/mdx_nikola.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -37,7 +37,6 @@ except ImportError:
from nikola.plugin_categories import MarkdownExtension
-# FIXME: duplicated with listings.py
CODERE = re.compile('<div class="codehilite"><pre>(.*?)</pre></div>', flags=re.MULTILINE | re.DOTALL)
@@ -47,6 +46,7 @@ class NikolaPostProcessor(Postprocessor):
# python-markdown's highlighter uses <div class="codehilite"><pre>
# for code. We switch it to reST's <pre class="code">.
+ # TODO: monkey-patch for CodeHilite that uses nikola.utils.NikolaPygmentsHTML
output = CODERE.sub('<pre class="code literal-block">\\1</pre>', output)
return output
diff --git a/nikola/plugins/compile/markdown/mdx_podcast.py b/nikola/plugins/compile/markdown/mdx_podcast.py
index 9a67910..670973a 100644
--- a/nikola/plugins/compile/markdown/mdx_podcast.py
+++ b/nikola/plugins/compile/markdown/mdx_podcast.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright © 2013-2014 Michael Rabbitt, Roberto Alsina and others.
+# Copyright © 2013-2015 Michael Rabbitt, Roberto Alsina and others.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
diff --git a/nikola/plugins/compile/pandoc.plugin b/nikola/plugins/compile/pandoc.plugin
index 157b694..ad54b3b 100644
--- a/nikola/plugins/compile/pandoc.plugin
+++ b/nikola/plugins/compile/pandoc.plugin
@@ -4,7 +4,7 @@ Module = pandoc
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Compile markups into HTML using pandoc
diff --git a/nikola/plugins/compile/pandoc.py b/nikola/plugins/compile/pandoc.py
index ada8035..361f158 100644
--- a/nikola/plugins/compile/pandoc.py
+++ b/nikola/plugins/compile/pandoc.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -30,6 +30,8 @@ You will need, of course, to install pandoc
"""
+from __future__ import unicode_literals
+
import io
import os
import subprocess
@@ -42,11 +44,16 @@ class CompilePandoc(PageCompiler):
"""Compile markups into HTML using pandoc."""
name = "pandoc"
+ friendly_name = "pandoc"
+
+ def set_site(self, site):
+ self.config_dependencies = [str(site.config['PANDOC_OPTIONS'])]
+ super(CompilePandoc, self).set_site(site)
def compile_html(self, source, dest, is_two_file=True):
makedirs(os.path.dirname(dest))
try:
- subprocess.check_call(('pandoc', '-o', dest, source))
+ subprocess.check_call(['pandoc', '-o', dest, source] + self.site.config['PANDOC_OPTIONS'])
except OSError as e:
if e.strreror == 'No such file or directory':
req_missing(['pandoc'], 'build this site (compile with pandoc)', python=False)
diff --git a/nikola/plugins/compile/php.plugin b/nikola/plugins/compile/php.plugin
index ac25259..d6623b5 100644
--- a/nikola/plugins/compile/php.plugin
+++ b/nikola/plugins/compile/php.plugin
@@ -4,7 +4,7 @@ Module = php
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Compile PHP into HTML (just copy and name the file .php)
diff --git a/nikola/plugins/compile/php.py b/nikola/plugins/compile/php.py
index 77344fb..bb436e5 100644
--- a/nikola/plugins/compile/php.py
+++ b/nikola/plugins/compile/php.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -40,6 +40,7 @@ class CompilePhp(PageCompiler):
"""Compile PHP into PHP."""
name = "php"
+ friendly_name = "PHP"
def compile_html(self, source, dest, is_two_file=True):
makedirs(os.path.dirname(dest))
diff --git a/nikola/plugins/compile/rest.plugin b/nikola/plugins/compile/rest.plugin
index 55e9c59..f144809 100644
--- a/nikola/plugins/compile/rest.plugin
+++ b/nikola/plugins/compile/rest.plugin
@@ -4,7 +4,7 @@ Module = rest
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Compile reSt into HTML
diff --git a/nikola/plugins/compile/rest/__init__.py b/nikola/plugins/compile/rest/__init__.py
index 98c7151..d446fe8 100644
--- a/nikola/plugins/compile/rest/__init__.py
+++ b/nikola/plugins/compile/rest/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -27,66 +27,78 @@
from __future__ import unicode_literals
import io
import os
-import re
-
-try:
- import docutils.core
- import docutils.nodes
- import docutils.utils
- import docutils.io
- import docutils.readers.standalone
- import docutils.writers.html4css1
- has_docutils = True
-except ImportError:
- has_docutils = False
+
+import docutils.core
+import docutils.nodes
+import docutils.utils
+import docutils.io
+import docutils.readers.standalone
+import docutils.writers.html4css1
from nikola.plugin_categories import PageCompiler
-from nikola.utils import get_logger, makedirs, req_missing, write_metadata
+from nikola.utils import unicode_str, get_logger, makedirs, write_metadata
class CompileRest(PageCompiler):
- """Compile reSt into HTML."""
+ """Compile reStructuredText into HTML."""
name = "rest"
+ friendly_name = "reStructuredText"
demote_headers = True
logger = None
- def compile_html(self, source, dest, is_two_file=True):
- """Compile reSt into HTML."""
+ def _read_extra_deps(self, post):
+ """Reads contents of .dep file and returns them as a list"""
+ dep_path = post.base_path + '.dep'
+ if os.path.isfile(dep_path):
+ with io.open(dep_path, 'r+', encoding='utf8') as depf:
+ deps = [l.strip() for l in depf.readlines()]
+ return deps
+ return []
+
+ def register_extra_dependencies(self, post):
+ """Adds dependency to post object to check .dep file."""
+ post.add_dependency(lambda: self._read_extra_deps(post), 'fragment')
+
+ def compile_html_string(self, data, source_path=None, is_two_file=True):
+ """Compile reSt into HTML strings."""
+ # If errors occur, this will be added to the line number reported by
+ # docutils so the line number matches the actual line number (off by
+ # 7 with default metadata, could be more or less depending on the post).
+ add_ln = 0
+ if not is_two_file:
+ m_data, data = self.split_metadata(data)
+ add_ln = len(m_data.splitlines()) + 1
+
+ default_template_path = os.path.join(os.path.dirname(__file__), 'template.txt')
+ output, error_level, deps = rst2html(
+ data, settings_overrides={
+ 'initial_header_level': 1,
+ 'record_dependencies': True,
+ 'stylesheet_path': None,
+ 'link_stylesheet': True,
+ 'syntax_highlight': 'short',
+ 'math_output': 'mathjax',
+ 'template': default_template_path,
+ }, logger=self.logger, source_path=source_path, l_add_ln=add_ln, transforms=self.site.rst_transforms)
+ if not isinstance(output, unicode_str):
+ # To prevent some weird bugs here or there.
+ # Original issue: empty files. `output` became a bytestring.
+ output = output.decode('utf-8')
+ return output, error_level, deps
- if not has_docutils:
- req_missing(['docutils'], 'build this site (compile reStructuredText)')
+ def compile_html(self, source, dest, is_two_file=True):
+ """Compile reSt into HTML files."""
makedirs(os.path.dirname(dest))
error_level = 100
with io.open(dest, "w+", encoding="utf8") as out_file:
with io.open(source, "r", encoding="utf8") as in_file:
data = in_file.read()
- add_ln = 0
- if not is_two_file:
- spl = re.split('(\n\n|\r\n\r\n)', data, maxsplit=1)
- data = spl[-1]
- if len(spl) != 1:
- # If errors occur, this will be added to the line
- # number reported by docutils so the line number
- # matches the actual line number (off by 7 with default
- # metadata, could be more or less depending on the post
- # author).
- add_ln = len(spl[0].splitlines()) + 1
-
- default_template_path = os.path.join(os.path.dirname(__file__), 'template.txt')
- output, error_level, deps = rst2html(
- data, settings_overrides={
- 'initial_header_level': 1,
- 'record_dependencies': True,
- 'stylesheet_path': None,
- 'link_stylesheet': True,
- 'syntax_highlight': 'short',
- 'math_output': 'mathjax',
- 'template': default_template_path,
- }, logger=self.logger, source_path=source, l_add_ln=add_ln)
+ output, error_level, deps = self.compile_html_string(data, source, is_two_file)
out_file.write(output)
deps_path = dest + '.dep'
if deps.list:
+ deps.list = [p for p in deps.list if p != dest] # Don't depend on yourself (#1671)
with io.open(deps_path, "w+", encoding="utf8") as deps_file:
deps_file.write('\n'.join(deps.list))
else:
@@ -111,15 +123,18 @@ class CompileRest(PageCompiler):
with io.open(path, "w+", encoding="utf8") as fd:
if onefile:
fd.write(write_metadata(metadata))
- fd.write('\n' + content)
+ fd.write('\n')
+ fd.write(content)
def set_site(self, site):
+ self.config_dependencies = []
for plugin_info in site.plugin_manager.getPluginsOfCategory("RestExtension"):
if plugin_info.name in site.config['DISABLED_PLUGINS']:
site.plugin_manager.removePluginFromCategory(plugin_info, "RestExtension")
continue
site.plugin_manager.activatePluginByName(plugin_info.name)
+ self.config_dependencies.append(plugin_info.name)
plugin_info.plugin_object.set_site(site)
plugin_info.plugin_object.short_help = plugin_info.description
@@ -160,6 +175,13 @@ def get_observer(settings):
class NikolaReader(docutils.readers.standalone.Reader):
+ def __init__(self, *args, **kwargs):
+ self.transforms = kwargs.pop('transforms', [])
+ docutils.readers.standalone.Reader.__init__(self, *args, **kwargs)
+
+ def get_transforms(self):
+ return docutils.readers.standalone.Reader(self).get_transforms() + self.transforms
+
def new_document(self):
"""Create and return a new empty document tree (root node)."""
document = docutils.utils.new_document(self.source.source_path, self.settings)
@@ -199,7 +221,7 @@ def add_node(node, visit_function=None, depart_function=None):
def depart_Math(self, node):
self.body.append('</math>')
- For full example, you can refer to `Microdata plugin <http://plugins.getnikola.com/#microdata>`_
+ For full example, you can refer to `Microdata plugin <https://plugins.getnikola.com/#microdata>`_
"""
docutils.nodes._add_node_class_names([node.__name__])
if visit_function:
@@ -213,7 +235,7 @@ def rst2html(source, source_path=None, source_class=docutils.io.StringInput,
parser=None, parser_name='restructuredtext', writer=None,
writer_name='html', settings=None, settings_spec=None,
settings_overrides=None, config_section=None,
- enable_exit_status=None, logger=None, l_add_ln=0):
+ enable_exit_status=None, logger=None, l_add_ln=0, transforms=None):
"""
Set up & run a `Publisher`, and return a dictionary of document parts.
Dictionary keys are the names of parts, and values are Unicode strings;
@@ -231,7 +253,7 @@ def rst2html(source, source_path=None, source_class=docutils.io.StringInput,
reStructuredText syntax errors.
"""
if reader is None:
- reader = NikolaReader()
+ reader = NikolaReader(transforms=transforms)
# For our custom logging, we have special needs and special settings we
# specify here.
# logger a logger from Nikola
diff --git a/nikola/plugins/compile/rest/chart.py b/nikola/plugins/compile/rest/chart.py
index 55ddf5c..59b9dc7 100644
--- a/nikola/plugins/compile/rest/chart.py
+++ b/nikola/plugins/compile/rest/chart.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/compile/rest/doc.py b/nikola/plugins/compile/rest/doc.py
index 6143606..703c234 100644
--- a/nikola/plugins/compile/rest/doc.py
+++ b/nikola/plugins/compile/rest/doc.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/compile/rest/gist.py b/nikola/plugins/compile/rest/gist.py
index 65189b5..ab4d56d 100644
--- a/nikola/plugins/compile/rest/gist.py
+++ b/nikola/plugins/compile/rest/gist.py
@@ -1,16 +1,11 @@
# -*- coding: utf-8 -*-
# This file is public domain according to its author, Brian Hsu
+import requests
from docutils.parsers.rst import Directive, directives
from docutils import nodes
-try:
- import requests
-except ImportError:
- requests = None # NOQA
-
from nikola.plugin_categories import RestExtension
-from nikola.utils import req_missing
class Plugin(RestExtension):
@@ -64,22 +59,15 @@ class GitHubGist(Directive):
if 'file' in self.options:
filename = self.options['file']
- if requests is not None:
- rawGist = (self.get_raw_gist_with_filename(gistID, filename))
+ rawGist = (self.get_raw_gist_with_filename(gistID, filename))
embedHTML = ('<script src="https://gist.github.com/{0}.js'
'?file={1}"></script>').format(gistID, filename)
else:
- if requests is not None:
- rawGist = (self.get_raw_gist(gistID))
+ rawGist = (self.get_raw_gist(gistID))
embedHTML = ('<script src="https://gist.github.com/{0}.js">'
'</script>').format(gistID)
- if requests is None:
- reqnode = nodes.raw(
- '', req_missing('requests', 'have inline gist source',
- optional=True), format='html')
- else:
- reqnode = nodes.literal_block('', rawGist)
+ reqnode = nodes.literal_block('', rawGist)
return [nodes.raw('', embedHTML, format='html'),
nodes.raw('', '<noscript>', format='html'),
diff --git a/nikola/plugins/compile/rest/listing.py b/nikola/plugins/compile/rest/listing.py
index 23ec254..b8340cf 100644
--- a/nikola/plugins/compile/rest/listing.py
+++ b/nikola/plugins/compile/rest/listing.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -31,43 +31,95 @@
from __future__ import unicode_literals
import io
import os
+import uuid
try:
from urlparse import urlunsplit
except ImportError:
from urllib.parse import urlunsplit # NOQA
+import docutils.parsers.rst.directives.body
+import docutils.parsers.rst.directives.misc
from docutils import core
from docutils import nodes
from docutils.parsers.rst import Directive, directives
+from docutils.parsers.rst.roles import set_classes
from docutils.parsers.rst.directives.misc import Include
-try:
- from docutils.parsers.rst.directives.body import CodeBlock
-except ImportError: # docutils < 0.9 (Debian Sid For The Loss)
- class CodeBlock(Directive):
- required_arguments = 1
- has_content = True
- option_spec = {}
- CODE = '<pre>{0}</pre>'
-
- def run(self):
- """ Required by the Directive interface. Create docutils nodes """
- return [nodes.raw('', self.CODE.format('\n'.join(self.content)), format='html')]
- directives.register_directive('code', CodeBlock)
+from pygments.lexers import get_lexer_by_name
+import pygments
+import pygments.util
+from nikola import utils
from nikola.plugin_categories import RestExtension
-# Add sphinx compatibility option
-CodeBlock.option_spec['linenos'] = directives.unchanged
-
-class FlexibleCodeBlock(CodeBlock):
+# A sanitized version of docutils.parsers.rst.directives.body.CodeBlock.
+class CodeBlock(Directive):
+ """Parse and mark up content of a code block."""
+ optional_arguments = 1
+ option_spec = {'class': directives.class_option,
+ 'name': directives.unchanged,
+ 'number-lines': directives.unchanged, # integer or None
+ 'linenos': directives.unchanged,
+ 'tab-width': directives.nonnegative_int}
+ has_content = True
def run(self):
+ self.assert_has_content()
+
if 'linenos' in self.options:
self.options['number-lines'] = self.options['linenos']
- return super(FlexibleCodeBlock, self).run()
-CodeBlock = FlexibleCodeBlock
+ if 'tab-width' in self.options:
+ self.content = [x.replace('\t', ' ' * self.options['tab-width']) for x in self.content]
+
+ if self.arguments:
+ language = self.arguments[0]
+ else:
+ language = 'text'
+ set_classes(self.options)
+ classes = ['code']
+ if language:
+ classes.append(language)
+ if 'classes' in self.options:
+ classes.extend(self.options['classes'])
+
+ code = '\n'.join(self.content)
+
+ try:
+ lexer = get_lexer_by_name(language)
+ except pygments.util.ClassNotFound:
+ raise self.error('Cannot find pygments lexer for language "{0}"'.format(language))
+
+ if 'number-lines' in self.options:
+ linenos = 'table'
+ # optional argument `startline`, defaults to 1
+ try:
+ linenostart = int(self.options['number-lines'] or 1)
+ except ValueError:
+ raise self.error(':number-lines: with non-integer start value')
+ else:
+ linenos = False
+ linenostart = 1 # actually unused
+
+ if self.site.invariant: # for testing purposes
+ anchor_ref = 'rest_code_' + 'fixedvaluethatisnotauuid'
+ else:
+ anchor_ref = 'rest_code_' + uuid.uuid4().hex
+
+ formatter = utils.NikolaPygmentsHTML(anchor_ref=anchor_ref, classes=classes, linenos=linenos, linenostart=linenostart)
+ out = pygments.highlight(code, lexer, formatter)
+ node = nodes.raw('', out, format='html')
+
+ self.add_name(node)
+ # if called from "include", set the source
+ if 'source' in self.options:
+ node.attributes['source'] = self.options['source']
+
+ return [node]
+
+# Monkey-patch: replace insane docutils CodeBlock with our implementation.
+docutils.parsers.rst.directives.body.CodeBlock = CodeBlock
+docutils.parsers.rst.directives.misc.CodeBlock = CodeBlock
class Plugin(RestExtension):
@@ -79,11 +131,15 @@ class Plugin(RestExtension):
# Even though listings don't use CodeBlock anymore, I am
# leaving these to make the code directive work with
# docutils < 0.9
+ CodeBlock.site = site
+ directives.register_directive('code', CodeBlock)
directives.register_directive('code-block', CodeBlock)
directives.register_directive('sourcecode', CodeBlock)
directives.register_directive('listing', Listing)
+ Listing.folders = site.config['LISTINGS_FOLDERS']
return super(Plugin, self).set_site(site)
+
# Add sphinx compatibility option
listing_spec = Include.option_spec
listing_spec['linenos'] = directives.unchanged
@@ -104,9 +160,17 @@ class Listing(Include):
option_spec = listing_spec
def run(self):
- fname = self.arguments.pop(0)
+ _fname = self.arguments.pop(0)
+ fname = _fname.replace('/', os.sep)
lang = self.arguments.pop(0)
- fpath = os.path.join('listings', fname)
+ if len(self.folders) == 1:
+ listings_folder = next(iter(self.folders.keys()))
+ if fname.startswith(listings_folder):
+ fpath = os.path.join(fname) # new syntax: specify folder name
+ else:
+ fpath = os.path.join(listings_folder, fname) # old syntax: don't specify folder name
+ else:
+ fpath = os.path.join(fname) # must be new syntax: specify folder name
self.arguments.insert(0, fpath)
self.options['code'] = lang
if 'linenos' in self.options:
@@ -114,9 +178,9 @@ class Listing(Include):
with io.open(fpath, 'r+', encoding='utf8') as fileobject:
self.content = fileobject.read().splitlines()
self.state.document.settings.record_dependencies.add(fpath)
- target = urlunsplit(("link", 'listing', fname, '', ''))
+ target = urlunsplit(("link", 'listing', fpath.replace('\\', '/'), '', ''))
generated_nodes = (
- [core.publish_doctree('`{0} <{1}>`_'.format(fname, target))[0]])
+ [core.publish_doctree('`{0} <{1}>`_'.format(_fname, target))[0]])
generated_nodes += self.get_code_from_file(fileobject)
return generated_nodes
diff --git a/nikola/plugins/compile/rest/media.py b/nikola/plugins/compile/rest/media.py
index ccda559..0363d28 100644
--- a/nikola/plugins/compile/rest/media.py
+++ b/nikola/plugins/compile/rest/media.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/compile/rest/post_list.py b/nikola/plugins/compile/rest/post_list.py
index f719e31..ddbd82d 100644
--- a/nikola/plugins/compile/rest/post_list.py
+++ b/nikola/plugins/compile/rest/post_list.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2013-2014 Udo Spallek, Roberto Alsina and others.
+# Copyright © 2013-2015 Udo Spallek, Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -25,7 +25,9 @@
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import unicode_literals
+import os
import uuid
+import natsort
from docutils import nodes
from docutils.parsers.rst import Directive, directives
@@ -52,7 +54,7 @@ class PostList(Directive):
Post List
=========
:Directive Arguments: None.
- :Directive Options: lang, start, stop, reverse, tags, template, id
+ :Directive Options: lang, start, stop, reverse, sort, tags, template, id
:Directive Content: None.
Provides a reStructuredText directive to create a list of posts.
@@ -77,6 +79,10 @@ class PostList(Directive):
Reverse the order of the post-list.
Defaults is to not reverse the order of posts.
+ ``sort``: string
+ Sort post list by one of each post's attributes, usually ``title`` or a
+ custom ``priority``. Defaults to None (chronological sorting).
+
``tags`` : string [, string...]
Filter posts to show only posts having at least one of the ``tags``.
Defaults to None.
@@ -105,6 +111,7 @@ class PostList(Directive):
'start': int,
'stop': int,
'reverse': directives.flag,
+ 'sort': directives.unchanged,
'tags': directives.unchanged,
'slugs': directives.unchanged,
'all': directives.flag,
@@ -124,6 +131,7 @@ class PostList(Directive):
show_all = self.options.get('all', False)
lang = self.options.get('lang', utils.LocaleBorg().current_lang)
template = self.options.get('template', 'post_list_directive.tmpl')
+ sort = self.options.get('sort')
if self.site.invariant: # for testing purposes
post_list_id = self.options.get('id', 'post_list_' + 'fixedvaluethatisnotauuid')
else:
@@ -150,6 +158,9 @@ class PostList(Directive):
filtered_timeline.append(post)
+ if sort:
+ filtered_timeline = natsort.natsorted(filtered_timeline, key=lambda post: post.meta[lang][sort], alg=natsort.ns.F | natsort.ns.IC)
+
for post in filtered_timeline[start:stop:step]:
if slugs:
cont = True
@@ -160,10 +171,15 @@ class PostList(Directive):
if cont:
continue
+ bp = post.translated_base_path(lang)
+ if os.path.exists(bp):
+ self.state.document.settings.record_dependencies.add(bp)
+
posts += [post]
if not posts:
return []
+ self.state.document.settings.record_dependencies.add("####MAGIC####TIMELINE")
template_data = {
'lang': lang,
diff --git a/nikola/plugins/compile/rest/slides.py b/nikola/plugins/compile/rest/slides.py
index ea8e413..7826f6a 100644
--- a/nikola/plugins/compile/rest/slides.py
+++ b/nikola/plugins/compile/rest/slides.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/compile/rest/thumbnail.plugin b/nikola/plugins/compile/rest/thumbnail.plugin
new file mode 100644
index 0000000..3b73340
--- /dev/null
+++ b/nikola/plugins/compile/rest/thumbnail.plugin
@@ -0,0 +1,9 @@
+[Core]
+Name = rest_thumbnail
+Module = thumbnail
+
+[Documentation]
+Author = Pelle Nilsson
+Version = 0.1
+Website = http://getnikola.com
+Description = reST directive to facilitate enlargeable images with thumbnails
diff --git a/nikola/plugins/compile/rest/thumbnail.py b/nikola/plugins/compile/rest/thumbnail.py
new file mode 100644
index 0000000..5388d8d
--- /dev/null
+++ b/nikola/plugins/compile/rest/thumbnail.py
@@ -0,0 +1,69 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2014-2015 Pelle Nilsson and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+import os
+
+from docutils.parsers.rst import directives
+from docutils.parsers.rst.directives.images import Image, Figure
+
+from nikola.plugin_categories import RestExtension
+
+
+class Plugin(RestExtension):
+
+ name = "rest_thumbnail"
+
+ def set_site(self, site):
+ self.site = site
+ directives.register_directive('thumbnail', Thumbnail)
+ return super(Plugin, self).set_site(site)
+
+
+class Thumbnail(Figure):
+
+ def align(argument):
+ return directives.choice(argument, Image.align_values)
+
+ def figwidth_value(argument):
+ if argument.lower() == 'image':
+ return 'image'
+ else:
+ return directives.length_or_percentage_or_unitless(argument, 'px')
+
+ option_spec = Image.option_spec.copy()
+ option_spec['figwidth'] = figwidth_value
+ option_spec['figclass'] = directives.class_option
+ has_content = True
+
+ def run(self):
+ uri = directives.uri(self.arguments[0])
+ self.options['target'] = uri
+ self.arguments[0] = '.thumbnail'.join(os.path.splitext(uri))
+ if self.content:
+ (node,) = Figure.run(self)
+ else:
+ (node,) = Image.run(self)
+ return [node]
diff --git a/nikola/plugins/compile/rest/vimeo.py b/nikola/plugins/compile/rest/vimeo.py
index 4b34dfe..bc44b0e 100644
--- a/nikola/plugins/compile/rest/vimeo.py
+++ b/nikola/plugins/compile/rest/vimeo.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -28,15 +28,11 @@
from docutils import nodes
from docutils.parsers.rst import Directive, directives
-try:
- import requests
-except ImportError:
- requests = None # NOQA
+import requests
import json
from nikola.plugin_categories import RestExtension
-from nikola.utils import req_missing
class Plugin(RestExtension):
@@ -94,10 +90,6 @@ class Vimeo(Directive):
return [nodes.raw('', CODE.format(**options), format='html')]
def check_modules(self):
- msg = None
- if requests is None:
- msg = req_missing(['requests'], 'use the vimeo directive', optional=True)
- return [nodes.raw('', '<div class="text-error">{0}</div>'.format(msg), format='html')]
return None
def set_video_size(self):
diff --git a/nikola/plugins/compile/rest/youtube.py b/nikola/plugins/compile/rest/youtube.py
index b32e77a..7c6bba1 100644
--- a/nikola/plugins/compile/rest/youtube.py
+++ b/nikola/plugins/compile/rest/youtube.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/loghandler/__init__.py b/nikola/plugins/loghandler/__init__.py
index 6ad8bac..a1d17a6 100644
--- a/nikola/plugins/loghandler/__init__.py
+++ b/nikola/plugins/loghandler/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/loghandler/smtp.plugin b/nikola/plugins/loghandler/smtp.plugin
index e914b3d..38c1d96 100644
--- a/nikola/plugins/loghandler/smtp.plugin
+++ b/nikola/plugins/loghandler/smtp.plugin
@@ -4,6 +4,6 @@ Module = smtp
[Documentation]
Author = Daniel Devine
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Log over smtp (email).
diff --git a/nikola/plugins/loghandler/smtp.py b/nikola/plugins/loghandler/smtp.py
index 2c9fd9c..146a658 100644
--- a/nikola/plugins/loghandler/smtp.py
+++ b/nikola/plugins/loghandler/smtp.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Daniel Devine and others.
+# Copyright © 2012-2015 Daniel Devine and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/loghandler/stderr.plugin b/nikola/plugins/loghandler/stderr.plugin
index 211d2b4..6c20ea1 100644
--- a/nikola/plugins/loghandler/stderr.plugin
+++ b/nikola/plugins/loghandler/stderr.plugin
@@ -4,6 +4,6 @@ Module = stderr
[Documentation]
Author = Daniel Devine
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Log to stderr, the default logger.
diff --git a/nikola/plugins/loghandler/stderr.py b/nikola/plugins/loghandler/stderr.py
index 593c381..79ace68 100644
--- a/nikola/plugins/loghandler/stderr.py
+++ b/nikola/plugins/loghandler/stderr.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Daniel Devine and others.
+# Copyright © 2012-2015 Daniel Devine and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/misc/scan_posts.plugin b/nikola/plugins/misc/scan_posts.plugin
new file mode 100644
index 0000000..6d2351f
--- /dev/null
+++ b/nikola/plugins/misc/scan_posts.plugin
@@ -0,0 +1,10 @@
+[Core]
+Name = scan_posts
+Module = scan_posts
+
+[Documentation]
+Author = Roberto Alsina
+Version = 1.0
+Website = http://getnikola.com
+Description = Scan posts and create timeline
+
diff --git a/nikola/plugins/misc/scan_posts.py b/nikola/plugins/misc/scan_posts.py
new file mode 100644
index 0000000..a6f04e6
--- /dev/null
+++ b/nikola/plugins/misc/scan_posts.py
@@ -0,0 +1,100 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2012-2015 Roberto Alsina and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+from __future__ import unicode_literals, print_function
+import glob
+import os
+import sys
+
+from nikola.plugin_categories import PostScanner
+from nikola import utils
+from nikola.post import Post
+
+
+class ScanPosts(PostScanner):
+ """Render pages into output."""
+
+ name = "scan_posts"
+
+ def scan(self):
+ """Create list of posts from POSTS and PAGES options."""
+
+ seen = set([])
+ if not self.site.quiet:
+ print("Scanning posts", end='', file=sys.stderr)
+
+ timeline = []
+
+ for wildcard, destination, template_name, use_in_feeds in \
+ self.site.config['post_pages']:
+ if not self.site.quiet:
+ print(".", end='', file=sys.stderr)
+ dirname = os.path.dirname(wildcard)
+ for dirpath, _, _ in os.walk(dirname, followlinks=True):
+ dest_dir = os.path.normpath(os.path.join(destination,
+ os.path.relpath(dirpath, dirname))) # output/destination/foo/
+ # Get all the untranslated paths
+ dir_glob = os.path.join(dirpath, os.path.basename(wildcard)) # posts/foo/*.rst
+ untranslated = glob.glob(dir_glob)
+ # And now get all the translated paths
+ translated = set([])
+ for lang in self.site.config['TRANSLATIONS'].keys():
+ if lang == self.site.config['DEFAULT_LANG']:
+ continue
+ lang_glob = utils.get_translation_candidate(self.site.config, dir_glob, lang) # posts/foo/*.LANG.rst
+ translated = translated.union(set(glob.glob(lang_glob)))
+ # untranslated globs like *.rst often match translated paths too, so remove them
+ # and ensure x.rst is not in the translated set
+ untranslated = set(untranslated) - translated
+
+ # also remove from translated paths that are translations of
+ # paths in untranslated_list, so x.es.rst is not in the untranslated set
+ for p in untranslated:
+ translated = translated - set([utils.get_translation_candidate(self.site.config, p, l) for l in self.site.config['TRANSLATIONS'].keys()])
+
+ full_list = list(translated) + list(untranslated)
+ # We eliminate from the list the files inside any .ipynb folder
+ full_list = [p for p in full_list
+ if not any([x.startswith('.')
+ for x in p.split(os.sep)])]
+
+ for base_path in full_list:
+ if base_path in seen:
+ continue
+ else:
+ seen.add(base_path)
+ post = Post(
+ base_path,
+ self.site.config,
+ dest_dir,
+ use_in_feeds,
+ self.site.MESSAGES,
+ template_name,
+ self.site.get_compiler(base_path)
+ )
+ timeline.append(post)
+
+ return timeline
diff --git a/nikola/plugins/task/__init__.py b/nikola/plugins/task/__init__.py
index 6ad8bac..a1d17a6 100644
--- a/nikola/plugins/task/__init__.py
+++ b/nikola/plugins/task/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/task/archive.plugin b/nikola/plugins/task/archive.plugin
index 448b115..6687209 100644
--- a/nikola/plugins/task/archive.plugin
+++ b/nikola/plugins/task/archive.plugin
@@ -4,7 +4,7 @@ Module = archive
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Generates the blog's archive pages.
diff --git a/nikola/plugins/task/archive.py b/nikola/plugins/task/archive.py
index 4f1ab19..533be69 100644
--- a/nikola/plugins/task/archive.py
+++ b/nikola/plugins/task/archive.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -24,12 +24,14 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+import copy
import os
# for tearDown with _reload we cannot use 'import from' to access LocaleBorg
import nikola.utils
+import datetime
from nikola.plugin_categories import Task
-from nikola.utils import config_changed
+from nikola.utils import config_changed, adjust_name_for_index_path, adjust_name_for_index_link
class Archive(Task):
@@ -39,133 +41,191 @@ class Archive(Task):
def set_site(self, site):
site.register_path_handler('archive', self.archive_path)
+ site.register_path_handler('archive_atom', self.archive_atom_path)
return super(Archive, self).set_site(site)
+ def _prepare_task(self, kw, name, lang, posts, items, template_name,
+ title, deps_translatable=None):
+ # name: used to build permalink and destination
+ # posts, items: posts or items; only one of them should be used,
+ # the other be None
+ # template_name: name of the template to use
+ # title: the (translated) title for the generated page
+ # deps_translatable: dependencies (None if not added)
+ assert posts is not None or items is not None
+
+ context = {}
+ context["lang"] = lang
+ context["title"] = title
+ context["permalink"] = self.site.link("archive", name, lang)
+ if posts is not None:
+ context["posts"] = posts
+ n = len(posts)
+ else:
+ context["items"] = items
+ n = len(items)
+ task = self.site.generic_post_list_renderer(
+ lang,
+ [],
+ os.path.join(kw['output_folder'], self.site.path("archive", name, lang)),
+ template_name,
+ kw['filters'],
+ context,
+ )
+
+ task_cfg = {1: copy.copy(kw), 2: n}
+ if deps_translatable is not None:
+ task_cfg[3] = deps_translatable
+ task['uptodate'] = task['uptodate'] + [config_changed(task_cfg, 'nikola.plugins.task.archive')]
+ task['basename'] = self.name
+ return task
+
+ def _generate_posts_task(self, kw, name, lang, posts, title, deps_translatable=None):
+ posts = sorted(posts, key=lambda a: a.date)
+ posts.reverse()
+ if kw['archives_are_indexes']:
+ def page_link(i, displayed_i, num_pages, force_addition, extension=None):
+ feed = "_atom" if extension == ".atom" else ""
+ return adjust_name_for_index_link(self.site.link("archive" + feed, name, lang), i, displayed_i,
+ lang, self.site, force_addition, extension)
+
+ def page_path(i, displayed_i, num_pages, force_addition, extension=None):
+ feed = "_atom" if extension == ".atom" else ""
+ return adjust_name_for_index_path(self.site.path("archive" + feed, name, lang), i, displayed_i,
+ lang, self.site, force_addition, extension)
+
+ uptodate = []
+ if deps_translatable is not None:
+ uptodate += [config_changed(deps_translatable, 'nikola.plugins.task.archive')]
+ yield self.site.generic_index_renderer(
+ lang,
+ posts,
+ title,
+ "archiveindex.tmpl",
+ {"archive_name": name,
+ "is_feed_stale": kw["is_feed_stale"]},
+ kw,
+ str(self.name),
+ page_link,
+ page_path,
+ uptodate)
+ else:
+ yield self._prepare_task(kw, name, lang, posts, None, "list_post.tmpl", title, deps_translatable)
+
def gen_tasks(self):
kw = {
"messages": self.site.MESSAGES,
"translations": self.site.config['TRANSLATIONS'],
"output_folder": self.site.config['OUTPUT_FOLDER'],
"filters": self.site.config['FILTERS'],
+ "archives_are_indexes": self.site.config['ARCHIVES_ARE_INDEXES'],
"create_monthly_archive": self.site.config['CREATE_MONTHLY_ARCHIVE'],
"create_single_archive": self.site.config['CREATE_SINGLE_ARCHIVE'],
+ "show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
+ "create_full_archives": self.site.config['CREATE_FULL_ARCHIVES'],
+ "create_daily_archive": self.site.config['CREATE_DAILY_ARCHIVE'],
+ "pretty_urls": self.site.config['PRETTY_URLS'],
+ "strip_indexes": self.site.config['STRIP_INDEXES'],
+ "index_file": self.site.config['INDEX_FILE'],
+ "generate_atom": self.site.config["GENERATE_ATOM"],
}
self.site.scan_posts()
yield self.group_task()
# TODO add next/prev links for years
- if kw['create_monthly_archive'] and kw['create_single_archive']:
+ if (kw['create_monthly_archive'] and kw['create_single_archive']) and not kw['create_full_archives']:
raise Exception('Cannot create monthly and single archives at the same time.')
for lang in kw["translations"]:
- archdata = self.site.posts_per_year
- # A bit of a hack.
- if kw['create_single_archive']:
- archdata = {None: self.site.posts}
+ if kw['create_single_archive'] and not kw['create_full_archives']:
+ # if we are creating one single archive
+ archdata = {}
+ else:
+ # if we are not creating one single archive, start with all years
+ archdata = self.site.posts_per_year.copy()
+ if kw['create_single_archive'] or kw['create_full_archives']:
+ # if we are creating one single archive, or full archives
+ archdata[None] = self.site.posts # for create_single_archive
for year, posts in archdata.items():
- output_name = os.path.join(
- kw['output_folder'], self.site.path("archive", year, lang))
- context = {}
- context["lang"] = lang
+ # Filter untranslated posts (Issue #1360)
+ if not kw["show_untranslated_posts"]:
+ posts = [p for p in posts if lang in p.translated_to]
+
+ # Add archive per year or total archive
if year:
- context["title"] = kw["messages"][lang]["Posts for year %s"] % year
+ title = kw["messages"][lang]["Posts for year %s"] % year
+ kw["is_feed_stale"] = (datetime.datetime.utcnow().strftime("%Y") != year)
else:
- context["title"] = kw["messages"][lang]["Archive"]
- context["permalink"] = self.site.link("archive", year, lang)
- if not kw["create_monthly_archive"]:
- template_name = "list_post.tmpl"
- post_list = sorted(posts, key=lambda a: a.date)
- post_list.reverse()
- context["posts"] = post_list
- else: # Monthly archives, just list the months
- months = set([(m.split('/')[1], self.site.link("archive", m, lang)) for m in self.site.posts_per_month.keys() if m.startswith(str(year))])
- months = sorted(list(months))
- months.reverse()
- template_name = "list.tmpl"
- context["items"] = [[nikola.utils.LocaleBorg().get_month_name(int(month), lang), link] for month, link in months]
- post_list = []
- task = self.site.generic_post_list_renderer(
- lang,
- [],
- output_name,
- template_name,
- kw['filters'],
- context,
- )
- n = len(post_list) if 'posts' in context else len(months)
-
+ title = kw["messages"][lang]["Archive"]
+ kw["is_feed_stale"] = False
deps_translatable = {}
for k in self.site._GLOBAL_CONTEXT_TRANSLATABLE:
deps_translatable[k] = self.site.GLOBAL_CONTEXT[k](lang)
+ if not kw["create_monthly_archive"] or kw["create_full_archives"]:
+ yield self._generate_posts_task(kw, year, lang, posts, title, deps_translatable)
+ else:
+ months = set([(m.split('/')[1], self.site.link("archive", m, lang)) for m in self.site.posts_per_month.keys() if m.startswith(str(year))])
+ months = sorted(list(months))
+ months.reverse()
+ items = [[nikola.utils.LocaleBorg().get_month_name(int(month), lang), link] for month, link in months]
+ yield self._prepare_task(kw, year, lang, None, items, "list.tmpl", title, deps_translatable)
- task_cfg = {1: task['uptodate'][0].config, 2: kw, 3: n, 4: deps_translatable}
- task['uptodate'] = [config_changed(task_cfg)]
- task['basename'] = self.name
- yield task
-
- if not kw["create_monthly_archive"]:
+ if not kw["create_monthly_archive"] and not kw["create_full_archives"] and not kw["create_daily_archive"]:
continue # Just to avoid nesting the other loop in this if
- template_name = "list_post.tmpl"
for yearmonth, posts in self.site.posts_per_month.items():
- output_name = os.path.join(
- kw['output_folder'], self.site.path("archive", yearmonth,
- lang))
+ # Add archive per month
year, month = yearmonth.split('/')
- post_list = sorted(posts, key=lambda a: a.date)
- post_list.reverse()
- context = {}
- context["lang"] = lang
- context["posts"] = post_list
- context["permalink"] = self.site.link("archive", year, lang)
-
- context["title"] = kw["messages"][lang]["Posts for {month} {year}"].format(
- year=year, month=nikola.utils.LocaleBorg().get_month_name(int(month), lang))
- task = self.site.generic_post_list_renderer(
- lang,
- post_list,
- output_name,
- template_name,
- kw['filters'],
- context,
- )
- task_cfg = {1: task['uptodate'][0].config, 2: kw, 3: len(post_list)}
- task['uptodate'] = [config_changed(task_cfg)]
- task['basename'] = self.name
- yield task
-
- if not kw['create_single_archive']:
+
+ kw["is_feed_stale"] = (datetime.datetime.utcnow().strftime("%Y/%m") != yearmonth)
+
+ # Filter untranslated posts (via Issue #1360)
+ if not kw["show_untranslated_posts"]:
+ posts = [p for p in posts if lang in p.translated_to]
+
+ if kw["create_monthly_archive"] or kw["create_full_archives"]:
+ title = kw["messages"][lang]["Posts for {month} {year}"].format(
+ year=year, month=nikola.utils.LocaleBorg().get_month_name(int(month), lang))
+ yield self._generate_posts_task(kw, yearmonth, lang, posts, title)
+
+ if not kw["create_full_archives"] and not kw["create_daily_archive"]:
+ continue # Just to avoid nesting the other loop in this if
+ # Add archive per day
+ days = dict()
+ for p in posts:
+ if p.date.day not in days:
+ days[p.date.day] = list()
+ days[p.date.day].append(p)
+ for day, posts in days.items():
+ title = kw["messages"][lang]["Posts for {month} {day}, {year}"].format(
+ year=year, month=nikola.utils.LocaleBorg().get_month_name(int(month), lang), day=day)
+ yield self._generate_posts_task(kw, yearmonth + '/{0:02d}'.format(day), lang, posts, title)
+
+ if not kw['create_single_archive'] and not kw['create_full_archives']:
# And an "all your years" page for yearly and monthly archives
+ if "is_feed_stale" in kw:
+ del kw["is_feed_stale"]
years = list(self.site.posts_per_year.keys())
years.sort(reverse=True)
- template_name = "list.tmpl"
kw['years'] = years
for lang in kw["translations"]:
- context = {}
- output_name = os.path.join(
- kw['output_folder'], self.site.path("archive", None,
- lang))
- context["title"] = kw["messages"][lang]["Archive"]
- context["items"] = [(y, self.site.link("archive", y, lang))
- for y in years]
- context["permalink"] = self.site.link("archive", None, lang)
- task = self.site.generic_post_list_renderer(
- lang,
- [],
- output_name,
- template_name,
- kw['filters'],
- context,
- )
- task_cfg = {1: task['uptodate'][0].config, 2: kw, 3: len(years)}
- task['uptodate'] = [config_changed(task_cfg)]
- task['basename'] = self.name
- yield task
-
- def archive_path(self, name, lang):
+ items = [(y, self.site.link("archive", y, lang)) for y in years]
+ yield self._prepare_task(kw, None, lang, None, items, "list.tmpl", kw["messages"][lang]["Archive"])
+
+ def archive_path(self, name, lang, is_feed=False):
+ if is_feed:
+ extension = ".atom"
+ archive_file = os.path.splitext(self.site.config['ARCHIVE_FILENAME'])[0] + extension
+ index_file = os.path.splitext(self.site.config['INDEX_FILE'])[0] + extension
+ else:
+ archive_file = self.site.config['ARCHIVE_FILENAME']
+ index_file = self.site.config['INDEX_FILE']
if name:
return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
self.site.config['ARCHIVE_PATH'], name,
- self.site.config['INDEX_FILE']] if _f]
+ index_file] if _f]
else:
return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
self.site.config['ARCHIVE_PATH'],
- self.site.config['ARCHIVE_FILENAME']] if _f]
+ archive_file] if _f]
+
+ def archive_atom_path(self, name, lang):
+ return self.archive_path(name, lang, is_feed=True)
diff --git a/nikola/plugins/task/bundles.plugin b/nikola/plugins/task/bundles.plugin
index e0b0a4d..3fe049b 100644
--- a/nikola/plugins/task/bundles.plugin
+++ b/nikola/plugins/task/bundles.plugin
@@ -4,7 +4,7 @@ Module = bundles
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Theme bundles using WebAssets
diff --git a/nikola/plugins/task/bundles.py b/nikola/plugins/task/bundles.py
index fca6924..6f88d0c 100644
--- a/nikola/plugins/task/bundles.py
+++ b/nikola/plugins/task/bundles.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -43,11 +43,12 @@ class BuildBundles(LateTask):
name = "create_bundles"
def set_site(self, site):
- super(BuildBundles, self).set_site(site)
- if webassets is None and self.site.config['USE_BUNDLES']:
+ self.logger = utils.get_logger('bundles', site.loghandlers)
+ if webassets is None and site.config['USE_BUNDLES']:
utils.req_missing(['webassets'], 'USE_BUNDLES', optional=True)
- utils.LOGGER.warn('Setting USE_BUNDLES to False.')
- self.site.config['USE_BUNDLES'] = False
+ self.logger.warn('Setting USE_BUNDLES to False.')
+ site.config['USE_BUNDLES'] = False
+ super(BuildBundles, self).set_site(site)
def gen_tasks(self):
"""Bundle assets using WebAssets."""
@@ -74,7 +75,12 @@ class BuildBundles(LateTask):
bundle = webassets.Bundle(*inputs, output=os.path.basename(output))
env.register(output, bundle)
# This generates the file
- env[output].urls()
+ try:
+ env[output].urls()
+ except Exception as e:
+ self.logger.error("Failed to build bundles.")
+ self.logger.exception(e)
+ self.logger.notice("Try running ``nikola clean`` and building again.")
else:
with open(os.path.join(out_dir, os.path.basename(output)), 'wb+'):
pass # Create empty file
@@ -91,8 +97,7 @@ class BuildBundles(LateTask):
files.append(os.path.join(dname, fname))
file_dep = [os.path.join(kw['output_folder'], fname)
for fname in files if
- utils.get_asset_path(fname, self.site.THEMES, self.site.config['FILES_FOLDERS'])
- or fname == 'assets/css/code.css']
+ utils.get_asset_path(fname, self.site.THEMES, self.site.config['FILES_FOLDERS']) or fname == os.path.join('assets', 'css', 'code.css')]
# code.css will be generated by us if it does not exist in
# FILES_FOLDERS or theme assets. It is guaranteed that the
# generation will happen before this task.
@@ -107,7 +112,7 @@ class BuildBundles(LateTask):
utils.config_changed({
1: kw,
2: file_dep
- })],
+ }, 'nikola.plugins.task.bundles')],
'clean': True,
}
yield utils.apply_filters(task, kw['filters'])
diff --git a/nikola/plugins/task/copy_assets.plugin b/nikola/plugins/task/copy_assets.plugin
index 28b9e32..0530ebf 100644
--- a/nikola/plugins/task/copy_assets.plugin
+++ b/nikola/plugins/task/copy_assets.plugin
@@ -4,7 +4,7 @@ Module = copy_assets
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Copy theme assets into output.
diff --git a/nikola/plugins/task/copy_assets.py b/nikola/plugins/task/copy_assets.py
index 29aa083..a72bfdf 100644
--- a/nikola/plugins/task/copy_assets.py
+++ b/nikola/plugins/task/copy_assets.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -72,7 +72,7 @@ class CopyAssets(Task):
if task['name'] in tasks:
continue
tasks[task['name']] = task
- task['uptodate'] = [utils.config_changed(kw)]
+ task['uptodate'] = [utils.config_changed(kw, 'nikola.plugins.task.copy_assets')]
task['basename'] = self.name
if code_css_input:
task['file_dep'] = [code_css_input]
@@ -99,7 +99,7 @@ class CopyAssets(Task):
'basename': self.name,
'name': code_css_path,
'targets': [code_css_path],
- 'uptodate': [utils.config_changed(kw), testcontents],
+ 'uptodate': [utils.config_changed(kw, 'nikola.plugins.task.copy_assets'), testcontents],
'actions': [(create_code_css, [])],
'clean': True,
}
diff --git a/nikola/plugins/task/copy_files.plugin b/nikola/plugins/task/copy_files.plugin
index 45c9e0d..073676b 100644
--- a/nikola/plugins/task/copy_files.plugin
+++ b/nikola/plugins/task/copy_files.plugin
@@ -4,7 +4,7 @@ Module = copy_files
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Copy static files into the output.
diff --git a/nikola/plugins/task/copy_files.py b/nikola/plugins/task/copy_files.py
index 1d31756..9a039f1 100644
--- a/nikola/plugins/task/copy_files.py
+++ b/nikola/plugins/task/copy_files.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -51,5 +51,5 @@ class CopyFiles(Task):
real_dst = os.path.join(dst, kw['files_folders'][src])
for task in utils.copy_tree(src, real_dst, link_cutoff=dst):
task['basename'] = self.name
- task['uptodate'] = [utils.config_changed(kw)]
+ task['uptodate'] = [utils.config_changed(kw, 'nikola.plugins.task.copy_files')]
yield utils.apply_filters(task, filters, skip_ext=['.html'])
diff --git a/nikola/plugins/task/galleries.plugin b/nikola/plugins/task/galleries.plugin
index 8352151..73085cd 100644
--- a/nikola/plugins/task/galleries.plugin
+++ b/nikola/plugins/task/galleries.plugin
@@ -4,7 +4,7 @@ Module = galleries
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Create image galleries automatically.
diff --git a/nikola/plugins/task/galleries.py b/nikola/plugins/task/galleries.py
index f835444..e887f18 100644
--- a/nikola/plugins/task/galleries.py
+++ b/nikola/plugins/task/galleries.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -31,34 +31,30 @@ import glob
import json
import mimetypes
import os
+import sys
try:
from urlparse import urljoin
except ImportError:
from urllib.parse import urljoin # NOQA
import natsort
-Image = None
try:
- from PIL import Image, ExifTags # NOQA
+ from PIL import Image # NOQA
except ImportError:
- try:
- import Image as _Image
- import ExifTags
- Image = _Image
- except ImportError:
- pass
+ import Image as _Image
+ Image = _Image
import PyRSS2Gen as rss
from nikola.plugin_categories import Task
from nikola import utils
+from nikola.image_processing import ImageProcessor
from nikola.post import Post
-from nikola.utils import req_missing
_image_size_cache = {}
-class Galleries(Task):
+class Galleries(Task, ImageProcessor):
"""Render image galleries."""
name = 'render_galleries'
@@ -66,47 +62,84 @@ class Galleries(Task):
def set_site(self, site):
site.register_path_handler('gallery', self.gallery_path)
+ site.register_path_handler('gallery_global', self.gallery_global_path)
site.register_path_handler('gallery_rss', self.gallery_rss_path)
+
+ self.logger = utils.get_logger('render_galleries', site.loghandlers)
+
+ self.kw = {
+ 'thumbnail_size': site.config['THUMBNAIL_SIZE'],
+ 'max_image_size': site.config['MAX_IMAGE_SIZE'],
+ 'output_folder': site.config['OUTPUT_FOLDER'],
+ 'cache_folder': site.config['CACHE_FOLDER'],
+ 'default_lang': site.config['DEFAULT_LANG'],
+ 'use_filename_as_title': site.config['USE_FILENAME_AS_TITLE'],
+ 'gallery_folders': site.config['GALLERY_FOLDERS'],
+ 'sort_by_date': site.config['GALLERY_SORT_BY_DATE'],
+ 'filters': site.config['FILTERS'],
+ 'translations': site.config['TRANSLATIONS'],
+ 'global_context': site.GLOBAL_CONTEXT,
+ 'feed_length': site.config['FEED_LENGTH'],
+ 'tzinfo': site.tzinfo,
+ 'comments_in_galleries': site.config['COMMENTS_IN_GALLERIES'],
+ 'generate_rss': site.config['GENERATE_RSS'],
+ }
+
+ # Verify that no folder in GALLERY_FOLDERS appears twice
+ appearing_paths = set()
+ for source, dest in self.kw['gallery_folders'].items():
+ if source in appearing_paths or dest in appearing_paths:
+ problem = source if source in appearing_paths else dest
+ utils.LOGGER.error("The gallery input or output folder '{0}' appears in more than one entry in GALLERY_FOLDERS, exiting.".format(problem))
+ sys.exit(1)
+ appearing_paths.add(source)
+ appearing_paths.add(dest)
+
+ # Find all galleries we need to process
+ self.find_galleries()
+ # Create self.gallery_links
+ self.create_galleries_paths()
+
return super(Galleries, self).set_site(site)
+ def _find_gallery_path(self, name):
+ # The system using self.proper_gallery_links and self.improper_gallery_links
+ # is similar as in listings.py.
+ if name in self.proper_gallery_links:
+ return self.proper_gallery_links[name]
+ elif name in self.improper_gallery_links:
+ candidates = self.improper_gallery_links[name]
+ if len(candidates) == 1:
+ return candidates[0]
+ self.logger.error("Gallery name '{0}' is not unique! Possible output paths: {1}".format(name, candidates))
+ else:
+ self.logger.error("Unknown gallery '{0}'!".format(name))
+ self.logger.info("Known galleries: " + str(list(self.proper_gallery_links.keys())))
+ sys.exit(1)
+
def gallery_path(self, name, lang):
- return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['GALLERY_PATH'], name,
- self.site.config['INDEX_FILE']] if _f]
+ gallery_path = self._find_gallery_path(name)
+ return [_f for _f in [self.site.config['TRANSLATIONS'][lang]] +
+ gallery_path.split(os.sep) +
+ [self.site.config['INDEX_FILE']] if _f]
+
+ def gallery_global_path(self, name, lang):
+ gallery_path = self._find_gallery_path(name)
+ return [_f for _f in gallery_path.split(os.sep) +
+ [self.site.config['INDEX_FILE']] if _f]
def gallery_rss_path(self, name, lang):
- return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['GALLERY_PATH'], name,
- 'rss.xml'] if _f]
+ gallery_path = self._find_gallery_path(name)
+ return [_f for _f in [self.site.config['TRANSLATIONS'][lang]] +
+ gallery_path.split(os.sep) +
+ ['rss.xml'] if _f]
def gen_tasks(self):
"""Render image galleries."""
- if Image is None:
- req_missing(['pillow'], 'render galleries')
-
- self.logger = utils.get_logger('render_galleries', self.site.loghandlers)
- self.image_ext_list = ['.jpg', '.png', '.jpeg', '.gif', '.svg', '.bmp', '.tiff']
+ self.image_ext_list = self.image_ext_list_builtin
self.image_ext_list.extend(self.site.config.get('EXTRA_IMAGE_EXTENSIONS', []))
- self.kw = {
- 'thumbnail_size': self.site.config['THUMBNAIL_SIZE'],
- 'max_image_size': self.site.config['MAX_IMAGE_SIZE'],
- 'output_folder': self.site.config['OUTPUT_FOLDER'],
- 'cache_folder': self.site.config['CACHE_FOLDER'],
- 'default_lang': self.site.config['DEFAULT_LANG'],
- 'use_filename_as_title': self.site.config['USE_FILENAME_AS_TITLE'],
- 'gallery_path': self.site.config['GALLERY_PATH'],
- 'sort_by_date': self.site.config['GALLERY_SORT_BY_DATE'],
- 'filters': self.site.config['FILTERS'],
- 'translations': self.site.config['TRANSLATIONS'],
- 'global_context': self.site.GLOBAL_CONTEXT,
- 'feed_length': self.site.config['FEED_LENGTH'],
- 'tzinfo': self.site.tzinfo,
- 'comments_in_galleries': self.site.config['COMMENTS_IN_GALLERIES'],
- 'generate_rss': self.site.config['GENERATE_RSS'],
- }
-
for k, v in self.site.GLOBAL_CONTEXT['template_hooks'].items():
self.kw['||template_hooks|{0}||'.format(k)] = v._items
@@ -114,22 +147,19 @@ class Galleries(Task):
template_name = "gallery.tmpl"
- # Find all galleries we need to process
- self.find_galleries()
-
# Create all output folders
for task in self.create_galleries():
yield task
# For each gallery:
- for gallery in self.gallery_list:
+ for gallery, input_folder, output_folder in self.gallery_list:
# Create subfolder list
folder_list = [(x, x.split(os.sep)[-2]) for x in
glob.glob(os.path.join(gallery, '*') + os.sep)]
# Parse index into a post (with translations)
- post = self.parse_index(gallery)
+ post = self.parse_index(gallery, input_folder, output_folder)
# Create image list, filter exclusions
image_list = self.get_image_list(gallery)
@@ -143,12 +173,12 @@ class Galleries(Task):
# Create thumbnails and large images in destination
for image in image_list:
- for task in self.create_target_images(image):
+ for task in self.create_target_images(image, input_folder):
yield task
# Remove excluded images
for image in self.get_excluded_images(gallery):
- for task in self.remove_excluded_image(image):
+ for task in self.remove_excluded_image(image, input_folder):
yield task
crumbs = utils.get_crumbs(gallery, index_folder=self)
@@ -160,9 +190,7 @@ class Galleries(Task):
dst = os.path.join(
self.kw['output_folder'],
- self.site.path(
- "gallery",
- os.path.relpath(gallery, self.kw['gallery_path']), lang))
+ self.site.path("gallery", gallery, lang))
dst = os.path.normpath(dst)
for k in self.site._GLOBAL_CONTEXT_TRANSLATABLE:
@@ -187,25 +215,27 @@ class Galleries(Task):
img_titles = [''] * len(image_name_list)
thumbs = ['.thumbnail'.join(os.path.splitext(p)) for p in image_list]
- thumbs = [os.path.join(self.kw['output_folder'], t) for t in thumbs]
- dest_img_list = [os.path.join(self.kw['output_folder'], t) for t in image_list]
+ thumbs = [os.path.join(self.kw['output_folder'], output_folder, os.path.relpath(t, input_folder)) for t in thumbs]
+ dst_img_list = [os.path.join(output_folder, os.path.relpath(t, input_folder)) for t in image_list]
+ dest_img_list = [os.path.join(self.kw['output_folder'], t) for t in dst_img_list]
folders = []
# Generate friendly gallery names
for path, folder in folder_list:
- fpost = self.parse_index(path)
+ fpost = self.parse_index(path, input_folder, output_folder)
if fpost:
ft = fpost.title(lang) or folder
else:
ft = folder
+ if not folder.endswith('/'):
+ folder += '/'
folders.append((folder, ft))
- context["folders"] = natsort.natsorted(folders)
+ context["folders"] = natsort.natsorted(
+ folders, alg=natsort.ns.F | natsort.ns.IC)
context["crumbs"] = crumbs
- context["permalink"] = self.site.link(
- "gallery", os.path.basename(
- os.path.relpath(gallery, self.kw['gallery_path'])), lang)
+ context["permalink"] = self.site.link("gallery", gallery, lang)
context["enable_comments"] = self.kw['comments_in_galleries']
context["thumbnail_size"] = self.kw["thumbnail_size"]
@@ -216,15 +246,18 @@ class Galleries(Task):
'targets': [post.translated_base_path(lang)],
'file_dep': post.fragment_deps(lang),
'actions': [(post.compile, [lang])],
- 'uptodate': [utils.config_changed(self.kw)]
+ 'uptodate': [utils.config_changed(self.kw, 'nikola.plugins.task.galleries:post')] + post.fragment_deps_uptodate(lang)
}
context['post'] = post
else:
context['post'] = None
file_dep = self.site.template_system.template_deps(
template_name) + image_list + thumbs
+ file_dep_dest = self.site.template_system.template_deps(
+ template_name) + dest_img_list + thumbs
if post:
file_dep += [post.translated_base_path(l) for l in self.kw['translations']]
+ file_dep_dest += [post.translated_base_path(l) for l in self.kw['translations']]
yield utils.apply_filters({
'basename': self.name,
@@ -244,58 +277,87 @@ class Galleries(Task):
'uptodate': [utils.config_changed({
1: self.kw,
2: self.site.config["COMMENTS_IN_GALLERIES"],
- 3: context,
- })],
+ 3: context.copy(),
+ }, 'nikola.plugins.task.galleries:gallery')],
}, self.kw['filters'])
# RSS for the gallery
if self.kw["generate_rss"]:
rss_dst = os.path.join(
self.kw['output_folder'],
- self.site.path(
- "gallery_rss",
- os.path.relpath(gallery, self.kw['gallery_path']), lang))
+ self.site.path("gallery_rss", gallery, lang))
rss_dst = os.path.normpath(rss_dst)
yield utils.apply_filters({
'basename': self.name,
'name': rss_dst,
- 'file_dep': file_dep,
+ 'file_dep': file_dep_dest,
'targets': [rss_dst],
'actions': [
(self.gallery_rss, (
image_list,
+ dst_img_list,
img_titles,
lang,
- self.site.link(
- "gallery_rss", os.path.basename(gallery), lang),
+ self.site.link("gallery_rss", gallery, lang),
rss_dst,
context['title']
))],
'clean': True,
'uptodate': [utils.config_changed({
1: self.kw,
- })],
+ }, 'nikola.plugins.task.galleries:rss')],
}, self.kw['filters'])
def find_galleries(self):
"""Find all galleries to be processed according to conf.py"""
self.gallery_list = []
- for root, dirs, files in os.walk(self.kw['gallery_path'], followlinks=True):
- self.gallery_list.append(root)
+ for input_folder, output_folder in self.kw['gallery_folders'].items():
+ for root, dirs, files in os.walk(input_folder, followlinks=True):
+ self.gallery_list.append((root, input_folder, output_folder))
+
+ def create_galleries_paths(self):
+ """Given a list of galleries, puts their paths into self.gallery_links."""
+
+ # gallery_path is "gallery/foo/name"
+ self.proper_gallery_links = dict()
+ self.improper_gallery_links = dict()
+ for gallery_path, input_folder, output_folder in self.gallery_list:
+ if gallery_path == input_folder:
+ gallery_name = ''
+ # special case, because relpath will return '.' in this case
+ else:
+ gallery_name = os.path.relpath(gallery_path, input_folder)
+
+ output_path = os.path.join(output_folder, gallery_name)
+ self.proper_gallery_links[gallery_path] = output_path
+ self.proper_gallery_links[output_path] = output_path
+
+ # If the input and output names differ, the gallery is accessible
+ # only by `input` and `output/`.
+ output_path_noslash = output_path[:-1]
+ if output_path_noslash not in self.proper_gallery_links:
+ self.proper_gallery_links[output_path_noslash] = output_path
+
+ gallery_path_slash = gallery_path + '/'
+ if gallery_path_slash not in self.proper_gallery_links:
+ self.proper_gallery_links[gallery_path_slash] = output_path
+
+ if gallery_name not in self.improper_gallery_links:
+ self.improper_gallery_links[gallery_name] = list()
+ self.improper_gallery_links[gallery_name].append(output_path)
def create_galleries(self):
"""Given a list of galleries, create the output folders."""
# gallery_path is "gallery/foo/name"
- for gallery_path in self.gallery_list:
- gallery_name = os.path.relpath(gallery_path, self.kw['gallery_path'])
+ for gallery_path, input_folder, _ in self.gallery_list:
# have to use dirname because site.path returns .../index.html
output_gallery = os.path.dirname(
os.path.join(
self.kw["output_folder"],
- self.site.path("gallery", gallery_name)))
+ self.site.path("gallery", gallery_path)))
output_gallery = os.path.normpath(output_gallery)
# Task to create gallery in output/
yield {
@@ -304,16 +366,16 @@ class Galleries(Task):
'actions': [(utils.makedirs, (output_gallery,))],
'targets': [output_gallery],
'clean': True,
- 'uptodate': [utils.config_changed(self.kw)],
+ 'uptodate': [utils.config_changed(self.kw, 'nikola.plugins.task.galleries:mkdir')],
}
- def parse_index(self, gallery):
+ def parse_index(self, gallery, input_folder, output_folder):
"""Returns a Post object if there is an index.txt."""
index_path = os.path.join(gallery, "index.txt")
destination = os.path.join(
- self.kw["output_folder"],
- gallery)
+ self.kw["output_folder"], output_folder,
+ os.path.relpath(gallery, input_folder))
if os.path.isfile(index_path):
post = Post(
index_path,
@@ -361,12 +423,12 @@ class Galleries(Task):
image_list = list(image_set)
return image_list
- def create_target_images(self, img):
- gallery_name = os.path.relpath(os.path.dirname(img), self.kw['gallery_path'])
+ def create_target_images(self, img, input_path):
+ gallery_name = os.path.dirname(img)
output_gallery = os.path.dirname(
os.path.join(
self.kw["output_folder"],
- self.site.path("gallery", gallery_name)))
+ self.site.path("gallery_global", gallery_name)))
# Do thumbnails and copy originals
# img is "galleries/name/image_name.jpg"
# img_name is "image_name.jpg"
@@ -392,7 +454,7 @@ class Galleries(Task):
'clean': True,
'uptodate': [utils.config_changed({
1: self.kw['thumbnail_size']
- })],
+ }, 'nikola.plugins.task.galleries:resize_thumb')],
}, self.kw['filters'])
yield utils.apply_filters({
@@ -407,19 +469,19 @@ class Galleries(Task):
'clean': True,
'uptodate': [utils.config_changed({
1: self.kw['max_image_size']
- })],
+ }, 'nikola.plugins.task.galleries:resize_max')],
}, self.kw['filters'])
- def remove_excluded_image(self, img):
+ def remove_excluded_image(self, img, input_folder):
# Remove excluded images
- # img is something like galleries/demo/tesla2_lg.jpg so it's the *source* path
+ # img is something like input_folder/demo/tesla2_lg.jpg so it's the *source* path
# and we should remove both the large and thumbnail *destination* paths
- img = os.path.relpath(img, self.kw['gallery_path'])
output_folder = os.path.dirname(
os.path.join(
self.kw["output_folder"],
- self.site.path("gallery", os.path.dirname(img))))
+ self.site.path("gallery_global", os.path.dirname(img))))
+ img = os.path.relpath(img, input_folder)
img_path = os.path.join(output_folder, os.path.basename(img))
fname, ext = os.path.splitext(img_path)
thumb_path = fname + '.thumbnail' + ext
@@ -431,7 +493,7 @@ class Galleries(Task):
(utils.remove_file, (thumb_path,))
],
'clean': True,
- 'uptodate': [utils.config_changed(self.kw)],
+ 'uptodate': [utils.config_changed(self.kw, 'nikola.plugins.task.galleries:clean_thumb')],
}, self.kw['filters'])
yield utils.apply_filters({
@@ -441,7 +503,7 @@ class Galleries(Task):
(utils.remove_file, (img_path,))
],
'clean': True,
- 'uptodate': [utils.config_changed(self.kw)],
+ 'uptodate': [utils.config_changed(self.kw, 'nikola.plugins.task.galleries:clean_file')],
}, self.kw['filters'])
def render_gallery_index(
@@ -484,7 +546,7 @@ class Galleries(Task):
context['photo_array_json'] = json.dumps(photo_array)
self.site.render_template(template_name, output_name, context)
- def gallery_rss(self, img_list, img_titles, lang, permalink, output_path, title):
+ def gallery_rss(self, img_list, dest_img_list, img_titles, lang, permalink, output_path, title):
"""Create a RSS showing the latest images in the gallery.
This doesn't use generic_rss_renderer because it
@@ -492,10 +554,10 @@ class Galleries(Task):
"""
def make_url(url):
- return urljoin(self.site.config['BASE_URL'], url)
+ return urljoin(self.site.config['BASE_URL'], url.lstrip('/'))
items = []
- for img, title in list(zip(img_list, img_titles))[:self.kw["feed_length"]]:
+ for img, srcimg, title in list(zip(dest_img_list, img_list, img_titles))[:self.kw["feed_length"]]:
img_size = os.stat(
os.path.join(
self.site.config['OUTPUT_FOLDER'], img)).st_size
@@ -503,7 +565,7 @@ class Galleries(Task):
'title': title,
'link': make_url(img),
'guid': rss.Guid(img, False),
- 'pubDate': self.image_date(img),
+ 'pubDate': self.image_date(srcimg),
'enclosure': rss.Enclosure(
make_url(img),
img_size,
@@ -515,12 +577,15 @@ class Galleries(Task):
title=title,
link=make_url(permalink),
description='',
- lastBuildDate=datetime.datetime.now(),
+ lastBuildDate=datetime.datetime.utcnow(),
items=items,
generator='http://getnikola.com/',
language=lang
)
+
rss_obj.rss_attrs["xmlns:dc"] = "http://purl.org/dc/elements/1.1/"
+ rss_obj.self_url = make_url(permalink)
+ rss_obj.rss_attrs["xmlns:atom"] = "http://www.w3.org/2005/Atom"
dst_dir = os.path.dirname(output_path)
utils.makedirs(dst_dir)
with io.open(output_path, "w+", encoding="utf-8") as rss_file:
@@ -528,66 +593,3 @@ class Galleries(Task):
if isinstance(data, utils.bytes_str):
data = data.decode('utf-8')
rss_file.write(data)
-
- def resize_image(self, src, dst, max_size):
- """Make a copy of the image in the requested size."""
- if not Image:
- utils.copy_file(src, dst)
- return
- im = Image.open(src)
- w, h = im.size
- if w > max_size or h > max_size:
- size = max_size, max_size
-
- # Panoramas get larger thumbnails because they look *awful*
- if w > 2 * h:
- size = min(w, max_size * 4), min(w, max_size * 4)
-
- try:
- exif = im._getexif()
- except Exception:
- exif = None
- if exif is not None:
- for tag, value in list(exif.items()):
- decoded = ExifTags.TAGS.get(tag, tag)
-
- if decoded == 'Orientation':
- if value == 3:
- im = im.rotate(180)
- elif value == 6:
- im = im.rotate(270)
- elif value == 8:
- im = im.rotate(90)
- break
- try:
- im.thumbnail(size, Image.ANTIALIAS)
- im.save(dst)
- except Exception as e:
- self.logger.warn("Can't thumbnail {0}, using original "
- "image as thumbnail ({1})".format(src, e))
- utils.copy_file(src, dst)
- else: # Image is small
- utils.copy_file(src, dst)
-
- def image_date(self, src):
- """Try to figure out the date of the image."""
- if src not in self.dates:
- try:
- im = Image.open(src)
- exif = im._getexif()
- except Exception:
- exif = None
- if exif is not None:
- for tag, value in list(exif.items()):
- decoded = ExifTags.TAGS.get(tag, tag)
- if decoded in ('DateTimeOriginal', 'DateTimeDigitized'):
- try:
- self.dates[src] = datetime.datetime.strptime(
- value, r'%Y:%m:%d %H:%M:%S')
- break
- except ValueError: # Invalid EXIF date.
- pass
- if src not in self.dates:
- self.dates[src] = datetime.datetime.fromtimestamp(
- os.stat(src).st_mtime)
- return self.dates[src]
diff --git a/nikola/plugins/task/gzip.plugin b/nikola/plugins/task/gzip.plugin
index b68ea6f..4867fd6 100644
--- a/nikola/plugins/task/gzip.plugin
+++ b/nikola/plugins/task/gzip.plugin
@@ -4,7 +4,7 @@ Module = gzip
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Create gzipped copies of files
diff --git a/nikola/plugins/task/gzip.py b/nikola/plugins/task/gzip.py
index bcc9637..5799839 100644
--- a/nikola/plugins/task/gzip.py
+++ b/nikola/plugins/task/gzip.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/task/indexes.plugin b/nikola/plugins/task/indexes.plugin
index a18942c..5d2bf5a 100644
--- a/nikola/plugins/task/indexes.plugin
+++ b/nikola/plugins/task/indexes.plugin
@@ -4,7 +4,7 @@ Module = indexes
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Generates the blog's index pages.
diff --git a/nikola/plugins/task/indexes.py b/nikola/plugins/task/indexes.py
index 0a2cd02..03d36b1 100644
--- a/nikola/plugins/task/indexes.py
+++ b/nikola/plugins/task/indexes.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -29,7 +29,7 @@ from collections import defaultdict
import os
from nikola.plugin_categories import Task
-from nikola.utils import config_changed
+from nikola import utils
class Indexes(Task):
@@ -39,6 +39,7 @@ class Indexes(Task):
def set_site(self, site):
site.register_path_handler('index', self.index_path)
+ site.register_path_handler('index_atom', self.index_atom_path)
return super(Indexes, self).set_site(site)
def gen_tasks(self):
@@ -47,85 +48,39 @@ class Indexes(Task):
kw = {
"translations": self.site.config['TRANSLATIONS'],
- "index_display_post_count":
- self.site.config['INDEX_DISPLAY_POST_COUNT'],
"messages": self.site.MESSAGES,
- "index_teasers": self.site.config['INDEX_TEASERS'],
"output_folder": self.site.config['OUTPUT_FOLDER'],
"filters": self.site.config['FILTERS'],
"show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
+ "index_display_post_count": self.site.config['INDEX_DISPLAY_POST_COUNT'],
"indexes_title": self.site.config['INDEXES_TITLE'],
- "indexes_pages": self.site.config['INDEXES_PAGES'],
- "indexes_pages_main": self.site.config['INDEXES_PAGES_MAIN'],
"blog_title": self.site.config["BLOG_TITLE"],
- "rss_read_more_link": self.site.config["RSS_READ_MORE_LINK"],
+ "generate_atom": self.site.config["GENERATE_ATOM"],
}
template_name = "index.tmpl"
posts = self.site.posts
+ self.number_of_pages = dict()
for lang in kw["translations"]:
- # Split in smaller lists
- lists = []
+ def page_link(i, displayed_i, num_pages, force_addition, extension=None):
+ feed = "_atom" if extension == ".atom" else ""
+ return utils.adjust_name_for_index_link(self.site.link("index" + feed, None, lang), i, displayed_i,
+ lang, self.site, force_addition, extension)
+
+ def page_path(i, displayed_i, num_pages, force_addition, extension=None):
+ feed = "_atom" if extension == ".atom" else ""
+ return utils.adjust_name_for_index_path(self.site.path("index" + feed, None, lang), i, displayed_i,
+ lang, self.site, force_addition, extension)
+
if kw["show_untranslated_posts"]:
filtered_posts = posts
else:
filtered_posts = [x for x in posts if x.is_translation_available(lang)]
- lists.append(filtered_posts[:kw["index_display_post_count"]])
- filtered_posts = filtered_posts[kw["index_display_post_count"]:]
- while filtered_posts:
- lists.append(filtered_posts[-kw["index_display_post_count"]:])
- filtered_posts = filtered_posts[:-kw["index_display_post_count"]]
- num_pages = len(lists)
- for i, post_list in enumerate(lists):
- context = {}
- indexes_title = kw['indexes_title'] or kw['blog_title'](lang)
- if kw["indexes_pages_main"]:
- ipages_i = i + 1
- ipages_msg = "page %d"
- else:
- ipages_i = i
- ipages_msg = "old posts, page %d"
- if kw["indexes_pages"]:
- indexes_pages = kw["indexes_pages"] % ipages_i
- else:
- indexes_pages = " (" + \
- kw["messages"][lang][ipages_msg] % ipages_i + ")"
- if i > 0 or kw["indexes_pages_main"]:
- context["title"] = indexes_title + indexes_pages
- else:
- context["title"] = indexes_title
- context["prevlink"] = None
- context["nextlink"] = None
- context['index_teasers'] = kw['index_teasers']
- if i == 0: # index.html page
- context["prevlink"] = None
- if num_pages > 1:
- context["nextlink"] = "index-{0}.html".format(num_pages - 1)
- else:
- context["nextlink"] = None
- else: # index-x.html pages
- if i > 1:
- context["nextlink"] = "index-{0}.html".format(i - 1)
- if i < num_pages - 1:
- context["prevlink"] = "index-{0}.html".format(i + 1)
- elif i == num_pages - 1:
- context["prevlink"] = "index.html"
- context["permalink"] = self.site.link("index", i, lang)
- output_name = os.path.join(
- kw['output_folder'], self.site.path("index", i,
- lang))
- task = self.site.generic_post_list_renderer(
- lang,
- post_list,
- output_name,
- template_name,
- kw['filters'],
- context,
- )
- task_cfg = {1: task['uptodate'][0].config, 2: kw}
- task['uptodate'] = [config_changed(task_cfg)]
- task['basename'] = 'render_indexes'
- yield task
+
+ indexes_title = kw['indexes_title'](lang) or kw['blog_title'](lang)
+ self.number_of_pages[lang] = (len(filtered_posts) + kw['index_display_post_count'] - 1) // kw['index_display_post_count']
+
+ yield self.site.generic_index_renderer(lang, filtered_posts, indexes_title, template_name, {}, kw, 'render_indexes', page_link, page_path)
if not self.site.config["STORY_INDEX"]:
return
@@ -135,6 +90,7 @@ class Indexes(Task):
"output_folder": self.site.config['OUTPUT_FOLDER'],
"filters": self.site.config['FILTERS'],
"index_file": self.site.config['INDEX_FILE'],
+ "strip_indexes": self.site.config['STRIP_INDEXES'],
}
template_name = "list.tmpl"
for lang in kw["translations"]:
@@ -151,6 +107,12 @@ class Indexes(Task):
should_render = True
output_name = os.path.join(kw['output_folder'], dirname, kw['index_file'])
short_destination = os.path.join(dirname, kw['index_file'])
+ link = short_destination.replace('\\', '/')
+ index_len = len(kw['index_file'])
+ if kw['strip_indexes'] and link[-(1 + index_len):] == '/' + kw['index_file']:
+ link = link[:-index_len]
+ context["permalink"] = link
+
for post in post_list:
# If there is an index.html pending to be created from
# a story, do not generate the STORY_INDEX
@@ -166,18 +128,25 @@ class Indexes(Task):
template_name,
kw['filters'],
context)
- task_cfg = {1: task['uptodate'][0].config, 2: kw}
- task['uptodate'] = [config_changed(task_cfg)]
+ task['uptodate'] = task['uptodate'] + [utils.config_changed(kw, 'nikola.plugins.task.indexes')]
task['basename'] = self.name
yield task
- def index_path(self, name, lang):
- if name not in [None, 0]:
- return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['INDEX_PATH'],
- 'index-{0}.html'.format(name)] if _f]
+ def index_path(self, name, lang, is_feed=False):
+ extension = None
+ if is_feed:
+ extension = ".atom"
+ index_file = os.path.splitext(self.site.config['INDEX_FILE'])[0] + extension
else:
- return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['INDEX_PATH'],
- self.site.config['INDEX_FILE']]
- if _f]
+ index_file = self.site.config['INDEX_FILE']
+ return utils.adjust_name_for_index_path_list([_f for _f in [self.site.config['TRANSLATIONS'][lang],
+ self.site.config['INDEX_PATH'],
+ index_file] if _f],
+ name,
+ utils.get_displayed_page_number(name, self.number_of_pages[lang], self.site),
+ lang,
+ self.site,
+ extension=extension)
+
+ def index_atom_path(self, name, lang):
+ return self.index_path(name, lang, is_feed=True)
diff --git a/nikola/plugins/task/listings.plugin b/nikola/plugins/task/listings.plugin
index c93184d..a5ba77a 100644
--- a/nikola/plugins/task/listings.plugin
+++ b/nikola/plugins/task/listings.plugin
@@ -4,7 +4,7 @@ Module = listings
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Render code listings into output
diff --git a/nikola/plugins/task/listings.py b/nikola/plugins/task/listings.py
index 79f6763..b913330 100644
--- a/nikola/plugins/task/listings.py
+++ b/nikola/plugins/task/listings.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,74 +26,115 @@
from __future__ import unicode_literals, print_function
+import sys
import os
from pygments import highlight
from pygments.lexers import get_lexer_for_filename, TextLexer
-from pygments.formatters import HtmlFormatter
import natsort
-import re
from nikola.plugin_categories import Task
from nikola import utils
-# FIXME: (almost) duplicated with mdx_nikola.py
-CODERE = re.compile('<div class="code"><pre>(.*?)</pre></div>', flags=re.MULTILINE | re.DOTALL)
-
-
class Listings(Task):
"""Render pretty listings."""
name = "render_listings"
+ def register_output_name(self, input_folder, rel_name, rel_output_name):
+ """Register proper and improper file mappings."""
+ if rel_name not in self.improper_input_file_mapping:
+ self.improper_input_file_mapping[rel_name] = []
+ self.improper_input_file_mapping[rel_name].append(rel_output_name)
+ self.proper_input_file_mapping[os.path.join(input_folder, rel_name)] = rel_output_name
+ self.proper_input_file_mapping[rel_output_name] = rel_output_name
+
def set_site(self, site):
site.register_path_handler('listing', self.listing_path)
+
+ # We need to prepare some things for the listings path handler to work.
+
+ self.kw = {
+ "default_lang": site.config["DEFAULT_LANG"],
+ "listings_folders": site.config["LISTINGS_FOLDERS"],
+ "output_folder": site.config["OUTPUT_FOLDER"],
+ "index_file": site.config["INDEX_FILE"],
+ "strip_indexes": site.config['STRIP_INDEXES'],
+ "filters": site.config["FILTERS"],
+ }
+
+ # Verify that no folder in LISTINGS_FOLDERS appears twice (on output side)
+ appearing_paths = set()
+ for source, dest in self.kw['listings_folders'].items():
+ if source in appearing_paths or dest in appearing_paths:
+ problem = source if source in appearing_paths else dest
+ utils.LOGGER.error("The listings input or output folder '{0}' appears in more than one entry in LISTINGS_FOLDERS, exiting.".format(problem))
+ sys.exit(1)
+ appearing_paths.add(source)
+ appearing_paths.add(dest)
+
+ # improper_input_file_mapping maps a relative input file (relative to
+ # its corresponding input directory) to a list of the output files.
+ # Since several input directories can contain files of the same name,
+ # a list is needed. This is needed for compatibility to previous Nikola
+ # versions, where there was no need to specify the input directory name
+ # when asking for a link via site.link('listing', ...).
+ self.improper_input_file_mapping = {}
+
+ # proper_input_file_mapping maps relative input file (relative to CWD)
+ # to a generated output file. Since we don't allow an input directory
+ # to appear more than once in LISTINGS_FOLDERS, we can map directly to
+ # a file name (and not a list of files).
+ self.proper_input_file_mapping = {}
+
+ for input_folder, output_folder in self.kw['listings_folders'].items():
+ for root, dirs, files in os.walk(input_folder, followlinks=True):
+ # Compute relative path; can't use os.path.relpath() here as it returns "." instead of ""
+ rel_path = root[len(input_folder):]
+ if rel_path[:1] == os.sep:
+ rel_path = rel_path[1:]
+
+ for f in files + [self.kw['index_file']]:
+ rel_name = os.path.join(rel_path, f)
+ rel_output_name = os.path.join(output_folder, rel_path, f)
+ # Register file names in the mapping.
+ self.register_output_name(input_folder, rel_name, rel_output_name)
+
return super(Listings, self).set_site(site)
def gen_tasks(self):
"""Render pretty code listings."""
- kw = {
- "default_lang": self.site.config["DEFAULT_LANG"],
- "listings_folder": self.site.config["LISTINGS_FOLDER"],
- "output_folder": self.site.config["OUTPUT_FOLDER"],
- "index_file": self.site.config["INDEX_FILE"],
- }
# Things to ignore in listings
ignored_extensions = (".pyc", ".pyo")
- def render_listing(in_name, out_name, folders=[], files=[]):
+ def render_listing(in_name, out_name, input_folder, output_folder, folders=[], files=[]):
if in_name:
with open(in_name, 'r') as fd:
try:
lexer = get_lexer_for_filename(in_name)
except:
lexer = TextLexer()
- code = highlight(fd.read(), lexer,
- HtmlFormatter(cssclass='code',
- linenos="table", nowrap=False,
- lineanchors=utils.slugify(in_name, force=True),
- anchorlinenos=True))
- # the pygments highlighter uses <div class="codehilite"><pre>
- # for code. We switch it to reST's <pre class="code">.
- code = CODERE.sub('<pre class="code literal-block">\\1</pre>', code)
+ code = highlight(fd.read(), lexer, utils.NikolaPygmentsHTML(in_name))
title = os.path.basename(in_name)
else:
code = ''
- title = ''
+ title = os.path.split(os.path.dirname(out_name))[1]
crumbs = utils.get_crumbs(os.path.relpath(out_name,
- kw['output_folder']),
+ self.kw['output_folder']),
is_file=True)
permalink = self.site.link(
'listing',
- os.path.relpath(
- out_name,
- os.path.join(
- kw['output_folder'],
- kw['listings_folder'])))
- if self.site.config['COPY_SOURCES']:
- source_link = permalink[:-5]
+ os.path.join(
+ input_folder,
+ os.path.relpath(
+ out_name[:-5], # remove '.html'
+ os.path.join(
+ self.kw['output_folder'],
+ output_folder))))
+ if self.site.config['COPY_SOURCES'] and in_name:
+ source_link = permalink[:-5] # remove '.html'
else:
source_link = None
context = {
@@ -101,88 +142,121 @@ class Listings(Task):
'title': title,
'crumbs': crumbs,
'permalink': permalink,
- 'lang': kw['default_lang'],
- 'folders': natsort.natsorted(folders),
- 'files': natsort.natsorted(files),
+ 'lang': self.kw['default_lang'],
+ 'folders': natsort.natsorted(
+ folders, alg=natsort.ns.F | natsort.ns.IC),
+ 'files': natsort.natsorted(
+ files, alg=natsort.ns.F | natsort.ns.IC),
'description': title,
'source_link': source_link,
}
- self.site.render_template('listing.tmpl', out_name,
- context)
+ self.site.render_template('listing.tmpl', out_name, context)
yield self.group_task()
template_deps = self.site.template_system.template_deps('listing.tmpl')
- for root, dirs, files in os.walk(kw['listings_folder'], followlinks=True):
- files = [f for f in files if os.path.splitext(f)[-1] not in ignored_extensions]
-
- uptodate = {'c': self.site.GLOBAL_CONTEXT}
-
- for k, v in self.site.GLOBAL_CONTEXT['template_hooks'].items():
- uptodate['||template_hooks|{0}||'.format(k)] = v._items
-
- for k in self.site._GLOBAL_CONTEXT_TRANSLATABLE:
- uptodate[k] = self.site.GLOBAL_CONTEXT[k](kw['default_lang'])
-
- # save navigation links as dependencies
- uptodate['navigation_links'] = uptodate['c']['navigation_links'](kw['default_lang'])
-
- uptodate2 = uptodate.copy()
- uptodate2['f'] = files
- uptodate2['d'] = dirs
-
- # Render all files
- out_name = os.path.join(
- kw['output_folder'],
- root, kw['index_file']
- )
- yield {
- 'basename': self.name,
- 'name': out_name,
- 'file_dep': template_deps,
- 'targets': [out_name],
- 'actions': [(render_listing, [None, out_name, dirs, files])],
- # This is necessary to reflect changes in blog title,
- # sidebar links, etc.
- 'uptodate': [utils.config_changed(uptodate2)],
- 'clean': True,
- }
- for f in files:
- ext = os.path.splitext(f)[-1]
- if ext in ignored_extensions:
- continue
- in_name = os.path.join(root, f)
- out_name = os.path.join(
- kw['output_folder'],
- root,
- f) + '.html'
- yield {
+
+ for input_folder, output_folder in self.kw['listings_folders'].items():
+ for root, dirs, files in os.walk(input_folder, followlinks=True):
+ files = [f for f in files if os.path.splitext(f)[-1] not in ignored_extensions]
+
+ uptodate = {'c': self.site.GLOBAL_CONTEXT}
+
+ for k, v in self.site.GLOBAL_CONTEXT['template_hooks'].items():
+ uptodate['||template_hooks|{0}||'.format(k)] = v._items
+
+ for k in self.site._GLOBAL_CONTEXT_TRANSLATABLE:
+ uptodate[k] = self.site.GLOBAL_CONTEXT[k](self.kw['default_lang'])
+
+ # save navigation links as dependencies
+ uptodate['navigation_links'] = uptodate['c']['navigation_links'](self.kw['default_lang'])
+
+ uptodate['kw'] = self.kw
+
+ uptodate2 = uptodate.copy()
+ uptodate2['f'] = files
+ uptodate2['d'] = dirs
+
+ # Compute relative path; can't use os.path.relpath() here as it returns "." instead of ""
+ rel_path = root[len(input_folder):]
+ if rel_path[:1] == os.sep:
+ rel_path = rel_path[1:]
+
+ rel_name = os.path.join(rel_path, self.kw['index_file'])
+ rel_output_name = os.path.join(output_folder, rel_path, self.kw['index_file'])
+
+ # Render all files
+ out_name = os.path.join(self.kw['output_folder'], rel_output_name)
+ yield utils.apply_filters({
'basename': self.name,
'name': out_name,
- 'file_dep': template_deps + [in_name],
+ 'file_dep': template_deps,
'targets': [out_name],
- 'actions': [(render_listing, [in_name, out_name])],
+ 'actions': [(render_listing, [None, out_name, input_folder, output_folder, dirs, files])],
# This is necessary to reflect changes in blog title,
# sidebar links, etc.
- 'uptodate': [utils.config_changed(uptodate)],
+ 'uptodate': [utils.config_changed(uptodate2, 'nikola.plugins.task.listings:folder')],
'clean': True,
- }
- if self.site.config['COPY_SOURCES']:
- out_name = os.path.join(
- kw['output_folder'],
- root,
- f)
- yield {
+ }, self.kw["filters"])
+ for f in files:
+ ext = os.path.splitext(f)[-1]
+ if ext in ignored_extensions:
+ continue
+ in_name = os.path.join(root, f)
+ # Record file names
+ rel_name = os.path.join(rel_path, f + '.html')
+ rel_output_name = os.path.join(output_folder, rel_path, f + '.html')
+ self.register_output_name(input_folder, rel_name, rel_output_name)
+ # Set up output name
+ out_name = os.path.join(self.kw['output_folder'], rel_output_name)
+ # Yield task
+ yield utils.apply_filters({
'basename': self.name,
'name': out_name,
- 'file_dep': [in_name],
+ 'file_dep': template_deps + [in_name],
'targets': [out_name],
- 'actions': [(utils.copy_file, [in_name, out_name])],
+ 'actions': [(render_listing, [in_name, out_name, input_folder, output_folder])],
+ # This is necessary to reflect changes in blog title,
+ # sidebar links, etc.
+ 'uptodate': [utils.config_changed(uptodate, 'nikola.plugins.task.listings:source')],
'clean': True,
- }
+ }, self.kw["filters"])
+ if self.site.config['COPY_SOURCES']:
+ rel_name = os.path.join(rel_path, f)
+ rel_output_name = os.path.join(output_folder, rel_path, f)
+ self.register_output_name(input_folder, rel_name, rel_output_name)
+ out_name = os.path.join(self.kw['output_folder'], rel_output_name)
+ yield utils.apply_filters({
+ 'basename': self.name,
+ 'name': out_name,
+ 'file_dep': [in_name],
+ 'targets': [out_name],
+ 'actions': [(utils.copy_file, [in_name, out_name])],
+ 'clean': True,
+ }, self.kw["filters"])
- def listing_path(self, name, lang):
- if not name.endswith('.html'):
+ def listing_path(self, namep, lang):
+ namep = namep.replace('/', os.sep)
+ nameh = namep + '.html'
+ for name in (namep, nameh):
+ if name in self.proper_input_file_mapping:
+ # If the name shows up in this dict, everything's fine.
+ name = self.proper_input_file_mapping[name]
+ break
+ elif name in self.improper_input_file_mapping:
+ # If the name shows up in this dict, we have to check for
+ # ambiguities.
+ if len(self.improper_input_file_mapping[name]) > 1:
+ utils.LOGGER.error("Using non-unique listing name '{0}', which maps to more than one listing name ({1})!".format(name, str(self.improper_input_file_mapping[name])))
+ sys.exit(1)
+ if len(self.site.config['LISTINGS_FOLDERS']) > 1:
+ utils.LOGGER.notice("Using listings names in site.link() without input directory prefix while configuration's LISTINGS_FOLDERS has more than one entry.")
+ name = self.improper_input_file_mapping[name][0]
+ break
+ else:
+ utils.LOGGER.error("Unknown listing name {0}!".format(namep))
+ sys.exit(1)
+ if not name.endswith(os.sep + self.site.config["INDEX_FILE"]):
name += '.html'
- path_parts = [self.site.config['LISTINGS_FOLDER']] + list(os.path.split(name))
+ path_parts = name.split(os.sep)
return [_f for _f in path_parts if _f]
diff --git a/nikola/plugins/task/pages.plugin b/nikola/plugins/task/pages.plugin
index 67212d2..4cad7b7 100644
--- a/nikola/plugins/task/pages.plugin
+++ b/nikola/plugins/task/pages.plugin
@@ -4,7 +4,7 @@ Module = pages
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Create pages in the output.
diff --git a/nikola/plugins/task/pages.py b/nikola/plugins/task/pages.py
index aefc5a1..d0edb56 100644
--- a/nikola/plugins/task/pages.py
+++ b/nikola/plugins/task/pages.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -51,9 +51,7 @@ class RenderPages(Task):
continue
for task in self.site.generic_page_renderer(lang, post,
kw["filters"]):
- task['uptodate'] = [config_changed({
- 1: task['uptodate'][0].config,
- 2: kw})]
+ task['uptodate'] = task['uptodate'] + [config_changed(kw, 'nikola.plugins.task.pages')]
task['basename'] = self.name
task['task_dep'] = ['render_posts']
yield task
diff --git a/nikola/plugins/task/posts.plugin b/nikola/plugins/task/posts.plugin
index e1a42fd..707b3c2 100644
--- a/nikola/plugins/task/posts.plugin
+++ b/nikola/plugins/task/posts.plugin
@@ -4,7 +4,7 @@ Module = posts
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Create HTML fragments out of posts.
diff --git a/nikola/plugins/task/posts.py b/nikola/plugins/task/posts.py
index 8e03122..d3f17fd 100644
--- a/nikola/plugins/task/posts.py
+++ b/nikola/plugins/task/posts.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -25,18 +25,20 @@
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from copy import copy
+import os
from nikola.plugin_categories import Task
-from nikola import utils
+from nikola import filters, utils
-def rest_deps(post, task):
- """Add extra_deps from ReST into task.
+def update_deps(post, lang, task):
+ """Updates file dependencies as they might have been updated during compilation.
- The .dep file is created by ReST so not available before the task starts
- to execute.
+ This is done for example by the ReST page compiler, which writes its
+ dependencies into a .dep file. This file is read and incorporated when calling
+ post.fragment_deps(), and only available /after/ compiling the fragment.
"""
- task.file_dep.update(post.extra_deps())
+ task.file_dep.update([p for p in post.fragment_deps(lang) if not p.startswith("####MAGIC####")])
class RenderPosts(Task):
@@ -54,23 +56,62 @@ class RenderPosts(Task):
"show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
"demote_headers": self.site.config['DEMOTE_HEADERS'],
}
+ self.tl_changed = False
yield self.group_task()
+ def tl_ch():
+ self.tl_changed = True
+
+ yield {
+ 'basename': self.name,
+ 'name': 'timeline_changes',
+ 'actions': [tl_ch],
+ 'uptodate': [utils.config_changed({1: kw['timeline']})],
+ }
+
for lang in kw["translations"]:
deps_dict = copy(kw)
deps_dict.pop('timeline')
for post in kw['timeline']:
+
dest = post.translated_base_path(lang)
+ file_dep = [p for p in post.fragment_deps(lang) if not p.startswith("####MAGIC####")]
task = {
'basename': self.name,
'name': dest,
- 'file_dep': post.fragment_deps(lang),
+ 'file_dep': file_dep,
'targets': [dest],
'actions': [(post.compile, (lang, )),
- (rest_deps, (post,)),
+ (update_deps, (post, lang, )),
],
'clean': True,
- 'uptodate': [utils.config_changed(deps_dict)],
+ 'uptodate': [
+ utils.config_changed(deps_dict, 'nikola.plugins.task.posts'),
+ lambda p=post, l=lang: self.dependence_on_timeline(p, l)
+ ] + post.fragment_deps_uptodate(lang),
+ 'task_dep': ['render_posts:timeline_changes']
}
- yield task
+
+ # Apply filters specified in the metadata
+ ff = [x.strip() for x in post.meta('filters', lang).split(',')]
+ flist = []
+ for i, f in enumerate(ff):
+ if not f:
+ continue
+ if f.startswith('filters.'): # A function from the filters module
+ f = f[8:]
+ try:
+ flist.append(getattr(filters, f))
+ except AttributeError:
+ pass
+ else:
+ flist.append(f)
+ yield utils.apply_filters(task, {os.path.splitext(dest): flist})
+
+ def dependence_on_timeline(self, post, lang):
+ if "####MAGIC####TIMELINE" not in post.fragment_deps(lang):
+ return True # No dependency on timeline
+ elif self.tl_changed:
+ return False # Timeline changed
+ return True
diff --git a/nikola/plugins/task/redirect.plugin b/nikola/plugins/task/redirect.plugin
index 826f3d8..0228c70 100644
--- a/nikola/plugins/task/redirect.plugin
+++ b/nikola/plugins/task/redirect.plugin
@@ -4,7 +4,7 @@ Module = redirect
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Create redirect pages.
diff --git a/nikola/plugins/task/redirect.py b/nikola/plugins/task/redirect.py
index e1134bf..428dd5a 100644
--- a/nikola/plugins/task/redirect.py
+++ b/nikola/plugins/task/redirect.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -24,7 +24,8 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-import io
+from __future__ import unicode_literals
+
import os
from nikola.plugin_categories import Task
@@ -42,26 +43,18 @@ class Redirect(Task):
kw = {
'redirections': self.site.config['REDIRECTIONS'],
'output_folder': self.site.config['OUTPUT_FOLDER'],
+ 'filters': self.site.config['FILTERS'],
}
yield self.group_task()
if kw['redirections']:
for src, dst in kw["redirections"]:
src_path = os.path.join(kw["output_folder"], src)
- yield {
+ yield utils.apply_filters({
'basename': self.name,
'name': src_path,
'targets': [src_path],
- 'actions': [(create_redirect, (src_path, dst))],
+ 'actions': [(utils.create_redirect, (src_path, dst))],
'clean': True,
- 'uptodate': [utils.config_changed(kw)],
- }
-
-
-def create_redirect(src, dst):
- utils.makedirs(os.path.dirname(src))
- with io.open(src, "w+", encoding="utf8") as fd:
- fd.write('<!DOCTYPE html><head><title>Redirecting...</title>'
- '<meta name="robots" content="noindex">'
- '<meta http-equiv="refresh" content="0; '
- 'url={0}"></head><body><p>Page moved <a href="{0}">here</a></p></body>'.format(dst))
+ 'uptodate': [utils.config_changed(kw, 'nikola.plugins.task.redirect')],
+ }, kw["filters"])
diff --git a/nikola/plugins/task/robots.plugin b/nikola/plugins/task/robots.plugin
index 60b50fb..b4b43a3 100644
--- a/nikola/plugins/task/robots.plugin
+++ b/nikola/plugins/task/robots.plugin
@@ -4,7 +4,7 @@ Module = robots
[Documentation]
Author = Daniel Aleksandersen
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Generate /robots.txt exclusion file and promote sitemap.
diff --git a/nikola/plugins/task/robots.py b/nikola/plugins/task/robots.py
index b229d37..2f25a21 100644
--- a/nikola/plugins/task/robots.py
+++ b/nikola/plugins/task/robots.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -48,7 +48,8 @@ class RobotsFile(LateTask):
"site_url": self.site.config["SITE_URL"],
"output_folder": self.site.config["OUTPUT_FOLDER"],
"files_folders": self.site.config['FILES_FOLDERS'],
- "robots_exclusions": self.site.config["ROBOTS_EXCLUSIONS"]
+ "robots_exclusions": self.site.config["ROBOTS_EXCLUSIONS"],
+ "filters": self.site.config["FILTERS"],
}
sitemapindex_url = urljoin(kw["base_url"], "sitemapindex.xml")
@@ -68,15 +69,15 @@ class RobotsFile(LateTask):
yield self.group_task()
if not utils.get_asset_path("robots.txt", [], files_folders=kw["files_folders"]):
- yield {
+ yield utils.apply_filters({
"basename": self.name,
"name": robots_path,
"targets": [robots_path],
"actions": [(write_robots)],
- "uptodate": [utils.config_changed(kw)],
+ "uptodate": [utils.config_changed(kw, 'nikola.plugins.task.robots')],
"clean": True,
"task_dep": ["sitemap"]
- }
+ }, kw["filters"])
elif kw["robots_exclusions"]:
utils.LOGGER.warn('Did not generate robots.txt as one already exists in FILES_FOLDERS. ROBOTS_EXCLUSIONS will not have any affect on the copied fie.')
else:
diff --git a/nikola/plugins/task/rss.plugin b/nikola/plugins/task/rss.plugin
index 7206a43..56f0bf4 100644
--- a/nikola/plugins/task/rss.plugin
+++ b/nikola/plugins/task/rss.plugin
@@ -4,7 +4,7 @@ Module = rss
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Generate RSS feeds.
diff --git a/nikola/plugins/task/rss.py b/nikola/plugins/task/rss.py
index b16ed48..26a4da1 100644
--- a/nikola/plugins/task/rss.py
+++ b/nikola/plugins/task/rss.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -51,6 +51,7 @@ class GenerateRSS(Task):
"filters": self.site.config["FILTERS"],
"blog_title": self.site.config["BLOG_TITLE"],
"site_url": self.site.config["SITE_URL"],
+ "base_url": self.site.config["BASE_URL"],
"blog_description": self.site.config["BLOG_DESCRIPTION"],
"output_folder": self.site.config["OUTPUT_FOLDER"],
"rss_teasers": self.site.config["RSS_TEASERS"],
@@ -59,6 +60,7 @@ class GenerateRSS(Task):
"feed_length": self.site.config['FEED_LENGTH'],
"tzinfo": self.site.tzinfo,
"rss_read_more_link": self.site.config["RSS_READ_MORE_LINK"],
+ "rss_links_append_query": self.site.config["RSS_LINKS_APPEND_QUERY"],
}
self.site.scan_posts()
# Check for any changes in the state of use_in_feeds for any post.
@@ -71,16 +73,18 @@ class GenerateRSS(Task):
output_name = os.path.join(kw['output_folder'],
self.site.path("rss", None, lang))
deps = []
+ deps_uptodate = []
if kw["show_untranslated_posts"]:
- posts = self.site.posts[:10]
+ posts = self.site.posts[:kw['feed_length']]
else:
- posts = [x for x in self.site.posts if x.is_translation_available(lang)][:10]
+ posts = [x for x in self.site.posts if x.is_translation_available(lang)][:kw['feed_length']]
for post in posts:
deps += post.deps(lang)
+ deps_uptodate += post.deps_uptodate(lang)
feed_url = urljoin(self.site.config['BASE_URL'], self.site.link("rss", None, lang).lstrip('/'))
- yield {
+ task = {
'basename': 'generate_rss',
'name': os.path.normpath(output_name),
'file_dep': deps,
@@ -88,12 +92,14 @@ class GenerateRSS(Task):
'actions': [(utils.generic_rss_renderer,
(lang, kw["blog_title"](lang), kw["site_url"],
kw["blog_description"](lang), posts, output_name,
- kw["rss_teasers"], kw["rss_plain"], kw['feed_length'], feed_url))],
+ kw["rss_teasers"], kw["rss_plain"], kw['feed_length'], feed_url,
+ None, kw["rss_links_append_query"]))],
'task_dep': ['render_posts'],
'clean': True,
- 'uptodate': [utils.config_changed(kw)],
+ 'uptodate': [utils.config_changed(kw, 'nikola.plugins.task.rss')] + deps_uptodate,
}
+ yield utils.apply_filters(task, kw['filters'])
def rss_path(self, name, lang):
return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
diff --git a/nikola/plugins/task/scale_images.plugin b/nikola/plugins/task/scale_images.plugin
new file mode 100644
index 0000000..c0f0f28
--- /dev/null
+++ b/nikola/plugins/task/scale_images.plugin
@@ -0,0 +1,9 @@
+[Core]
+Name = scale_images
+Module = scale_images
+
+[Documentation]
+Author = Pelle Nilsson
+Version = 1.0
+Website = http://getnikola.com
+Description = Create down-scaled images and thumbnails.
diff --git a/nikola/plugins/task/scale_images.py b/nikola/plugins/task/scale_images.py
new file mode 100644
index 0000000..f97027e
--- /dev/null
+++ b/nikola/plugins/task/scale_images.py
@@ -0,0 +1,96 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2014-2015 Pelle Nilsson and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+import os
+
+from nikola.plugin_categories import Task
+from nikola.image_processing import ImageProcessor
+from nikola import utils
+
+
+class ScaleImage(Task, ImageProcessor):
+ """Copy static files into the output folder."""
+
+ name = "scale_images"
+
+ def set_site(self, site):
+ self.logger = utils.get_logger('scale_images', site.loghandlers)
+ return super(ScaleImage, self).set_site(site)
+
+ def process_tree(self, src, dst):
+ """Processes all images in a src tree and put the (possibly) rescaled
+ images in the dst folder."""
+ ignore = set(['.svn'])
+ base_len = len(src.split(os.sep))
+ for root, dirs, files in os.walk(src, followlinks=True):
+ root_parts = root.split(os.sep)
+ if set(root_parts) & ignore:
+ continue
+ dst_dir = os.path.join(dst, *root_parts[base_len:])
+ utils.makedirs(dst_dir)
+ for src_name in files:
+ if src_name in ('.DS_Store', 'Thumbs.db'):
+ continue
+ if (not src_name.lower().endswith(tuple(self.image_ext_list)) and not src_name.upper().endswith(tuple(self.image_ext_list))):
+ continue
+ dst_file = os.path.join(dst_dir, src_name)
+ src_file = os.path.join(root, src_name)
+ thumb_file = '.thumbnail'.join(os.path.splitext(dst_file))
+ yield {
+ 'name': dst_file,
+ 'file_dep': [src_file],
+ 'targets': [dst_file, thumb_file],
+ 'actions': [(self.process_image, (src_file, dst_file, thumb_file))],
+ 'clean': True,
+ }
+
+ def process_image(self, src, dst, thumb):
+ self.resize_image(src, dst, self.kw['max_image_size'], False)
+ self.resize_image(src, thumb, self.kw['image_thumbnail_size'], False)
+
+ def gen_tasks(self):
+ """Copy static files into the output folder."""
+
+ self.kw = {
+ 'image_thumbnail_size': self.site.config['IMAGE_THUMBNAIL_SIZE'],
+ 'max_image_size': self.site.config['MAX_IMAGE_SIZE'],
+ 'image_folders': self.site.config['IMAGE_FOLDERS'],
+ 'output_folder': self.site.config['OUTPUT_FOLDER'],
+ 'filters': self.site.config['FILTERS'],
+ }
+
+ self.image_ext_list = self.image_ext_list_builtin
+ self.image_ext_list.extend(self.site.config.get('EXTRA_IMAGE_EXTENSIONS', []))
+
+ yield self.group_task()
+ for src in self.kw['image_folders']:
+ dst = self.kw['output_folder']
+ filters = self.kw['filters']
+ real_dst = os.path.join(dst, self.kw['image_folders'][src])
+ for task in self.process_tree(src, real_dst):
+ task['basename'] = self.name
+ task['uptodate'] = [utils.config_changed(self.kw)]
+ yield utils.apply_filters(task, filters)
diff --git a/nikola/plugins/task/sitemap.plugin b/nikola/plugins/task/sitemap.plugin
index 2cd8195..0b992b8 100644
--- a/nikola/plugins/task/sitemap.plugin
+++ b/nikola/plugins/task/sitemap.plugin
@@ -4,7 +4,7 @@ Module = sitemap
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Generate google sitemap.
diff --git a/nikola/plugins/task/sitemap/__init__.py b/nikola/plugins/task/sitemap/__init__.py
index 943e9b2..92d557d 100644
--- a/nikola/plugins/task/sitemap/__init__.py
+++ b/nikola/plugins/task/sitemap/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -36,7 +36,7 @@ except ImportError:
import urllib.robotparser as robotparser # NOQA
from nikola.plugin_categories import LateTask
-from nikola.utils import config_changed
+from nikola.utils import config_changed, apply_filters
urlset_header = """<?xml version="1.0" encoding="UTF-8"?>
@@ -49,7 +49,7 @@ urlset_header = """<?xml version="1.0" encoding="UTF-8"?>
loc_format = """ <url>
<loc>{0}</loc>
- <lastmod>{1}</lastmod>
+ <lastmod>{1}</lastmod>{2}
</url>
"""
@@ -69,6 +69,9 @@ sitemap_format = """ <sitemap>
</sitemap>
"""
+alternates_format = """\n <xhtml:link rel="alternate" hreflang="{0}" href="{1}" />"""
+
+
sitemapindex_footer = "</sitemapindex>"
@@ -111,8 +114,10 @@ class Sitemap(LateTask):
"strip_indexes": self.site.config["STRIP_INDEXES"],
"index_file": self.site.config["INDEX_FILE"],
"sitemap_include_fileless_dirs": self.site.config["SITEMAP_INCLUDE_FILELESS_DIRS"],
- "mapped_extensions": self.site.config.get('MAPPED_EXTENSIONS', ['.html', '.htm', '.xml', '.rss']),
- "robots_exclusions": self.site.config["ROBOTS_EXCLUSIONS"]
+ "mapped_extensions": self.site.config.get('MAPPED_EXTENSIONS', ['.atom', '.html', '.htm', '.xml', '.rss']),
+ "robots_exclusions": self.site.config["ROBOTS_EXCLUSIONS"],
+ "filters": self.site.config["FILTERS"],
+ "translations": self.site.config["TRANSLATIONS"],
}
output = kw['output_folder']
@@ -136,7 +141,17 @@ class Sitemap(LateTask):
lastmod = self.get_lastmod(root)
loc = urljoin(base_url, base_path + path)
if kw['index_file'] in files and kw['strip_indexes']: # ignore folders when not stripping urls
- urlset[loc] = loc_format.format(loc, lastmod)
+ post = self.site.post_per_file.get(path + kw['index_file'])
+ if post and (post.is_draft or post.is_private or post.publish_later):
+ continue
+ alternates = []
+ if post:
+ for lang in kw['translations']:
+ alt_url = post.permalink(lang=lang, absolute=True)
+ if loc == alt_url:
+ continue
+ alternates.append(alternates_format.format(lang, alt_url))
+ urlset[loc] = loc_format.format(loc, lastmod, ''.join(alternates))
for fname in files:
if kw['strip_indexes'] and fname == kw['index_file']:
continue # We already mapped the folder
@@ -148,20 +163,30 @@ class Sitemap(LateTask):
continue
if not robot_fetch(path):
continue
+
+ # read in binary mode to make ancient files work
+ fh = open(real_path, 'rb')
+ filehead = fh.read(1024)
+ fh.close()
+
if path.endswith('.html') or path.endswith('.htm'):
- try:
- if u'<!doctype html' not in io.open(real_path, 'r', encoding='utf8').read(1024).lower():
- # ignores "html" files without doctype
- # alexa-verify, google-site-verification, etc.
- continue
- except UnicodeDecodeError:
- # ignore ancient files
- # most non-utf8 files are worthless anyways
+ """ ignores "html" files without doctype """
+ if b'<!doctype html' not in filehead.lower():
continue
- """ put RSS in sitemapindex[] instead of in urlset[], sitemap_path is included after it is generated """
- if path.endswith('.xml') or path.endswith('.rss'):
- filehead = io.open(real_path, 'r', encoding='utf8').read(512)
- if u'<rss' in filehead or (u'<urlset' in filehead and path != sitemap_path):
+
+ """ ignores "html" files with noindex robot directives """
+ robots_directives = [b'<meta content="noindex" name="robots"',
+ b'<meta content="none" name="robots"',
+ b'<meta name="robots" content="noindex"',
+ b'<meta name="robots" content="none"']
+ if any([robot_directive in filehead.lower() for robot_directive in robots_directives]):
+ continue
+
+ # put Atom and RSS in sitemapindex[] instead of in urlset[],
+ # sitemap_path is included after it is generated
+ if path.endswith('.xml') or path.endswith('.atom') or path.endswith('.rss'):
+ known_elm_roots = (b'<feed', b'<rss', b'<urlset')
+ if any([elm_root in filehead.lower() for elm_root in known_elm_roots]) and path != sitemap_path:
path = path.replace(os.sep, '/')
lastmod = self.get_lastmod(real_path)
loc = urljoin(base_url, base_path + path)
@@ -175,7 +200,14 @@ class Sitemap(LateTask):
path = path.replace(os.sep, '/')
lastmod = self.get_lastmod(real_path)
loc = urljoin(base_url, base_path + path)
- urlset[loc] = loc_format.format(loc, lastmod)
+ alternates = []
+ if post:
+ for lang in kw['translations']:
+ alt_url = post.permalink(lang=lang, absolute=True)
+ if loc == alt_url:
+ continue
+ alternates.append(alternates_format.format(lang, alt_url))
+ urlset[loc] = loc_format.format(loc, lastmod, '\n'.join(alternates))
def robot_fetch(path):
for rule in kw["robots_exclusions"]:
@@ -208,7 +240,27 @@ class Sitemap(LateTask):
# to scan locations.
def scan_locs_task():
scan_locs()
- return {'locations': list(urlset.keys()) + list(sitemapindex.keys())}
+
+ # Generate a list of file dependencies for the actual generation
+ # task, so rebuilds are triggered. (Issue #1032)
+ output = kw["output_folder"]
+ file_dep = []
+
+ for i in urlset.keys():
+ p = os.path.join(output, urlparse(i).path.replace(base_path, '', 1))
+ if not p.endswith('sitemap.xml') and not os.path.isdir(p):
+ file_dep.append(p)
+ if os.path.isdir(p) and os.path.exists(os.path.join(p, 'index.html')):
+ file_dep.append(p + 'index.html')
+
+ for i in sitemapindex.keys():
+ p = os.path.join(output, urlparse(i).path.replace(base_path, '', 1))
+ if not p.endswith('sitemap.xml') and not os.path.isdir(p):
+ file_dep.append(p)
+ if os.path.isdir(p) and os.path.exists(os.path.join(p, 'index.html')):
+ file_dep.append(p + 'index.html')
+
+ return {'file_dep': file_dep}
yield {
"basename": "_scan_locs",
@@ -217,29 +269,29 @@ class Sitemap(LateTask):
}
yield self.group_task()
- yield {
+ yield apply_filters({
"basename": "sitemap",
"name": sitemap_path,
"targets": [sitemap_path],
"actions": [(write_sitemap,)],
- "uptodate": [config_changed(kw)],
+ "uptodate": [config_changed(kw, 'nikola.plugins.task.sitemap:write')],
"clean": True,
"task_dep": ["render_site"],
"calc_dep": ["_scan_locs:sitemap"],
- }
- yield {
+ }, kw['filters'])
+ yield apply_filters({
"basename": "sitemap",
"name": sitemapindex_path,
"targets": [sitemapindex_path],
"actions": [(write_sitemapindex,)],
- "uptodate": [config_changed(kw)],
+ "uptodate": [config_changed(kw, 'nikola.plugins.task.sitemap:write_index')],
"clean": True,
"file_dep": [sitemap_path]
- }
+ }, kw['filters'])
def get_lastmod(self, p):
if self.site.invariant:
- return '2014-01-01'
+ return '2038-01-01'
else:
return datetime.datetime.fromtimestamp(os.stat(p).st_mtime).isoformat().split('T')[0]
diff --git a/nikola/plugins/task/sources.plugin b/nikola/plugins/task/sources.plugin
index 6224e48..5560df6 100644
--- a/nikola/plugins/task/sources.plugin
+++ b/nikola/plugins/task/sources.plugin
@@ -4,7 +4,7 @@ Module = sources
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Copy page sources into the output.
diff --git a/nikola/plugins/task/sources.py b/nikola/plugins/task/sources.py
index 4c669c2..840a31c 100644
--- a/nikola/plugins/task/sources.py
+++ b/nikola/plugins/task/sources.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -49,6 +49,7 @@ class Sources(Task):
"translations": self.site.config["TRANSLATIONS"],
"output_folder": self.site.config["OUTPUT_FOLDER"],
"default_lang": self.site.config["DEFAULT_LANG"],
+ "show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
}
self.site.scan_posts()
@@ -56,6 +57,8 @@ class Sources(Task):
if self.site.config['COPY_SOURCES']:
for lang in kw["translations"]:
for post in self.site.timeline:
+ if not kw["show_untranslated_posts"] and lang not in post.translated_to:
+ continue
if post.meta('password'):
continue
output_name = os.path.join(
@@ -77,5 +80,5 @@ class Sources(Task):
'targets': [output_name],
'actions': [(utils.copy_file, (source, output_name))],
'clean': True,
- 'uptodate': [utils.config_changed(kw)],
+ 'uptodate': [utils.config_changed(kw, 'nikola.plugins.task.sources')],
}
diff --git a/nikola/plugins/task/tags.plugin b/nikola/plugins/task/tags.plugin
index f01e0f8..4ac3800 100644
--- a/nikola/plugins/task/tags.plugin
+++ b/nikola/plugins/task/tags.plugin
@@ -4,7 +4,7 @@ Module = tags
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Render the tag pages and feeds.
diff --git a/nikola/plugins/task/tags.py b/nikola/plugins/task/tags.py
index 8d43f13..832ceff 100644
--- a/nikola/plugins/task/tags.py
+++ b/nikola/plugins/task/tags.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -27,6 +27,8 @@
from __future__ import unicode_literals
import json
import os
+import sys
+import natsort
try:
from urlparse import urljoin
except ImportError:
@@ -43,9 +45,12 @@ class RenderTags(Task):
def set_site(self, site):
site.register_path_handler('tag_index', self.tag_index_path)
+ site.register_path_handler('category_index', self.category_index_path)
site.register_path_handler('tag', self.tag_path)
+ site.register_path_handler('tag_atom', self.tag_atom_path)
site.register_path_handler('tag_rss', self.tag_rss_path)
site.register_path_handler('category', self.category_path)
+ site.register_path_handler('category_atom', self.category_atom_path)
site.register_path_handler('category_rss', self.category_rss_path)
return super(RenderTags, self).set_site(site)
@@ -56,18 +61,26 @@ class RenderTags(Task):
"translations": self.site.config["TRANSLATIONS"],
"blog_title": self.site.config["BLOG_TITLE"],
"site_url": self.site.config["SITE_URL"],
+ "base_url": self.site.config["BASE_URL"],
"messages": self.site.MESSAGES,
"output_folder": self.site.config['OUTPUT_FOLDER'],
"filters": self.site.config['FILTERS'],
+ 'tag_path': self.site.config['TAG_PATH'],
"tag_pages_are_indexes": self.site.config['TAG_PAGES_ARE_INDEXES'],
- "index_display_post_count": self.site.config['INDEX_DISPLAY_POST_COUNT'],
- "index_teasers": self.site.config['INDEX_TEASERS'],
+ 'category_path': self.site.config['CATEGORY_PATH'],
+ 'category_prefix': self.site.config['CATEGORY_PREFIX'],
+ "category_pages_are_indexes": self.site.config['CATEGORY_PAGES_ARE_INDEXES'],
"generate_rss": self.site.config['GENERATE_RSS'],
"rss_teasers": self.site.config["RSS_TEASERS"],
"rss_plain": self.site.config["RSS_PLAIN"],
+ "rss_link_append_query": self.site.config["RSS_LINKS_APPEND_QUERY"],
"show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
"feed_length": self.site.config['FEED_LENGTH'],
+ "taglist_minimum_post_count": self.site.config['TAGLIST_MINIMUM_POSTS'],
"tzinfo": self.site.tzinfo,
+ "pretty_urls": self.site.config['PRETTY_URLS'],
+ "strip_indexes": self.site.config['STRIP_INDEXES'],
+ "index_file": self.site.config['INDEX_FILE'],
}
self.site.scan_posts()
@@ -78,6 +91,32 @@ class RenderTags(Task):
if not self.site.posts_per_tag and not self.site.posts_per_category:
return
+ if kw['category_path'] == kw['tag_path']:
+ tags = {self.slugify_tag_name(tag): tag for tag in self.site.posts_per_tag.keys()}
+ cats = {tuple(self.slugify_category_name(category)): category for category in self.site.posts_per_category.keys()}
+ categories = {k[0]: v for k, v in cats.items() if len(k) == 1}
+ intersect = set(tags.keys()) & set(categories.keys())
+ if len(intersect) > 0:
+ for slug in intersect:
+ utils.LOGGER.error("Category '{0}' and tag '{1}' both have the same slug '{2}'!".format('/'.join(categories[slug]), tags[slug], slug))
+ sys.exit(1)
+
+ # Test for category slug clashes
+ categories = {}
+ for category in self.site.posts_per_category.keys():
+ slug = tuple(self.slugify_category_name(category))
+ for part in slug:
+ if len(part) == 0:
+ utils.LOGGER.error("Category '{0}' yields invalid slug '{1}'!".format(category, '/'.join(slug)))
+ sys.exit(1)
+ if slug in categories:
+ other_category = categories[slug]
+ utils.LOGGER.error('You have categories that are too similar: {0} and {1}'.format(category, other_category))
+ utils.LOGGER.error('Category {0} is used in: {1}'.format(category, ', '.join([p.source_path for p in self.site.posts_per_category[category]])))
+ utils.LOGGER.error('Category {0} is used in: {1}'.format(other_category, ', '.join([p.source_path for p in self.site.posts_per_category[other_category]])))
+ sys.exit(1)
+ categories[slug] = category
+
tag_list = list(self.site.posts_per_tag.items())
cat_list = list(self.site.posts_per_category.items())
@@ -92,7 +131,7 @@ class RenderTags(Task):
if kw["generate_rss"]:
yield self.tag_rss(tag, lang, filtered_posts, kw, is_category)
# Render HTML
- if kw['tag_pages_are_indexes']:
+ if kw['category_pages_are_indexes'] if is_category else kw['tag_pages_are_indexes']:
yield self.tag_page_as_index(tag, lang, filtered_posts, kw, is_category)
else:
yield self.tag_page_as_list(tag, lang, filtered_posts, kw, is_category)
@@ -101,19 +140,19 @@ class RenderTags(Task):
for task in render_lists(tag, posts, False):
yield task
- for tag, posts in cat_list:
- if tag == '': # This is uncategorized posts
- continue
- for task in render_lists(tag, posts, True):
+ for path, posts in cat_list:
+ for task in render_lists(path, posts, True):
yield task
# Tag cloud json file
tag_cloud_data = {}
for tag, posts in self.site.posts_per_tag.items():
+ if tag in self.site.config['HIDDEN_TAGS']:
+ continue
tag_posts = dict(posts=[{'title': post.meta[post.default_lang]['title'],
'date': post.date.strftime('%m/%d/%Y'),
'isodate': post.date.isoformat(),
- 'url': post.base_path.replace('cache', '')}
+ 'url': post.permalink(post.default_lang)}
for post in reversed(sorted(self.site.timeline, key=lambda post: post.date))
if tag in post.alltags])
tag_cloud_data[tag] = [len(posts), self.site.link(
@@ -126,48 +165,59 @@ class RenderTags(Task):
with open(output_name, 'w+') as fd:
json.dump(data, fd)
- task = {
- 'basename': str(self.name),
- 'name': str(output_name)
- }
+ if self.site.config['WRITE_TAG_CLOUD']:
+ task = {
+ 'basename': str(self.name),
+ 'name': str(output_name)
+ }
- task['uptodate'] = [utils.config_changed(tag_cloud_data)]
- task['targets'] = [output_name]
- task['actions'] = [(write_tag_data, [tag_cloud_data])]
- task['clean'] = True
- yield task
+ task['uptodate'] = [utils.config_changed(tag_cloud_data, 'nikola.plugins.task.tags:tagdata')]
+ task['targets'] = [output_name]
+ task['actions'] = [(write_tag_data, [tag_cloud_data])]
+ task['clean'] = True
+ yield utils.apply_filters(task, kw['filters'])
- def list_tags_page(self, kw):
+ def _create_tags_page(self, kw, include_tags=True, include_categories=True):
"""a global "all your tags/categories" page for each language"""
- tags = list(self.site.posts_per_tag.keys())
- categories = list(self.site.posts_per_category.keys())
- # We want our tags to be sorted case insensitive
- tags.sort(key=lambda a: a.lower())
- categories.sort(key=lambda a: a.lower())
- if categories != ['']:
- has_categories = True
- else:
- has_categories = False
+ tags = natsort.natsorted([tag for tag in self.site.posts_per_tag.keys()
+ if len(self.site.posts_per_tag[tag]) >= kw["taglist_minimum_post_count"]],
+ alg=natsort.ns.F | natsort.ns.IC)
+ categories = [cat.category_name for cat in self.site.category_hierarchy]
+ has_tags = (tags != []) and include_tags
+ has_categories = (categories != []) and include_categories
template_name = "tags.tmpl"
- kw['tags'] = tags
- kw['categories'] = categories
+ kw = kw.copy()
+ if include_tags:
+ kw['tags'] = tags
+ if include_categories:
+ kw['categories'] = categories
for lang in kw["translations"]:
output_name = os.path.join(
- kw['output_folder'], self.site.path('tag_index', None, lang))
+ kw['output_folder'], self.site.path('tag_index' if has_tags else 'category_index', None, lang))
output_name = output_name
context = {}
- if has_categories:
+ if has_categories and has_tags:
context["title"] = kw["messages"][lang]["Tags and Categories"]
+ elif has_categories:
+ context["title"] = kw["messages"][lang]["Categories"]
else:
context["title"] = kw["messages"][lang]["Tags"]
- context["items"] = [(tag, self.site.link("tag", tag, lang)) for tag
- in tags]
+ if has_tags:
+ context["items"] = [(tag, self.site.link("tag", tag, lang)) for tag
+ in tags]
+ else:
+ context["items"] = None
if has_categories:
context["cat_items"] = [(tag, self.site.link("category", tag, lang)) for tag
in categories]
+ context['cat_hierarchy'] = [(node.name, node.category_name, node.category_path,
+ self.site.link("category", node.category_name),
+ node.indent_levels, node.indent_change_before,
+ node.indent_change_after)
+ for node in self.site.category_hierarchy]
else:
context["cat_items"] = None
- context["permalink"] = self.site.link("tag_index", None, lang)
+ context["permalink"] = self.site.link("tag_index" if has_tags else "category_index", None, lang)
context["description"] = context["title"]
task = self.site.generic_post_list_renderer(
lang,
@@ -177,73 +227,66 @@ class RenderTags(Task):
kw['filters'],
context,
)
- task_cfg = {1: task['uptodate'][0].config, 2: kw}
- task['uptodate'] = [utils.config_changed(task_cfg)]
+ task['uptodate'] = task['uptodate'] + [utils.config_changed(kw, 'nikola.plugins.task.tags:page')]
task['basename'] = str(self.name)
yield task
+ def list_tags_page(self, kw):
+ """a global "all your tags/categories" page for each language"""
+ if self.site.config['TAG_PATH'] == self.site.config['CATEGORY_PATH']:
+ yield self._create_tags_page(kw, True, True)
+ else:
+ yield self._create_tags_page(kw, False, True)
+ yield self._create_tags_page(kw, True, False)
+
+ def _get_title(self, tag, is_category):
+ if is_category:
+ return self.site.parse_category_name(tag)[-1]
+ else:
+ return tag
+
+ def _get_description(self, tag, is_category, lang):
+ descriptions = self.site.config['CATEGORY_PAGES_DESCRIPTIONS'] if is_category else self.site.config['TAG_PAGES_DESCRIPTIONS']
+ return descriptions[lang][tag] if lang in descriptions and tag in descriptions[lang] else None
+
+ def _get_subcategories(self, category):
+ node = self.site.category_hierarchy_lookup[category]
+ return [(child.name, self.site.link("category", child.category_name)) for child in node.children]
+
def tag_page_as_index(self, tag, lang, post_list, kw, is_category):
"""render a sort of index page collection using only this
tag's posts."""
kind = "category" if is_category else "tag"
- def page_name(tagname, i, lang):
- """Given tag, n, returns a page name."""
- name = self.site.path(kind, tag, lang)
- if i:
- name = name.replace('.html', '-{0}.html'.format(i))
- return name
-
- # FIXME: deduplicate this with render_indexes
+ def page_link(i, displayed_i, num_pages, force_addition, extension=None):
+ feed = "_atom" if extension == ".atom" else ""
+ return utils.adjust_name_for_index_link(self.site.link(kind + feed, tag, lang), i, displayed_i, lang, self.site, force_addition, extension)
+
+ def page_path(i, displayed_i, num_pages, force_addition, extension=None):
+ feed = "_atom" if extension == ".atom" else ""
+ return utils.adjust_name_for_index_path(self.site.path(kind + feed, tag, lang), i, displayed_i, lang, self.site, force_addition, extension)
+
+ context_source = {}
+ title = self._get_title(tag, is_category)
+ if kw["generate_rss"]:
+ # On a tag page, the feeds include the tag's feeds
+ rss_link = ("""<link rel="alternate" type="application/rss+xml" """
+ """type="application/rss+xml" title="RSS for tag """
+ """{0} ({1})" href="{2}">""".format(
+ title, lang, self.site.link(kind + "_rss", tag, lang)))
+ context_source['rss_link'] = rss_link
+ if is_category:
+ context_source["category"] = tag
+ context_source["category_path"] = self.site.parse_category_name(tag)
+ context_source["tag"] = title
+ indexes_title = kw["messages"][lang]["Posts about %s"] % title
+ context_source["description"] = self._get_description(tag, is_category, lang)
+ if is_category:
+ context_source["subcategories"] = self._get_subcategories(tag)
template_name = "tagindex.tmpl"
- # Split in smaller lists
- lists = []
- while post_list:
- lists.append(post_list[:kw["index_display_post_count"]])
- post_list = post_list[kw["index_display_post_count"]:]
- num_pages = len(lists)
- for i, post_list in enumerate(lists):
- context = {}
- if kw["generate_rss"]:
- # On a tag page, the feeds include the tag's feeds
- rss_link = ("""<link rel="alternate" type="application/rss+xml" """
- """type="application/rss+xml" title="RSS for tag """
- """{0} ({1})" href="{2}">""".format(
- tag, lang, self.site.link(kind + "_rss", tag, lang)))
- context['rss_link'] = rss_link
- output_name = os.path.join(kw['output_folder'],
- page_name(tag, i, lang))
- context["title"] = kw["messages"][lang][
- "Posts about %s"] % tag
- context["prevlink"] = None
- context["nextlink"] = None
- context['index_teasers'] = kw['index_teasers']
- if i > 1:
- context["prevlink"] = os.path.basename(
- page_name(tag, i - 1, lang))
- if i == 1:
- context["prevlink"] = os.path.basename(
- page_name(tag, 0, lang))
- if i < num_pages - 1:
- context["nextlink"] = os.path.basename(
- page_name(tag, i + 1, lang))
- context["permalink"] = self.site.link(kind, tag, lang)
- context["tag"] = tag
- context["description"] = context["title"]
- task = self.site.generic_post_list_renderer(
- lang,
- post_list,
- output_name,
- template_name,
- kw['filters'],
- context,
- )
- task_cfg = {1: task['uptodate'][0].config, 2: kw}
- task['uptodate'] = [utils.config_changed(task_cfg)]
- task['basename'] = str(self.name)
- yield task
+ yield self.site.generic_index_renderer(lang, post_list, indexes_title, template_name, context_source, kw, str(self.name), page_link, page_path)
def tag_page_as_list(self, tag, lang, post_list, kw, is_category):
"""We render a single flat link list with this tag's posts"""
@@ -253,12 +296,18 @@ class RenderTags(Task):
kind, tag, lang))
context = {}
context["lang"] = lang
- context["title"] = kw["messages"][lang]["Posts about %s"] % tag
+ title = self._get_title(tag, is_category)
+ if is_category:
+ context["category"] = tag
+ context["category_path"] = self.site.parse_category_name(tag)
+ context["tag"] = title
+ context["title"] = kw["messages"][lang]["Posts about %s"] % title
context["posts"] = post_list
context["permalink"] = self.site.link(kind, tag, lang)
- context["tag"] = tag
context["kind"] = kind
- context["description"] = context["title"]
+ context["description"] = self._get_description(tag, is_category, lang)
+ if is_category:
+ context["subcategories"] = self._get_subcategories(tag)
task = self.site.generic_post_list_renderer(
lang,
post_list,
@@ -267,8 +316,7 @@ class RenderTags(Task):
kw['filters'],
context,
)
- task_cfg = {1: task['uptodate'][0].config, 2: kw}
- task['uptodate'] = [utils.config_changed(task_cfg)]
+ task['uptodate'] = task['uptodate'] + [utils.config_changed(kw, 'nikola.plugins.task.tags:list')]
task['basename'] = str(self.name)
yield task
@@ -281,26 +329,29 @@ class RenderTags(Task):
self.site.path(kind + "_rss", tag, lang)))
feed_url = urljoin(self.site.config['BASE_URL'], self.site.link(kind + "_rss", tag, lang).lstrip('/'))
deps = []
+ deps_uptodate = []
post_list = sorted(posts, key=lambda a: a.date)
post_list.reverse()
for post in post_list:
deps += post.deps(lang)
- return {
+ deps_uptodate += post.deps_uptodate(lang)
+ task = {
'basename': str(self.name),
'name': output_name,
'file_dep': deps,
'targets': [output_name],
'actions': [(utils.generic_rss_renderer,
- (lang, "{0} ({1})".format(kw["blog_title"](lang), tag),
+ (lang, "{0} ({1})".format(kw["blog_title"](lang), self._get_title(tag, is_category)),
kw["site_url"], None, post_list,
output_name, kw["rss_teasers"], kw["rss_plain"], kw['feed_length'],
- feed_url))],
+ feed_url, None, kw["rss_link_append_query"]))],
'clean': True,
- 'uptodate': [utils.config_changed(kw)],
+ 'uptodate': [utils.config_changed(kw, 'nikola.plugins.task.tags:rss')] + deps_uptodate,
'task_dep': ['render_posts'],
}
+ return utils.apply_filters(task, kw['filters'])
- def slugify_name(self, name):
+ def slugify_tag_name(self, name):
if self.site.config['SLUG_TAG_PATH']:
name = utils.slugify(name)
return name
@@ -310,30 +361,64 @@ class RenderTags(Task):
self.site.config['TAG_PATH'],
self.site.config['INDEX_FILE']] if _f]
+ def category_index_path(self, name, lang):
+ return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
+ self.site.config['CATEGORY_PATH'],
+ self.site.config['INDEX_FILE']] if _f]
+
def tag_path(self, name, lang):
if self.site.config['PRETTY_URLS']:
return [_f for _f in [
self.site.config['TRANSLATIONS'][lang],
self.site.config['TAG_PATH'],
- self.slugify_name(name),
+ self.slugify_tag_name(name),
self.site.config['INDEX_FILE']] if _f]
else:
return [_f for _f in [
self.site.config['TRANSLATIONS'][lang],
self.site.config['TAG_PATH'],
- self.slugify_name(name) + ".html"] if _f]
+ self.slugify_tag_name(name) + ".html"] if _f]
+
+ def tag_atom_path(self, name, lang):
+ return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
+ self.site.config['TAG_PATH'], self.slugify_tag_name(name) + ".atom"] if
+ _f]
def tag_rss_path(self, name, lang):
return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['TAG_PATH'], self.slugify_name(name) + ".xml"] if
+ self.site.config['TAG_PATH'], self.slugify_tag_name(name) + ".xml"] if
_f]
+ def slugify_category_name(self, name):
+ path = self.site.parse_category_name(name)
+ if self.site.config['CATEGORY_OUTPUT_FLAT_HIERARCHY']:
+ path = path[-1:] # only the leaf
+ result = [self.slugify_tag_name(part) for part in path]
+ result[0] = self.site.config['CATEGORY_PREFIX'] + result[0]
+ if not self.site.config['PRETTY_URLS']:
+ result = ['-'.join(result)]
+ return result
+
+ def _add_extension(self, path, extension):
+ path[-1] += extension
+ return path
+
def category_path(self, name, lang):
+ if self.site.config['PRETTY_URLS']:
+ return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
+ self.site.config['CATEGORY_PATH']] if
+ _f] + self.slugify_category_name(name) + [self.site.config['INDEX_FILE']]
+ else:
+ return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
+ self.site.config['CATEGORY_PATH']] if
+ _f] + self._add_extension(self.slugify_category_name(name), ".html")
+
+ def category_atom_path(self, name, lang):
return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['TAG_PATH'], "cat_" + self.slugify_name(name) + ".html"] if
- _f]
+ self.site.config['CATEGORY_PATH']] if
+ _f] + self._add_extension(self.slugify_category_name(name), ".atom")
def category_rss_path(self, name, lang):
return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['TAG_PATH'], "cat_" + self.slugify_name(name) + ".xml"] if
- _f]
+ self.site.config['CATEGORY_PATH']] if
+ _f] + self._add_extension(self.slugify_category_name(name), ".xml")
diff --git a/nikola/plugins/template/__init__.py b/nikola/plugins/template/__init__.py
index 6ad8bac..a1d17a6 100644
--- a/nikola/plugins/template/__init__.py
+++ b/nikola/plugins/template/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/template/jinja.plugin b/nikola/plugins/template/jinja.plugin
index 53b0fec..0bdcb94 100644
--- a/nikola/plugins/template/jinja.plugin
+++ b/nikola/plugins/template/jinja.plugin
@@ -4,6 +4,6 @@ Module = jinja
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Support for Jinja2 templates.
diff --git a/nikola/plugins/template/jinja.py b/nikola/plugins/template/jinja.py
index 5156f38..82e8397 100644
--- a/nikola/plugins/template/jinja.py
+++ b/nikola/plugins/template/jinja.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -114,8 +114,7 @@ class JinjaTemplates(TemplateSystem):
ast = self.lookup.parse(source)
dep_names = meta.find_referenced_templates(ast)
for dep_name in dep_names:
- if (dep_name not in visited_templates
- and dep_name is not None):
+ if (dep_name not in visited_templates and dep_name is not None):
visited_templates.add(dep_name)
queue.append(dep_name)
self.dependency_cache[template_name] = deps
diff --git a/nikola/plugins/template/mako.plugin b/nikola/plugins/template/mako.plugin
index 71f2c71..2fe6d98 100644
--- a/nikola/plugins/template/mako.plugin
+++ b/nikola/plugins/template/mako.plugin
@@ -4,6 +4,6 @@ Module = mako
[Documentation]
Author = Roberto Alsina
-Version = 0.1
+Version = 1.0
Website = http://getnikola.com
Description = Support for Mako templates.
diff --git a/nikola/plugins/template/mako.py b/nikola/plugins/template/mako.py
index b9d856e..e5545f6 100644
--- a/nikola/plugins/template/mako.py
+++ b/nikola/plugins/template/mako.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2014 Roberto Alsina and others.
+# Copyright © 2012-2015 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -115,7 +115,7 @@ class MakoTemplates(TemplateSystem):
def render_template_to_string(self, template, context):
""" Render template to a string using context. """
- context = context.update(self.filters)
+ context.update(self.filters)
return Template(template).render(**context)