summaryrefslogtreecommitdiffstats
path: root/nikola/plugins
diff options
context:
space:
mode:
Diffstat (limited to 'nikola/plugins')
-rw-r--r--nikola/plugins/basic_import.py5
-rw-r--r--nikola/plugins/command/auto.py4
-rw-r--r--nikola/plugins/command/bootswatch_theme.py5
-rw-r--r--nikola/plugins/command/check.py12
-rw-r--r--nikola/plugins/command/console.py116
-rw-r--r--nikola/plugins/command/deploy.py10
-rw-r--r--nikola/plugins/command/github_deploy.plugin9
-rw-r--r--nikola/plugins/command/github_deploy.py271
-rw-r--r--nikola/plugins/command/import_blogger.plugin10
-rw-r--r--nikola/plugins/command/import_blogger.py228
-rw-r--r--nikola/plugins/command/import_feed.plugin10
-rw-r--r--nikola/plugins/command/import_feed.py200
-rw-r--r--nikola/plugins/command/import_wordpress.py10
-rw-r--r--nikola/plugins/command/init.py346
-rw-r--r--nikola/plugins/command/install_plugin.plugin10
-rw-r--r--nikola/plugins/command/install_plugin.py188
-rw-r--r--nikola/plugins/command/install_theme.py4
-rw-r--r--nikola/plugins/command/mincss.plugin10
-rw-r--r--nikola/plugins/command/mincss.py75
-rw-r--r--nikola/plugins/command/new_page.py7
-rw-r--r--nikola/plugins/command/new_post.py105
-rw-r--r--nikola/plugins/command/planetoid.plugin9
-rw-r--r--nikola/plugins/command/planetoid/__init__.py289
-rw-r--r--nikola/plugins/command/plugin.plugin10
-rw-r--r--nikola/plugins/command/plugin.py319
-rw-r--r--nikola/plugins/command/serve.py6
-rw-r--r--nikola/plugins/compile/asciidoc.py71
-rw-r--r--nikola/plugins/compile/bbcode.py80
-rw-r--r--nikola/plugins/compile/html.py24
-rw-r--r--nikola/plugins/compile/ipynb.plugin2
-rw-r--r--nikola/plugins/compile/ipynb/__init__.py26
-rw-r--r--nikola/plugins/compile/markdown/__init__.py49
-rw-r--r--nikola/plugins/compile/markdown/mdx_gist.plugin (renamed from nikola/plugins/compile/bbcode.plugin)7
-rw-r--r--nikola/plugins/compile/markdown/mdx_gist.py18
-rw-r--r--nikola/plugins/compile/markdown/mdx_nikola.plugin (renamed from nikola/plugins/compile/asciidoc.plugin)7
-rw-r--r--nikola/plugins/compile/markdown/mdx_nikola.py24
-rw-r--r--nikola/plugins/compile/markdown/mdx_podcast.plugin9
-rw-r--r--nikola/plugins/compile/markdown/mdx_podcast.py14
-rw-r--r--nikola/plugins/compile/misaka.plugin10
-rw-r--r--nikola/plugins/compile/misaka.py87
-rw-r--r--nikola/plugins/compile/pandoc.py24
-rw-r--r--nikola/plugins/compile/php.py24
-rw-r--r--nikola/plugins/compile/rest/__init__.py26
-rw-r--r--nikola/plugins/compile/rest/chart.py13
-rw-r--r--nikola/plugins/compile/rest/doc.py2
-rw-r--r--nikola/plugins/compile/rest/listing.py1
-rw-r--r--nikola/plugins/compile/rest/post_list.py5
-rw-r--r--nikola/plugins/compile/rest/slides.py9
-rw-r--r--nikola/plugins/compile/rest/vimeo.py6
-rw-r--r--nikola/plugins/compile/rest/youtube.py2
-rw-r--r--nikola/plugins/compile/textile.plugin10
-rw-r--r--nikola/plugins/compile/textile.py76
-rw-r--r--nikola/plugins/compile/txt2tags.plugin10
-rw-r--r--nikola/plugins/compile/txt2tags.py76
-rw-r--r--nikola/plugins/compile/wiki.plugin10
-rw-r--r--nikola/plugins/compile/wiki.py75
-rw-r--r--nikola/plugins/loghandler/stderr.py6
-rw-r--r--nikola/plugins/task/archive.py21
-rw-r--r--nikola/plugins/task/build_less.plugin10
-rw-r--r--nikola/plugins/task/build_less.py118
-rw-r--r--nikola/plugins/task/build_sass.plugin9
-rw-r--r--nikola/plugins/task/build_sass.py139
-rw-r--r--nikola/plugins/task/bundles.py27
-rw-r--r--nikola/plugins/task/copy_assets.py31
-rw-r--r--nikola/plugins/task/galleries.py126
-rw-r--r--nikola/plugins/task/indexes.py62
-rw-r--r--nikola/plugins/task/listings.py67
-rw-r--r--nikola/plugins/task/localsearch.plugin10
-rw-r--r--nikola/plugins/task/localsearch/MIT-LICENSE.txt20
-rw-r--r--nikola/plugins/task/localsearch/__init__.py106
-rw-r--r--nikola/plugins/task/localsearch/files/assets/css/img/loader.gifbin4178 -> 0 bytes
-rw-r--r--nikola/plugins/task/localsearch/files/assets/css/img/search.pngbin315 -> 0 bytes
-rw-r--r--nikola/plugins/task/localsearch/files/assets/css/tipuesearch.css159
-rw-r--r--nikola/plugins/task/localsearch/files/assets/js/tipuesearch.js384
-rw-r--r--nikola/plugins/task/localsearch/files/assets/js/tipuesearch_set.js21
-rw-r--r--nikola/plugins/task/localsearch/files/tipue_search.html31
-rw-r--r--nikola/plugins/task/mustache.plugin10
-rw-r--r--nikola/plugins/task/mustache/__init__.py184
-rw-r--r--nikola/plugins/task/mustache/mustache-template.html29
-rw-r--r--nikola/plugins/task/mustache/mustache.html34
-rw-r--r--nikola/plugins/task/pages.py5
-rw-r--r--nikola/plugins/task/posts.py18
-rw-r--r--nikola/plugins/task/redirect.py2
-rw-r--r--nikola/plugins/task/robots.plugin10
-rw-r--r--nikola/plugins/task/robots.py83
-rw-r--r--nikola/plugins/task/rss.py20
-rw-r--r--nikola/plugins/task/sitemap/__init__.py124
-rw-r--r--nikola/plugins/task/tags.py45
-rw-r--r--nikola/plugins/template/jinja.py16
-rw-r--r--nikola/plugins/template/mako.py22
90 files changed, 1755 insertions, 3259 deletions
diff --git a/nikola/plugins/basic_import.py b/nikola/plugins/basic_import.py
index 27c0eb4..7b23f9c 100644
--- a/nikola/plugins/basic_import.py
+++ b/nikola/plugins/basic_import.py
@@ -29,6 +29,7 @@ import codecs
import csv
import datetime
import os
+from pkg_resources import resource_filename
try:
from urlparse import urlparse
@@ -89,13 +90,13 @@ class ImportMixin(object):
def generate_base_site(self):
if not os.path.exists(self.output_folder):
- os.system('nikola init ' + self.output_folder)
+ os.system('nikola init -q ' + self.output_folder)
else:
self.import_into_existing_site = True
utils.LOGGER.notice('The folder {0} already exists - assuming that this is a '
'already existing Nikola site.'.format(self.output_folder))
- filename = os.path.join(os.path.dirname(utils.__file__), 'conf.py.in')
+ filename = resource_filename('nikola', 'conf.py.in')
# The 'strict_undefined=True' will give the missing symbol name if any,
# (ex: NameError: 'THEME' is not defined )
# for other errors from mako/runtime.py, you can add format_extensions=True ,
diff --git a/nikola/plugins/command/auto.py b/nikola/plugins/command/auto.py
index d707d53..c46e0a3 100644
--- a/nikola/plugins/command/auto.py
+++ b/nikola/plugins/command/auto.py
@@ -73,11 +73,11 @@ class CommandAuto(Command):
server.watch('conf.py', 'nikola build')
server.watch('themes/', 'nikola build')
server.watch('templates/', 'nikola build')
- server.watch(self.site.config['GALLERY_PATH'])
+ server.watch(self.site.config['GALLERY_PATH'], 'nikola build')
for item in self.site.config['post_pages']:
server.watch(os.path.dirname(item[0]), 'nikola build')
for item in self.site.config['FILES_FOLDERS']:
- server.watch(os.path.dirname(item), 'nikola build')
+ server.watch(item, 'nikola build')
out_folder = self.site.config['OUTPUT_FOLDER']
if options and options.get('browser'):
diff --git a/nikola/plugins/command/bootswatch_theme.py b/nikola/plugins/command/bootswatch_theme.py
index 82c47d2..871a5ce 100644
--- a/nikola/plugins/command/bootswatch_theme.py
+++ b/nikola/plugins/command/bootswatch_theme.py
@@ -92,7 +92,10 @@ class CommandBootswatchTheme(Command):
LOGGER.info("Creating '{0}' theme from '{1}' and '{2}'".format(name, swatch, parent))
utils.makedirs(os.path.join('themes', name, 'assets', 'css'))
for fname in ('bootstrap.min.css', 'bootstrap.css'):
- url = '/'.join(('http://bootswatch.com', version, swatch, fname))
+ url = 'http://bootswatch.com'
+ if version:
+ url += '/' + version
+ url = '/'.join((url, swatch, fname))
LOGGER.info("Downloading: " + url)
data = requests.get(url).text
with open(os.path.join('themes', name, 'assets', 'css', fname),
diff --git a/nikola/plugins/command/check.py b/nikola/plugins/command/check.py
index 26db321..76571a0 100644
--- a/nikola/plugins/command/check.py
+++ b/nikola/plugins/command/check.py
@@ -51,7 +51,7 @@ def real_scan_files(site):
fname = task.split(':', 1)[-1]
task_fnames.add(fname)
# And now check that there are no non-target files
- for root, dirs, files in os.walk(output_folder):
+ for root, dirs, files in os.walk(output_folder, followlinks=True):
for src_name in files:
fname = os.path.join(root, src_name)
real_fnames.add(fname)
@@ -139,6 +139,8 @@ class CommandCheck(Command):
rv = False
self.whitelist = [re.compile(x) for x in self.site.config['LINK_CHECK_WHITELIST']]
base_url = urlparse(self.site.config['BASE_URL'])
+ self.existing_targets.add(self.site.config['SITE_URL'])
+ self.existing_targets.add(self.site.config['BASE_URL'])
url_type = self.site.config['URL_TYPE']
try:
filename = task.split(":")[-1]
@@ -166,11 +168,15 @@ class CommandCheck(Command):
elif url_type in ('full_path', 'absolute'):
target_filename = os.path.abspath(
os.path.join(os.path.dirname(filename), parsed.path))
- if parsed.path.endswith('/'): # abspath removes trailing slashes
+ if parsed.path in ['', '/']:
+ target_filename = os.path.join(self.site.config['OUTPUT_FOLDER'], self.site.config['INDEX_FILE'])
+ elif parsed.path.endswith('/'): # abspath removes trailing slashes
target_filename += '/{0}'.format(self.site.config['INDEX_FILE'])
if target_filename.startswith(base_url.path):
target_filename = target_filename[len(base_url.path):]
target_filename = os.path.join(self.site.config['OUTPUT_FOLDER'], target_filename)
+ if parsed.path in ['', '/']:
+ target_filename = os.path.join(self.site.config['OUTPUT_FOLDER'], self.site.config['INDEX_FILE'])
if any(re.match(x, target_filename) for x in self.whitelist):
continue
@@ -233,7 +239,7 @@ class CommandCheck(Command):
return failure
def clean_files(self):
- only_on_output, _ = self.real_scan_files()
+ only_on_output, _ = real_scan_files(self.site)
for f in only_on_output:
os.unlink(f)
return True
diff --git a/nikola/plugins/command/console.py b/nikola/plugins/command/console.py
index b0a8958..9dfc975 100644
--- a/nikola/plugins/command/console.py
+++ b/nikola/plugins/command/console.py
@@ -30,7 +30,7 @@ import os
from nikola import __version__
from nikola.plugin_categories import Command
-from nikola.utils import get_logger, STDERR_HANDLER
+from nikola.utils import get_logger, STDERR_HANDLER, req_missing
LOGGER = get_logger('console', STDERR_HANDLER)
@@ -41,86 +41,102 @@ class CommandConsole(Command):
shells = ['ipython', 'bpython', 'plain']
doc_purpose = "start an interactive Python console with access to your site"
doc_description = """\
-Order of resolution: IPython → bpython [deprecated] → plain Python interpreter
-The site engine is accessible as `SITE`, and the config as `conf`."""
- header = "Nikola v" + __version__ + " -- {0} Console (conf = configuration, SITE = site engine)"
+The site engine is accessible as `site`, the config file as `conf`, and commands are available as `commands`.
+If there is no console to use specified (as -b, -i, -p) it tries IPython, then falls back to bpython, and finally falls back to the plain Python console."""
+ header = "Nikola v" + __version__ + " -- {0} Console (conf = configuration file, site = site engine, commands = nikola commands)"
cmd_options = [
{
+ 'name': 'bpython',
+ 'short': 'b',
+ 'long': 'bpython',
+ 'type': bool,
+ 'default': False,
+ 'help': 'Use bpython',
+ },
+ {
+ 'name': 'ipython',
+ 'short': 'i',
+ 'long': 'plain',
+ 'type': bool,
+ 'default': False,
+ 'help': 'Use IPython',
+ },
+ {
'name': 'plain',
'short': 'p',
'long': 'plain',
'type': bool,
'default': False,
- 'help': 'Force the plain Python console',
- }
+ 'help': 'Use the plain Python interpreter',
+ },
]
- def ipython(self):
+ def ipython(self, willful=True):
"""IPython shell."""
- from nikola import Nikola
try:
- import conf
- except ImportError:
- LOGGER.error("No configuration found, cannot run the console.")
- else:
import IPython
- SITE = Nikola(**conf.__dict__)
- SITE.scan_posts()
+ except ImportError as e:
+ if willful:
+ req_missing(['IPython'], 'use the IPython console')
+ raise e # That’s how _execute knows whether to try something else.
+ else:
+ site = self.context['site'] # NOQA
+ conf = self.context['conf'] # NOQA
+ commands = self.context['commands'] # NOQA
IPython.embed(header=self.header.format('IPython'))
- def bpython(self):
+ def bpython(self, willful=True):
"""bpython shell."""
- from nikola import Nikola
try:
- import conf
- except ImportError:
- LOGGER.error("No configuration found, cannot run the console.")
- else:
import bpython
- SITE = Nikola(**conf.__dict__)
- SITE.scan_posts()
- gl = {'conf': conf, 'SITE': SITE, 'Nikola': Nikola}
- bpython.embed(banner=self.header.format(
- 'bpython (Slightly Deprecated)'), locals_=gl)
+ except ImportError as e:
+ if willful:
+ req_missing(['bpython'], 'use the bpython console')
+ raise e # That’s how _execute knows whether to try something else.
+ else:
+ bpython.embed(banner=self.header.format('bpython'), locals_=self.context)
- def plain(self):
+ def plain(self, willful=True):
"""Plain Python shell."""
- from nikola import Nikola
+ import code
try:
- import conf
- SITE = Nikola(**conf.__dict__)
- SITE.scan_posts()
- gl = {'conf': conf, 'SITE': SITE, 'Nikola': Nikola}
+ import readline
except ImportError:
- LOGGER.error("No configuration found, cannot run the console.")
+ pass
else:
- import code
+ import rlcompleter
+ readline.set_completer(rlcompleter.Completer(self.context).complete)
+ readline.parse_and_bind("tab:complete")
+
+ pythonrc = os.environ.get("PYTHONSTARTUP")
+ if pythonrc and os.path.isfile(pythonrc):
try:
- import readline
- except ImportError:
+ execfile(pythonrc) # NOQA
+ except NameError:
pass
- else:
- import rlcompleter
- readline.set_completer(rlcompleter.Completer(gl).complete)
- readline.parse_and_bind("tab:complete")
- pythonrc = os.environ.get("PYTHONSTARTUP")
- if pythonrc and os.path.isfile(pythonrc):
- try:
- execfile(pythonrc) # NOQA
- except NameError:
- pass
-
- code.interact(local=gl, banner=self.header.format('Python'))
+ code.interact(local=self.context, banner=self.header.format('Python'))
def _execute(self, options, args):
"""Start the console."""
- if options['plain']:
- self.plain()
+ self.site.scan_posts()
+ # Create nice object with all commands:
+
+ self.context = {
+ 'conf': self.site.config,
+ 'site': self.site,
+ 'commands': self.site.commands,
+ }
+ if options['bpython']:
+ self.bpython(True)
+ elif options['ipython']:
+ self.ipython(True)
+ elif options['plain']:
+ self.plain(True)
else:
for shell in self.shells:
try:
- return getattr(self, shell)()
+ return getattr(self, shell)(False)
except ImportError:
pass
raise ImportError
diff --git a/nikola/plugins/command/deploy.py b/nikola/plugins/command/deploy.py
index bd1c15f..1bec1d3 100644
--- a/nikola/plugins/command/deploy.py
+++ b/nikola/plugins/command/deploy.py
@@ -31,7 +31,6 @@ import os
import sys
import subprocess
import time
-import pytz
from blinker import signal
@@ -62,10 +61,10 @@ class CommandDeploy(Command):
deploy_drafts = self.site.config.get('DEPLOY_DRAFTS', True)
deploy_future = self.site.config.get('DEPLOY_FUTURE', False)
+ undeployed_posts = []
if not (deploy_drafts and deploy_future):
# Remove drafts and future posts
out_dir = self.site.config['OUTPUT_FOLDER']
- undeployed_posts = []
self.site.scan_posts()
for post in self.site.timeline:
if (not deploy_drafts and post.is_draft) or \
@@ -114,9 +113,6 @@ class CommandDeploy(Command):
"""
- if undeployed is None:
- undeployed = []
-
event = {
'last_deploy': last_deploy,
'new_deploy': new_deploy,
@@ -124,11 +120,9 @@ class CommandDeploy(Command):
'undeployed': undeployed
}
- tzinfo = pytz.timezone(self.site.config['TIMEZONE'])
-
deployed = [
entry for entry in self.site.timeline
- if entry.date > (last_deploy.replace(tzinfo=tzinfo) if tzinfo else last_deploy) and entry not in undeployed
+ if entry.date > last_deploy.replace(tzinfo=self.site.tzinfo) and entry not in undeployed
]
event['deployed'] = deployed
diff --git a/nikola/plugins/command/github_deploy.plugin b/nikola/plugins/command/github_deploy.plugin
new file mode 100644
index 0000000..4cbc422
--- /dev/null
+++ b/nikola/plugins/command/github_deploy.plugin
@@ -0,0 +1,9 @@
+[Core]
+Name = github_deploy
+Module = github_deploy
+
+[Documentation]
+Author = Puneeth Chaganti
+Version = 0.1
+Website = http://getnikola.com
+Description = Deploy the site to GitHub pages.
diff --git a/nikola/plugins/command/github_deploy.py b/nikola/plugins/command/github_deploy.py
new file mode 100644
index 0000000..d4dd8c5
--- /dev/null
+++ b/nikola/plugins/command/github_deploy.py
@@ -0,0 +1,271 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2014 Puneeth Chaganti and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+from __future__ import print_function
+import os
+import shutil
+import subprocess
+import sys
+from textwrap import dedent
+
+from nikola.plugin_categories import Command
+from nikola.plugins.command.check import real_scan_files
+from nikola.utils import ask_yesno, get_logger
+from nikola.__main__ import main
+from nikola import __version__
+
+
+def uni_check_output(*args, **kwargs):
+ o = subprocess.check_output(*args, **kwargs)
+ return o.decode('utf-8')
+
+
+class CommandGitHubDeploy(Command):
+ """ Deploy site to GitHub pages. """
+ name = 'github_deploy'
+
+ doc_usage = ''
+ doc_purpose = 'deploy the site to GitHub pages'
+ doc_description = dedent(
+ """\
+ This command can be used to deploy your site to GitHub pages.
+ It performs the following actions:
+
+ 1. Ensure that your site is a git repository, and git is on the PATH.
+ 2. Ensure that the output directory is not committed on the
+ source branch.
+ 3. Check for changes, and prompt the user to continue, if required.
+ 4. Build the site
+ 5. Clean any files that are "unknown" to Nikola.
+ 6. Create a deploy branch, if one doesn't exist.
+ 7. Commit the output to this branch. (NOTE: Any untracked source
+ files, may get committed at this stage, on the wrong branch!)
+ 8. Push and deploy!
+
+ NOTE: This command needs your site to be a git repository, with a
+ master branch (or a different branch, configured using
+ GITHUB_SOURCE_BRANCH if you are pushing to user.github
+ .io/organization.github.io pages) containing the sources of your
+ site. You also, obviously, need to have `git` on your PATH,
+ and should be able to push to the repository specified as the remote
+ (origin, by default).
+ """
+ )
+
+ logger = None
+
+ _deploy_branch = ''
+ _source_branch = ''
+ _remote_name = ''
+
+ def _execute(self, command, args):
+
+ self.logger = get_logger(
+ CommandGitHubDeploy.name, self.site.loghandlers
+ )
+ self._source_branch = self.site.config.get(
+ 'GITHUB_SOURCE_BRANCH', 'master'
+ )
+ self._deploy_branch = self.site.config.get(
+ 'GITHUB_DEPLOY_BRANCH', 'gh-pages'
+ )
+ self._remote_name = self.site.config.get(
+ 'GITHUB_REMOTE_NAME', 'origin'
+ )
+
+ self._ensure_git_repo()
+
+ self._exit_if_output_committed()
+
+ if not self._prompt_continue():
+ return
+
+ build = main(['build'])
+ if build != 0:
+ self.logger.error('Build failed, not deploying to GitHub')
+ sys.exit(build)
+
+ only_on_output, _ = real_scan_files(self.site)
+ for f in only_on_output:
+ os.unlink(f)
+
+ self._checkout_deploy_branch()
+
+ self._copy_output()
+
+ self._commit_and_push()
+
+ return
+
+ def _commit_and_push(self):
+ """ Commit all the files and push. """
+
+ deploy = self._deploy_branch
+ source = self._source_branch
+ remote = self._remote_name
+
+ source_commit = uni_check_output(['git', 'rev-parse', source])
+ commit_message = (
+ 'Nikola auto commit.\n\n'
+ 'Source commit: %s'
+ 'Nikola version: %s' % (source_commit, __version__)
+ )
+
+ commands = [
+ ['git', 'add', '-A'],
+ ['git', 'commit', '-m', commit_message],
+ ['git', 'push', '-f', remote, '%s:%s' % (deploy, deploy)],
+ ['git', 'checkout', source],
+ ]
+
+ for command in commands:
+ self.logger.info("==> {0}".format(command))
+ try:
+ subprocess.check_call(command)
+ except subprocess.CalledProcessError as e:
+ self.logger.error(
+ 'Failed GitHub deployment — command {0} '
+ 'returned {1}'.format(e.cmd, e.returncode)
+ )
+ sys.exit(e.returncode)
+
+ def _copy_output(self):
+ """ Copy all output to the top level directory. """
+ output_folder = self.site.config['OUTPUT_FOLDER']
+ for each in os.listdir(output_folder):
+ if os.path.exists(each):
+ if os.path.isdir(each):
+ shutil.rmtree(each)
+
+ else:
+ os.unlink(each)
+
+ shutil.move(os.path.join(output_folder, each), '.')
+
+ def _checkout_deploy_branch(self):
+ """ Check out the deploy branch
+
+ Creates an orphan branch if not present.
+
+ """
+
+ deploy = self._deploy_branch
+
+ try:
+ subprocess.check_call(
+ [
+ 'git', 'show-ref', '--verify', '--quiet',
+ 'refs/heads/%s' % deploy
+ ]
+ )
+ except subprocess.CalledProcessError:
+ self._create_orphan_deploy_branch()
+ else:
+ subprocess.check_call(['git', 'checkout', deploy])
+
+ def _create_orphan_deploy_branch(self):
+ """ Create an orphan deploy branch """
+
+ result = subprocess.check_call(
+ ['git', 'checkout', '--orphan', self._deploy_branch]
+ )
+ if result != 0:
+ self.logger.error('Failed to create a deploy branch')
+ sys.exit(1)
+
+ result = subprocess.check_call(['git', 'rm', '-rf', '.'])
+ if result != 0:
+ self.logger.error('Failed to create a deploy branch')
+ sys.exit(1)
+
+ with open('.gitignore', 'w') as f:
+ f.write('%s\n' % self.site.config['OUTPUT_FOLDER'])
+ f.write('%s\n' % self.site.config['CACHE_FOLDER'])
+ f.write('*.pyc\n')
+ f.write('*.db\n')
+
+ subprocess.check_call(['git', 'add', '.gitignore'])
+ subprocess.check_call(['git', 'commit', '-m', 'Add .gitignore'])
+
+ def _ensure_git_repo(self):
+ """ Ensure that the site is a git-repo.
+
+ Also make sure that a remote with the specified name exists.
+
+ """
+
+ try:
+ remotes = uni_check_output(['git', 'remote'])
+ except subprocess.CalledProcessError as e:
+ self.logger.notice('github_deploy needs a git repository!')
+ sys.exit(e.returncode)
+ except OSError as e:
+ import errno
+ self.logger.error('Running git failed with {0}'.format(e))
+ if e.errno == errno.ENOENT:
+ self.logger.notice('Is git on the PATH?')
+ sys.exit(1)
+ else:
+ if self._remote_name not in remotes:
+ self.logger.error(
+ 'Need a remote called "%s" configured' % self._remote_name
+ )
+ sys.exit(1)
+
+ def _exit_if_output_committed(self):
+ """ Exit if the output folder is committed on the source branch. """
+
+ source = self._source_branch
+ subprocess.check_call(['git', 'checkout', source])
+
+ output_folder = self.site.config['OUTPUT_FOLDER']
+ output_log = uni_check_output(
+ ['git', 'ls-files', '--', output_folder]
+ )
+
+ if len(output_log.strip()) > 0:
+ self.logger.error(
+ 'Output folder is committed on the source branch. '
+ 'Cannot proceed until it is removed.'
+ )
+ sys.exit(1)
+
+ def _prompt_continue(self):
+ """ Show uncommitted changes, and ask if user wants to continue. """
+
+ changes = uni_check_output(['git', 'status', '--porcelain'])
+ if changes.strip():
+ changes = uni_check_output(['git', 'status']).strip()
+ message = (
+ "You have the following changes:\n%s\n\n"
+ "Anything not committed, and unknown to Nikola may be lost, "
+ "or committed onto the wrong branch. Do you wish to continue?"
+ ) % changes
+ proceed = ask_yesno(message, False)
+ else:
+ proceed = True
+
+ return proceed
diff --git a/nikola/plugins/command/import_blogger.plugin b/nikola/plugins/command/import_blogger.plugin
deleted file mode 100644
index 91a7cb6..0000000
--- a/nikola/plugins/command/import_blogger.plugin
+++ /dev/null
@@ -1,10 +0,0 @@
-[Core]
-Name = import_blogger
-Module = import_blogger
-
-[Documentation]
-Author = Roberto Alsina
-Version = 0.2
-Website = http://getnikola.com
-Description = Import a blogger site from a XML dump.
-
diff --git a/nikola/plugins/command/import_blogger.py b/nikola/plugins/command/import_blogger.py
deleted file mode 100644
index dd629c4..0000000
--- a/nikola/plugins/command/import_blogger.py
+++ /dev/null
@@ -1,228 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2014 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-from __future__ import unicode_literals, print_function
-import datetime
-import os
-import time
-
-try:
- from urlparse import urlparse
-except ImportError:
- from urllib.parse import urlparse # NOQA
-
-try:
- import feedparser
-except ImportError:
- feedparser = None # NOQA
-
-from nikola.plugin_categories import Command
-from nikola import utils
-from nikola.utils import req_missing
-from nikola.plugins.basic_import import ImportMixin
-from nikola.plugins.command.init import SAMPLE_CONF, prepare_config
-
-LOGGER = utils.get_logger('import_blogger', utils.STDERR_HANDLER)
-
-
-class CommandImportBlogger(Command, ImportMixin):
- """Import a blogger dump."""
-
- name = "import_blogger"
- needs_config = False
- doc_usage = "[options] blogger_export_file"
- doc_purpose = "import a blogger dump"
- cmd_options = ImportMixin.cmd_options + [
- {
- 'name': 'exclude_drafts',
- 'long': 'no-drafts',
- 'short': 'd',
- 'default': False,
- 'type': bool,
- 'help': "Don't import drafts",
- },
- ]
-
- def _execute(self, options, args):
- """Import a Blogger blog from an export file into a Nikola site."""
- # Parse the data
- if feedparser is None:
- req_missing(['feedparser'], 'import Blogger dumps')
- return
-
- if not args:
- print(self.help())
- return
-
- options['filename'] = args[0]
- self.blogger_export_file = options['filename']
- self.output_folder = options['output_folder']
- self.import_into_existing_site = False
- self.exclude_drafts = options['exclude_drafts']
- self.url_map = {}
- channel = self.get_channel_from_file(self.blogger_export_file)
- self.context = self.populate_context(channel)
- conf_template = self.generate_base_site()
- self.context['REDIRECTIONS'] = self.configure_redirections(
- self.url_map)
-
- self.import_posts(channel)
- self.write_urlmap_csv(
- os.path.join(self.output_folder, 'url_map.csv'), self.url_map)
-
- conf_out_path = self.get_configuration_output_path()
- # if it tracebacks here, look a comment in
- # basic_import.Import_Mixin.generate_base_site
- conf_template_render = conf_template.render(**prepare_config(self.context))
- self.write_configuration(conf_out_path, conf_template_render)
-
- @classmethod
- def get_channel_from_file(cls, filename):
- if not os.path.isfile(filename):
- raise Exception("Missing file: %s" % filename)
- return feedparser.parse(filename)
-
- @staticmethod
- def populate_context(channel):
- context = SAMPLE_CONF.copy()
- context['DEFAULT_LANG'] = 'en' # blogger doesn't include the language
- # in the dump
- context['BLOG_TITLE'] = channel.feed.title
-
- context['BLOG_DESCRIPTION'] = '' # Missing in the dump
- context['SITE_URL'] = channel.feed.link
- context['BLOG_EMAIL'] = channel.feed.author_detail.email
- context['BLOG_AUTHOR'] = channel.feed.author_detail.name
- context['POSTS'] = '''(
- ("posts/*.txt", "posts", "post.tmpl"),
- ("posts/*.rst", "posts", "post.tmpl"),
- ("posts/*.html", "posts", "post.tmpl"),
- )'''
- context['PAGES'] = '''(
- ("articles/*.txt", "articles", "story.tmpl"),
- ("articles/*.rst", "articles", "story.tmpl"),
- )'''
- context['COMPILERS'] = '''{
- "rest": ('.txt', '.rst'),
- "markdown": ('.md', '.mdown', '.markdown', '.wp'),
- "html": ('.html', '.htm')
- }
- '''
-
- return context
-
- def import_item(self, item, out_folder=None):
- """Takes an item from the feed and creates a post file."""
- if out_folder is None:
- out_folder = 'posts'
-
- # link is something like http://foo.com/2012/09/01/hello-world/
- # So, take the path, utils.slugify it, and that's our slug
- link = item.link
- link_path = urlparse(link).path
-
- title = item.title
-
- # blogger supports empty titles, which Nikola doesn't
- if not title:
- LOGGER.warn("Empty title in post with URL {0}. Using NO_TITLE "
- "as placeholder, please fix.".format(link))
- title = "NO_TITLE"
-
- if link_path.lower().endswith('.html'):
- link_path = link_path[:-5]
-
- slug = utils.slugify(link_path)
-
- if not slug: # should never happen
- LOGGER.error("Error converting post:", title)
- return
-
- description = ''
- post_date = datetime.datetime.fromtimestamp(time.mktime(
- item.published_parsed))
-
- for candidate in item.content:
- if candidate.type == 'text/html':
- content = candidate.value
- break
- # FIXME: handle attachments
-
- tags = []
- for tag in item.tags:
- if tag.scheme == 'http://www.blogger.com/atom/ns#':
- tags.append(tag.term)
-
- if item.get('app_draft'):
- tags.append('draft')
- is_draft = True
- else:
- is_draft = False
-
- self.url_map[link] = self.context['SITE_URL'] + '/' + \
- out_folder + '/' + slug + '.html'
-
- if is_draft and self.exclude_drafts:
- LOGGER.notice('Draft "{0}" will not be imported.'.format(title))
- elif content.strip():
- # If no content is found, no files are written.
- content = self.transform_content(content)
-
- self.write_metadata(os.path.join(self.output_folder, out_folder,
- slug + '.meta'),
- title, slug, post_date, description, tags)
- self.write_content(
- os.path.join(self.output_folder, out_folder, slug + '.html'),
- content)
- else:
- LOGGER.warn('Not going to import "{0}" because it seems to contain'
- ' no content.'.format(title))
-
- def process_item(self, item):
- post_type = item.tags[0].term
-
- if post_type == 'http://schemas.google.com/blogger/2008/kind#post':
- self.import_item(item, 'posts')
- elif post_type == 'http://schemas.google.com/blogger/2008/kind#page':
- self.import_item(item, 'stories')
- elif post_type == ('http://schemas.google.com/blogger/2008/kind'
- '#settings'):
- # Ignore settings
- pass
- elif post_type == ('http://schemas.google.com/blogger/2008/kind'
- '#template'):
- # Ignore template
- pass
- elif post_type == ('http://schemas.google.com/blogger/2008/kind'
- '#comment'):
- # FIXME: not importing comments. Does blogger support "pages"?
- pass
- else:
- LOGGER.warn("Unknown post_type:", post_type)
-
- def import_posts(self, channel):
- for item in channel.entries:
- self.process_item(item)
diff --git a/nikola/plugins/command/import_feed.plugin b/nikola/plugins/command/import_feed.plugin
deleted file mode 100644
index 26e570a..0000000
--- a/nikola/plugins/command/import_feed.plugin
+++ /dev/null
@@ -1,10 +0,0 @@
-[Core]
-Name = import_feed
-Module = import_feed
-
-[Documentation]
-Author = Grzegorz Śliwiński
-Version = 0.1
-Website = http://www.fizyk.net.pl/
-Description = Import a blog posts from a RSS/Atom dump
-
diff --git a/nikola/plugins/command/import_feed.py b/nikola/plugins/command/import_feed.py
deleted file mode 100644
index ee59277..0000000
--- a/nikola/plugins/command/import_feed.py
+++ /dev/null
@@ -1,200 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2014 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-from __future__ import unicode_literals, print_function
-import datetime
-import os
-import time
-
-try:
- from urlparse import urlparse
-except ImportError:
- from urllib.parse import urlparse # NOQA
-
-try:
- import feedparser
-except ImportError:
- feedparser = None # NOQA
-
-from nikola.plugin_categories import Command
-from nikola import utils
-from nikola.utils import req_missing
-from nikola.plugins.basic_import import ImportMixin
-from nikola.plugins.command.init import SAMPLE_CONF, prepare_config
-
-LOGGER = utils.get_logger('import_feed', utils.STDERR_HANDLER)
-
-
-class CommandImportFeed(Command, ImportMixin):
- """Import a feed dump."""
-
- name = "import_feed"
- needs_config = False
- doc_usage = "[options] feed_file"
- doc_purpose = "import a RSS/Atom dump"
- cmd_options = ImportMixin.cmd_options
-
- def _execute(self, options, args):
- '''
- Import Atom/RSS feed
- '''
- if feedparser is None:
- req_missing(['feedparser'], 'import feeds')
- return
-
- if not args:
- print(self.help())
- return
-
- options['filename'] = args[0]
- self.feed_export_file = options['filename']
- self.output_folder = options['output_folder']
- self.import_into_existing_site = False
- self.url_map = {}
- channel = self.get_channel_from_file(self.feed_export_file)
- self.context = self.populate_context(channel)
- conf_template = self.generate_base_site()
- self.context['REDIRECTIONS'] = self.configure_redirections(
- self.url_map)
-
- self.import_posts(channel)
-
- self.write_configuration(self.get_configuration_output_path(
- ), conf_template.render(**prepare_config(self.context)))
-
- @classmethod
- def get_channel_from_file(cls, filename):
- return feedparser.parse(filename)
-
- @staticmethod
- def populate_context(channel):
- context = SAMPLE_CONF.copy()
- context['DEFAULT_LANG'] = channel.feed.title_detail.language \
- if channel.feed.title_detail.language else 'en'
- context['BLOG_TITLE'] = channel.feed.title
-
- context['BLOG_DESCRIPTION'] = channel.feed.get('subtitle', '')
- context['SITE_URL'] = channel.feed.get('link', '').rstrip('/')
- context['BLOG_EMAIL'] = channel.feed.author_detail.get('email', '') if 'author_detail' in channel.feed else ''
- context['BLOG_AUTHOR'] = channel.feed.author_detail.get('name', '') if 'author_detail' in channel.feed else ''
-
- context['POSTS'] = '''(
- ("posts/*.html", "posts", "post.tmpl"),
- )'''
- context['PAGES'] = '''(
- ("stories/*.html", "stories", "story.tmpl"),
- )'''
- context['COMPILERS'] = '''{
- "rest": ('.txt', '.rst'),
- "markdown": ('.md', '.mdown', '.markdown', '.wp'),
- "html": ('.html', '.htm')
- }
- '''
-
- return context
-
- def import_posts(self, channel):
- for item in channel.entries:
- self.process_item(item)
-
- def process_item(self, item):
- self.import_item(item, 'posts')
-
- def import_item(self, item, out_folder=None):
- """Takes an item from the feed and creates a post file."""
- if out_folder is None:
- out_folder = 'posts'
-
- # link is something like http://foo.com/2012/09/01/hello-world/
- # So, take the path, utils.slugify it, and that's our slug
- link = item.link
- link_path = urlparse(link).path
-
- title = item.title
-
- # blogger supports empty titles, which Nikola doesn't
- if not title:
- LOGGER.warn("Empty title in post with URL {0}. Using NO_TITLE "
- "as placeholder, please fix.".format(link))
- title = "NO_TITLE"
-
- if link_path.lower().endswith('.html'):
- link_path = link_path[:-5]
-
- slug = utils.slugify(link_path)
-
- if not slug: # should never happen
- LOGGER.error("Error converting post:", title)
- return
-
- description = ''
- post_date = datetime.datetime.fromtimestamp(time.mktime(
- item.published_parsed))
- if item.get('content'):
- for candidate in item.get('content', []):
- content = candidate.value
- break
- # FIXME: handle attachments
- elif item.get('summary'):
- content = item.get('summary')
-
- tags = []
- for tag in item.get('tags', []):
- tags.append(tag.term)
-
- if item.get('app_draft'):
- tags.append('draft')
- is_draft = True
- else:
- is_draft = False
-
- self.url_map[link] = self.context['SITE_URL'] + '/' + \
- out_folder + '/' + slug + '.html'
-
- if is_draft and self.exclude_drafts:
- LOGGER.notice('Draft "{0}" will not be imported.'.format(title))
- elif content.strip():
- # If no content is found, no files are written.
- content = self.transform_content(content)
-
- self.write_metadata(os.path.join(self.output_folder, out_folder,
- slug + '.meta'),
- title, slug, post_date, description, tags)
- self.write_content(
- os.path.join(self.output_folder, out_folder, slug + '.html'),
- content)
- else:
- LOGGER.warn('Not going to import "{0}" because it seems to contain'
- ' no content.'.format(title))
-
- @staticmethod
- def write_metadata(filename, title, slug, post_date, description, tags):
- ImportMixin.write_metadata(filename,
- title,
- slug,
- post_date.strftime(r'%Y/%m/%d %H:%m:%S'),
- description,
- tags)
diff --git a/nikola/plugins/command/import_wordpress.py b/nikola/plugins/command/import_wordpress.py
index b567c77..8ddc8c7 100644
--- a/nikola/plugins/command/import_wordpress.py
+++ b/nikola/plugins/command/import_wordpress.py
@@ -51,7 +51,7 @@ from nikola import utils
from nikola.utils import req_missing
from nikola.plugins.basic_import import ImportMixin, links
from nikola.nikola import DEFAULT_TRANSLATIONS_PATTERN
-from nikola.plugins.command.init import SAMPLE_CONF, prepare_config
+from nikola.plugins.command.init import SAMPLE_CONF, prepare_config, format_default_translations_config
LOGGER = utils.get_logger('import_wordpress', utils.STDERR_HANDLER)
@@ -136,6 +136,9 @@ class CommandImportWordpress(Command, ImportMixin):
self.separate_qtranslate_content = options.get('separate_qtranslate_content')
self.translations_pattern = options.get('translations_pattern')
+ # A place holder where extra language (if detected) will be stored
+ self.extra_languages = set()
+
if not self.no_downloads:
def show_info_about_mising_module(modulename):
LOGGER.error(
@@ -164,6 +167,8 @@ class CommandImportWordpress(Command, ImportMixin):
self.import_posts(channel)
+ self.context['TRANSLATIONS'] = format_default_translations_config(
+ self.extra_languages)
self.context['REDIRECTIONS'] = self.configure_redirections(
self.url_map)
self.write_urlmap_csv(
@@ -326,7 +331,7 @@ class CommandImportWordpress(Command, ImportMixin):
size_key = b'sizes'
file_key = b'file'
- if not size_key in metadata:
+ if size_key not in metadata:
continue
for filename in [metadata[size_key][size][file_key] for size in metadata[size_key]]:
@@ -452,6 +457,7 @@ class CommandImportWordpress(Command, ImportMixin):
out_content_filename \
= utils.get_translation_candidate(self.context,
slug + ".wp", lang)
+ self.extra_languages.add(lang)
meta_slug = slug
else:
out_meta_filename = slug + '.meta'
diff --git a/nikola/plugins/command/init.py b/nikola/plugins/command/init.py
index d7eeed7..8fb15e0 100644
--- a/nikola/plugins/command/init.py
+++ b/nikola/plugins/command/init.py
@@ -24,19 +24,24 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-from __future__ import print_function
+from __future__ import print_function, unicode_literals
import os
import shutil
import codecs
import json
-
+import textwrap
+import datetime
+import unidecode
+import dateutil.tz
from mako.template import Template
+from pkg_resources import resource_filename
import nikola
-from nikola.nikola import DEFAULT_TRANSLATIONS_PATTERN
+from nikola.nikola import DEFAULT_TRANSLATIONS_PATTERN, DEFAULT_INDEX_READ_MORE_LINK, DEFAULT_RSS_READ_MORE_LINK, LEGAL_VALUES
from nikola.plugin_categories import Command
-from nikola.utils import get_logger, makedirs, STDERR_HANDLER
-from nikola.winutils import fix_git_symlinked
+from nikola.utils import ask, ask_yesno, get_logger, makedirs, STDERR_HANDLER, load_messages
+from nikola.packages.tzlocal import get_localzone
+
LOGGER = get_logger('init', STDERR_HANDLER)
@@ -47,39 +52,144 @@ SAMPLE_CONF = {
'BLOG_EMAIL': "joe@demo.site",
'BLOG_DESCRIPTION': "This is a demo site for Nikola.",
'DEFAULT_LANG': "en",
+ 'TRANSLATIONS': """{
+ DEFAULT_LANG: "",
+ # Example for another language:
+ # "es": "./es",
+}""",
'THEME': 'bootstrap3',
+ 'TIMEZONE': 'UTC',
'COMMENT_SYSTEM': 'disqus',
'COMMENT_SYSTEM_ID': 'nikolademo',
'TRANSLATIONS_PATTERN': DEFAULT_TRANSLATIONS_PATTERN,
+ 'INDEX_READ_MORE_LINK': DEFAULT_INDEX_READ_MORE_LINK,
+ 'RSS_READ_MORE_LINK': DEFAULT_RSS_READ_MORE_LINK,
'POSTS': """(
-("posts/*.rst", "posts", "post.tmpl"),
-("posts/*.txt", "posts", "post.tmpl"),
+ ("posts/*.rst", "posts", "post.tmpl"),
+ ("posts/*.txt", "posts", "post.tmpl"),
)""",
'PAGES': """(
-("stories/*.rst", "stories", "story.tmpl"),
-("stories/*.txt", "stories", "story.tmpl"),
+ ("stories/*.rst", "stories", "story.tmpl"),
+ ("stories/*.txt", "stories", "story.tmpl"),
)""",
'COMPILERS': """{
-"rest": ('.rst', '.txt'),
-"markdown": ('.md', '.mdown', '.markdown'),
-"textile": ('.textile',),
-"txt2tags": ('.t2t',),
-"bbcode": ('.bb',),
-"wiki": ('.wiki',),
-"ipynb": ('.ipynb',),
-"html": ('.html', '.htm'),
-# PHP files are rendered the usual way (i.e. with the full templates).
-# The resulting files have .php extensions, making it possible to run
-# them without reconfiguring your server to recognize them.
-"php": ('.php',),
-# Pandoc detects the input from the source filename
-# but is disabled by default as it would conflict
-# with many of the others.
-# "pandoc": ('.rst', '.md', '.txt'),
+ "rest": ('.rst', '.txt'),
+ "markdown": ('.md', '.mdown', '.markdown'),
+ "textile": ('.textile',),
+ "txt2tags": ('.t2t',),
+ "bbcode": ('.bb',),
+ "wiki": ('.wiki',),
+ "ipynb": ('.ipynb',),
+ "html": ('.html', '.htm'),
+ # PHP files are rendered the usual way (i.e. with the full templates).
+ # The resulting files have .php extensions, making it possible to run
+ # them without reconfiguring your server to recognize them.
+ "php": ('.php',),
+ # Pandoc detects the input from the source filename
+ # but is disabled by default as it would conflict
+ # with many of the others.
+ # "pandoc": ('.rst', '.md', '.txt'),
+}""",
+ 'NAVIGATION_LINKS': """{
+ DEFAULT_LANG: (
+ ("/archive.html", "Archives"),
+ ("/categories/index.html", "Tags"),
+ ("/rss.xml", "RSS feed"),
+ ),
}""",
'REDIRECTIONS': [],
}
+# Generate a list of supported languages here.
+# Ugly code follows.
+_suplang = {}
+_sllength = 0
+
+for k, v in LEGAL_VALUES['TRANSLATIONS'].items():
+ if not isinstance(k, tuple):
+ main = k
+ _suplang[main] = v
+ else:
+ main = k[0]
+ k = k[1:]
+ bad = []
+ good = []
+ for i in k:
+ if i.startswith('!'):
+ bad.append(i[1:])
+ else:
+ good.append(i)
+ different = ''
+ if good or bad:
+ different += ' ['
+ if good:
+ different += 'ALTERNATIVELY ' + ', '.join(good)
+ if bad:
+ if good:
+ different += '; '
+ different += 'NOT ' + ', '.join(bad)
+ if good or bad:
+ different += ']'
+ _suplang[main] = v + different
+
+ if len(main) > _sllength:
+ _sllength = len(main)
+
+_sllength = str(_sllength)
+suplang = (u'# {0:<' + _sllength + u'} {1}\n').format('en', 'English')
+del _suplang['en']
+for k, v in sorted(_suplang.items()):
+ suplang += (u'# {0:<' + _sllength + u'} {1}\n').format(k, v)
+
+SAMPLE_CONF['_SUPPORTED_LANGUAGES'] = suplang.strip()
+
+# Generate a list of supported comment systems here.
+
+SAMPLE_CONF['_SUPPORTED_COMMENT_SYSTEMS'] = '\n'.join(textwrap.wrap(
+ u', '.join(LEGAL_VALUES['COMMENT_SYSTEM']),
+ initial_indent=u'# ', subsequent_indent=u'# ', width=79))
+
+
+def format_default_translations_config(additional_languages):
+ """Return the string to configure the TRANSLATIONS config variable to
+ make each additional language visible on the generated site."""
+ if not additional_languages:
+ return SAMPLE_CONF["TRANSLATIONS"]
+ lang_paths = [' DEFAULT_LANG: "",']
+ for lang in sorted(additional_languages):
+ lang_paths.append(' "{0}": "./{0}",'.format(lang))
+ return "{{\n{0}\n}}".format("\n".join(lang_paths))
+
+
+def format_navigation_links(additional_languages, default_lang, messages):
+ """Return the string to configure NAVIGATION_LINKS."""
+ f = u"""\
+ {0}: (
+ ("{1}/archive.html", "{2[Archive]}"),
+ ("{1}/categories/index.html", "{2[Tags]}"),
+ ("{1}/rss.xml", "{2[RSS feed]}"),
+ ),"""
+
+ pairs = []
+
+ def get_msg(lang):
+ """Generate a smaller messages dict with fallback."""
+ fmsg = {}
+ for i in (u'Archive', u'Tags', u'RSS feed'):
+ if messages[lang][i]:
+ fmsg[i] = messages[lang][i]
+ else:
+ fmsg[i] = i
+ return fmsg
+
+ # handle the default language
+ pairs.append(f.format('DEFAULT_LANG', '', get_msg(default_lang)))
+
+ for l in additional_languages:
+ pairs.append(f.format(json.dumps(l), '/' + l, get_msg(l)))
+
+ return u'{{\n{0}\n}}'.format('\n\n'.join(pairs))
+
# In order to ensure proper escaping, all variables but the three
# pre-formatted ones are handled by json.dumps().
@@ -87,7 +197,10 @@ def prepare_config(config):
"""Parse sample config with JSON."""
p = config.copy()
p.update(dict((k, json.dumps(v)) for k, v in p.items()
- if k not in ('POSTS', 'PAGES', 'COMPILERS')))
+ if k not in ('POSTS', 'PAGES', 'COMPILERS', 'TRANSLATIONS', 'NAVIGATION_LINKS', '_SUPPORTED_LANGUAGES', '_SUPPORTED_COMMENT_SYSTEMS', 'INDEX_READ_MORE_LINK', 'RSS_READ_MORE_LINK')))
+ # READ_MORE_LINKs require some special treatment.
+ p['INDEX_READ_MORE_LINK'] = "'" + p['INDEX_READ_MORE_LINK'].replace("'", "\\'") + "'"
+ p['RSS_READ_MORE_LINK'] = "'" + p['RSS_READ_MORE_LINK'].replace("'", "\\'") + "'"
return p
@@ -97,13 +210,22 @@ class CommandInit(Command):
name = "init"
- doc_usage = "[--demo] folder"
+ doc_usage = "[--demo] [--quiet] folder"
needs_config = False
doc_purpose = "create a Nikola site in the specified folder"
cmd_options = [
{
+ 'name': 'quiet',
+ 'long': 'quiet',
+ 'short': 'q',
+ 'default': False,
+ 'type': bool,
+ 'help': "Do not ask questions about config.",
+ },
+ {
'name': 'demo',
'long': 'demo',
+ 'short': 'd',
'default': False,
'type': bool,
'help': "Create a site filled with example data.",
@@ -112,15 +234,12 @@ class CommandInit(Command):
@classmethod
def copy_sample_site(cls, target):
- lib_path = cls.get_path_to_nikola_modules()
- src = os.path.join(lib_path, 'data', 'samplesite')
+ src = resource_filename('nikola', os.path.join('data', 'samplesite'))
shutil.copytree(src, target)
- fix_git_symlinked(src, target)
@classmethod
def create_configuration(cls, target):
- lib_path = cls.get_path_to_nikola_modules()
- template_path = os.path.join(lib_path, 'conf.py.in')
+ template_path = resource_filename('nikola', 'conf.py.in')
conf_template = Template(filename=template_path)
conf_path = os.path.join(target, 'conf.py')
with codecs.open(conf_path, 'w+', 'utf8') as fd:
@@ -132,16 +251,167 @@ class CommandInit(Command):
makedirs(os.path.join(target, folder))
@staticmethod
- def get_path_to_nikola_modules():
- return os.path.dirname(nikola.__file__)
+ def ask_questions(target):
+ """Ask some questions about Nikola."""
+ def lhandler(default, toconf, show_header=True):
+ if show_header:
+ print("We will now ask you to provide the list of languages you want to use.")
+ print("Please list all the desired languages, comma-separated, using ISO 639-1 codes. The first language will be used as the default.")
+ print("Type '?' (a question mark, sans quotes) to list available languages.")
+ answer = ask('Language(s) to use', 'en')
+ while answer.strip() == '?':
+ print('\n# Available languages:')
+ try:
+ print(SAMPLE_CONF['_SUPPORTED_LANGUAGES'] + '\n')
+ except UnicodeEncodeError:
+ # avoid Unicode characters in supported language names
+ print(unidecode.unidecode(SAMPLE_CONF['_SUPPORTED_LANGUAGES']) + '\n')
+ answer = ask('Language(s) to use', 'en')
+
+ langs = [i.strip().lower().replace('-', '_') for i in answer.split(',')]
+ for partial, full in LEGAL_VALUES['_TRANSLATIONS_WITH_COUNTRY_SPECIFIERS'].items():
+ if partial in langs:
+ langs[langs.index(partial)] = full
+ print("NOTICE: Assuming '{0}' instead of '{1}'.".format(full, partial))
+
+ default = langs.pop(0)
+ SAMPLE_CONF['DEFAULT_LANG'] = default
+ # format_default_translations_config() is intelligent enough to
+ # return the current value if there are no additional languages.
+ SAMPLE_CONF['TRANSLATIONS'] = format_default_translations_config(langs)
+
+ # Get messages for navigation_links. In order to do this, we need
+ # to generate a throwaway TRANSLATIONS dict.
+ tr = {default: ''}
+ for l in langs:
+ tr[l] = './' + l
+ # Assuming that base contains all the locales, and that base does
+ # not inherit from anywhere.
+ try:
+ messages = load_messages(['base'], tr, default)
+ SAMPLE_CONF['NAVIGATION_LINKS'] = format_navigation_links(langs, default, messages)
+ except nikola.utils.LanguageNotFoundError as e:
+ print(" ERROR: the language '{0}' is not supported.".format(e.lang))
+ print(" Are you sure you spelled the name correctly? Names are case-sensitive and need to be reproduced as-is (complete with the country specifier, if any).")
+ print("\nType '?' (a question mark, sans quotes) to list available languages.")
+ lhandler(default, toconf, show_header=False)
+
+ def tzhandler(default, toconf):
+ print("\nPlease choose the correct time zone for your blog. Nikola uses the tz database.")
+ print("You can find your time zone here:")
+ print("http://en.wikipedia.org/wiki/List_of_tz_database_time_zones")
+ print("")
+ answered = False
+ while not answered:
+ try:
+ lz = get_localzone()
+ except:
+ lz = None
+ answer = ask('Time zone', lz if lz else "UTC")
+ tz = dateutil.tz.gettz(answer)
+ if tz is not None:
+ time = datetime.datetime.now(tz).strftime('%H:%M:%S')
+ print(" Current time in {0}: {1}".format(answer, time))
+ answered = ask_yesno("Use this time zone?", True)
+ else:
+ print(" ERROR: Time zone not found. Please try again. Time zones are case-sensitive.")
+
+ SAMPLE_CONF['TIMEZONE'] = answer
+
+ def chandler(default, toconf):
+ print("You can configure comments now. Type '?' (a question mark, sans quotes) to list available comment systems. If you do not want any comments, just leave the field blank.")
+ answer = ask('Comment system', '')
+ while answer.strip() == '?':
+ print('\n# Available comment systems:')
+ print(SAMPLE_CONF['_SUPPORTED_COMMENT_SYSTEMS'])
+ print('')
+ answer = ask('Comment system', '')
+
+ while answer and answer not in LEGAL_VALUES['COMMENT_SYSTEM']:
+ if answer != '?':
+ print(' ERROR: Nikola does not know this comment system.')
+ print('\n# Available comment systems:')
+ print(SAMPLE_CONF['_SUPPORTED_COMMENT_SYSTEMS'])
+ print('')
+ answer = ask('Comment system', '')
+
+ SAMPLE_CONF['COMMENT_SYSTEM'] = answer
+ SAMPLE_CONF['COMMENT_SYSTEM_ID'] = ''
+
+ if answer:
+ print("You need to provide the site identifier for your comment system. Consult the Nikola manual for details on what the value should be. (you can leave it empty and come back later)")
+ answer = ask('Comment system site identifier', '')
+ SAMPLE_CONF['COMMENT_SYSTEM_ID'] = answer
+
+ STORAGE = {'target': target}
+
+ questions = [
+ ('Questions about the site', None, None, None),
+ # query, default, toconf, destination
+ ('Destination', None, False, '!target'),
+ ('Site title', 'My Nikola Site', True, 'BLOG_TITLE'),
+ ('Site author', 'Nikola Tesla', True, 'BLOG_AUTHOR'),
+ ('Site author\'s e-mail', 'n.tesla@example.com', True, 'BLOG_EMAIL'),
+ ('Site description', 'This is a demo site for Nikola.', True, 'BLOG_DESCRIPTION'),
+ ('Site URL', 'http://getnikola.com/', True, 'SITE_URL'),
+ ('Questions about languages and locales', None, None, None),
+ (lhandler, None, True, True),
+ (tzhandler, None, True, True),
+ ('Questions about comments', None, None, None),
+ (chandler, None, True, True),
+ ]
+
+ print("Creating Nikola Site")
+ print("====================\n")
+ print("This is Nikola v{0}. We will now ask you a few easy questions about your new site.".format(nikola.__version__))
+ print("If you do not want to answer and want to go with the defaults instead, simply restart with the `-q` parameter.")
+
+ for query, default, toconf, destination in questions:
+ if target and destination == '!target':
+ # Skip the destination question if we know it already
+ pass
+ else:
+ if default is toconf is destination is None:
+ print('--- {0} ---'.format(query))
+ elif destination is True:
+ query(default, toconf)
+ else:
+ answer = ask(query, default)
+ if toconf:
+ SAMPLE_CONF[destination] = answer
+ if destination == '!target':
+ while not answer:
+ print(' ERROR: you need to specify a target directory.\n')
+ answer = ask(query, default)
+ STORAGE['target'] = answer
+
+ print("\nThat's it, Nikola is now configured. Make sure to edit conf.py to your liking.")
+ print("If you are looking for themes and addons, check out http://themes.getnikola.com/ and http://plugins.getnikola.com/.")
+ print("Have fun!")
+ return STORAGE
def _execute(self, options={}, args=None):
"""Create a new site."""
- if not args:
- print("Usage: nikola init folder [options]")
+ try:
+ target = args[0]
+ except IndexError:
+ target = None
+ if not options.get('quiet'):
+ st = self.ask_questions(target=target)
+ try:
+ if not target:
+ target = st['target']
+ except KeyError:
+ pass
+
+ if not target:
+ print("Usage: nikola init [--demo] [--quiet] folder")
+ print("""
+Options:
+ -q, --quiet Do not ask questions about config.
+ -d, --demo Create a site filled with example data.""")
return False
- target = args[0]
- if not options or not options.get('demo'):
+ if not options.get('demo'):
self.create_empty_site(target)
LOGGER.info('Created empty site at {0}.'.format(target))
else:
diff --git a/nikola/plugins/command/install_plugin.plugin b/nikola/plugins/command/install_plugin.plugin
deleted file mode 100644
index 3dbabd8..0000000
--- a/nikola/plugins/command/install_plugin.plugin
+++ /dev/null
@@ -1,10 +0,0 @@
-[Core]
-Name = install_plugin
-Module = install_plugin
-
-[Documentation]
-Author = Roberto Alsina and Chris Warrick
-Version = 0.1
-Website = http://getnikola.com
-Description = Install a plugin into the current site.
-
diff --git a/nikola/plugins/command/install_plugin.py b/nikola/plugins/command/install_plugin.py
deleted file mode 100644
index 34223c0..0000000
--- a/nikola/plugins/command/install_plugin.py
+++ /dev/null
@@ -1,188 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2014 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-from __future__ import print_function
-import codecs
-import os
-import json
-import shutil
-import subprocess
-from io import BytesIO
-
-import pygments
-from pygments.lexers import PythonLexer
-from pygments.formatters import TerminalFormatter
-
-try:
- import requests
-except ImportError:
- requests = None # NOQA
-
-from nikola.plugin_categories import Command
-from nikola import utils
-
-LOGGER = utils.get_logger('install_plugin', utils.STDERR_HANDLER)
-
-
-# Stolen from textwrap in Python 3.3.2.
-def indent(text, prefix, predicate=None): # NOQA
- """Adds 'prefix' to the beginning of selected lines in 'text'.
-
- If 'predicate' is provided, 'prefix' will only be added to the lines
- where 'predicate(line)' is True. If 'predicate' is not provided,
- it will default to adding 'prefix' to all non-empty lines that do not
- consist solely of whitespace characters.
- """
- if predicate is None:
- def predicate(line):
- return line.strip()
-
- def prefixed_lines():
- for line in text.splitlines(True):
- yield (prefix + line if predicate(line) else line)
- return ''.join(prefixed_lines())
-
-
-class CommandInstallPlugin(Command):
- """Install a plugin."""
-
- name = "install_plugin"
- doc_usage = "[[-u] plugin_name] | [[-u] -l]"
- doc_purpose = "install plugin into current site"
- output_dir = 'plugins'
- cmd_options = [
- {
- 'name': 'list',
- 'short': 'l',
- 'long': 'list',
- 'type': bool,
- 'default': False,
- 'help': 'Show list of available plugins.'
- },
- {
- 'name': 'url',
- 'short': 'u',
- 'long': 'url',
- 'type': str,
- 'help': "URL for the plugin repository (default: "
- "http://plugins.getnikola.com/v6/plugins.json)",
- 'default': 'http://plugins.getnikola.com/v6/plugins.json'
- },
- ]
-
- def _execute(self, options, args):
- """Install plugin into current site."""
- if requests is None:
- utils.req_missing(['requests'], 'install plugins')
-
- listing = options['list']
- url = options['url']
- if args:
- name = args[0]
- else:
- name = None
-
- if name is None and not listing:
- LOGGER.error("This command needs either a plugin name or the -l option.")
- return False
- data = requests.get(url).text
- data = json.loads(data)
- if listing:
- print("Plugins:")
- print("--------")
- for plugin in sorted(data.keys()):
- print(plugin)
- return True
- else:
- self.do_install(name, data)
-
- def do_install(self, name, data):
- if name in data:
- utils.makedirs(self.output_dir)
- LOGGER.info('Downloading: ' + data[name])
- zip_file = BytesIO()
- zip_file.write(requests.get(data[name]).content)
- LOGGER.info('Extracting: {0} into plugins'.format(name))
- utils.extract_all(zip_file, 'plugins')
- dest_path = os.path.join('plugins', name)
- else:
- try:
- plugin_path = utils.get_plugin_path(name)
- except:
- LOGGER.error("Can't find plugin " + name)
- return False
-
- utils.makedirs(self.output_dir)
- dest_path = os.path.join(self.output_dir, name)
- if os.path.exists(dest_path):
- LOGGER.error("{0} is already installed".format(name))
- return False
-
- LOGGER.info('Copying {0} into plugins'.format(plugin_path))
- shutil.copytree(plugin_path, dest_path)
-
- reqpath = os.path.join(dest_path, 'requirements.txt')
- if os.path.exists(reqpath):
- LOGGER.notice('This plugin has Python dependencies.')
- LOGGER.info('Installing dependencies with pip...')
- try:
- subprocess.check_call(('pip', 'install', '-r', reqpath))
- except subprocess.CalledProcessError:
- LOGGER.error('Could not install the dependencies.')
- print('Contents of the requirements.txt file:\n')
- with codecs.open(reqpath, 'rb', 'utf-8') as fh:
- print(indent(fh.read(), 4 * ' '))
- print('You have to install those yourself or through a '
- 'package manager.')
- else:
- LOGGER.info('Dependency installation succeeded.')
- reqnpypath = os.path.join(dest_path, 'requirements-nonpy.txt')
- if os.path.exists(reqnpypath):
- LOGGER.notice('This plugin has third-party '
- 'dependencies you need to install '
- 'manually.')
- print('Contents of the requirements-nonpy.txt file:\n')
- with codecs.open(reqnpypath, 'rb', 'utf-8') as fh:
- for l in fh.readlines():
- i, j = l.split('::')
- print(indent(i.strip(), 4 * ' '))
- print(indent(j.strip(), 8 * ' '))
- print()
-
- print('You have to install those yourself or through a package '
- 'manager.')
- confpypath = os.path.join(dest_path, 'conf.py.sample')
- if os.path.exists(confpypath):
- LOGGER.notice('This plugin has a sample config file. Integrate it with yours in order to make this plugin work!')
- print('Contents of the conf.py.sample file:\n')
- with codecs.open(confpypath, 'rb', 'utf-8') as fh:
- if self.site.colorful:
- print(indent(pygments.highlight(
- fh.read(), PythonLexer(), TerminalFormatter()),
- 4 * ' '))
- else:
- print(indent(fh.read(), 4 * ' '))
- return True
diff --git a/nikola/plugins/command/install_theme.py b/nikola/plugins/command/install_theme.py
index 47c73b4..859bd56 100644
--- a/nikola/plugins/command/install_theme.py
+++ b/nikola/plugins/command/install_theme.py
@@ -87,8 +87,8 @@ class CommandInstallTheme(Command):
'long': 'url',
'type': str,
'help': "URL for the theme repository (default: "
- "http://themes.getnikola.com/v6/themes.json)",
- 'default': 'http://themes.getnikola.com/v6/themes.json'
+ "http://themes.getnikola.com/v7/themes.json)",
+ 'default': 'http://themes.getnikola.com/v7/themes.json'
},
]
diff --git a/nikola/plugins/command/mincss.plugin b/nikola/plugins/command/mincss.plugin
deleted file mode 100644
index d394d06..0000000
--- a/nikola/plugins/command/mincss.plugin
+++ /dev/null
@@ -1,10 +0,0 @@
-[Core]
-Name = mincss
-Module = mincss
-
-[Documentation]
-Author = Roberto Alsina
-Version = 0.1
-Website = http://getnikola.com
-Description = Apply mincss to the generated site
-
diff --git a/nikola/plugins/command/mincss.py b/nikola/plugins/command/mincss.py
deleted file mode 100644
index 0193458..0000000
--- a/nikola/plugins/command/mincss.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2014 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-from __future__ import print_function, unicode_literals
-import os
-import sys
-
-try:
- from mincss.processor import Processor
-except ImportError:
- Processor = None
-
-from nikola.plugin_categories import Command
-from nikola.utils import req_missing, get_logger, STDERR_HANDLER
-
-
-class CommandMincss(Command):
- """Check the generated site."""
- name = "mincss"
-
- doc_usage = ""
- doc_purpose = "apply mincss to the generated site"
-
- logger = get_logger('mincss', STDERR_HANDLER)
-
- def _execute(self, options, args):
- """Apply mincss the generated site."""
- output_folder = self.site.config['OUTPUT_FOLDER']
- if Processor is None:
- req_missing(['mincss'], 'use the "mincss" command')
- return
-
- p = Processor(preserve_remote_urls=False)
- urls = []
- css_files = {}
- for root, dirs, files in os.walk(output_folder):
- for f in files:
- url = os.path.join(root, f)
- if url.endswith('.css'):
- fname = os.path.basename(url)
- if fname in css_files:
- self.logger.error("You have two CSS files with the same name and that confuses me.")
- sys.exit(1)
- css_files[fname] = url
- if not f.endswith('.html'):
- continue
- urls.append(url)
- p.process(*urls)
- for inline in p.links:
- fname = os.path.basename(inline.href)
- with open(css_files[fname], 'wb+') as outf:
- outf.write(inline.after)
diff --git a/nikola/plugins/command/new_page.py b/nikola/plugins/command/new_page.py
index 39c0c1d..f07ba39 100644
--- a/nikola/plugins/command/new_page.py
+++ b/nikola/plugins/command/new_page.py
@@ -59,6 +59,13 @@ class CommandNewPage(Command):
'help': 'Create the page with separate metadata (two file format)'
},
{
+ 'name': 'edit',
+ 'short': 'e',
+ 'type': bool,
+ 'default': False,
+ 'help': 'Open the page (and meta file, if any) in $EDITOR after creation.'
+ },
+ {
'name': 'content_format',
'short': 'f',
'long': 'format',
diff --git a/nikola/plugins/command/new_post.py b/nikola/plugins/command/new_post.py
index cd37a75..42f77cc 100644
--- a/nikola/plugins/command/new_post.py
+++ b/nikola/plugins/command/new_post.py
@@ -29,8 +29,10 @@ import codecs
import datetime
import os
import sys
+import subprocess
from blinker import signal
+import dateutil.tz
from nikola.plugin_categories import Command
from nikola import utils
@@ -82,7 +84,7 @@ def get_default_compiler(is_post, compilers, post_pages):
return 'rest'
-def get_date(schedule=False, rule=None, last_date=None, force_today=False):
+def get_date(schedule=False, rule=None, last_date=None, tz=None, iso8601=False):
"""Returns a date stamp, given a recurrence rule.
schedule - bool:
@@ -94,33 +96,45 @@ def get_date(schedule=False, rule=None, last_date=None, force_today=False):
last_date - datetime:
timestamp of the last post
- force_today - bool:
- tries to schedule a post to today, if possible, even if the scheduled
- time has already passed in the day.
+ tz - tzinfo:
+ the timezone used for getting the current time.
+
+ iso8601 - bool:
+ whether to force ISO 8601 dates (instead of locale-specific ones)
+
"""
- date = now = datetime.datetime.now()
+ if tz is None:
+ tz = dateutil.tz.tzlocal()
+ date = now = datetime.datetime.now(tz)
if schedule:
try:
from dateutil import rrule
except ImportError:
LOGGER.error('To use the --schedule switch of new_post, '
'you have to install the "dateutil" package.')
- rrule = None
+ rrule = None # NOQA
if schedule and rrule and rule:
- if last_date and last_date.tzinfo:
- # strip tzinfo for comparisons
- last_date = last_date.replace(tzinfo=None)
try:
rule_ = rrule.rrulestr(rule, dtstart=last_date)
except Exception:
LOGGER.error('Unable to parse rule string, using current time.')
else:
- # Try to post today, instead of tomorrow, if no other post today.
- if force_today:
- now = now.replace(hour=0, minute=0, second=0, microsecond=0)
date = rule_.after(max(now, last_date or now), last_date is None)
- return date.strftime('%Y/%m/%d %H:%M:%S')
+
+ offset = tz.utcoffset(now)
+ offset_sec = (offset.days * 24 * 3600 + offset.seconds)
+ offset_hrs = offset_sec // 3600
+ offset_min = offset_sec % 3600
+ if iso8601:
+ tz_str = '{0:+03d}:{1:02d}'.format(offset_hrs, offset_min // 60)
+ else:
+ if offset:
+ tz_str = ' UTC{0:+03d}:{1:02d}'.format(offset_hrs, offset_min // 60)
+ else:
+ tz_str = ' UTC'
+
+ return date.strftime('%Y-%m-%d %H:%M:%S') + tz_str
class CommandNewPost(Command):
@@ -168,6 +182,13 @@ class CommandNewPost(Command):
'help': 'Create the post with separate metadata (two file format)'
},
{
+ 'name': 'edit',
+ 'short': 'e',
+ 'type': bool,
+ 'default': False,
+ 'help': 'Open the post (and meta file, if any) in $EDITOR after creation.'
+ },
+ {
'name': 'content_format',
'short': 'f',
'long': 'format',
@@ -242,31 +263,44 @@ class CommandNewPost(Command):
print("Creating New {0}".format(content_type.title()))
print("-----------------\n")
- if title is None:
- print("Enter title: ", end='')
- # WHY, PYTHON3???? WHY?
- sys.stdout.flush()
- title = sys.stdin.readline()
- else:
+ if title is not None:
print("Title:", title)
+ else:
+ while not title:
+ title = utils.ask('Title')
+
if isinstance(title, utils.bytes_str):
- title = title.decode(sys.stdin.encoding)
+ try:
+ title = title.decode(sys.stdin.encoding)
+ except AttributeError: # for tests
+ title = title.decode('utf-8')
+
title = title.strip()
if not path:
slug = utils.slugify(title)
else:
if isinstance(path, utils.bytes_str):
- path = path.decode(sys.stdin.encoding)
+ try:
+ path = path.decode(sys.stdin.encoding)
+ except AttributeError: # for tests
+ path = path.decode('utf-8')
slug = utils.slugify(os.path.splitext(os.path.basename(path))[0])
# Calculate the date to use for the content
schedule = options['schedule'] or self.site.config['SCHEDULE_ALL']
rule = self.site.config['SCHEDULE_RULE']
- force_today = self.site.config['SCHEDULE_FORCE_TODAY']
self.site.scan_posts()
timeline = self.site.timeline
last_date = None if not timeline else timeline[0].date
- date = get_date(schedule, rule, last_date, force_today)
- data = [title, slug, date, tags]
+ date = get_date(schedule, rule, last_date, self.site.tzinfo, self.site.config['FORCE_ISO8601'])
+ data = {
+ 'title': title,
+ 'slug': slug,
+ 'date': date,
+ 'tags': tags,
+ 'link': '',
+ 'description': '',
+ 'type': 'text',
+ }
output_path = os.path.dirname(entry[0])
meta_path = os.path.join(output_path, slug + ".meta")
pattern = os.path.basename(entry[0])
@@ -284,19 +318,34 @@ class CommandNewPost(Command):
d_name = os.path.dirname(txt_path)
utils.makedirs(d_name)
metadata = self.site.config['ADDITIONAL_METADATA']
+
+ # Override onefile if not really supported.
+ if not compiler_plugin.supports_onefile and onefile:
+ onefile = False
+ LOGGER.warn('This compiler does not support one-file posts.')
+
+ content = "Write your {0} here.".format('page' if is_page else 'post')
compiler_plugin.create_post(
- txt_path, onefile, title=title,
+ txt_path, content=content, onefile=onefile, title=title,
slug=slug, date=date, tags=tags, is_page=is_page, **metadata)
event = dict(path=txt_path)
if not onefile: # write metadata file
with codecs.open(meta_path, "wb+", "utf8") as fd:
- fd.write('\n'.join(data))
- with codecs.open(txt_path, "wb+", "utf8") as fd:
- fd.write("Write your {0} here.".format(content_type))
+ fd.write(utils.write_metadata(data))
LOGGER.info("Your {0}'s metadata is at: {1}".format(content_type, meta_path))
event['meta_path'] = meta_path
LOGGER.info("Your {0}'s text is at: {1}".format(content_type, txt_path))
signal('new_' + content_type).send(self, **event)
+
+ if options['edit']:
+ editor = os.getenv('EDITOR')
+ to_run = [editor, txt_path]
+ if not onefile:
+ to_run.append(meta_path)
+ if editor:
+ subprocess.call(to_run)
+ else:
+ LOGGER.error('$EDITOR not set, cannot edit the post. Please do it manually.')
diff --git a/nikola/plugins/command/planetoid.plugin b/nikola/plugins/command/planetoid.plugin
deleted file mode 100644
index e767f31..0000000
--- a/nikola/plugins/command/planetoid.plugin
+++ /dev/null
@@ -1,9 +0,0 @@
-[Core]
-Name = planetoid
-Module = planetoid
-
-[Documentation]
-Author = Roberto Alsina
-Version = 0.1
-Website = http://getnikola.com
-Description = Maintain a planet-like site
diff --git a/nikola/plugins/command/planetoid/__init__.py b/nikola/plugins/command/planetoid/__init__.py
deleted file mode 100644
index fe1a59b..0000000
--- a/nikola/plugins/command/planetoid/__init__.py
+++ /dev/null
@@ -1,289 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2014 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-from __future__ import print_function, unicode_literals
-import codecs
-import datetime
-import hashlib
-from optparse import OptionParser
-import os
-import sys
-
-from doit.tools import timeout
-from nikola.plugin_categories import Command, Task
-from nikola.utils import config_changed, req_missing, get_logger, STDERR_HANDLER
-
-LOGGER = get_logger('planetoid', STDERR_HANDLER)
-
-try:
- import feedparser
-except ImportError:
- feedparser = None # NOQA
-
-try:
- import peewee
-except ImportError:
- peewee = None
-
-
-if peewee is not None:
- class Feed(peewee.Model):
- name = peewee.CharField()
- url = peewee.CharField(max_length=200)
- last_status = peewee.CharField(null=True)
- etag = peewee.CharField(max_length=200)
- last_modified = peewee.DateTimeField()
-
- class Entry(peewee.Model):
- date = peewee.DateTimeField()
- feed = peewee.ForeignKeyField(Feed)
- content = peewee.TextField(max_length=20000)
- link = peewee.CharField(max_length=200)
- title = peewee.CharField(max_length=200)
- guid = peewee.CharField(max_length=200)
-
-
-class Planetoid(Command, Task):
- """Maintain a planet-like thing."""
- name = "planetoid"
-
- def init_db(self):
- # setup database
- Feed.create_table(fail_silently=True)
- Entry.create_table(fail_silently=True)
-
- def gen_tasks(self):
- if peewee is None or sys.version_info[0] == 3:
- if sys.version_info[0] == 3:
- message = 'Peewee, a requirement of the "planetoid" command, is currently incompatible with Python 3.'
- else:
- req_missing('peewee', 'use the "planetoid" command')
- message = ''
- yield {
- 'basename': self.name,
- 'name': '',
- 'verbosity': 2,
- 'actions': ['echo "%s"' % message]
- }
- else:
- self.init_db()
- self.load_feeds()
- for task in self.task_update_feeds():
- yield task
- for task in self.task_generate_posts():
- yield task
- yield {
- 'basename': self.name,
- 'name': '',
- 'actions': [],
- 'file_dep': ['feeds'],
- 'task_dep': [
- self.name + "_fetch_feed",
- self.name + "_generate_posts",
- ]
- }
-
- def run(self, *args):
- parser = OptionParser(usage="nikola %s [options]" % self.name)
- (options, args) = parser.parse_args(list(args))
-
- def load_feeds(self):
- "Read the feeds file, add it to the database."
- feeds = []
- feed = name = None
- for line in codecs.open('feeds', 'r', 'utf-8'):
- line = line.strip()
- if line.startswith("#"):
- continue
- elif line.startswith('http'):
- feed = line
- elif line:
- name = line
- if feed and name:
- feeds.append([feed, name])
- feed = name = None
-
- def add_feed(name, url):
- f = Feed.create(
- name=name,
- url=url,
- etag='foo',
- last_modified=datetime.datetime(1970, 1, 1),
- )
- f.save()
-
- def update_feed_url(feed, url):
- feed.url = url
- feed.save()
-
- for feed, name in feeds:
- f = Feed.select().where(Feed.name == name)
- if not list(f):
- add_feed(name, feed)
- elif list(f)[0].url != feed:
- update_feed_url(list(f)[0], feed)
-
- def task_update_feeds(self):
- """Download feed contents, add entries to the database."""
- def update_feed(feed):
- modified = feed.last_modified.timetuple()
- etag = feed.etag
- try:
- parsed = feedparser.parse(
- feed.url,
- etag=etag,
- modified=modified
- )
- feed.last_status = str(parsed.status)
- except: # Probably a timeout
- # TODO: log failure
- return
- if parsed.feed.get('title'):
- LOGGER.info(parsed.feed.title)
- else:
- LOGGER.info(feed.url)
- feed.etag = parsed.get('etag', 'foo')
- modified = tuple(parsed.get('date_parsed', (1970, 1, 1)))[:6]
- LOGGER.info("==========>", modified)
- modified = datetime.datetime(*modified)
- feed.last_modified = modified
- feed.save()
- # No point in adding items from missinfg feeds
- if parsed.status > 400:
- # TODO log failure
- return
- for entry_data in parsed.entries:
- LOGGER.info("=========================================")
- date = entry_data.get('published_parsed', None)
- if date is None:
- date = entry_data.get('updated_parsed', None)
- if date is None:
- LOGGER.error("Can't parse date from:\n", entry_data)
- return False
- LOGGER.info("DATE:===>", date)
- date = datetime.datetime(*(date[:6]))
- title = "%s: %s" % (feed.name, entry_data.get('title', 'Sin título'))
- content = entry_data.get('content', None)
- if content:
- content = content[0].value
- if not content:
- content = entry_data.get('description', None)
- if not content:
- content = entry_data.get('summary', 'Sin contenido')
- guid = str(entry_data.get('guid', entry_data.link))
- link = entry_data.link
- LOGGER.info(repr([date, title]))
- e = list(Entry.select().where(Entry.guid == guid))
- LOGGER.info(
- repr(dict(
- date=date,
- title=title,
- content=content,
- guid=guid,
- feed=feed,
- link=link,
- ))
- )
- if not e:
- entry = Entry.create(
- date=date,
- title=title,
- content=content,
- guid=guid,
- feed=feed,
- link=link,
- )
- else:
- entry = e[0]
- entry.date = date
- entry.title = title
- entry.content = content
- entry.link = link
- entry.save()
- flag = False
- for feed in Feed.select():
- flag = True
- task = {
- 'basename': self.name + "_fetch_feed",
- 'name': str(feed.url),
- 'actions': [(update_feed, (feed, ))],
- 'uptodate': [timeout(datetime.timedelta(minutes=
- self.site.config.get('PLANETOID_REFRESH', 60)))],
- }
- yield task
- if not flag:
- yield {
- 'basename': self.name + "_fetch_feed",
- 'name': '',
- 'actions': [],
- }
-
- def task_generate_posts(self):
- """Generate post files for the blog entries."""
- def gen_id(entry):
- h = hashlib.md5()
- h.update(entry.feed.name.encode('utf8'))
- h.update(entry.guid)
- return h.hexdigest()
-
- def generate_post(entry):
- unique_id = gen_id(entry)
- meta_path = os.path.join('posts', unique_id + '.meta')
- post_path = os.path.join('posts', unique_id + '.txt')
- with codecs.open(meta_path, 'wb+', 'utf8') as fd:
- fd.write('%s\n' % entry.title.replace('\n', ' '))
- fd.write('%s\n' % unique_id)
- fd.write('%s\n' % entry.date.strftime('%Y/%m/%d %H:%M'))
- fd.write('\n')
- fd.write('%s\n' % entry.link)
- with codecs.open(post_path, 'wb+', 'utf8') as fd:
- fd.write('.. raw:: html\n\n')
- content = entry.content
- if not content:
- content = 'Sin contenido'
- for line in content.splitlines():
- fd.write(' %s\n' % line)
-
- if not os.path.isdir('posts'):
- os.mkdir('posts')
- flag = False
- for entry in Entry.select().order_by(Entry.date.desc()):
- flag = True
- entry_id = gen_id(entry)
- yield {
- 'basename': self.name + "_generate_posts",
- 'targets': [os.path.join('posts', entry_id + '.meta'), os.path.join('posts', entry_id + '.txt')],
- 'name': entry_id,
- 'actions': [(generate_post, (entry,))],
- 'uptodate': [config_changed({1: entry})],
- 'task_dep': [self.name + "_fetch_feed"],
- }
- if not flag:
- yield {
- 'basename': self.name + "_generate_posts",
- 'name': '',
- 'actions': [],
- }
diff --git a/nikola/plugins/command/plugin.plugin b/nikola/plugins/command/plugin.plugin
new file mode 100644
index 0000000..d2bca92
--- /dev/null
+++ b/nikola/plugins/command/plugin.plugin
@@ -0,0 +1,10 @@
+[Core]
+Name = plugin
+Module = plugin
+
+[Documentation]
+Author = Roberto Alsina and Chris Warrick
+Version = 0.2
+Website = http://getnikola.com
+Description = Manage Nikola plugins
+
diff --git a/nikola/plugins/command/plugin.py b/nikola/plugins/command/plugin.py
new file mode 100644
index 0000000..df0e7a4
--- /dev/null
+++ b/nikola/plugins/command/plugin.py
@@ -0,0 +1,319 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2012-2014 Roberto Alsina and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+from __future__ import print_function
+import codecs
+from io import BytesIO
+import os
+import shutil
+import subprocess
+import sys
+
+import pygments
+from pygments.lexers import PythonLexer
+from pygments.formatters import TerminalFormatter
+
+try:
+ import requests
+except ImportError:
+ requests = None # NOQA
+
+from nikola.plugin_categories import Command
+from nikola import utils
+
+LOGGER = utils.get_logger('plugin', utils.STDERR_HANDLER)
+
+
+# Stolen from textwrap in Python 3.3.2.
+def indent(text, prefix, predicate=None): # NOQA
+ """Adds 'prefix' to the beginning of selected lines in 'text'.
+
+ If 'predicate' is provided, 'prefix' will only be added to the lines
+ where 'predicate(line)' is True. If 'predicate' is not provided,
+ it will default to adding 'prefix' to all non-empty lines that do not
+ consist solely of whitespace characters.
+ """
+ if predicate is None:
+ def predicate(line):
+ return line.strip()
+
+ def prefixed_lines():
+ for line in text.splitlines(True):
+ yield (prefix + line if predicate(line) else line)
+ return ''.join(prefixed_lines())
+
+
+class CommandPlugin(Command):
+ """Manage plugins."""
+
+ json = None
+ name = "plugin"
+ doc_usage = "[[-u][--user] --install name] | [[-u] [-l |--upgrade|--list-installed] | [--uninstall name]]"
+ doc_purpose = "manage plugins"
+ output_dir = None
+ needs_config = False
+ cmd_options = [
+ {
+ 'name': 'install',
+ 'short': 'i',
+ 'long': 'install',
+ 'type': str,
+ 'default': '',
+ 'help': 'Install a plugin.',
+ },
+ {
+ 'name': 'uninstall',
+ 'long': 'uninstall',
+ 'short': 'r',
+ 'type': str,
+ 'default': '',
+ 'help': 'Uninstall a plugin.'
+ },
+ {
+ 'name': 'list',
+ 'short': 'l',
+ 'long': 'list',
+ 'type': bool,
+ 'default': False,
+ 'help': 'Show list of available plugins.'
+ },
+ {
+ 'name': 'url',
+ 'short': 'u',
+ 'long': 'url',
+ 'type': str,
+ 'help': "URL for the plugin repository (default: "
+ "http://plugins.getnikola.com/v7/plugins.json)",
+ 'default': 'http://plugins.getnikola.com/v7/plugins.json'
+ },
+ {
+ 'name': 'user',
+ 'long': 'user',
+ 'type': bool,
+ 'help': "Install user-wide, available for all sites.",
+ 'default': False
+ },
+ {
+ 'name': 'upgrade',
+ 'long': 'upgrade',
+ 'type': bool,
+ 'help': "Upgrade all installed plugins.",
+ 'default': False
+ },
+ {
+ 'name': 'list_installed',
+ 'long': 'list-installed',
+ 'type': bool,
+ 'help': "List the installed plugins with their location.",
+ 'default': False
+ },
+ ]
+
+ def _execute(self, options, args):
+ """Install plugin into current site."""
+ url = options['url']
+ user_mode = options['user']
+
+ # See the "mode" we need to operate in
+ install = options.get('install')
+ uninstall = options.get('uninstall')
+ upgrade = options.get('upgrade')
+ list_available = options.get('list')
+ list_installed = options.get('list_installed')
+ command_count = [bool(x) for x in (
+ install,
+ uninstall,
+ upgrade,
+ list_available,
+ list_installed)].count(True)
+ if command_count > 1 or command_count == 0:
+ print(self.help())
+ return
+
+ if not self.site.configured and not user_mode and install:
+ LOGGER.notice('No site found, assuming --user')
+ user_mode = True
+
+ if user_mode:
+ self.output_dir = os.path.expanduser('~/.nikola/plugins')
+ else:
+ self.output_dir = 'plugins'
+
+ if list_available:
+ self.list_available(url)
+ elif list_installed:
+ self.list_installed()
+ elif upgrade:
+ self.do_upgrade(url)
+ elif uninstall:
+ self.do_uninstall(uninstall)
+ elif install:
+ self.do_install(url, install)
+
+ def list_available(self, url):
+ data = self.get_json(url)
+ print("Available Plugins:")
+ print("------------------")
+ for plugin in sorted(data.keys()):
+ print(plugin)
+ return True
+
+ def list_installed(self):
+ plugins = []
+ for plugin in self.site.plugin_manager.getAllPlugins():
+ p = plugin.path
+ if os.path.isdir(p):
+ p = p + os.sep
+ else:
+ p = p + '.py'
+ plugins.append([plugin.name, p])
+
+ plugins.sort()
+ for name, path in plugins:
+ print('{0} at {1}'.format(name, path))
+
+ def do_upgrade(self, url):
+ LOGGER.warning('This is not very smart, it just reinstalls some plugins and hopes for the best')
+ data = self.get_json(url)
+ plugins = []
+ for plugin in self.site.plugin_manager.getAllPlugins():
+ p = plugin.path
+ if os.path.isdir(p):
+ p = p + os.sep
+ else:
+ p = p + '.py'
+ if plugin.name in data:
+ plugins.append([plugin.name, p])
+ print('Will upgrade {0} plugins: {1}'.format(len(plugins), ', '.join(n for n, _ in plugins)))
+ for name, path in plugins:
+ print('Upgrading {0}'.format(name))
+ p = path
+ while True:
+ tail, head = os.path.split(path)
+ if head == 'plugins':
+ self.output_dir = path
+ break
+ elif tail == '':
+ LOGGER.error("Can't find the plugins folder for path: {0}".format(p))
+ return False
+ else:
+ path = tail
+ self.do_install(url, name)
+
+ def do_install(self, url, name):
+ data = self.get_json(url)
+ if name in data:
+ utils.makedirs(self.output_dir)
+ LOGGER.info('Downloading: ' + data[name])
+ zip_file = BytesIO()
+ zip_file.write(requests.get(data[name]).content)
+ LOGGER.info('Extracting: {0} into {1}/'.format(name, self.output_dir))
+ utils.extract_all(zip_file, self.output_dir)
+ dest_path = os.path.join(self.output_dir, name)
+ else:
+ try:
+ plugin_path = utils.get_plugin_path(name)
+ except:
+ LOGGER.error("Can't find plugin " + name)
+ return False
+
+ utils.makedirs(self.output_dir)
+ dest_path = os.path.join(self.output_dir, name)
+ if os.path.exists(dest_path):
+ LOGGER.error("{0} is already installed".format(name))
+ return False
+
+ LOGGER.info('Copying {0} into plugins'.format(plugin_path))
+ shutil.copytree(plugin_path, dest_path)
+
+ reqpath = os.path.join(dest_path, 'requirements.txt')
+ if os.path.exists(reqpath):
+ LOGGER.notice('This plugin has Python dependencies.')
+ LOGGER.info('Installing dependencies with pip...')
+ try:
+ subprocess.check_call(('pip', 'install', '-r', reqpath))
+ except subprocess.CalledProcessError:
+ LOGGER.error('Could not install the dependencies.')
+ print('Contents of the requirements.txt file:\n')
+ with codecs.open(reqpath, 'rb', 'utf-8') as fh:
+ print(indent(fh.read(), 4 * ' '))
+ print('You have to install those yourself or through a '
+ 'package manager.')
+ else:
+ LOGGER.info('Dependency installation succeeded.')
+ reqnpypath = os.path.join(dest_path, 'requirements-nonpy.txt')
+ if os.path.exists(reqnpypath):
+ LOGGER.notice('This plugin has third-party '
+ 'dependencies you need to install '
+ 'manually.')
+ print('Contents of the requirements-nonpy.txt file:\n')
+ with codecs.open(reqnpypath, 'rb', 'utf-8') as fh:
+ for l in fh.readlines():
+ i, j = l.split('::')
+ print(indent(i.strip(), 4 * ' '))
+ print(indent(j.strip(), 8 * ' '))
+ print()
+
+ print('You have to install those yourself or through a package '
+ 'manager.')
+ confpypath = os.path.join(dest_path, 'conf.py.sample')
+ if os.path.exists(confpypath):
+ LOGGER.notice('This plugin has a sample config file. Integrate it with yours in order to make this plugin work!')
+ print('Contents of the conf.py.sample file:\n')
+ with codecs.open(confpypath, 'rb', 'utf-8') as fh:
+ if self.site.colorful:
+ print(indent(pygments.highlight(
+ fh.read(), PythonLexer(), TerminalFormatter()),
+ 4 * ' '))
+ else:
+ print(indent(fh.read(), 4 * ' '))
+ return True
+
+ def do_uninstall(self, name):
+ for plugin in self.site.plugin_manager.getAllPlugins(): # FIXME: this is repeated thrice
+ p = plugin.path
+ if os.path.isdir(p):
+ p = p + os.sep
+ else:
+ p = os.path.dirname(p)
+ if name == plugin.name: # Uninstall this one
+ LOGGER.warning('About to uninstall plugin: {0}'.format(name))
+ LOGGER.warning('This will delete {0}'.format(p))
+ inpf = raw_input if sys.version_info[0] == 2 else input
+ sure = inpf('Are you sure? [y/n] ')
+ if sure.lower().startswith('y'):
+ LOGGER.warning('Removing {0}'.format(p))
+ shutil.rmtree(p)
+ return True
+ LOGGER.error('Unknown plugin: {0}'.format(name))
+ return False
+
+ def get_json(self, url):
+ if requests is None:
+ utils.req_missing(['requests'], 'install or list available plugins', python=True, optional=False)
+ if self.json is None:
+ self.json = requests.get(url).json()
+ return self.json
diff --git a/nikola/plugins/command/serve.py b/nikola/plugins/command/serve.py
index f27d1f7..623e2db 100644
--- a/nikola/plugins/command/serve.py
+++ b/nikola/plugins/command/serve.py
@@ -89,7 +89,11 @@ class CommandServe(Command):
server_url = "http://{0}:{1}/".format(options['address'], options['port'])
self.logger.info("Opening {0} in the default web browser ...".format(server_url))
webbrowser.open(server_url)
- httpd.serve_forever()
+ try:
+ httpd.serve_forever()
+ except KeyboardInterrupt:
+ self.logger.info("Server is shutting down.")
+ exit(130)
class OurHTTPRequestHandler(SimpleHTTPRequestHandler):
diff --git a/nikola/plugins/compile/asciidoc.py b/nikola/plugins/compile/asciidoc.py
deleted file mode 100644
index 68f96d9..0000000
--- a/nikola/plugins/compile/asciidoc.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2014 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-"""Implementation of compile_html based on asciidoc.
-
-You will need, of course, to install asciidoc
-
-"""
-
-import codecs
-import os
-import subprocess
-
-from nikola.plugin_categories import PageCompiler
-from nikola.utils import makedirs, req_missing
-
-try:
- from collections import OrderedDict
-except ImportError:
- OrderedDict = dict # NOQA
-
-
-class CompileAsciiDoc(PageCompiler):
- """Compile asciidoc into HTML."""
-
- name = "asciidoc"
- demote_headers = True
-
- def compile_html(self, source, dest, is_two_file=True):
- makedirs(os.path.dirname(dest))
- try:
- subprocess.check_call(('asciidoc', '-f', 'html', '-s', '-o', dest, source))
- except OSError as e:
- if e.strreror == 'No such file or directory':
- req_missing(['asciidoc'], 'build this site (compile with asciidoc)', python=False)
-
- def create_post(self, path, onefile=False, is_page=False, **kw):
- metadata = OrderedDict()
- metadata.update(self.default_metadata)
- metadata.update(kw)
- makedirs(os.path.dirname(path))
- with codecs.open(path, "wb+", "utf8") as fd:
- if onefile:
- fd.write("/////////////////////////////////////////////\n")
- for k, v in metadata.items():
- fd.write('.. {0}: {1}\n'.format(k, v))
- fd.write("/////////////////////////////////////////////\n")
- fd.write("\nWrite your {0} here.".format('page' if is_page else 'post'))
diff --git a/nikola/plugins/compile/bbcode.py b/nikola/plugins/compile/bbcode.py
deleted file mode 100644
index 0961ffe..0000000
--- a/nikola/plugins/compile/bbcode.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2014 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-"""Implementation of compile_html based on bbcode."""
-
-import codecs
-import os
-import re
-
-try:
- import bbcode
-except ImportError:
- bbcode = None # NOQA
-
-from nikola.plugin_categories import PageCompiler
-from nikola.utils import makedirs, req_missing
-try:
- from collections import OrderedDict
-except ImportError:
- OrderedDict = dict # NOQA
-
-
-class CompileBbcode(PageCompiler):
- """Compile bbcode into HTML."""
-
- name = "bbcode"
-
- def __init__(self):
- if bbcode is None:
- return
- self.parser = bbcode.Parser()
- self.parser.add_simple_formatter("note", "")
-
- def compile_html(self, source, dest, is_two_file=True):
- if bbcode is None:
- req_missing(['bbcode'], 'build this site (compile BBCode)')
- makedirs(os.path.dirname(dest))
- with codecs.open(dest, "w+", "utf8") as out_file:
- with codecs.open(source, "r", "utf8") as in_file:
- data = in_file.read()
- if not is_two_file:
- data = re.split('(\n\n|\r\n\r\n)', data, maxsplit=1)[-1]
- output = self.parser.format(data)
- out_file.write(output)
-
- def create_post(self, path, onefile=False, is_page=False, **kw):
- metadata = OrderedDict()
- metadata.update(self.default_metadata)
- metadata.update(kw)
- makedirs(os.path.dirname(path))
- with codecs.open(path, "wb+", "utf8") as fd:
- if onefile:
- fd.write('[note]<!--\n')
- for k, v in metadata.items():
- fd.write('.. {0}: {1}\n'.format(k, v))
- fd.write('-->[/note]\n\n')
- fd.write("Write your {0} here.".format('page' if is_page else 'post'))
diff --git a/nikola/plugins/compile/html.py b/nikola/plugins/compile/html.py
index 09a9756..fff7f89 100644
--- a/nikola/plugins/compile/html.py
+++ b/nikola/plugins/compile/html.py
@@ -31,12 +31,7 @@ import re
import codecs
from nikola.plugin_categories import PageCompiler
-from nikola.utils import makedirs
-
-try:
- from collections import OrderedDict
-except ImportError:
- OrderedDict = dict # NOQA
+from nikola.utils import makedirs, write_metadata
_META_SEPARATOR = '(' + os.linesep * 2 + '|' + ('\n' * 2) + '|' + ("\r\n" * 2) + ')'
@@ -56,15 +51,20 @@ class CompileHtml(PageCompiler):
out_file.write(data)
return True
- def create_post(self, path, onefile=False, is_page=False, **kw):
- metadata = OrderedDict()
+ def create_post(self, path, **kw):
+ content = kw.pop('content', None)
+ onefile = kw.pop('onefile', False)
+ # is_page is not used by create_post as of now.
+ kw.pop('is_page', False)
+ metadata = {}
metadata.update(self.default_metadata)
metadata.update(kw)
makedirs(os.path.dirname(path))
+ if not content.endswith('\n'):
+ content += '\n'
with codecs.open(path, "wb+", "utf8") as fd:
if onefile:
- fd.write('<!-- \n')
- for k, v in metadata.items():
- fd.write('.. {0}: {1}\n'.format(k, v))
+ fd.write('<!--\n')
+ fd.write(write_metadata(metadata))
fd.write('-->\n\n')
- fd.write("\n<p>Write your {0} here.</p>\n".format('page' if is_page else 'post'))
+ fd.write(content)
diff --git a/nikola/plugins/compile/ipynb.plugin b/nikola/plugins/compile/ipynb.plugin
index 3d15bb0..e258d8a 100644
--- a/nikola/plugins/compile/ipynb.plugin
+++ b/nikola/plugins/compile/ipynb.plugin
@@ -3,7 +3,7 @@ Name = ipynb
Module = ipynb
[Documentation]
-Author = Damián Avila
+Author = Damian Avila
Version = 1.0
Website = http://www.oquanta.info
Description = Compile IPython notebooks into HTML
diff --git a/nikola/plugins/compile/ipynb/__init__.py b/nikola/plugins/compile/ipynb/__init__.py
index 2b1fd28..f4d554c 100644
--- a/nikola/plugins/compile/ipynb/__init__.py
+++ b/nikola/plugins/compile/ipynb/__init__.py
@@ -41,16 +41,12 @@ except ImportError:
from nikola.plugin_categories import PageCompiler
from nikola.utils import makedirs, req_missing
-try:
- from collections import OrderedDict
-except ImportError:
- OrderedDict = dict # NOQA
-
class CompileIPynb(PageCompiler):
"""Compile IPynb into HTML."""
name = "ipynb"
+ supports_onefile = False
def compile_html(self, source, dest, is_two_file=True):
if flag is None:
@@ -66,19 +62,15 @@ class CompileIPynb(PageCompiler):
(body, resources) = exportHtml.from_notebook_node(nb_json)
out_file.write(body)
- def create_post(self, path, onefile=False, is_page=False, **kw):
- metadata = OrderedDict()
- metadata.update(self.default_metadata)
- metadata.update(kw)
- d_name = os.path.dirname(path)
+ def create_post(self, path, **kw):
+ # content and onefile are ignored by ipynb.
+ kw.pop('content', None)
+ onefile = kw.pop('onefile', False)
+ kw.pop('is_page', False)
+
makedirs(os.path.dirname(path))
- meta_path = os.path.join(d_name, kw['slug'] + ".meta")
- with codecs.open(meta_path, "wb+", "utf8") as fd:
- fd.write('\n'.join((metadata['title'], metadata['slug'],
- metadata['date'], metadata['tags'],
- metadata['link'],
- metadata['description'], metadata['type'])))
- print("Your {0}'s metadata is at: {1}".format('page' if is_page else 'post', meta_path))
+ if onefile:
+ raise Exception('The one-file format is not supported by this compiler.')
with codecs.open(path, "wb+", "utf8") as fd:
fd.write("""{
"metadata": {
diff --git a/nikola/plugins/compile/markdown/__init__.py b/nikola/plugins/compile/markdown/__init__.py
index d0fa66a..4182626 100644
--- a/nikola/plugins/compile/markdown/__init__.py
+++ b/nikola/plugins/compile/markdown/__init__.py
@@ -34,30 +34,14 @@ import re
try:
from markdown import markdown
-
- from nikola.plugins.compile.markdown.mdx_nikola import NikolaExtension
- nikola_extension = NikolaExtension()
-
- from nikola.plugins.compile.markdown.mdx_gist import GistExtension
- gist_extension = GistExtension()
-
- from nikola.plugins.compile.markdown.mdx_podcast import PodcastExtension
- podcast_extension = PodcastExtension()
-
except ImportError:
markdown = None # NOQA
nikola_extension = None
gist_extension = None
podcast_extension = None
-
-try:
- from collections import OrderedDict
-except ImportError:
- OrderedDict = dict # NOQA
-
from nikola.plugin_categories import PageCompiler
-from nikola.utils import makedirs, req_missing
+from nikola.utils import makedirs, req_missing, write_metadata
class CompileMarkdown(PageCompiler):
@@ -65,9 +49,22 @@ class CompileMarkdown(PageCompiler):
name = "markdown"
demote_headers = True
- extensions = [gist_extension, nikola_extension, podcast_extension]
+ extensions = []
site = None
+ def set_site(self, site):
+ for plugin_info in site.plugin_manager.getPluginsOfCategory("MarkdownExtension"):
+ if plugin_info.name in site.config['DISABLED_PLUGINS']:
+ site.plugin_manager.removePluginFromCategory(plugin_info, "MarkdownExtension")
+ continue
+
+ site.plugin_manager.activatePluginByName(plugin_info.name)
+ plugin_info.plugin_object.set_site(site)
+ self.extensions.append(plugin_info.plugin_object)
+ plugin_info.plugin_object.short_help = plugin_info.description
+
+ return super(CompileMarkdown, self).set_site(site)
+
def compile_html(self, source, dest, is_two_file=True):
if markdown is None:
req_missing(['markdown'], 'build this site (compile Markdown)')
@@ -81,15 +78,21 @@ class CompileMarkdown(PageCompiler):
output = markdown(data, self.extensions)
out_file.write(output)
- def create_post(self, path, onefile=False, is_page=False, **kw):
- metadata = OrderedDict()
+ def create_post(self, path, **kw):
+ content = kw.pop('content', None)
+ onefile = kw.pop('onefile', False)
+ # is_page is not used by create_post as of now.
+ kw.pop('is_page', False)
+
+ metadata = {}
metadata.update(self.default_metadata)
metadata.update(kw)
makedirs(os.path.dirname(path))
+ if not content.endswith('\n'):
+ content += '\n'
with codecs.open(path, "wb+", "utf8") as fd:
if onefile:
fd.write('<!-- \n')
- for k, v in metadata.items():
- fd.write('.. {0}: {1}\n'.format(k, v))
+ fd.write(write_metadata(metadata))
fd.write('-->\n\n')
- fd.write("Write your {0} here.".format('page' if is_page else 'post'))
+ fd.write(content)
diff --git a/nikola/plugins/compile/bbcode.plugin b/nikola/plugins/compile/markdown/mdx_gist.plugin
index b3d9357..0e5c578 100644
--- a/nikola/plugins/compile/bbcode.plugin
+++ b/nikola/plugins/compile/markdown/mdx_gist.plugin
@@ -1,10 +1,9 @@
[Core]
-Name = bbcode
-Module = bbcode
+Name = mdx_gist
+Module = mdx_gist
[Documentation]
Author = Roberto Alsina
Version = 0.1
Website = http://getnikola.com
-Description = Compile BBCode into HTML
-
+Description = Extension for embedding gists
diff --git a/nikola/plugins/compile/markdown/mdx_gist.py b/nikola/plugins/compile/markdown/mdx_gist.py
index d92295d..247478b 100644
--- a/nikola/plugins/compile/markdown/mdx_gist.py
+++ b/nikola/plugins/compile/markdown/mdx_gist.py
@@ -117,10 +117,18 @@ Error Case: non-existent file:
'''
from __future__ import unicode_literals, print_function
-from markdown.extensions import Extension
-from markdown.inlinepatterns import Pattern
-from markdown.util import AtomicString
-from markdown.util import etree
+
+try:
+ from markdown.extensions import Extension
+ from markdown.inlinepatterns import Pattern
+ from markdown.util import AtomicString
+ from markdown.util import etree
+except ImportError:
+ # No need to catch this, if you try to use this without Markdown,
+ # the markdown compiler will fail first
+ Extension = Pattern = object
+
+from nikola.plugin_categories import MarkdownExtension
from nikola.utils import get_logger, req_missing, STDERR_HANDLER
LOGGER = get_logger('compile_markdown.mdx_gist', STDERR_HANDLER)
@@ -209,7 +217,7 @@ class GistPattern(Pattern):
return gist_elem
-class GistExtension(Extension):
+class GistExtension(MarkdownExtension, Extension):
def __init__(self, configs={}):
# set extension defaults
self.config = {}
diff --git a/nikola/plugins/compile/asciidoc.plugin b/nikola/plugins/compile/markdown/mdx_nikola.plugin
index 47c5608..7af52a4 100644
--- a/nikola/plugins/compile/asciidoc.plugin
+++ b/nikola/plugins/compile/markdown/mdx_nikola.plugin
@@ -1,10 +1,9 @@
[Core]
-Name = asciidoc
-Module = asciidoc
+Name = mdx_nikola
+Module = mdx_nikola
[Documentation]
Author = Roberto Alsina
Version = 0.1
Website = http://getnikola.com
-Description = Compile ASCIIDoc into HTML
-
+Description = Nikola-specific Markdown extensions
diff --git a/nikola/plugins/compile/markdown/mdx_nikola.py b/nikola/plugins/compile/markdown/mdx_nikola.py
index b7c29a5..ca67511 100644
--- a/nikola/plugins/compile/markdown/mdx_nikola.py
+++ b/nikola/plugins/compile/markdown/mdx_nikola.py
@@ -27,23 +27,31 @@
"""Markdown Extension for Nikola-specific post-processing"""
from __future__ import unicode_literals
import re
-from markdown.postprocessors import Postprocessor
-from markdown.extensions import Extension
+try:
+ from markdown.postprocessors import Postprocessor
+ from markdown.extensions import Extension
+except ImportError:
+ # No need to catch this, if you try to use this without Markdown,
+ # the markdown compiler will fail first
+ Postprocessor = Extension = object
+
+from nikola.plugin_categories import MarkdownExtension
+
+# FIXME: duplicated with listings.py
+CODERE = re.compile('<div class="codehilite"><pre>(.*?)</pre></div>', flags=re.MULTILINE | re.DOTALL)
class NikolaPostProcessor(Postprocessor):
def run(self, text):
output = text
- # python-markdown's highlighter uses the class 'codehilite' to wrap
- # code, instead of the standard 'code'. None of the standard
- # pygments stylesheets use this class, so swap it to be 'code'
- output = re.sub(r'(<div[^>]+class="[^"]*)codehilite([^>]+)',
- r'\1code\2', output)
+ # python-markdown's highlighter uses <div class="codehilite"><pre>
+ # for code. We switch it to reST's <pre class="code">.
+ output = CODERE.sub('<pre class="code literal-block">\\1</pre>', output)
return output
-class NikolaExtension(Extension):
+class NikolaExtension(MarkdownExtension, Extension):
def extendMarkdown(self, md, md_globals):
pp = NikolaPostProcessor()
md.postprocessors.add('nikola_post_processor', pp, '_end')
diff --git a/nikola/plugins/compile/markdown/mdx_podcast.plugin b/nikola/plugins/compile/markdown/mdx_podcast.plugin
new file mode 100644
index 0000000..dc16044
--- /dev/null
+++ b/nikola/plugins/compile/markdown/mdx_podcast.plugin
@@ -0,0 +1,9 @@
+[Core]
+Name = mdx_podcast
+Module = mdx_podcast
+
+[Documentation]
+Author = Roberto Alsina
+Version = 0.1
+Website = http://getnikola.com
+Description = Markdown extensions for embedding podcasts and other audio files
diff --git a/nikola/plugins/compile/markdown/mdx_podcast.py b/nikola/plugins/compile/markdown/mdx_podcast.py
index b38b969..9a67910 100644
--- a/nikola/plugins/compile/markdown/mdx_podcast.py
+++ b/nikola/plugins/compile/markdown/mdx_podcast.py
@@ -39,9 +39,15 @@ Basic Example:
<p><audio src="http://archive.org/download/Rebeldes_Stereotipos/rs20120609_1.mp3"></audio></p>
'''
-from markdown.extensions import Extension
-from markdown.inlinepatterns import Pattern
-from markdown.util import etree
+from nikola.plugin_categories import MarkdownExtension
+try:
+ from markdown.extensions import Extension
+ from markdown.inlinepatterns import Pattern
+ from markdown.util import etree
+except ImportError:
+ # No need to catch this, if you try to use this without Markdown,
+ # the markdown compiler will fail first
+ Pattern = Extension = object
PODCAST_RE = r'\[podcast\](?P<url>.+)\[/podcast\]'
@@ -62,7 +68,7 @@ class PodcastPattern(Pattern):
return audio_elem
-class PodcastExtension(Extension):
+class PodcastExtension(MarkdownExtension, Extension):
def __init__(self, configs={}):
# set extension defaults
self.config = {}
diff --git a/nikola/plugins/compile/misaka.plugin b/nikola/plugins/compile/misaka.plugin
deleted file mode 100644
index fef6d71..0000000
--- a/nikola/plugins/compile/misaka.plugin
+++ /dev/null
@@ -1,10 +0,0 @@
-[Core]
-Name = misaka
-Module = misaka
-
-[Documentation]
-Author = Chris Lee
-Version = 0.1
-Website = http://c133.org/
-Description = Compile Markdown into HTML with Mikasa instead of python-markdown
-
diff --git a/nikola/plugins/compile/misaka.py b/nikola/plugins/compile/misaka.py
deleted file mode 100644
index 4951c9f..0000000
--- a/nikola/plugins/compile/misaka.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2013-2014 Chris Lee and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-"""Implementation of compile_html based on misaka."""
-
-from __future__ import unicode_literals
-
-import codecs
-import os
-import re
-
-try:
- import misaka
-except ImportError:
- misaka = None # NOQA
- nikola_extension = None
-try:
- from collections import OrderedDict
-except ImportError:
- OrderedDict = dict # NOQA
-
- gist_extension = None
- podcast_extension = None
-
-from nikola.plugin_categories import PageCompiler
-from nikola.utils import makedirs, req_missing
-
-
-class CompileMisaka(PageCompiler):
- """Compile Misaka into HTML."""
-
- name = "misaka"
- demote_headers = True
-
- def __init__(self, *args, **kwargs):
- super(CompileMisaka, self).__init__(*args, **kwargs)
- if misaka is not None:
- self.ext = misaka.EXT_FENCED_CODE | misaka.EXT_STRIKETHROUGH | \
- misaka.EXT_AUTOLINK | misaka.EXT_NO_INTRA_EMPHASIS
-
- def compile_html(self, source, dest, is_two_file=True):
- if misaka is None:
- req_missing(['misaka'], 'build this site (compile with misaka)')
- makedirs(os.path.dirname(dest))
- with codecs.open(dest, "w+", "utf8") as out_file:
- with codecs.open(source, "r", "utf8") as in_file:
- data = in_file.read()
- if not is_two_file:
- data = re.split('(\n\n|\r\n\r\n)', data, maxsplit=1)[-1]
- output = misaka.html(data, extensions=self.ext)
- out_file.write(output)
-
- def create_post(self, path, onefile=False, is_page=False, **kw):
- metadata = OrderedDict()
- metadata.update(self.default_metadata)
- metadata.update(kw)
- makedirs(os.path.dirname(path))
- with codecs.open(path, "wb+", "utf8") as fd:
- if onefile:
- fd.write('<!-- \n')
- for k, v in metadata.items():
- fd.write('.. {0}: {1}\n'.format(k, v))
- fd.write('-->\n\n')
- fd.write("\nWrite your {0} here.".format('page' if is_page else 'post'))
diff --git a/nikola/plugins/compile/pandoc.py b/nikola/plugins/compile/pandoc.py
index 654c7c8..6aa737e 100644
--- a/nikola/plugins/compile/pandoc.py
+++ b/nikola/plugins/compile/pandoc.py
@@ -35,12 +35,7 @@ import os
import subprocess
from nikola.plugin_categories import PageCompiler
-from nikola.utils import req_missing, makedirs
-
-try:
- from collections import OrderedDict
-except ImportError:
- OrderedDict = dict # NOQA
+from nikola.utils import req_missing, makedirs, write_metadata
class CompilePandoc(PageCompiler):
@@ -56,15 +51,20 @@ class CompilePandoc(PageCompiler):
if e.strreror == 'No such file or directory':
req_missing(['pandoc'], 'build this site (compile with pandoc)', python=False)
- def create_post(self, path, onefile=False, is_page=False, **kw):
- metadata = OrderedDict()
+ def create_post(self, path, **kw):
+ content = kw.pop('content', None)
+ onefile = kw.pop('onefile', False)
+ # is_page is not used by create_post as of now.
+ kw.pop('is_page', False)
+ metadata = {}
metadata.update(self.default_metadata)
metadata.update(kw)
makedirs(os.path.dirname(path))
+ if not content.endswith('\n'):
+ content += '\n'
with codecs.open(path, "wb+", "utf8") as fd:
if onefile:
- fd.write('<!-- \n')
- for k, v in metadata.items():
- fd.write('.. {0}: {1}\n'.format(k, v))
+ fd.write('<!--\n')
+ fd.write(write_metadata(metadata))
fd.write('-->\n\n')
- fd.write("Write your {0} here.".format('page' if is_page else 'post'))
+ fd.write(content)
diff --git a/nikola/plugins/compile/php.py b/nikola/plugins/compile/php.py
index 0a652a6..601f098 100644
--- a/nikola/plugins/compile/php.py
+++ b/nikola/plugins/compile/php.py
@@ -33,12 +33,7 @@ import shutil
import codecs
from nikola.plugin_categories import PageCompiler
-from nikola.utils import makedirs
-
-try:
- from collections import OrderedDict
-except ImportError:
- OrderedDict = dict # NOQA
+from nikola.utils import makedirs, write_metadata
class CompilePhp(PageCompiler):
@@ -50,18 +45,23 @@ class CompilePhp(PageCompiler):
makedirs(os.path.dirname(dest))
shutil.copyfile(source, dest)
- def create_post(self, path, onefile=False, is_page=False, **kw):
- metadata = OrderedDict()
+ def create_post(self, path, **kw):
+ content = kw.pop('content', None)
+ onefile = kw.pop('onefile', False)
+ # is_page is not used by create_post as of now.
+ kw.pop('is_page', False)
+ metadata = {}
metadata.update(self.default_metadata)
metadata.update(kw)
os.makedirs(os.path.dirname(path))
+ if not content.endswith('\n'):
+ content += '\n'
with codecs.open(path, "wb+", "utf8") as fd:
if onefile:
- fd.write('<!-- \n')
- for k, v in metadata.items():
- fd.write('.. {0}: {1}\n'.format(k, v))
+ fd.write('<!--\n')
+ fd.write(write_metadata(metadata))
fd.write('-->\n\n')
- fd.write("\n<p>Write your {0} here.</p>".format('page' if is_page else 'post'))
+ fd.write(content)
def extension(self):
return ".php"
diff --git a/nikola/plugins/compile/rest/__init__.py b/nikola/plugins/compile/rest/__init__.py
index 9a4e19b..a93199c 100644
--- a/nikola/plugins/compile/rest/__init__.py
+++ b/nikola/plugins/compile/rest/__init__.py
@@ -40,13 +40,8 @@ try:
except ImportError:
has_docutils = False
-try:
- from collections import OrderedDict
-except ImportError:
- OrderedDict = dict # NOQA
-
from nikola.plugin_categories import PageCompiler
-from nikola.utils import get_logger, makedirs, req_missing
+from nikola.utils import get_logger, makedirs, req_missing, write_metadata
class CompileRest(PageCompiler):
@@ -102,22 +97,25 @@ class CompileRest(PageCompiler):
else:
return False
- def create_post(self, path, onefile=False, is_page=False, **kw):
- metadata = OrderedDict()
+ def create_post(self, path, **kw):
+ content = kw.pop('content', None)
+ onefile = kw.pop('onefile', False)
+ # is_page is not used by create_post as of now.
+ kw.pop('is_page', False)
+ metadata = {}
metadata.update(self.default_metadata)
metadata.update(kw)
makedirs(os.path.dirname(path))
+ if not content.endswith('\n'):
+ content += '\n'
with codecs.open(path, "wb+", "utf8") as fd:
if onefile:
- for k, v in metadata.items():
- fd.write('.. {0}: {1}\n'.format(k, v))
- fd.write("\nWrite your {0} here.".format('page' if is_page else 'post'))
+ fd.write(write_metadata(metadata))
+ fd.write('\n' + content)
def set_site(self, site):
for plugin_info in site.plugin_manager.getPluginsOfCategory("RestExtension"):
- if (plugin_info.name in site.config['DISABLED_PLUGINS']
- or (plugin_info.name in site.EXTRA_PLUGINS and
- plugin_info.name not in site.config['ENABLED_EXTRAS'])):
+ if plugin_info.name in site.config['DISABLED_PLUGINS']:
site.plugin_manager.removePluginFromCategory(plugin_info, "RestExtension")
continue
diff --git a/nikola/plugins/compile/rest/chart.py b/nikola/plugins/compile/rest/chart.py
index 03878a3..55ddf5c 100644
--- a/nikola/plugins/compile/rest/chart.py
+++ b/nikola/plugins/compile/rest/chart.py
@@ -37,13 +37,16 @@ except ImportError:
from nikola.plugin_categories import RestExtension
from nikola.utils import req_missing
+_site = None
+
class Plugin(RestExtension):
name = "rest_chart"
def set_site(self, site):
- self.site = site
+ global _site
+ _site = self.site = site
directives.register_directive('chart', Chart)
return super(Plugin, self).set_site(site)
@@ -146,5 +149,9 @@ class Chart(Directive):
for line in self.content:
label, series = literal_eval('({0})'.format(line))
chart.add(label, series)
-
- return [nodes.raw('', chart.render().decode('utf8'), format='html')]
+ data = chart.render().decode('utf8')
+ if _site and _site.invariant:
+ import re
+ data = re.sub('id="chart-[a-f0-9\-]+"', 'id="chart-foobar"', data)
+ data = re.sub('#chart-[a-f0-9\-]+', '#chart-foobar', data)
+ return [nodes.raw('', data, format='html')]
diff --git a/nikola/plugins/compile/rest/doc.py b/nikola/plugins/compile/rest/doc.py
index a150a81..6143606 100644
--- a/nikola/plugins/compile/rest/doc.py
+++ b/nikola/plugins/compile/rest/doc.py
@@ -48,7 +48,6 @@ def doc_role(name, rawtext, text, lineno, inliner,
# split link's text and post's slug in role content
has_explicit_title, title, slug = split_explicit_title(text)
-
# check if the slug given is part of our blog posts/pages
twin_slugs = False
post = None
@@ -73,7 +72,6 @@ def doc_role(name, rawtext, text, lineno, inliner,
if not has_explicit_title:
# use post's title as link's text
title = post.title()
-
permalink = post.permalink()
if twin_slugs:
msg = inliner.reporter.warning(
diff --git a/nikola/plugins/compile/rest/listing.py b/nikola/plugins/compile/rest/listing.py
index d70e02d..18a1807 100644
--- a/nikola/plugins/compile/rest/listing.py
+++ b/nikola/plugins/compile/rest/listing.py
@@ -46,6 +46,7 @@ except ImportError: # docutils < 0.9 (Debian Sid For The Loss)
class CodeBlock(Directive):
required_arguments = 1
has_content = True
+ option_spec = {}
CODE = '<pre>{0}</pre>'
def run(self):
diff --git a/nikola/plugins/compile/rest/post_list.py b/nikola/plugins/compile/rest/post_list.py
index 6804b58..456e571 100644
--- a/nikola/plugins/compile/rest/post_list.py
+++ b/nikola/plugins/compile/rest/post_list.py
@@ -124,7 +124,10 @@ class PostList(Directive):
show_all = self.options.get('all', False)
lang = self.options.get('lang', utils.LocaleBorg().current_lang)
template = self.options.get('template', 'post_list_directive.tmpl')
- post_list_id = self.options.get('id', 'post_list_' + uuid.uuid4().hex)
+ if self.site.invariant: # for testing purposes
+ post_list_id = self.options.get('id', 'post_list_' + 'fixedvaluethatisnotauuid')
+ else:
+ post_list_id = self.options.get('id', 'post_list_' + uuid.uuid4().hex)
posts = []
step = -1 if reverse is None else None
diff --git a/nikola/plugins/compile/rest/slides.py b/nikola/plugins/compile/rest/slides.py
index 203ae51..ea8e413 100644
--- a/nikola/plugins/compile/rest/slides.py
+++ b/nikola/plugins/compile/rest/slides.py
@@ -53,12 +53,17 @@ class Slides(Directive):
if len(self.content) == 0:
return
+ if self.site.invariant: # for testing purposes
+ carousel_id = 'slides_' + 'fixedvaluethatisnotauuid'
+ else:
+ carousel_id = 'slides_' + uuid.uuid4().hex
+
output = self.site.template_system.render_template(
'slides.tmpl',
None,
{
- 'content': self.content,
- 'carousel_id': 'slides_' + uuid.uuid4().hex,
+ 'slides_content': self.content,
+ 'carousel_id': carousel_id,
}
)
return [nodes.raw('', output, format='html')]
diff --git a/nikola/plugins/compile/rest/vimeo.py b/nikola/plugins/compile/rest/vimeo.py
index 82c4dc1..4b34dfe 100644
--- a/nikola/plugins/compile/rest/vimeo.py
+++ b/nikola/plugins/compile/rest/vimeo.py
@@ -49,9 +49,9 @@ class Plugin(RestExtension):
return super(Plugin, self).set_site(site)
-CODE = """<iframe src="http://player.vimeo.com/video/{vimeo_id}"
+CODE = """<iframe src="//player.vimeo.com/video/{vimeo_id}"
width="{width}" height="{height}"
-frameborder="0" webkitAllowFullScreen mozallowfullscreen allowFullScreen>
+frameborder="0" webkitAllowFullScreen="webkitAllowFullScreen" mozallowfullscreen="mozallowfullscreen" allowFullScreen="allowFullScreen">
</iframe>
"""
@@ -108,7 +108,7 @@ class Vimeo(Directive):
if json: # we can attempt to retrieve video attributes from vimeo
try:
- url = ('http://vimeo.com/api/v2/video/{0}'
+ url = ('//vimeo.com/api/v2/video/{0}'
'.json'.format(self.arguments[0]))
data = requests.get(url).text
video_attributes = json.loads(data)[0]
diff --git a/nikola/plugins/compile/rest/youtube.py b/nikola/plugins/compile/rest/youtube.py
index 19e12d1..b32e77a 100644
--- a/nikola/plugins/compile/rest/youtube.py
+++ b/nikola/plugins/compile/rest/youtube.py
@@ -44,7 +44,7 @@ class Plugin(RestExtension):
CODE = """\
<iframe width="{width}"
height="{height}"
-src="http://www.youtube.com/embed/{yid}?rel=0&amp;hd=1&amp;wmode=transparent"
+src="//www.youtube.com/embed/{yid}?rel=0&amp;hd=1&amp;wmode=transparent"
></iframe>"""
diff --git a/nikola/plugins/compile/textile.plugin b/nikola/plugins/compile/textile.plugin
deleted file mode 100644
index 6439b0f..0000000
--- a/nikola/plugins/compile/textile.plugin
+++ /dev/null
@@ -1,10 +0,0 @@
-[Core]
-Name = textile
-Module = textile
-
-[Documentation]
-Author = Roberto Alsina
-Version = 0.1
-Website = http://getnikola.com
-Description = Compile Textile into HTML
-
diff --git a/nikola/plugins/compile/textile.py b/nikola/plugins/compile/textile.py
deleted file mode 100644
index 1679831..0000000
--- a/nikola/plugins/compile/textile.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2014 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-"""Implementation of compile_html based on textile."""
-
-import codecs
-import os
-import re
-
-try:
- from textile import textile
-except ImportError:
- textile = None # NOQA
-
-from nikola.plugin_categories import PageCompiler
-from nikola.utils import makedirs, req_missing
-
-try:
- from collections import OrderedDict
-except ImportError:
- OrderedDict = dict # NOQA
-
-
-class CompileTextile(PageCompiler):
- """Compile textile into HTML."""
-
- name = "textile"
- demote_headers = True
-
- def compile_html(self, source, dest, is_two_file=True):
- if textile is None:
- req_missing(['textile'], 'build this site (compile Textile)')
- makedirs(os.path.dirname(dest))
- with codecs.open(dest, "w+", "utf8") as out_file:
- with codecs.open(source, "r", "utf8") as in_file:
- data = in_file.read()
- if not is_two_file:
- data = re.split('(\n\n|\r\n\r\n)', data, maxsplit=1)[-1]
- output = textile(data, head_offset=1)
- out_file.write(output)
-
- def create_post(self, path, onefile=False, is_page=False, **kw):
- metadata = OrderedDict()
- metadata.update(self.default_metadata)
- metadata.update(kw)
- makedirs(os.path.dirname(path))
- with codecs.open(path, "wb+", "utf8") as fd:
- if onefile:
- fd.write('<notextile> <!--\n')
- for k, v in metadata.items():
- fd.write('.. {0}: {1}\n'.format(k, v))
- fd.write('--></notextile>\n\n')
- fd.write("\nWrite your {0} here.".format('page' if is_page else 'post'))
diff --git a/nikola/plugins/compile/txt2tags.plugin b/nikola/plugins/compile/txt2tags.plugin
deleted file mode 100644
index 55eb0a0..0000000
--- a/nikola/plugins/compile/txt2tags.plugin
+++ /dev/null
@@ -1,10 +0,0 @@
-[Core]
-Name = txt2tags
-Module = txt2tags
-
-[Documentation]
-Author = Roberto Alsina
-Version = 0.1
-Website = http://getnikola.com
-Description = Compile Txt2tags into HTML
-
diff --git a/nikola/plugins/compile/txt2tags.py b/nikola/plugins/compile/txt2tags.py
deleted file mode 100644
index bb6afa5..0000000
--- a/nikola/plugins/compile/txt2tags.py
+++ /dev/null
@@ -1,76 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2014 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-"""Implementation of compile_html based on txt2tags.
-
-Txt2tags is not in PyPI, you can install it with
-
-easy_install -f "http://txt2tags.org/txt2tags.py#egg=txt2tags-2.6" txt2tags
-
-"""
-
-import codecs
-import os
-
-try:
- from txt2tags import exec_command_line as txt2tags
-except ImportError:
- txt2tags = None # NOQA
-
-try:
- from collections import OrderedDict
-except ImportError:
- OrderedDict = dict # NOQA
-
-from nikola.plugin_categories import PageCompiler
-from nikola.utils import makedirs, req_missing
-
-
-class CompileTxt2tags(PageCompiler):
- """Compile txt2tags into HTML."""
-
- name = "txt2tags"
- demote_headers = True
-
- def compile_html(self, source, dest, is_two_file=True):
- if txt2tags is None:
- req_missing(['txt2tags'], 'build this site (compile txt2tags)')
- makedirs(os.path.dirname(dest))
- cmd = ["-t", "html", "--no-headers", "--outfile", dest, source]
- txt2tags(cmd)
-
- def create_post(self, path, onefile=False, is_page=False, **kw):
- metadata = OrderedDict()
- metadata.update(self.default_metadata)
- metadata.update(kw)
- makedirs(os.path.dirname(path))
- with codecs.open(path, "wb+", "utf8") as fd:
- if onefile:
- fd.write("\n'''\n<!--\n")
- for k, v in metadata.items():
- fd.write('.. {0}: {1}\n'.format(k, v))
- fd.write("-->\n'''\n")
- fd.write("\nWrite your {0} here.".format('page' if is_page else 'post'))
diff --git a/nikola/plugins/compile/wiki.plugin b/nikola/plugins/compile/wiki.plugin
deleted file mode 100644
index eee14a8..0000000
--- a/nikola/plugins/compile/wiki.plugin
+++ /dev/null
@@ -1,10 +0,0 @@
-[Core]
-Name = wiki
-Module = wiki
-
-[Documentation]
-Author = Roberto Alsina
-Version = 0.1
-Website = http://getnikola.com
-Description = Compile WikiMarkup into HTML
-
diff --git a/nikola/plugins/compile/wiki.py b/nikola/plugins/compile/wiki.py
deleted file mode 100644
index f4858c7..0000000
--- a/nikola/plugins/compile/wiki.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2014 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-"""Implementation of compile_html based on CreoleWiki."""
-
-import codecs
-import os
-
-try:
- from creole import Parser
- from creole.html_emitter import HtmlEmitter
- creole = True
-except ImportError:
- creole = None
-
-from nikola.plugin_categories import PageCompiler
-try:
- from collections import OrderedDict
-except ImportError:
- OrderedDict = dict # NOQA
-
-from nikola.utils import makedirs, req_missing
-
-
-class CompileWiki(PageCompiler):
- """Compile CreoleWiki into HTML."""
-
- name = "wiki"
- demote_headers = True
-
- def compile_html(self, source, dest, is_two_file=True):
- if creole is None:
- req_missing(['creole'], 'build this site (compile CreoleWiki)')
- makedirs(os.path.dirname(dest))
- with codecs.open(dest, "w+", "utf8") as out_file:
- with codecs.open(source, "r", "utf8") as in_file:
- data = in_file.read()
- document = Parser(data).parse()
- output = HtmlEmitter(document).emit()
- out_file.write(output)
-
- def create_post(self, path, onefile=False, is_page=False, **kw):
- metadata = OrderedDict()
- metadata.update(self.default_metadata)
- metadata.update(kw)
- makedirs(os.path.dirname(path))
- if onefile:
- raise Exception('There are no comments in CreoleWiki markup, so '
- 'one-file format is not possible, use the -2 '
- 'option.')
- with codecs.open(path, "wb+", "utf8") as fd:
- fd.write("Write your {0} here.".format('page' if is_page else 'post'))
diff --git a/nikola/plugins/loghandler/stderr.py b/nikola/plugins/loghandler/stderr.py
index fdc892e..593c381 100644
--- a/nikola/plugins/loghandler/stderr.py
+++ b/nikola/plugins/loghandler/stderr.py
@@ -41,7 +41,11 @@ class StderrHandler(SignalHandler):
conf = self.site.config.get('LOGGING_HANDLERS').get('stderr')
if conf or os.getenv('NIKOLA_DEBUG'):
self.site.loghandlers.append(ColorfulStderrHandler(
- level='DEBUG' if DEBUG else conf.get('loglevel', 'WARNING').upper(),
+ # We do not allow the level to be something else than 'DEBUG'
+ # or 'INFO' Any other level can have bad effects on the user
+ # experience and is discouraged.
+ # (oh, and it was incorrectly set to WARNING before)
+ level='DEBUG' if DEBUG or (conf.get('loglevel', 'INFO').upper() == 'DEBUG') else 'INFO',
format_string=u'[{record.time:%Y-%m-%dT%H:%M:%SZ}] {record.level_name}: {record.channel}: {record.message}'
))
diff --git a/nikola/plugins/task/archive.py b/nikola/plugins/task/archive.py
index a65a63f..4f1ab19 100644
--- a/nikola/plugins/task/archive.py
+++ b/nikola/plugins/task/archive.py
@@ -73,16 +73,15 @@ class Archive(Task):
context["permalink"] = self.site.link("archive", year, lang)
if not kw["create_monthly_archive"]:
template_name = "list_post.tmpl"
- post_list = [self.site.global_data[post] for post in posts]
- post_list.sort(key=lambda a: a.date)
+ post_list = sorted(posts, key=lambda a: a.date)
post_list.reverse()
context["posts"] = post_list
else: # Monthly archives, just list the months
- months = set([m.split('/')[1] for m in self.site.posts_per_month.keys() if m.startswith(str(year))])
+ months = set([(m.split('/')[1], self.site.link("archive", m, lang)) for m in self.site.posts_per_month.keys() if m.startswith(str(year))])
months = sorted(list(months))
months.reverse()
template_name = "list.tmpl"
- context["items"] = [[nikola.utils.LocaleBorg().get_month_name(int(month), lang), month] for month in months]
+ context["items"] = [[nikola.utils.LocaleBorg().get_month_name(int(month), lang), link] for month, link in months]
post_list = []
task = self.site.generic_post_list_renderer(
lang,
@@ -93,7 +92,12 @@ class Archive(Task):
context,
)
n = len(post_list) if 'posts' in context else len(months)
- task_cfg = {1: task['uptodate'][0].config, 2: kw, 3: n}
+
+ deps_translatable = {}
+ for k in self.site._GLOBAL_CONTEXT_TRANSLATABLE:
+ deps_translatable[k] = self.site.GLOBAL_CONTEXT[k](lang)
+
+ task_cfg = {1: task['uptodate'][0].config, 2: kw, 3: n, 4: deps_translatable}
task['uptodate'] = [config_changed(task_cfg)]
task['basename'] = self.name
yield task
@@ -106,8 +110,7 @@ class Archive(Task):
kw['output_folder'], self.site.path("archive", yearmonth,
lang))
year, month = yearmonth.split('/')
- post_list = [self.site.global_data[post] for post in posts]
- post_list.sort(key=lambda a: a.date)
+ post_list = sorted(posts, key=lambda a: a.date)
post_list.reverse()
context = {}
context["lang"] = lang
@@ -141,8 +144,8 @@ class Archive(Task):
kw['output_folder'], self.site.path("archive", None,
lang))
context["title"] = kw["messages"][lang]["Archive"]
- context["items"] = [(year, self.site.link("archive", year, lang))
- for year in years]
+ context["items"] = [(y, self.site.link("archive", y, lang))
+ for y in years]
context["permalink"] = self.site.link("archive", None, lang)
task = self.site.generic_post_list_renderer(
lang,
diff --git a/nikola/plugins/task/build_less.plugin b/nikola/plugins/task/build_less.plugin
deleted file mode 100644
index 27ca8cd..0000000
--- a/nikola/plugins/task/build_less.plugin
+++ /dev/null
@@ -1,10 +0,0 @@
-[Core]
-Name = build_less
-Module = build_less
-
-[Documentation]
-Author = Roberto Alsina
-Version = 0.1
-Website = http://getnikola.com
-Description = Build CSS out of LESS sources
-
diff --git a/nikola/plugins/task/build_less.py b/nikola/plugins/task/build_less.py
deleted file mode 100644
index a672282..0000000
--- a/nikola/plugins/task/build_less.py
+++ /dev/null
@@ -1,118 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2014 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-from __future__ import unicode_literals
-
-import codecs
-import glob
-import os
-import sys
-import subprocess
-
-from nikola.plugin_categories import Task
-from nikola import utils
-
-
-class BuildLess(Task):
- """Generate CSS out of LESS sources."""
-
- name = "build_less"
- sources_folder = "less"
- sources_ext = ".less"
-
- def gen_tasks(self):
- """Generate CSS out of LESS sources."""
- self.compiler_name = self.site.config['LESS_COMPILER']
- self.compiler_options = self.site.config['LESS_OPTIONS']
-
- kw = {
- 'cache_folder': self.site.config['CACHE_FOLDER'],
- 'themes': self.site.THEMES,
- }
- tasks = {}
-
- # Find where in the theme chain we define the LESS targets
- # There can be many *.less in the folder, but we only will build
- # the ones listed in less/targets
- if os.path.isfile(os.path.join(self.sources_folder, "targets")):
- targets_path = os.path.join(self.sources_folder, "targets")
- else:
- targets_path = utils.get_asset_path(os.path.join(self.sources_folder, "targets"), self.site.THEMES)
- try:
- with codecs.open(targets_path, "rb", "utf-8") as inf:
- targets = [x.strip() for x in inf.readlines()]
- except Exception:
- targets = []
-
- for task in utils.copy_tree(self.sources_folder, os.path.join(kw['cache_folder'], self.sources_folder)):
- if task['name'] in tasks:
- continue
- task['basename'] = 'prepare_less_sources'
- tasks[task['name']] = task
- yield task
-
- for theme_name in kw['themes']:
- src = os.path.join(utils.get_theme_path(theme_name), self.sources_folder)
- for task in utils.copy_tree(src, os.path.join(kw['cache_folder'], self.sources_folder)):
- task['basename'] = 'prepare_less_sources'
- yield task
-
- # Build targets and write CSS files
- base_path = utils.get_theme_path(self.site.THEMES[0])
- dst_dir = os.path.join(self.site.config['OUTPUT_FOLDER'], 'assets', 'css')
- # Make everything depend on all sources, rough but enough
- deps = glob.glob(os.path.join(
- base_path,
- self.sources_folder,
- "*{0}".format(self.sources_ext)))
-
- def compile_target(target, dst):
- utils.makedirs(dst_dir)
- src = os.path.join(kw['cache_folder'], self.sources_folder, target)
- run_in_shell = sys.platform == 'win32'
- try:
- compiled = subprocess.check_output([self.compiler_name] + self.compiler_options + [src], shell=run_in_shell)
- except OSError:
- utils.req_missing([self.compiler_name],
- 'build LESS files (and use this theme)',
- False, False)
- with open(dst, "wb+") as outf:
- outf.write(compiled)
-
- yield self.group_task()
-
- for target in targets:
- dst = os.path.join(dst_dir, target.replace(self.sources_ext, ".css"))
- yield {
- 'basename': self.name,
- 'name': dst,
- 'targets': [dst],
- 'file_dep': deps,
- 'task_dep': ['prepare_less_sources'],
- 'actions': ((compile_target, [target, dst]), ),
- 'uptodate': [utils.config_changed(kw)],
- 'clean': True
- }
diff --git a/nikola/plugins/task/build_sass.plugin b/nikola/plugins/task/build_sass.plugin
deleted file mode 100644
index 746c1df..0000000
--- a/nikola/plugins/task/build_sass.plugin
+++ /dev/null
@@ -1,9 +0,0 @@
-[Core]
-Name = build_sass
-Module = build_sass
-
-[Documentation]
-Author = Roberto Alsina, Chris “Kwpolska” Warrick
-Version = 0.1
-Website = http://getnikola.com
-Description = Build CSS out of Sass sources
diff --git a/nikola/plugins/task/build_sass.py b/nikola/plugins/task/build_sass.py
deleted file mode 100644
index becc843..0000000
--- a/nikola/plugins/task/build_sass.py
+++ /dev/null
@@ -1,139 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2014 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-from __future__ import unicode_literals
-
-import codecs
-import glob
-import os
-import sys
-import subprocess
-
-from nikola.plugin_categories import Task
-from nikola import utils
-
-
-class BuildSass(Task):
- """Generate CSS out of Sass sources."""
-
- name = "build_sass"
- sources_folder = "sass"
- sources_ext = (".sass", ".scss")
-
- def gen_tasks(self):
- """Generate CSS out of Sass sources."""
- self.logger = utils.get_logger('build_sass', self.site.loghandlers)
- self.compiler_name = self.site.config['SASS_COMPILER']
- self.compiler_options = self.site.config['SASS_OPTIONS']
-
- kw = {
- 'cache_folder': self.site.config['CACHE_FOLDER'],
- 'themes': self.site.THEMES,
- }
- tasks = {}
-
- # Find where in the theme chain we define the Sass targets
- # There can be many *.sass/*.scss in the folder, but we only
- # will build the ones listed in sass/targets
- if os.path.isfile(os.path.join(self.sources_folder, "targets")):
- targets_path = os.path.join(self.sources_folder, "targets")
- else:
- targets_path = utils.get_asset_path(os.path.join(self.sources_folder, "targets"), self.site.THEMES)
- try:
- with codecs.open(targets_path, "rb", "utf-8") as inf:
- targets = [x.strip() for x in inf.readlines()]
- except Exception:
- targets = []
-
- for task in utils.copy_tree(self.sources_folder, os.path.join(kw['cache_folder'], self.sources_folder)):
- if task['name'] in tasks:
- continue
- task['basename'] = 'prepare_sass_sources'
- tasks[task['name']] = task
- yield task
-
- for theme_name in kw['themes']:
- src = os.path.join(utils.get_theme_path(theme_name), self.sources_folder)
- for task in utils.copy_tree(src, os.path.join(kw['cache_folder'], self.sources_folder)):
- if task['name'] in tasks:
- continue
- task['basename'] = 'prepare_sass_sources'
- tasks[task['name']] = task
- yield task
-
- # Build targets and write CSS files
- base_path = utils.get_theme_path(self.site.THEMES[0])
- dst_dir = os.path.join(self.site.config['OUTPUT_FOLDER'], 'assets', 'css')
- # Make everything depend on all sources, rough but enough
- deps = glob.glob(os.path.join(
- base_path,
- self.sources_folder,
- *("*{0}".format(ext) for ext in self.sources_ext)))
-
- def compile_target(target, dst):
- utils.makedirs(dst_dir)
- run_in_shell = sys.platform == 'win32'
- src = os.path.join(kw['cache_folder'], self.sources_folder, target)
- try:
- compiled = subprocess.check_output([self.compiler_name] + self.compiler_options + [src], shell=run_in_shell)
- except OSError:
- utils.req_missing([self.compiler_name],
- 'build Sass files (and use this theme)',
- False, False)
- with open(dst, "wb+") as outf:
- outf.write(compiled)
-
- yield self.group_task()
-
- # We can have file conflicts. This is a way to prevent them.
- # I orignally wanted to use sets and their cannot-have-duplicates
- # magic, but I decided not to do this so we can show the user
- # what files were problematic.
- # If we didn’t do this, there would be a cryptic message from doit
- # instead.
- seennames = {}
- for target in targets:
- base = os.path.splitext(target)[0]
- dst = os.path.join(dst_dir, base + ".css")
-
- if base in seennames:
- self.logger.error(
- 'Duplicate filenames for Sass compiled files: {0} and '
- '{1} (both compile to {2})'.format(
- seennames[base], target, base + ".css"))
- else:
- seennames.update({base: target})
-
- yield {
- 'basename': self.name,
- 'name': dst,
- 'targets': [dst],
- 'file_dep': deps,
- 'task_dep': ['prepare_sass_sources'],
- 'actions': ((compile_target, [target, dst]), ),
- 'uptodate': [utils.config_changed(kw)],
- 'clean': True
- }
diff --git a/nikola/plugins/task/bundles.py b/nikola/plugins/task/bundles.py
index fcfaf42..7437a9d 100644
--- a/nikola/plugins/task/bundles.py
+++ b/nikola/plugins/task/bundles.py
@@ -65,8 +65,7 @@ class BuildBundles(LateTask):
def build_bundle(output, inputs):
out_dir = os.path.join(kw['output_folder'],
os.path.dirname(output))
- inputs = [i for i in inputs if os.path.isfile(
- os.path.join(out_dir, i))]
+ inputs = [os.path.relpath(i, out_dir) for i in inputs if os.path.isfile(i)]
cache_dir = os.path.join(kw['cache_folder'], 'webassets')
utils.makedirs(cache_dir)
env = webassets.Environment(out_dir, os.path.dirname(output),
@@ -83,20 +82,32 @@ class BuildBundles(LateTask):
yield self.group_task()
if (webassets is not None and self.site.config['USE_BUNDLES'] is not
False):
- for name, files in kw['theme_bundles'].items():
+ for name, _files in kw['theme_bundles'].items():
output_path = os.path.join(kw['output_folder'], name)
dname = os.path.dirname(name)
- file_dep = [os.path.join(kw['output_folder'], dname, fname)
+ files = []
+ for fname in _files:
+ # paths are relative to dirname
+ files.append(os.path.join(dname, fname))
+ file_dep = [os.path.join(kw['output_folder'], fname)
for fname in files if
- utils.get_asset_path(fname, self.site.THEMES, self.site.config['FILES_FOLDERS'])]
+ utils.get_asset_path(fname, self.site.THEMES, self.site.config['FILES_FOLDERS'])
+ or fname == 'assets/css/code.css']
+ # code.css will be generated by us if it does not exist in
+ # FILES_FOLDERS or theme assets. It is guaranteed that the
+ # generation will happen before this task.
task = {
'file_dep': list(file_dep),
- 'task_dep': ['copy_assets'],
+ 'task_dep': ['copy_assets', 'copy_files'],
'basename': str(self.name),
'name': str(output_path),
- 'actions': [(build_bundle, (name, files))],
+ 'actions': [(build_bundle, (name, file_dep))],
'targets': [output_path],
- 'uptodate': [utils.config_changed(kw)],
+ 'uptodate': [
+ utils.config_changed({
+ 1: kw,
+ 2: file_dep
+ })],
'clean': True,
}
yield utils.apply_filters(task, kw['filters'])
diff --git a/nikola/plugins/task/copy_assets.py b/nikola/plugins/task/copy_assets.py
index 93b7fb3..4801347 100644
--- a/nikola/plugins/task/copy_assets.py
+++ b/nikola/plugins/task/copy_assets.py
@@ -45,13 +45,21 @@ class CopyAssets(Task):
kw = {
"themes": self.site.THEMES,
+ "files_folders": self.site.config['FILES_FOLDERS'],
"output_folder": self.site.config['OUTPUT_FOLDER'],
"filters": self.site.config['FILTERS'],
"code_color_scheme": self.site.config['CODE_COLOR_SCHEME'],
+ "code.css_selectors": 'pre.code',
+ "code.css_head": '/* code.css file generated by Nikola */\n',
+ "code.css_close": "\ntable.codetable { width: 100%;} td.linenos {text-align: right; width: 4em;}\n",
}
- has_code_css = False
tasks = {}
code_css_path = os.path.join(kw['output_folder'], 'assets', 'css', 'code.css')
+ code_css_input = utils.get_asset_path('assets/css/code.css',
+ themes=kw['themes'],
+ files_folders=kw['files_folders'])
+
+ kw["code.css_input"] = code_css_input
yield self.group_task()
@@ -61,28 +69,35 @@ class CopyAssets(Task):
for task in utils.copy_tree(src, dst):
if task['name'] in tasks:
continue
- if task['targets'][0] == code_css_path:
- has_code_css = True
tasks[task['name']] = task
task['uptodate'] = [utils.config_changed(kw)]
task['basename'] = self.name
+ if code_css_input:
+ task['file_dep'] = [code_css_input]
yield utils.apply_filters(task, kw['filters'])
- if not has_code_css: # Generate it
-
+ # Check whether or not there is a code.css file around.
+ if not code_css_input:
def create_code_css():
from pygments.formatters import get_formatter_by_name
formatter = get_formatter_by_name('html', style=kw["code_color_scheme"])
utils.makedirs(os.path.dirname(code_css_path))
with codecs.open(code_css_path, 'wb+', 'utf8') as outf:
- outf.write(formatter.get_style_defs(['pre.code', 'div.code pre']))
- outf.write("\ntable.codetable { width: 100%;} td.linenos {text-align: right; width: 4em;}\n")
+ outf.write(kw["code.css_head"])
+ outf.write(formatter.get_style_defs(kw["code.css_selectors"]))
+ outf.write(kw["code.css_close"])
+
+ if os.path.exists(code_css_path):
+ with codecs.open(code_css_path, 'r', 'utf-8') as fh:
+ testcontents = fh.read(len(kw["code.css_head"])) == kw["code.css_head"]
+ else:
+ testcontents = False
task = {
'basename': self.name,
'name': code_css_path,
'targets': [code_css_path],
- 'uptodate': [utils.config_changed(kw)],
+ 'uptodate': [utils.config_changed(kw), testcontents],
'actions': [(create_code_css, [])],
'clean': True,
}
diff --git a/nikola/plugins/task/galleries.py b/nikola/plugins/task/galleries.py
index 880d47c..366374b 100644
--- a/nikola/plugins/task/galleries.py
+++ b/nikola/plugins/task/galleries.py
@@ -36,6 +36,7 @@ try:
except ImportError:
from urllib.parse import urljoin # NOQA
+import natsort
Image = None
try:
from PIL import Image, ExifTags # NOQA
@@ -46,6 +47,7 @@ except ImportError:
Image = _Image
except ImportError:
pass
+
import PyRSS2Gen as rss
from nikola.plugin_categories import Task
@@ -97,9 +99,15 @@ class Galleries(Task):
'filters': self.site.config['FILTERS'],
'translations': self.site.config['TRANSLATIONS'],
'global_context': self.site.GLOBAL_CONTEXT,
- "feed_length": self.site.config['FEED_LENGTH'],
+ 'feed_length': self.site.config['FEED_LENGTH'],
+ 'tzinfo': self.site.tzinfo,
+ 'comments_in_galleries': self.site.config['COMMENTS_IN_GALLERIES'],
+ 'generate_rss': self.site.config['GENERATE_RSS'],
}
+ for k, v in self.site.GLOBAL_CONTEXT['template_hooks'].items():
+ self.kw['||template_hooks|{0}||'.format(k)] = v._items
+
yield self.group_task()
template_name = "gallery.tmpl"
@@ -152,6 +160,9 @@ class Galleries(Task):
os.path.relpath(gallery, self.kw['gallery_path']), lang))
dst = os.path.normpath(dst)
+ for k in self.site._GLOBAL_CONTEXT_TRANSLATABLE:
+ self.kw[k] = self.site.GLOBAL_CONTEXT[k](lang)
+
context = {}
context["lang"] = lang
if post:
@@ -165,12 +176,8 @@ class Galleries(Task):
if self.kw['use_filename_as_title']:
img_titles = []
for fn in image_name_list:
- name_without_ext = os.path.splitext(fn)[0]
- img_titles.append(
- 'id="{0}" alt="{1}" title="{2}"'.format(
- name_without_ext,
- name_without_ext,
- utils.unslugify(name_without_ext)))
+ name_without_ext = os.path.splitext(os.path.basename(fn))[0]
+ img_titles.append(utils.unslugify(name_without_ext))
else:
img_titles = [''] * len(image_name_list)
@@ -189,27 +196,30 @@ class Galleries(Task):
ft = folder
folders.append((folder, ft))
- ## TODO: in v7 remove images from context, use photo_array
- context["images"] = list(zip(image_name_list, thumbs, img_titles))
- context["folders"] = folders
+ context["folders"] = natsort.natsorted(folders)
context["crumbs"] = crumbs
context["permalink"] = self.site.link(
"gallery", os.path.basename(
os.path.relpath(gallery, self.kw['gallery_path'])), lang)
- # FIXME: use kw
- context["enable_comments"] = (
- self.site.config["COMMENTS_IN_GALLERIES"])
+ context["enable_comments"] = self.kw['comments_in_galleries']
context["thumbnail_size"] = self.kw["thumbnail_size"]
- # FIXME: render post in a task
if post:
- post.compile(lang)
- context['text'] = post.text(lang)
+ yield {
+ 'basename': self.name,
+ 'name': post.translated_base_path(lang),
+ 'targets': [post.translated_base_path(lang)],
+ 'file_dep': post.fragment_deps(lang),
+ 'actions': [(post.compile, [lang])],
+ 'uptodate': [utils.config_changed(self.kw)]
+ }
+ context['post'] = post
else:
- context['text'] = ''
-
+ context['post'] = None
file_dep = self.site.template_system.template_deps(
template_name) + image_list + thumbs
+ if post:
+ file_dep += [post.translated_base_path(l) for l in self.kw['translations']]
yield utils.apply_filters({
'basename': self.name,
@@ -222,6 +232,7 @@ class Galleries(Task):
dst,
context,
dest_img_list,
+ img_titles,
thumbs,
file_dep))],
'clean': True,
@@ -233,39 +244,40 @@ class Galleries(Task):
}, self.kw['filters'])
# RSS for the gallery
- rss_dst = os.path.join(
- self.kw['output_folder'],
- self.site.path(
- "gallery_rss",
- os.path.relpath(gallery, self.kw['gallery_path']), lang))
- rss_dst = os.path.normpath(rss_dst)
-
- yield utils.apply_filters({
- 'basename': self.name,
- 'name': rss_dst,
- 'file_dep': file_dep,
- 'targets': [rss_dst],
- 'actions': [
- (self.gallery_rss, (
- image_list,
- img_titles,
- lang,
- self.site.link(
- "gallery_rss", os.path.basename(gallery), lang),
- rss_dst,
- context['title']
- ))],
- 'clean': True,
- 'uptodate': [utils.config_changed({
- 1: self.kw,
- })],
- }, self.kw['filters'])
+ if self.kw["generate_rss"]:
+ rss_dst = os.path.join(
+ self.kw['output_folder'],
+ self.site.path(
+ "gallery_rss",
+ os.path.relpath(gallery, self.kw['gallery_path']), lang))
+ rss_dst = os.path.normpath(rss_dst)
+
+ yield utils.apply_filters({
+ 'basename': self.name,
+ 'name': rss_dst,
+ 'file_dep': file_dep,
+ 'targets': [rss_dst],
+ 'actions': [
+ (self.gallery_rss, (
+ image_list,
+ img_titles,
+ lang,
+ self.site.link(
+ "gallery_rss", os.path.basename(gallery), lang),
+ rss_dst,
+ context['title']
+ ))],
+ 'clean': True,
+ 'uptodate': [utils.config_changed({
+ 1: self.kw,
+ })],
+ }, self.kw['filters'])
def find_galleries(self):
"""Find all galleries to be processed according to conf.py"""
self.gallery_list = []
- for root, dirs, files in os.walk(self.kw['gallery_path']):
+ for root, dirs, files in os.walk(self.kw['gallery_path'], followlinks=True):
self.gallery_list.append(root)
def create_galleries(self):
@@ -433,6 +445,7 @@ class Galleries(Task):
output_name,
context,
img_list,
+ img_titles,
thumbs,
file_dep):
"""Build the gallery index."""
@@ -446,12 +459,9 @@ class Galleries(Task):
return url
photo_array = []
- for img, thumb in zip(img_list, thumbs):
+ for img, thumb, title in zip(img_list, thumbs, img_titles):
im = Image.open(thumb)
w, h = im.size
- title = ''
- if self.kw['use_filename_as_title']:
- title = utils.unslugify(os.path.splitext(img)[0])
# Thumbs are files in output, we need URLs
photo_array.append({
'url': url_from_path(img),
@@ -462,9 +472,8 @@ class Galleries(Task):
'h': h
},
})
- context['photo_array_json'] = json.dumps(photo_array)
context['photo_array'] = photo_array
-
+ context['photo_array_json'] = json.dumps(photo_array)
self.site.render_template(template_name, output_name, context)
def gallery_rss(self, img_list, img_titles, lang, permalink, output_path, title):
@@ -478,12 +487,12 @@ class Galleries(Task):
return urljoin(self.site.config['BASE_URL'], url)
items = []
- for img, full_title in list(zip(img_list, img_titles))[:self.kw["feed_length"]]:
+ for img, title in list(zip(img_list, img_titles))[:self.kw["feed_length"]]:
img_size = os.stat(
os.path.join(
self.site.config['OUTPUT_FOLDER'], img)).st_size
args = {
- 'title': full_title.split('"')[-2] if full_title else '',
+ 'title': title,
'link': make_url(img),
'guid': rss.Guid(img, False),
'pubDate': self.image_date(img),
@@ -494,17 +503,16 @@ class Galleries(Task):
),
}
items.append(rss.RSSItem(**args))
- rss_obj = utils.ExtendedRSS2(
+ rss_obj = rss.RSS2(
title=title,
link=make_url(permalink),
description='',
lastBuildDate=datetime.datetime.now(),
items=items,
- generator='Nikola <http://getnikola.com/>',
+ generator='http://getnikola.com/',
language=lang
)
- rss_obj.self_url = make_url(permalink)
- rss_obj.rss_attrs["xmlns:atom"] = "http://www.w3.org/2005/Atom"
+ rss_obj.rss_attrs["xmlns:dc"] = "http://purl.org/dc/elements/1.1/"
dst_dir = os.path.dirname(output_path)
utils.makedirs(dst_dir)
with codecs.open(output_path, "wb+", "utf-8") as rss_file:
@@ -564,7 +572,7 @@ class Galleries(Task):
if exif is not None:
for tag, value in list(exif.items()):
decoded = ExifTags.TAGS.get(tag, tag)
- if decoded == 'DateTimeOriginal':
+ if decoded in ('DateTimeOriginal', 'DateTimeDigitized'):
try:
self.dates[src] = datetime.datetime.strptime(
value, r'%Y:%m:%d %H:%M:%S')
diff --git a/nikola/plugins/task/indexes.py b/nikola/plugins/task/indexes.py
index 3f45161..386cc18 100644
--- a/nikola/plugins/task/indexes.py
+++ b/nikola/plugins/task/indexes.py
@@ -25,8 +25,7 @@
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import unicode_literals
-import glob
-import itertools
+from collections import defaultdict
import os
from nikola.plugin_categories import Task
@@ -54,22 +53,23 @@ class Indexes(Task):
"index_teasers": self.site.config['INDEX_TEASERS'],
"output_folder": self.site.config['OUTPUT_FOLDER'],
"filters": self.site.config['FILTERS'],
- "hide_untranslated_posts": self.site.config['HIDE_UNTRANSLATED_POSTS'],
+ "show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
"indexes_title": self.site.config['INDEXES_TITLE'],
"indexes_pages": self.site.config['INDEXES_PAGES'],
"indexes_pages_main": self.site.config['INDEXES_PAGES_MAIN'],
"blog_title": self.site.config["BLOG_TITLE"],
+ "rss_read_more_link": self.site.config["RSS_READ_MORE_LINK"],
}
template_name = "index.tmpl"
- posts = [x for x in self.site.timeline if x.use_in_feeds]
+ posts = self.site.posts
for lang in kw["translations"]:
# Split in smaller lists
lists = []
- if kw["hide_untranslated_posts"]:
- filtered_posts = [x for x in posts if x.is_translation_available(lang)]
- else:
+ if kw["show_untranslated_posts"]:
filtered_posts = posts
+ else:
+ filtered_posts = [x for x in posts if x.is_translation_available(lang)]
lists.append(filtered_posts[:kw["index_display_post_count"]])
filtered_posts = filtered_posts[kw["index_display_post_count"]:]
while filtered_posts:
@@ -78,7 +78,7 @@ class Indexes(Task):
num_pages = len(lists)
for i, post_list in enumerate(lists):
context = {}
- indexes_title = kw['indexes_title'] or kw['blog_title']
+ indexes_title = kw['indexes_title'] or kw['blog_title'](lang)
if kw["indexes_pages_main"]:
ipages_i = i + 1
ipages_msg = "page %d"
@@ -134,33 +134,33 @@ class Indexes(Task):
"post_pages": self.site.config["post_pages"],
"output_folder": self.site.config['OUTPUT_FOLDER'],
"filters": self.site.config['FILTERS'],
+ "index_file": self.site.config['INDEX_FILE'],
}
template_name = "list.tmpl"
for lang in kw["translations"]:
# Need to group by folder to avoid duplicated tasks (Issue #758)
- for dirname, wildcards in itertools.groupby((w for w, d, x, i in kw["post_pages"] if not i), os.path.dirname):
- context = {}
- # vim/pyflakes thinks it's unused
- # src_dir = os.path.dirname(wildcard)
- files = []
- for wildcard in wildcards:
- files += glob.glob(wildcard)
- post_list = [self.site.global_data[p] for p in files]
- output_name = os.path.join(kw["output_folder"],
- self.site.path("post_path",
- wildcard,
- lang)).encode('utf8')
- context["items"] = [(post.title(lang), post.permalink(lang))
- for post in post_list]
- task = self.site.generic_post_list_renderer(lang, post_list,
- output_name,
- template_name,
- kw['filters'],
- context)
- task_cfg = {1: task['uptodate'][0].config, 2: kw}
- task['uptodate'] = [config_changed(task_cfg)]
- task['basename'] = self.name
- yield task
+ # Group all pages by path prefix
+ groups = defaultdict(list)
+ for p in self.site.timeline:
+ if not p.is_post:
+ dirname = os.path.dirname(p.destination_path(lang))
+ groups[dirname].append(p)
+ for dirname, post_list in groups.items():
+ context = {}
+ context["items"] = [
+ (post.title(lang), post.permalink(lang))
+ for post in post_list
+ ]
+ output_name = os.path.join(kw['output_folder'], dirname, kw['index_file'])
+ task = self.site.generic_post_list_renderer(lang, post_list,
+ output_name,
+ template_name,
+ kw['filters'],
+ context)
+ task_cfg = {1: task['uptodate'][0].config, 2: kw}
+ task['uptodate'] = [config_changed(task_cfg)]
+ task['basename'] = self.name
+ yield task
def index_path(self, name, lang):
if name not in [None, 0]:
diff --git a/nikola/plugins/task/listings.py b/nikola/plugins/task/listings.py
index 86be6c4..a0fe974 100644
--- a/nikola/plugins/task/listings.py
+++ b/nikola/plugins/task/listings.py
@@ -31,11 +31,17 @@ import os
from pygments import highlight
from pygments.lexers import get_lexer_for_filename, TextLexer
from pygments.formatters import HtmlFormatter
+import natsort
+import re
from nikola.plugin_categories import Task
from nikola import utils
+# FIXME: (almost) duplicated with mdx_nikola.py
+CODERE = re.compile('<div class="code"><pre>(.*?)</pre></div>', flags=re.MULTILINE | re.DOTALL)
+
+
class Listings(Task):
"""Render pretty listings."""
@@ -69,6 +75,9 @@ class Listings(Task):
linenos="table", nowrap=False,
lineanchors=utils.slugify(in_name),
anchorlinenos=True))
+ # the pygments highlighter uses <div class="codehilite"><pre>
+ # for code. We switch it to reST's <pre class="code">.
+ code = CODERE.sub('<pre class="code literal-block">\\1</pre>', code)
title = os.path.basename(in_name)
else:
code = ''
@@ -76,14 +85,27 @@ class Listings(Task):
crumbs = utils.get_crumbs(os.path.relpath(out_name,
kw['output_folder']),
is_file=True)
+ permalink = self.site.link(
+ 'listing',
+ os.path.relpath(
+ out_name,
+ os.path.join(
+ kw['output_folder'],
+ kw['listings_folder'])))
+ if self.site.config['COPY_SOURCES']:
+ source_link = permalink[:-5]
+ else:
+ source_link = None
context = {
'code': code,
'title': title,
'crumbs': crumbs,
+ 'permalink': permalink,
'lang': kw['default_lang'],
- 'folders': folders,
- 'files': files,
+ 'folders': natsort.natsorted(folders),
+ 'files': natsort.natsorted(files),
'description': title,
+ 'source_link': source_link,
}
self.site.render_template('listing.tmpl', out_name,
context)
@@ -91,7 +113,21 @@ class Listings(Task):
yield self.group_task()
template_deps = self.site.template_system.template_deps('listing.tmpl')
- for root, dirs, files in os.walk(kw['listings_folder']):
+ for root, dirs, files in os.walk(kw['listings_folder'], followlinks=True):
+ files = [f for f in files if os.path.splitext(f)[-1] not in ignored_extensions]
+
+ uptodate = {'c': self.site.GLOBAL_CONTEXT}
+
+ for k, v in self.site.GLOBAL_CONTEXT['template_hooks'].items():
+ uptodate['||template_hooks|{0}||'.format(k)] = v._items
+
+ for k in self.site._GLOBAL_CONTEXT_TRANSLATABLE:
+ uptodate[k] = self.site.GLOBAL_CONTEXT[k](kw['default_lang'])
+
+ uptodate2 = uptodate.copy()
+ uptodate2['f'] = files
+ uptodate2['d'] = dirs
+
# Render all files
out_name = os.path.join(
kw['output_folder'],
@@ -105,8 +141,7 @@ class Listings(Task):
'actions': [(render_listing, [None, out_name, dirs, files])],
# This is necessary to reflect changes in blog title,
# sidebar links, etc.
- 'uptodate': [utils.config_changed(
- self.site.GLOBAL_CONTEXT)],
+ 'uptodate': [utils.config_changed(uptodate2)],
'clean': True,
}
for f in files:
@@ -126,11 +161,25 @@ class Listings(Task):
'actions': [(render_listing, [in_name, out_name])],
# This is necessary to reflect changes in blog title,
# sidebar links, etc.
- 'uptodate': [utils.config_changed(
- self.site.GLOBAL_CONTEXT)],
+ 'uptodate': [utils.config_changed(uptodate)],
'clean': True,
}
+ if self.site.config['COPY_SOURCES']:
+ out_name = os.path.join(
+ kw['output_folder'],
+ root,
+ f)
+ yield {
+ 'basename': self.name,
+ 'name': out_name,
+ 'file_dep': [in_name],
+ 'targets': [out_name],
+ 'actions': [(utils.copy_file, [in_name, out_name])],
+ 'clean': True,
+ }
def listing_path(self, name, lang):
- return [_f for _f in [self.site.config['LISTINGS_FOLDER'], name +
- '.html'] if _f]
+ if not name.endswith('.html'):
+ name += '.html'
+ path_parts = [self.site.config['LISTINGS_FOLDER']] + list(os.path.split(name))
+ return [_f for _f in path_parts if _f]
diff --git a/nikola/plugins/task/localsearch.plugin b/nikola/plugins/task/localsearch.plugin
deleted file mode 100644
index 86accb6..0000000
--- a/nikola/plugins/task/localsearch.plugin
+++ /dev/null
@@ -1,10 +0,0 @@
-[Core]
-Name = local_search
-Module = localsearch
-
-[Documentation]
-Author = Roberto Alsina
-Version = 0.1
-Website = http://getnikola.com
-Description = Create data files for local search via Tipue
-
diff --git a/nikola/plugins/task/localsearch/MIT-LICENSE.txt b/nikola/plugins/task/localsearch/MIT-LICENSE.txt
deleted file mode 100644
index f131068..0000000
--- a/nikola/plugins/task/localsearch/MIT-LICENSE.txt
+++ /dev/null
@@ -1,20 +0,0 @@
-Tipue Search Copyright (c) 2012 Tipue
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/nikola/plugins/task/localsearch/__init__.py b/nikola/plugins/task/localsearch/__init__.py
deleted file mode 100644
index c501d80..0000000
--- a/nikola/plugins/task/localsearch/__init__.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2014 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-from __future__ import unicode_literals
-import codecs
-import json
-import os
-
-from doit.tools import result_dep
-
-from nikola.plugin_categories import LateTask
-from nikola.utils import config_changed, copy_tree, makedirs
-
-# This is what we need to produce:
-#var tipuesearch = {"pages": [
- #{"title": "Tipue Search, a jQuery site search engine", "text": "Tipue
- #Search is a site search engine jQuery plugin. It's free for both commercial and
- #non-commercial use and released under the MIT License. Tipue Search includes
- #features such as word stemming and word replacement.", "tags": "JavaScript",
- #"loc": "http://www.tipue.com/search"},
- #{"title": "Tipue Search demo", "text": "Tipue Search demo. Tipue Search is
- #a site search engine jQuery plugin.", "tags": "JavaScript", "loc":
- #"http://www.tipue.com/search/demo"},
- #{"title": "About Tipue", "text": "Tipue is a small web development/design
- #studio based in North London. We've been around for over a decade.", "tags": "",
- #"loc": "http://www.tipue.com/about"}
-#]};
-
-
-class Tipue(LateTask):
- """Render the blog posts as JSON data."""
-
- name = "local_search"
-
- def gen_tasks(self):
- self.site.scan_posts()
-
- kw = {
- "translations": self.site.config['TRANSLATIONS'],
- "output_folder": self.site.config['OUTPUT_FOLDER'],
- }
-
- posts = self.site.timeline[:]
- dst_path = os.path.join(kw["output_folder"], "assets", "js",
- "tipuesearch_content.json")
-
- def save_data():
- pages = []
- for lang in kw["translations"]:
- for post in posts:
- # Don't index drafts (Issue #387)
- if post.is_draft or post.is_retired or post.publish_later:
- continue
- text = post.text(lang, strip_html=True)
- text = text.replace('^', '')
-
- data = {}
- data["title"] = post.title(lang)
- data["text"] = text
- data["tags"] = ",".join(post.tags)
- data["loc"] = post.permalink(lang)
- pages.append(data)
- output = json.dumps({"pages": pages}, indent=2)
- makedirs(os.path.dirname(dst_path))
- with codecs.open(dst_path, "wb+", "utf8") as fd:
- fd.write(output)
-
- yield {
- "basename": str(self.name),
- "name": dst_path,
- "targets": [dst_path],
- "actions": [(save_data, [])],
- 'uptodate': [config_changed(kw), result_dep('sitemap')]
- }
- # Note: The task should run everytime a new file is added or a
- # file is changed. We cheat, and depend on the sitemap task,
- # to run everytime a new file is added.
-
- # Copy all the assets to the right places
- asset_folder = os.path.join(os.path.dirname(__file__), "files")
- for task in copy_tree(asset_folder, kw["output_folder"]):
- task["basename"] = str(self.name)
- yield task
diff --git a/nikola/plugins/task/localsearch/files/assets/css/img/loader.gif b/nikola/plugins/task/localsearch/files/assets/css/img/loader.gif
deleted file mode 100644
index 9c97738..0000000
--- a/nikola/plugins/task/localsearch/files/assets/css/img/loader.gif
+++ /dev/null
Binary files differ
diff --git a/nikola/plugins/task/localsearch/files/assets/css/img/search.png b/nikola/plugins/task/localsearch/files/assets/css/img/search.png
deleted file mode 100644
index 9ab0f2c..0000000
--- a/nikola/plugins/task/localsearch/files/assets/css/img/search.png
+++ /dev/null
Binary files differ
diff --git a/nikola/plugins/task/localsearch/files/assets/css/tipuesearch.css b/nikola/plugins/task/localsearch/files/assets/css/tipuesearch.css
deleted file mode 100644
index 2230193..0000000
--- a/nikola/plugins/task/localsearch/files/assets/css/tipuesearch.css
+++ /dev/null
@@ -1,159 +0,0 @@
-
-/*
-Tipue Search 3.0.1
-Copyright (c) 2013 Tipue
-Tipue Search is released under the MIT License
-http://www.tipue.com/search
-*/
-
-
-#tipue_search_input
-{
- font: 12px/1.7 'open sans', sans-serif;
- color: #333;
- padding: 7px;
- width: 150px;
- border: 1px solid #e2e2e2;
- border-radius: 0;
- -moz-appearance: none;
- -webkit-appearance: none;
- box-shadow: none;
- outline: 0;
- margin: 0;
-}
-#tipue_search_input:focus
-{
- border: 1px solid #ccc;
-}
-#tipue_search_button
-{
- width: 70px;
- height: 36px;
- border: 0;
- border-radius: 1px;
- background: #5193fb url('img/search.png') no-repeat center;
- outline: none;
-}
-#tipue_search_button:hover
-{
- background-color: #4589fb;
-}
-
-#tipue_search_content
-{
- clear: left;
- max-width: 650px;
- padding: 25px 0 13px 0;
- margin: 0;
-}
-#tipue_search_loading
-{
- padding-top: 60px;
- background: #fff url('img/loader.gif') no-repeat left;
-}
-
-#tipue_search_warning_head
-{
- font: 300 16px/1.6 'open sans', sans-serif;
- color: #333;
-}
-#tipue_search_warning
-{
- font: 12px/1.6 'open sans', sans-serif;
- color: #333;
- margin: 7px 0;
-}
-#tipue_search_warning a
-{
- color: #3f72d8;
- text-decoration: none;
-}
-#tipue_search_warning a:hover
-{
- padding-bottom: 1px;
- border-bottom: 1px solid #ccc;
-}
-#tipue_search_results_count
-{
- font: 13px/1.6 'open sans', sans-serif;
- color: #333;
-}
-.tipue_search_content_title
-{
- font: 300 23px/1.6 'open sans', sans-serif;
- margin-top: 31px;
-}
-.tipue_search_content_title a
-{
- color: #3f72d8;
- text-decoration: none;
-}
-.tipue_search_content_title a:hover
-{
- padding-bottom: 1px;
- border-bottom: 1px solid #ccc;
-}
-.tipue_search_content_text
-{
- font: 12px/1.7 'open sans', sans-serif;
- color: #333;
- padding: 13px 0;
-}
-.tipue_search_content_loc
-{
- font: 300 13px/1.7 'open sans', sans-serif;
- overflow: auto;
-}
-.tipue_search_content_loc a
-{
- color: #555;
- text-decoration: none;
-}
-.tipue_search_content_loc a:hover
-{
- padding-bottom: 1px;
- border-bottom: 1px solid #ccc;
-}
-#tipue_search_foot
-{
- margin: 51px 0 21px 0;
-}
-#tipue_search_foot_boxes
-{
- padding: 0;
- margin: 0;
- font: 12px/1 'open sans', sans-serif;
-}
-#tipue_search_foot_boxes li
-{
- list-style: none;
- margin: 0;
- padding: 0;
- display: inline;
-}
-#tipue_search_foot_boxes li a
-{
- padding: 7px 13px 8px 13px;
- background-color: #f1f1f1;
- border: 1px solid #dcdcdc;
- border-radius: 1px;
- color: #333;
- margin-right: 7px;
- text-decoration: none;
- text-align: center;
-}
-#tipue_search_foot_boxes li.current
-{
- padding: 7px 13px 8px 13px;
- background: #fff;
- border: 1px solid #dcdcdc;
- border-radius: 1px;
- color: #333;
- margin-right: 7px;
- text-align: center;
-}
-#tipue_search_foot_boxes li a:hover
-{
- border: 1px solid #ccc;
- background-color: #f3f3f3;
-}
diff --git a/nikola/plugins/task/localsearch/files/assets/js/tipuesearch.js b/nikola/plugins/task/localsearch/files/assets/js/tipuesearch.js
deleted file mode 100644
index a9982cd..0000000
--- a/nikola/plugins/task/localsearch/files/assets/js/tipuesearch.js
+++ /dev/null
@@ -1,384 +0,0 @@
-
-/*
-Tipue Search 3.0.1
-Copyright (c) 2013 Tipue
-Tipue Search is released under the MIT License
-http://www.tipue.com/search
-*/
-
-
-(function($) {
-
- $.fn.tipuesearch = function(options) {
-
- var set = $.extend( {
-
- 'show' : 7,
- 'newWindow' : false,
- 'showURL' : true,
- 'minimumLength' : 3,
- 'descriptiveWords' : 25,
- 'highlightTerms' : true,
- 'highlightEveryTerm' : false,
- 'mode' : 'static',
- 'liveDescription' : '*',
- 'liveContent' : '*',
- 'contentLocation' : 'tipuesearch/tipuesearch_content.json'
-
- }, options);
-
- return this.each(function() {
-
- var tipuesearch_in = {
- pages: []
- };
- $.ajaxSetup({
- async: false
- });
-
- if (set.mode == 'live')
- {
- for (var i = 0; i < tipuesearch_pages.length; i++)
- {
- $.get(tipuesearch_pages[i], '',
- function (html)
- {
- var cont = $(set.liveContent, html).text();
- cont = cont.replace(/\s+/g, ' ');
- var desc = $(set.liveDescription, html).text();
- desc = desc.replace(/\s+/g, ' ');
-
- var t_1 = html.toLowerCase().indexOf('<title>');
- var t_2 = html.toLowerCase().indexOf('</title>', t_1 + 7);
- if (t_1 != -1 && t_2 != -1)
- {
- var tit = html.slice(t_1 + 7, t_2);
- }
- else
- {
- var tit = 'No title';
- }
-
- tipuesearch_in.pages.push({
- "title": tit,
- "text": desc,
- "tags": cont,
- "loc": tipuesearch_pages[i]
- });
- }
- );
- }
- }
-
- if (set.mode == 'json')
- {
- $.getJSON(set.contentLocation,
- function(json)
- {
- tipuesearch_in = $.extend({}, json);
- }
- );
- }
-
- if (set.mode == 'static')
- {
- tipuesearch_in = $.extend({}, tipuesearch);
- }
-
- var tipue_search_w = '';
- if (set.newWindow)
- {
- tipue_search_w = ' target="_blank"';
- }
-
- function getURLP(name)
- {
- return decodeURIComponent((new RegExp('[?|&]' + name + '=' + '([^&;]+?)(&|#|;|$)').exec(location.search)||[,""])[1].replace(/\+/g, '%20')) || null;
- }
- if (getURLP('q'))
- {
- $('#tipue_search_input').val(getURLP('q'));
- getTipueSearch(0, true);
- }
-
- $('#tipue_search_button').click(function()
- {
- getTipueSearch(0, true);
- });
- $(this).keyup(function(event)
- {
- if(event.keyCode == '13')
- {
- getTipueSearch(0, true);
- }
- });
-
- function getTipueSearch(start, replace)
- {
- $('#tipue_search_content').hide();
- var out = '';
- var results = '';
- var show_replace = false;
- var show_stop = false;
-
- var d = $('#tipue_search_input').val().toLowerCase();
- d = $.trim(d);
- var d_w = d.split(' ');
- d = '';
- for (var i = 0; i < d_w.length; i++)
- {
- var a_w = true;
- for (var f = 0; f < tipuesearch_stop_words.length; f++)
- {
- if (d_w[i] == tipuesearch_stop_words[f])
- {
- a_w = false;
- show_stop = true;
- }
- }
- if (a_w)
- {
- d = d + ' ' + d_w[i];
- }
- }
- d = $.trim(d);
- d_w = d.split(' ');
-
- if (d.length >= set.minimumLength)
- {
- if (replace)
- {
- var d_r = d;
- for (var i = 0; i < d_w.length; i++)
- {
- for (var f = 0; f < tipuesearch_replace.words.length; f++)
- {
- if (d_w[i] == tipuesearch_replace.words[f].word)
- {
- d = d.replace(d_w[i], tipuesearch_replace.words[f].replace_with);
- show_replace = true;
- }
- }
- }
- d_w = d.split(' ');
- }
-
- var d_t = d;
- for (var i = 0; i < d_w.length; i++)
- {
- for (var f = 0; f < tipuesearch_stem.words.length; f++)
- {
- if (d_w[i] == tipuesearch_stem.words[f].word)
- {
- d_t = d_t + ' ' + tipuesearch_stem.words[f].stem;
- }
- }
- }
- d_w = d_t.split(' ');
-
- var c = 0;
- found = new Array();
- for (var i = 0; i < tipuesearch_in.pages.length; i++)
- {
- var score = 1000000000;
- var s_t = tipuesearch_in.pages[i].text;
- for (var f = 0; f < d_w.length; f++)
- {
- var pat = new RegExp(d_w[f], 'i');
- if (tipuesearch_in.pages[i].title.search(pat) != -1)
- {
- score -= (200000 - i);
- }
- if (tipuesearch_in.pages[i].text.search(pat) != -1)
- {
- score -= (150000 - i);
- }
-
- if (set.highlightTerms)
- {
- if (set.highlightEveryTerm)
- {
- var patr = new RegExp('(' + d_w[f] + ')', 'gi');
- }
- else
- {
- var patr = new RegExp('(' + d_w[f] + ')', 'i');
- }
- s_t = s_t.replace(patr, "<b>$1</b>");
- }
- if (tipuesearch_in.pages[i].tags.search(pat) != -1)
- {
- score -= (100000 - i);
- }
-
- }
- if (score < 1000000000)
- {
- found[c++] = score + '^' + tipuesearch_in.pages[i].title + '^' + s_t + '^' + tipuesearch_in.pages[i].loc;
- }
- }
-
- if (c != 0)
- {
- if (show_replace == 1)
- {
- out += '<div id="tipue_search_warning_head">Showing results for ' + d + '</div>';
- out += '<div id="tipue_search_warning">Search for <a href="javascript:void(0)" id="tipue_search_replaced">' + d_r + '</a></div>';
- }
- if (c == 1)
- {
- out += '<div id="tipue_search_results_count">1 result</div>';
- }
- else
- {
- c_c = c.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ",");
- out += '<div id="tipue_search_results_count">' + c_c + ' results</div>';
- }
-
- found.sort();
- var l_o = 0;
- for (var i = 0; i < found.length; i++)
- {
- var fo = found[i].split('^');
- if (l_o >= start && l_o < set.show + start)
- {
- out += '<div class="tipue_search_content_title"><a href="' + fo[3] + '"' + tipue_search_w + '>' + fo[1] + '</a></div>';
-
- var t = fo[2];
- var t_d = '';
- var t_w = t.split(' ');
- if (t_w.length < set.descriptiveWords)
- {
- t_d = t;
- }
- else
- {
- for (var f = 0; f < set.descriptiveWords; f++)
- {
- t_d += t_w[f] + ' ';
- }
- }
- t_d = $.trim(t_d);
- if (t_d.charAt(t_d.length - 1) != '.')
- {
- t_d += ' ...';
- }
- out += '<div class="tipue_search_content_text">' + t_d + '</div>';
-
- if (set.showURL)
- {
- t_url = fo[3];
- if (t_url.length > 45)
- {
- t_url = fo[3].substr(0, 45) + ' ...';
- }
- out += '<div class="tipue_search_content_loc"><a href="' + fo[3] + '"' + tipue_search_w + '>' + t_url + '</a></div>';
- }
- }
- l_o++;
- }
-
- if (c > set.show)
- {
- var pages = Math.ceil(c / set.show);
- var page = (start / set.show);
- out += '<div id="tipue_search_foot"><ul id="tipue_search_foot_boxes">';
-
- if (start > 0)
- {
- out += '<li><a href="javascript:void(0)" class="tipue_search_foot_box" id="' + (start - set.show) + '_' + replace + '">Prev</a></li>';
- }
-
- if (page <= 2)
- {
- var p_b = pages;
- if (pages > 3)
- {
- p_b = 3;
- }
- for (var f = 0; f < p_b; f++)
- {
- if (f == page)
- {
- out += '<li class="current">' + (f + 1) + '</li>';
- }
- else
- {
- out += '<li><a href="javascript:void(0)" class="tipue_search_foot_box" id="' + (f * set.show) + '_' + replace + '">' + (f + 1) + '</a></li>';
- }
- }
- }
- else
- {
- var p_b = page + 3;
- if (p_b > pages)
- {
- p_b = pages;
- }
- for (var f = page; f < p_b; f++)
- {
- if (f == page)
- {
- out += '<li class="current">' + (f + 1) + '</li>';
- }
- else
- {
- out += '<li><a href="javascript:void(0)" class="tipue_search_foot_box" id="' + (f * set.show) + '_' + replace + '">' + (f + 1) + '</a></li>';
- }
- }
- }
-
- if (page + 1 != pages)
- {
- out += '<li><a href="javascript:void(0)" class="tipue_search_foot_box" id="' + (start + set.show) + '_' + replace + '">Next</a></li>';
- }
-
- out += '</ul></div>';
- }
- }
- else
- {
- out += '<div id="tipue_search_warning_head">Nothing found</div>';
- }
- }
- else
- {
- if (show_stop)
- {
- out += '<div id="tipue_search_warning_head">Nothing found</div><div id="tipue_search_warning">Common words are largely ignored</div>';
- }
- else
- {
- out += '<div id="tipue_search_warning_head">Search too short</div>';
- if (set.minimumLength == 1)
- {
- out += '<div id="tipue_search_warning">Should be one character or more</div>';
- }
- else
- {
- out += '<div id="tipue_search_warning">Should be ' + set.minimumLength + ' characters or more</div>';
- }
- }
- }
-
- $('#tipue_search_content').html(out);
- $('#tipue_search_content').slideDown(200);
-
- $('#tipue_search_replaced').click(function()
- {
- getTipueSearch(0, false);
- });
-
- $('.tipue_search_foot_box').click(function()
- {
- var id_v = $(this).attr('id');
- var id_a = id_v.split('_');
-
- getTipueSearch(parseInt(id_a[0]), id_a[1]);
- });
- }
-
- });
- };
-
-})(jQuery);
diff --git a/nikola/plugins/task/localsearch/files/assets/js/tipuesearch_set.js b/nikola/plugins/task/localsearch/files/assets/js/tipuesearch_set.js
deleted file mode 100644
index 8493ec1..0000000
--- a/nikola/plugins/task/localsearch/files/assets/js/tipuesearch_set.js
+++ /dev/null
@@ -1,21 +0,0 @@
-
-/*
-Tipue Search 3.0.1
-Copyright (c) 2013 Tipue
-Tipue Search is released under the MIT License
-http://www.tipue.com/search
-*/
-
-
-var tipuesearch_stop_words = ["and", "be", "by", "do", "for", "he", "how", "if", "is", "it", "my", "not", "of", "or", "the", "to", "up", "what", "when"];
-
-var tipuesearch_replace = {"words": [
- {"word": "tipua", replace_with: "tipue"},
- {"word": "javscript", replace_with: "javascript"}
-]};
-
-var tipuesearch_stem = {"words": [
- {"word": "e-mail", stem: "email"},
- {"word": "javascript", stem: "script"},
- {"word": "javascript", stem: "js"}
-]};
diff --git a/nikola/plugins/task/localsearch/files/tipue_search.html b/nikola/plugins/task/localsearch/files/tipue_search.html
deleted file mode 100644
index 789fbe5..0000000
--- a/nikola/plugins/task/localsearch/files/tipue_search.html
+++ /dev/null
@@ -1,31 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-
-<html>
-<head>
-<title>Tipue Search</title>
-<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
-
-<script type="text/javascript" src="https://ajax.googleapis.com/ajax/libs/jquery/1.8.2/jquery.min.js"></script>
-
-<link rel="stylesheet" type="text/css" href="assets/css/tipuesearch.css">
-<script type="text/javascript" src="assets/js/tipuesearch_set.js"></script>
-<script type="text/javascript" src="assets/js/tipuesearch.js"></script>
-
-</head>
-<body>
-
-<div style="float: left;"><input type="text" id="tipue_search_input"></div>
-<div style="float: left; margin-left: 13px;"><input type="button" id="tipue_search_button"></div>
-<div id="tipue_search_content"><div id="tipue_search_loading"></div></div>
-</div>
-
-<script type="text/javascript">
-$(document).ready(function() {
- $('#tipue_search_input').tipuesearch({
- 'mode': 'json',
- 'contentLocation': 'assets/js/tipuesearch_content.json'
- });
-});
-</script>
-</body>
-</html>
diff --git a/nikola/plugins/task/mustache.plugin b/nikola/plugins/task/mustache.plugin
deleted file mode 100644
index d6b487a..0000000
--- a/nikola/plugins/task/mustache.plugin
+++ /dev/null
@@ -1,10 +0,0 @@
-[Core]
-Name = render_mustache
-Module = mustache
-
-[Documentation]
-Author = Roberto Alsina
-Version = 0.1
-Website = http://getnikola.com
-Description = Generates the blog's index pages in json.
-
diff --git a/nikola/plugins/task/mustache/__init__.py b/nikola/plugins/task/mustache/__init__.py
deleted file mode 100644
index 5be98f0..0000000
--- a/nikola/plugins/task/mustache/__init__.py
+++ /dev/null
@@ -1,184 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2014 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-from __future__ import unicode_literals
-
-import codecs
-import json
-import os
-
-from nikola.plugin_categories import Task
-from nikola.utils import (
- config_changed, copy_file, LocaleBorg, makedirs, unicode_str,
-)
-
-
-class Mustache(Task):
- """Render the blog posts as JSON data."""
-
- name = "render_mustache"
-
- def gen_tasks(self):
- self.site.scan_posts()
-
- kw = {
- "translations": self.site.config['TRANSLATIONS'],
- "index_display_post_count":
- self.site.config['INDEX_DISPLAY_POST_COUNT'],
- "messages": self.site.MESSAGES,
- "index_teasers": self.site.config['INDEX_TEASERS'],
- "output_folder": self.site.config['OUTPUT_FOLDER'],
- "filters": self.site.config['FILTERS'],
- "blog_title": self.site.config['BLOG_TITLE'],
- "content_footer": self.site.config['CONTENT_FOOTER'],
- }
-
- # TODO: timeline is global, get rid of it
- posts = [x for x in self.site.timeline if x.use_in_feeds]
- if not posts:
- yield {
- 'basename': 'render_mustache',
- 'actions': [],
- }
- return
-
- def write_file(path, post, lang):
-
- # Prev/Next links
- prev_link = False
- if post.prev_post:
- prev_link = post.prev_post.permalink(lang).replace(".html",
- ".json")
- next_link = False
- if post.next_post:
- next_link = post.next_post.permalink(lang).replace(".html",
- ".json")
- data = {}
-
- # Configuration
- for k, v in self.site.config.items():
- if isinstance(v, (str, unicode_str)): # NOQA
- data[k] = v
-
- # Tag data
- tags = []
- for tag in post.tags:
- tags.append({'name': tag, 'link': self.site.link("tag", tag,
- lang)})
- data.update({
- "tags": tags,
- "tags?": True if tags else False,
- })
-
- # Template strings
- for k, v in kw["messages"][lang].items():
- data["message_" + k] = v
-
- # Post data
- data.update({
- "title": post.title(lang),
- "text": post.text(lang),
- "prev": prev_link,
- "next": next_link,
- "date":
- post.date.strftime(self.site.GLOBAL_CONTEXT['date_format']),
- })
-
- # Comments
- context = dict(post=post, lang=LocaleBorg().current_lang)
- context.update(self.site.GLOBAL_CONTEXT)
- data["comment_html"] = self.site.template_system.render_template(
- 'mustache-comment-form.tmpl', None, context).strip()
-
- # Post translations
- translations = []
- for langname in kw["translations"]:
- if langname == lang:
- continue
- translations.append({'name':
- kw["messages"][langname]["Read in English"],
- 'link': "javascript:load_data('%s');"
- % post.permalink(langname).replace(
- ".html", ".json")})
- data["translations"] = translations
-
- makedirs(os.path.dirname(path))
- with codecs.open(path, 'wb+', 'utf8') as fd:
- fd.write(json.dumps(data))
-
- for lang in kw["translations"]:
- for i, post in enumerate(posts):
- out_path = post.destination_path(lang, ".json")
- out_file = os.path.join(kw['output_folder'], out_path)
- task = {
- 'basename': 'render_mustache',
- 'name': out_file,
- 'file_dep': post.fragment_deps(lang),
- 'targets': [out_file],
- 'actions': [(write_file, (out_file, post, lang))],
- 'task_dep': ['render_posts'],
- 'uptodate': [config_changed({
- 1: post.text(lang),
- 2: post.prev_post,
- 3: post.next_post,
- 4: post.title(lang),
- })]
- }
- yield task
-
- if posts:
- first_post_data = posts[0].permalink(
- self.site.config["DEFAULT_LANG"]).replace(".html", ".json")
-
- # Copy mustache template
- src = os.path.join(os.path.dirname(__file__), 'mustache-template.html')
- dst = os.path.join(kw['output_folder'], 'mustache-template.html')
- yield {
- 'basename': 'render_mustache',
- 'name': dst,
- 'targets': [dst],
- 'file_dep': [src],
- 'actions': [(copy_file, (src, dst))],
- }
-
- # Copy mustache.html with the right starting file in it
- src = os.path.join(os.path.dirname(__file__), 'mustache.html')
- dst = os.path.join(kw['output_folder'], 'mustache.html')
-
- def copy_mustache():
- with codecs.open(src, 'rb', 'utf8') as in_file:
- with codecs.open(dst, 'wb+', 'utf8') as out_file:
- data = in_file.read().replace('{{first_post_data}}',
- first_post_data)
- out_file.write(data)
- yield {
- 'basename': 'render_mustache',
- 'name': dst,
- 'targets': [dst],
- 'file_dep': [src],
- 'uptodate': [config_changed({1: first_post_data})],
- 'actions': [(copy_mustache, [])],
- }
diff --git a/nikola/plugins/task/mustache/mustache-template.html b/nikola/plugins/task/mustache/mustache-template.html
deleted file mode 100644
index e9a0213..0000000
--- a/nikola/plugins/task/mustache/mustache-template.html
+++ /dev/null
@@ -1,29 +0,0 @@
-<script id="view" type="text/html">
-<div class="container" id="container">
- <div class="postbox">
- <h1>{{BLOG_TITLE}}</h1>
- <hr>
- <h2>{{title}}</h2>
- Posted on: {{date}}</br>
- {{#tags?}} More posts about:
- {{#tags}}<a class="tag" href={{link}}><span class="badge badge-info">{{name}}</span></a>{{/tags}}
- </br>
- {{/tags?}}
- {{#translations}}<a href={{link}}>{{name}}</a>{{/translations}}&nbsp;</br>
- <hr>
- {{{text}}}
- <ul class="pager">
- {{#prev}}
- <li class="previous"><a href="javascript:load_data('{{prev}}')">{{message_Previous post}}</a></li>
- {{/prev}}
- {{#next}}
- <li class="next"><a href="javascript:load_data('{{next}}')">{{message_Next post}}</a></li>
- {{/next}}
- </ul>
- {{{comment_html}}}
- </div>
- <div class="footerbox">
- {{{CONTENT_FOOTER}}}
- </div>
-</div>
-</script>
diff --git a/nikola/plugins/task/mustache/mustache.html b/nikola/plugins/task/mustache/mustache.html
deleted file mode 100644
index 7ff6312..0000000
--- a/nikola/plugins/task/mustache/mustache.html
+++ /dev/null
@@ -1,34 +0,0 @@
-<head>
- <link href="/assets/css/bootstrap.css" rel="stylesheet" type="text/css">
- <link href="/assets/css/bootstrap-responsive.css" rel="stylesheet" type="text/css">
- <link href="/assets/css/rst.css" rel="stylesheet" type="text/css">
- <link href="/assets/css/code.css" rel="stylesheet" type="text/css">
- <link href="/assets/css/colorbox.css" rel="stylesheet" type="text/css"/>
- <link href="/assets/css/theme.css" rel="stylesheet" type="text/css"/>
- <link href="/assets/css/custom.css" rel="stylesheet" type="text/css">
- <script src="/assets/js/jquery-1.10.2.min.js" type="text/javascript"></script>
- <script src="//cdn.jsdelivr.net/jquery.mustache/0.2.7/jquery.mustache.js"></script>
- <script src="//cdn.jsdelivr.net/mustache.js/0.7.2/mustache.js"></script>
- <script src="/assets/js/jquery.colorbox-min.js" type="text/javascript"></script>
- <script type="text/javascript">
-function load_data(dataurl) {
- jQuery.getJSON(dataurl, function(data) {
- $('body').mustache('view', data, { method: 'html' });
- window.location.hash = '#' + dataurl;
- })
-};
-$(document).ready(function() {
-$.Mustache.load('/mustache-template.html')
- .done(function () {
- if (window.location.hash != '') {
- load_data(window.location.hash.slice(1));
- }
- else {
- load_data('{{first_post_data}}');
- };
- })
-});
-</script>
-</head>
-<body style="padding-top: 0;">
-</body>
diff --git a/nikola/plugins/task/pages.py b/nikola/plugins/task/pages.py
index f4c0469..aefc5a1 100644
--- a/nikola/plugins/task/pages.py
+++ b/nikola/plugins/task/pages.py
@@ -40,13 +40,14 @@ class RenderPages(Task):
"post_pages": self.site.config["post_pages"],
"translations": self.site.config["TRANSLATIONS"],
"filters": self.site.config["FILTERS"],
- "hide_untranslated_posts": self.site.config['HIDE_UNTRANSLATED_POSTS'],
+ "show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
+ "demote_headers": self.site.config['DEMOTE_HEADERS'],
}
self.site.scan_posts()
yield self.group_task()
for lang in kw["translations"]:
for post in self.site.timeline:
- if kw["hide_untranslated_posts"] and not post.is_translation_available(lang):
+ if not kw["show_untranslated_posts"] and not post.is_translation_available(lang):
continue
for task in self.site.generic_page_renderer(lang, post,
kw["filters"]):
diff --git a/nikola/plugins/task/posts.py b/nikola/plugins/task/posts.py
index a502b81..8e03122 100644
--- a/nikola/plugins/task/posts.py
+++ b/nikola/plugins/task/posts.py
@@ -25,12 +25,20 @@
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from copy import copy
-import nikola.post
from nikola.plugin_categories import Task
from nikola import utils
+def rest_deps(post, task):
+ """Add extra_deps from ReST into task.
+
+ The .dep file is created by ReST so not available before the task starts
+ to execute.
+ """
+ task.file_dep.update(post.extra_deps())
+
+
class RenderPosts(Task):
"""Build HTML fragments from metadata and text."""
@@ -43,10 +51,10 @@ class RenderPosts(Task):
"translations": self.site.config["TRANSLATIONS"],
"timeline": self.site.timeline,
"default_lang": self.site.config["DEFAULT_LANG"],
- "hide_untranslated_posts": self.site.config['HIDE_UNTRANSLATED_POSTS'],
+ "show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
+ "demote_headers": self.site.config['DEMOTE_HEADERS'],
}
- nikola.post.READ_MORE_LINK = self.site.config['READ_MORE_LINK']
yield self.group_task()
for lang in kw["translations"]:
@@ -59,7 +67,9 @@ class RenderPosts(Task):
'name': dest,
'file_dep': post.fragment_deps(lang),
'targets': [dest],
- 'actions': [(post.compile, (lang, ))],
+ 'actions': [(post.compile, (lang, )),
+ (rest_deps, (post,)),
+ ],
'clean': True,
'uptodate': [utils.config_changed(deps_dict)],
}
diff --git a/nikola/plugins/task/redirect.py b/nikola/plugins/task/redirect.py
index 6fafd13..eccc0ab 100644
--- a/nikola/plugins/task/redirect.py
+++ b/nikola/plugins/task/redirect.py
@@ -63,4 +63,4 @@ def create_redirect(src, dst):
with codecs.open(src, "wb+", "utf8") as fd:
fd.write('<!DOCTYPE html><head><title>Redirecting...</title>'
'<meta http-equiv="refresh" content="0; '
- 'url={0}"></head>'.format(dst))
+ 'url={0}"></head><body><p>Page moved <a href="{0}">here</a></p></body>'.format(dst))
diff --git a/nikola/plugins/task/robots.plugin b/nikola/plugins/task/robots.plugin
new file mode 100644
index 0000000..60b50fb
--- /dev/null
+++ b/nikola/plugins/task/robots.plugin
@@ -0,0 +1,10 @@
+[Core]
+Name = robots
+Module = robots
+
+[Documentation]
+Author = Daniel Aleksandersen
+Version = 0.1
+Website = http://getnikola.com
+Description = Generate /robots.txt exclusion file and promote sitemap.
+
diff --git a/nikola/plugins/task/robots.py b/nikola/plugins/task/robots.py
new file mode 100644
index 0000000..9944c0d
--- /dev/null
+++ b/nikola/plugins/task/robots.py
@@ -0,0 +1,83 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2012-2014 Roberto Alsina and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+from __future__ import print_function, absolute_import, unicode_literals
+import codecs
+import os
+try:
+ from urlparse import urljoin, urlparse
+except ImportError:
+ from urllib.parse import urljoin, urlparse # NOQA
+
+from nikola.plugin_categories import LateTask
+from nikola import utils
+
+
+class RobotsFile(LateTask):
+ """Generate a robots.txt."""
+
+ name = "robots_file"
+
+ def gen_tasks(self):
+ """Generate a robots.txt."""
+ kw = {
+ "base_url": self.site.config["BASE_URL"],
+ "site_url": self.site.config["SITE_URL"],
+ "output_folder": self.site.config["OUTPUT_FOLDER"],
+ "files_folders": self.site.config['FILES_FOLDERS'],
+ "robots_exclusions": self.site.config["ROBOTS_EXCLUSIONS"]
+ }
+
+ if kw["site_url"] != urljoin(kw["site_url"], "/"):
+ utils.LOGGER.warn('robots.txt not ending up in server root, will be useless')
+
+ sitemapindex_url = urljoin(kw["base_url"], "sitemapindex.xml")
+ robots_path = os.path.join(kw['output_folder'], "robots.txt")
+
+ def write_robots():
+ with codecs.open(robots_path, 'wb+', 'utf8') as outf:
+ outf.write("Sitemap: {0}\n\n".format(sitemapindex_url))
+ if kw["robots_exclusions"]:
+ outf.write("User-Agent: *\n")
+ for loc in kw["robots_exclusions"]:
+ outf.write("Disallow: {0}\n".format(loc))
+
+ yield self.group_task()
+
+ if not utils.get_asset_path("robots.txt", [], files_folders=kw["files_folders"]):
+ yield {
+ "basename": self.name,
+ "name": robots_path,
+ "targets": [robots_path],
+ "actions": [(write_robots)],
+ "uptodate": [utils.config_changed(kw)],
+ "clean": True,
+ "task_dep": ["sitemap"]
+ }
+ elif kw["robots_exclusions"]:
+ utils.LOGGER.warn('Did not generate robots.txt as one already exists in FILES_FOLDERS. ROBOTS_EXCLUSIONS will not have any affect on the copied fie.')
+ else:
+ utils.LOGGER.debug('Did not generate robots.txt as one already exists in FILES_FOLDERS.')
diff --git a/nikola/plugins/task/rss.py b/nikola/plugins/task/rss.py
index 9e4204c..b16ed48 100644
--- a/nikola/plugins/task/rss.py
+++ b/nikola/plugins/task/rss.py
@@ -54,8 +54,11 @@ class GenerateRSS(Task):
"blog_description": self.site.config["BLOG_DESCRIPTION"],
"output_folder": self.site.config["OUTPUT_FOLDER"],
"rss_teasers": self.site.config["RSS_TEASERS"],
- "hide_untranslated_posts": self.site.config['HIDE_UNTRANSLATED_POSTS'],
+ "rss_plain": self.site.config["RSS_PLAIN"],
+ "show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
"feed_length": self.site.config['FEED_LENGTH'],
+ "tzinfo": self.site.tzinfo,
+ "rss_read_more_link": self.site.config["RSS_READ_MORE_LINK"],
}
self.site.scan_posts()
# Check for any changes in the state of use_in_feeds for any post.
@@ -68,24 +71,25 @@ class GenerateRSS(Task):
output_name = os.path.join(kw['output_folder'],
self.site.path("rss", None, lang))
deps = []
- if kw["hide_untranslated_posts"]:
- posts = [x for x in self.site.timeline if x.use_in_feeds
- and x.is_translation_available(lang)][:10]
+ if kw["show_untranslated_posts"]:
+ posts = self.site.posts[:10]
else:
- posts = [x for x in self.site.timeline if x.use_in_feeds][:10]
+ posts = [x for x in self.site.posts if x.is_translation_available(lang)][:10]
for post in posts:
deps += post.deps(lang)
feed_url = urljoin(self.site.config['BASE_URL'], self.site.link("rss", None, lang).lstrip('/'))
+
yield {
'basename': 'generate_rss',
'name': os.path.normpath(output_name),
'file_dep': deps,
'targets': [output_name],
'actions': [(utils.generic_rss_renderer,
- (lang, kw["blog_title"], kw["site_url"],
- kw["blog_description"], posts, output_name,
- kw["rss_teasers"], kw['feed_length'], feed_url))],
+ (lang, kw["blog_title"](lang), kw["site_url"],
+ kw["blog_description"](lang), posts, output_name,
+ kw["rss_teasers"], kw["rss_plain"], kw['feed_length'], feed_url))],
+
'task_dep': ['render_posts'],
'clean': True,
'uptodate': [utils.config_changed(kw)],
diff --git a/nikola/plugins/task/sitemap/__init__.py b/nikola/plugins/task/sitemap/__init__.py
index 147bd50..beac6cb 100644
--- a/nikola/plugins/task/sitemap/__init__.py
+++ b/nikola/plugins/task/sitemap/__init__.py
@@ -30,14 +30,16 @@ import datetime
import os
try:
from urlparse import urljoin, urlparse
+ import robotparser as robotparser
except ImportError:
from urllib.parse import urljoin, urlparse # NOQA
+ import urllib.robotparser as robotparser # NOQA
from nikola.plugin_categories import LateTask
from nikola.utils import config_changed
-header = """<?xml version="1.0" encoding="UTF-8"?>
+urlset_header = """<?xml version="1.0" encoding="UTF-8"?>
<urlset
xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
@@ -45,13 +47,29 @@ header = """<?xml version="1.0" encoding="UTF-8"?>
http://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd">
"""
-url_format = """ <url>
+loc_format = """ <url>
<loc>{0}</loc>
<lastmod>{1}</lastmod>
</url>
"""
-get_lastmod = lambda p: datetime.datetime.fromtimestamp(os.stat(p).st_mtime).isoformat().split('T')[0]
+urlset_footer = "</urlset>"
+
+sitemapindex_header = """<?xml version="1.0" encoding="UTF-8"?>
+<sitemapindex
+ xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://www.sitemaps.org/schemas/sitemap/0.9
+ http://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd">
+"""
+
+sitemap_format = """ <sitemap>
+ <loc>{0}</loc>
+ <lastmod>{1}</lastmod>
+ </sitemap>
+"""
+
+sitemapindex_footer = "</sitemapindex>"
def get_base_path(base):
@@ -80,12 +98,12 @@ def get_base_path(base):
class Sitemap(LateTask):
- """Generate google sitemap."""
+ """Generate a sitemap."""
name = "sitemap"
def gen_tasks(self):
- """Generate Google sitemap."""
+ """Generate a sitemap."""
kw = {
"base_url": self.site.config["BASE_URL"],
"site_url": self.site.config["SITE_URL"],
@@ -93,28 +111,32 @@ class Sitemap(LateTask):
"strip_indexes": self.site.config["STRIP_INDEXES"],
"index_file": self.site.config["INDEX_FILE"],
"sitemap_include_fileless_dirs": self.site.config["SITEMAP_INCLUDE_FILELESS_DIRS"],
- "mapped_extensions": self.site.config.get('MAPPED_EXTENSIONS', ['.html', '.htm', '.xml'])
+ "mapped_extensions": self.site.config.get('MAPPED_EXTENSIONS', ['.html', '.htm', '.xml', '.rss']),
+ "robots_exclusions": self.site.config["ROBOTS_EXCLUSIONS"]
}
- output_path = kw['output_folder']
- sitemap_path = os.path.join(output_path, "sitemap.xml")
- base_path = get_base_path(kw['base_url'])
- locs = {}
output = kw['output_folder']
base_url = kw['base_url']
mapped_exts = kw['mapped_extensions']
+ output_path = kw['output_folder']
+ sitemapindex_path = os.path.join(output_path, "sitemapindex.xml")
+ sitemap_path = os.path.join(output_path, "sitemap.xml")
+ base_path = get_base_path(kw['base_url'])
+ sitemapindex = {}
+ urlset = {}
+
def scan_locs():
- for root, dirs, files in os.walk(output):
+ for root, dirs, files in os.walk(output, followlinks=True):
if not dirs and not files and not kw['sitemap_include_fileless_dirs']:
continue # Totally empty, not on sitemap
path = os.path.relpath(root, output)
# ignore the current directory.
path = (path.replace(os.sep, '/') + '/').replace('./', '')
- lastmod = get_lastmod(root)
+ lastmod = self.get_lastmod(root)
loc = urljoin(base_url, base_path + path)
if kw['index_file'] in files and kw['strip_indexes']: # ignore folders when not stripping urls
- locs[loc] = url_format.format(loc, lastmod)
+ urlset[loc] = loc_format.format(loc, lastmod)
for fname in files:
if kw['strip_indexes'] and fname == kw['index_file']:
continue # We already mapped the folder
@@ -124,38 +146,68 @@ class Sitemap(LateTask):
if path.endswith(kw['index_file']) and kw['strip_indexes']:
# ignore index files when stripping urls
continue
+ if not robot_fetch(path):
+ continue
if path.endswith('.html') or path.endswith('.htm'):
- if not u'<!doctype html' in codecs.open(real_path, 'r', 'utf8').read(1024).lower():
- # ignores "html" files without doctype
- # alexa-verify, google-site-verification, etc.
+ try:
+ if u'<!doctype html' not in codecs.open(real_path, 'r', 'utf8').read(1024).lower():
+ # ignores "html" files without doctype
+ # alexa-verify, google-site-verification, etc.
+ continue
+ except UnicodeDecodeError:
+ # ignore ancient files
+ # most non-utf8 files are worthless anyways
continue
- if path.endswith('.xml'):
- if not u'<rss' in codecs.open(real_path, 'r', 'utf8').read(512):
- # ignores all XML files except those presumed to be RSS
+ """ put RSS in sitemapindex[] instead of in urlset[], sitemap_path is included after it is generated """
+ if path.endswith('.xml') or path.endswith('.rss'):
+ if u'<rss' in codecs.open(real_path, 'r', 'utf8').read(512) or u'<urlset'and path != sitemap_path:
+ path = path.replace(os.sep, '/')
+ lastmod = self.get_lastmod(real_path)
+ loc = urljoin(base_url, base_path + path)
+ sitemapindex[loc] = sitemap_format.format(loc, lastmod)
continue
+ else:
+ continue # ignores all XML files except those presumed to be RSS
post = self.site.post_per_file.get(path)
- if post and (post.is_draft or post.is_retired or post.publish_later):
+ if post and (post.is_draft or post.is_private or post.publish_later):
continue
path = path.replace(os.sep, '/')
- lastmod = get_lastmod(real_path)
+ lastmod = self.get_lastmod(real_path)
loc = urljoin(base_url, base_path + path)
- locs[loc] = url_format.format(loc, lastmod)
+ urlset[loc] = loc_format.format(loc, lastmod)
+
+ def robot_fetch(path):
+ for rule in kw["robots_exclusions"]:
+ robot = robotparser.RobotFileParser()
+ robot.parse(["User-Agent: *", "Disallow: {0}".format(rule)])
+ if not robot.can_fetch("*", '/' + path):
+ return False # not robot food
+ return True
def write_sitemap():
# Have to rescan, because files may have been added between
# task dep scanning and task execution
with codecs.open(sitemap_path, 'wb+', 'utf8') as outf:
- outf.write(header)
- for k in sorted(locs.keys()):
- outf.write(locs[k])
- outf.write("</urlset>")
+ outf.write(urlset_header)
+ for k in sorted(urlset.keys()):
+ outf.write(urlset[k])
+ outf.write(urlset_footer)
+ sitemap_url = urljoin(base_url, base_path + "sitemap.xml")
+ sitemapindex[sitemap_url] = sitemap_format.format(sitemap_url, self.get_lastmod(sitemap_path))
+
+ def write_sitemapindex():
+ with codecs.open(sitemapindex_path, 'wb+', 'utf8') as outf:
+ outf.write(sitemapindex_header)
+ for k in sorted(sitemapindex.keys()):
+ outf.write(sitemapindex[k])
+ outf.write(sitemapindex_footer)
# Yield a task to calculate the dependencies of the sitemap
# Other tasks can depend on this output, instead of having
# to scan locations.
def scan_locs_task():
scan_locs()
- return {'locations': list(locs.keys())}
+ return {'locations': list(urlset.keys()) + list(sitemapindex.keys())}
yield {
"basename": "_scan_locs",
@@ -164,7 +216,7 @@ class Sitemap(LateTask):
}
yield self.group_task()
- task = {
+ yield {
"basename": "sitemap",
"name": sitemap_path,
"targets": [sitemap_path],
@@ -174,7 +226,21 @@ class Sitemap(LateTask):
"task_dep": ["render_site"],
"calc_dep": ["_scan_locs:sitemap"],
}
- yield task
+ yield {
+ "basename": "sitemap",
+ "name": sitemapindex_path,
+ "targets": [sitemapindex_path],
+ "actions": [(write_sitemapindex,)],
+ "uptodate": [config_changed(kw)],
+ "clean": True,
+ "file_dep": [sitemap_path]
+ }
+
+ def get_lastmod(self, p):
+ if self.site.invariant:
+ return '2014-01-01'
+ else:
+ return datetime.datetime.fromtimestamp(os.stat(p).st_mtime).isoformat().split('T')[0]
if __name__ == '__main__':
import doctest
diff --git a/nikola/plugins/task/tags.py b/nikola/plugins/task/tags.py
index f6b8234..f7f3579 100644
--- a/nikola/plugins/task/tags.py
+++ b/nikola/plugins/task/tags.py
@@ -61,12 +61,14 @@ class RenderTags(Task):
"output_folder": self.site.config['OUTPUT_FOLDER'],
"filters": self.site.config['FILTERS'],
"tag_pages_are_indexes": self.site.config['TAG_PAGES_ARE_INDEXES'],
- "index_display_post_count":
- self.site.config['INDEX_DISPLAY_POST_COUNT'],
+ "index_display_post_count": self.site.config['INDEX_DISPLAY_POST_COUNT'],
"index_teasers": self.site.config['INDEX_TEASERS'],
+ "generate_rss": self.site.config['GENERATE_RSS'],
"rss_teasers": self.site.config["RSS_TEASERS"],
- "hide_untranslated_posts": self.site.config['HIDE_UNTRANSLATED_POSTS'],
+ "rss_plain": self.site.config["RSS_PLAIN"],
+ "show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
"feed_length": self.site.config['FEED_LENGTH'],
+ "tzinfo": self.site.tzinfo,
}
self.site.scan_posts()
@@ -81,16 +83,15 @@ class RenderTags(Task):
cat_list = list(self.site.posts_per_category.items())
def render_lists(tag, posts, is_category=True):
- post_list = [self.site.global_data[post] for post in posts]
- post_list.sort(key=lambda a: a.date)
+ post_list = sorted(posts, key=lambda a: a.date)
post_list.reverse()
for lang in kw["translations"]:
- if kw["hide_untranslated_posts"]:
- filtered_posts = [x for x in post_list if x.is_translation_available(lang)]
- else:
+ if kw["show_untranslated_posts"]:
filtered_posts = post_list
- rss_post_list = [p.source_path for p in filtered_posts]
- yield self.tag_rss(tag, lang, rss_post_list, kw, is_category)
+ else:
+ filtered_posts = [x for x in post_list if x.is_translation_available(lang)]
+ if kw["generate_rss"]:
+ yield self.tag_rss(tag, lang, filtered_posts, kw, is_category)
# Render HTML
if kw['tag_pages_are_indexes']:
yield self.tag_page_as_index(tag, lang, filtered_posts, kw, is_category)
@@ -205,12 +206,13 @@ class RenderTags(Task):
num_pages = len(lists)
for i, post_list in enumerate(lists):
context = {}
- # On a tag page, the feeds include the tag's feeds
- rss_link = ("""<link rel="alternate" type="application/rss+xml" """
- """type="application/rss+xml" title="RSS for tag """
- """{0} ({1})" href="{2}">""".format(
- tag, lang, self.site.link(kind + "_rss", tag, lang)))
- context['rss_link'] = rss_link
+ if kw["generate_rss"]:
+ # On a tag page, the feeds include the tag's feeds
+ rss_link = ("""<link rel="alternate" type="application/rss+xml" """
+ """type="application/rss+xml" title="RSS for tag """
+ """{0} ({1})" href="{2}">""".format(
+ tag, lang, self.site.link(kind + "_rss", tag, lang)))
+ context['rss_link'] = rss_link
output_name = os.path.join(kw['output_folder'],
page_name(tag, i, lang))
context["title"] = kw["messages"][lang][
@@ -274,15 +276,13 @@ class RenderTags(Task):
def tag_rss(self, tag, lang, posts, kw, is_category):
"""RSS for a single tag / language"""
kind = "category" if is_category else "tag"
- #Render RSS
+ # Render RSS
output_name = os.path.normpath(
os.path.join(kw['output_folder'],
self.site.path(kind + "_rss", tag, lang)))
feed_url = urljoin(self.site.config['BASE_URL'], self.site.link(kind + "_rss", tag, lang).lstrip('/'))
deps = []
- post_list = [self.site.global_data[post] for post in posts if
- self.site.global_data[post].use_in_feeds]
- post_list.sort(key=lambda a: a.date)
+ post_list = sorted(posts, key=lambda a: a.date)
post_list.reverse()
for post in post_list:
deps += post.deps(lang)
@@ -292,9 +292,10 @@ class RenderTags(Task):
'file_dep': deps,
'targets': [output_name],
'actions': [(utils.generic_rss_renderer,
- (lang, "{0} ({1})".format(kw["blog_title"], tag),
+ (lang, "{0} ({1})".format(kw["blog_title"](lang), tag),
kw["site_url"], None, post_list,
- output_name, kw["rss_teasers"], kw['feed_length'], feed_url))],
+ output_name, kw["rss_teasers"], kw["rss_plain"], kw['feed_length'],
+ feed_url))],
'clean': True,
'uptodate': [utils.config_changed(kw)],
'task_dep': ['render_posts'],
diff --git a/nikola/plugins/template/jinja.py b/nikola/plugins/template/jinja.py
index f14adfe..097ec96 100644
--- a/nikola/plugins/template/jinja.py
+++ b/nikola/plugins/template/jinja.py
@@ -51,6 +51,8 @@ class JinjaTemplates(TemplateSystem):
if jinja2 is None:
return
self.lookup = jinja2.Environment()
+ self.lookup.trim_blocks = True
+ self.lookup.lstrip_blocks = True
self.lookup.filters['tojson'] = json.dumps
self.lookup.globals['enumerate'] = enumerate
@@ -58,7 +60,19 @@ class JinjaTemplates(TemplateSystem):
"""Create a template lookup."""
if jinja2 is None:
req_missing(['jinja2'], 'use this theme')
- self.lookup.loader = jinja2.FileSystemLoader(directories,
+ self.directories = directories
+ self.create_lookup()
+
+ def inject_directory(self, directory):
+ """if it's not there, add the directory to the lookup with lowest priority, and
+ recreate the lookup."""
+ if directory not in self.directories:
+ self.directories.append(directory)
+ self.create_lookup()
+
+ def create_lookup(self):
+ """Create a template lookup object."""
+ self.lookup.loader = jinja2.FileSystemLoader(self.directories,
encoding='utf-8')
def set_site(self, site):
diff --git a/nikola/plugins/template/mako.py b/nikola/plugins/template/mako.py
index 5a23230..b9d856e 100644
--- a/nikola/plugins/template/mako.py
+++ b/nikola/plugins/template/mako.py
@@ -50,6 +50,8 @@ class MakoTemplates(TemplateSystem):
lookup = None
cache = {}
filters = {}
+ directories = []
+ cache_dir = None
def get_deps(self, filename):
text = util.read_file(filename)
@@ -65,7 +67,7 @@ class MakoTemplates(TemplateSystem):
return deps
def set_directories(self, directories, cache_folder):
- """Create a template lookup."""
+ """Set directories and create a template lookup."""
cache_dir = os.path.join(cache_folder, '.mako.tmp')
# Workaround for a Mako bug, Issue #825
if sys.version_info[0] == 2:
@@ -74,12 +76,24 @@ class MakoTemplates(TemplateSystem):
except UnicodeEncodeError:
cache_dir = tempfile.mkdtemp()
LOGGER.warning('Because of a Mako bug, setting cache_dir to {0}'.format(cache_dir))
-
if os.path.exists(cache_dir):
shutil.rmtree(cache_dir)
+ self.directories = directories
+ self.cache_dir = cache_dir
+ self.create_lookup()
+
+ def inject_directory(self, directory):
+ """if it's not there, add the directory to the lookup with lowest priority, and
+ recreate the lookup."""
+ if directory not in self.directories:
+ self.directories.append(directory)
+ self.create_lookup()
+
+ def create_lookup(self):
+ """Create a template lookup object."""
self.lookup = TemplateLookup(
- directories=directories,
- module_directory=cache_dir,
+ directories=self.directories,
+ module_directory=self.cache_dir,
output_encoding='utf-8')
def set_site(self, site):