aboutsummaryrefslogtreecommitdiffstats
path: root/nikola/plugins
diff options
context:
space:
mode:
Diffstat (limited to 'nikola/plugins')
-rw-r--r--nikola/plugins/__init__.py2
-rw-r--r--nikola/plugins/basic_import.py27
-rw-r--r--nikola/plugins/command/__init__.py2
-rw-r--r--nikola/plugins/command/auto.plugin2
-rw-r--r--nikola/plugins/command/auto/__init__.py690
l---------nikola/plugins/command/auto/livereload.js2
-rw-r--r--nikola/plugins/command/bootswatch_theme.py116
-rw-r--r--nikola/plugins/command/check.plugin2
-rw-r--r--nikola/plugins/command/check.py104
-rw-r--r--nikola/plugins/command/console.plugin2
-rw-r--r--nikola/plugins/command/console.py45
-rw-r--r--nikola/plugins/command/default_config.plugin13
-rw-r--r--nikola/plugins/command/default_config.py54
-rw-r--r--nikola/plugins/command/deploy.plugin2
-rw-r--r--nikola/plugins/command/deploy.py54
-rw-r--r--nikola/plugins/command/github_deploy.plugin2
-rw-r--r--nikola/plugins/command/github_deploy.py43
-rw-r--r--nikola/plugins/command/import_wordpress.plugin2
-rw-r--r--nikola/plugins/command/import_wordpress.py283
-rw-r--r--nikola/plugins/command/init.plugin2
-rw-r--r--nikola/plugins/command/init.py88
-rw-r--r--nikola/plugins/command/install_theme.plugin13
-rw-r--r--nikola/plugins/command/install_theme.py91
-rw-r--r--nikola/plugins/command/new_page.plugin2
-rw-r--r--nikola/plugins/command/new_page.py4
-rw-r--r--nikola/plugins/command/new_post.plugin2
-rw-r--r--nikola/plugins/command/new_post.py105
-rw-r--r--nikola/plugins/command/orphans.plugin2
-rw-r--r--nikola/plugins/command/orphans.py3
-rw-r--r--nikola/plugins/command/plugin.plugin2
-rw-r--r--nikola/plugins/command/plugin.py109
-rw-r--r--nikola/plugins/command/rst2html.plugin2
-rw-r--r--nikola/plugins/command/rst2html/__init__.py11
-rw-r--r--nikola/plugins/command/serve.plugin2
-rw-r--r--nikola/plugins/command/serve.py87
-rw-r--r--nikola/plugins/command/status.plugin2
-rw-r--r--nikola/plugins/command/status.py3
-rw-r--r--nikola/plugins/command/subtheme.plugin (renamed from nikola/plugins/command/bootswatch_theme.plugin)10
-rw-r--r--nikola/plugins/command/subtheme.py150
-rw-r--r--nikola/plugins/command/theme.plugin2
-rw-r--r--nikola/plugins/command/theme.py102
-rw-r--r--nikola/plugins/command/version.plugin2
-rw-r--r--nikola/plugins/command/version.py17
-rw-r--r--nikola/plugins/compile/__init__.py2
-rw-r--r--nikola/plugins/compile/html.plugin2
-rw-r--r--nikola/plugins/compile/html.py78
-rw-r--r--nikola/plugins/compile/ipynb.plugin6
-rw-r--r--nikola/plugins/compile/ipynb.py189
-rw-r--r--nikola/plugins/compile/markdown.plugin2
-rw-r--r--nikola/plugins/compile/markdown/__init__.py129
-rw-r--r--nikola/plugins/compile/markdown/mdx_gist.plugin2
-rw-r--r--nikola/plugins/compile/markdown/mdx_gist.py20
-rw-r--r--nikola/plugins/compile/markdown/mdx_nikola.plugin2
-rw-r--r--nikola/plugins/compile/markdown/mdx_nikola.py14
-rw-r--r--nikola/plugins/compile/markdown/mdx_podcast.plugin2
-rw-r--r--nikola/plugins/compile/markdown/mdx_podcast.py10
-rw-r--r--nikola/plugins/compile/pandoc.plugin2
-rw-r--r--nikola/plugins/compile/pandoc.py29
-rw-r--r--nikola/plugins/compile/php.plugin2
-rw-r--r--nikola/plugins/compile/php.py22
-rw-r--r--nikola/plugins/compile/rest.plugin4
-rw-r--r--nikola/plugins/compile/rest/__init__.py229
-rw-r--r--nikola/plugins/compile/rest/chart.plugin2
-rw-r--r--nikola/plugins/compile/rest/chart.py58
-rw-r--r--nikola/plugins/compile/rest/doc.plugin2
-rw-r--r--nikola/plugins/compile/rest/doc.py38
-rw-r--r--nikola/plugins/compile/rest/gist.plugin2
-rw-r--r--nikola/plugins/compile/rest/gist.py2
-rw-r--r--nikola/plugins/compile/rest/listing.plugin2
-rw-r--r--nikola/plugins/compile/rest/listing.py25
-rw-r--r--nikola/plugins/compile/rest/media.plugin2
-rw-r--r--nikola/plugins/compile/rest/media.py13
-rw-r--r--nikola/plugins/compile/rest/post_list.plugin6
-rw-r--r--nikola/plugins/compile/rest/post_list.py258
-rw-r--r--nikola/plugins/compile/rest/slides.plugin14
-rw-r--r--nikola/plugins/compile/rest/slides.py78
-rw-r--r--nikola/plugins/compile/rest/soundcloud.plugin2
-rw-r--r--nikola/plugins/compile/rest/soundcloud.py26
-rw-r--r--nikola/plugins/compile/rest/thumbnail.plugin2
-rw-r--r--nikola/plugins/compile/rest/thumbnail.py6
-rw-r--r--nikola/plugins/compile/rest/vimeo.plugin2
-rw-r--r--nikola/plugins/compile/rest/vimeo.py13
-rw-r--r--nikola/plugins/compile/rest/youtube.plugin2
-rw-r--r--nikola/plugins/compile/rest/youtube.py19
-rw-r--r--nikola/plugins/misc/__init__.py2
-rw-r--r--nikola/plugins/misc/scan_posts.py29
-rw-r--r--nikola/plugins/misc/taxonomies_classifier.plugin12
-rw-r--r--nikola/plugins/misc/taxonomies_classifier.py335
-rw-r--r--nikola/plugins/shortcode/chart.plugin13
-rw-r--r--nikola/plugins/shortcode/chart.py90
-rw-r--r--nikola/plugins/shortcode/emoji.plugin13
-rw-r--r--nikola/plugins/shortcode/emoji/__init__.py46
-rw-r--r--nikola/plugins/shortcode/emoji/data/Activity.json418
-rw-r--r--nikola/plugins/shortcode/emoji/data/Flags.json998
-rw-r--r--nikola/plugins/shortcode/emoji/data/Food.json274
-rw-r--r--nikola/plugins/shortcode/emoji/data/LICENSE25
-rw-r--r--nikola/plugins/shortcode/emoji/data/Nature.json594
-rw-r--r--nikola/plugins/shortcode/emoji/data/Objects.json718
-rw-r--r--nikola/plugins/shortcode/emoji/data/People.json1922
-rw-r--r--nikola/plugins/shortcode/emoji/data/Symbols.json1082
-rw-r--r--nikola/plugins/shortcode/emoji/data/Travel.json466
-rw-r--r--nikola/plugins/shortcode/gist.plugin2
-rw-r--r--nikola/plugins/shortcode/gist.py6
-rw-r--r--nikola/plugins/shortcode/listing.plugin13
-rw-r--r--nikola/plugins/shortcode/listing.py77
-rw-r--r--nikola/plugins/shortcode/post_list.plugin13
-rw-r--r--nikola/plugins/shortcode/post_list.py245
-rw-r--r--nikola/plugins/shortcode/thumbnail.plugin12
-rw-r--r--nikola/plugins/shortcode/thumbnail.py69
-rw-r--r--nikola/plugins/task/__init__.py2
-rw-r--r--nikola/plugins/task/archive.plugin4
-rw-r--r--nikola/plugins/task/archive.py409
-rw-r--r--nikola/plugins/task/authors.plugin4
-rw-r--r--nikola/plugins/task/authors.py387
-rw-r--r--nikola/plugins/task/bundles.plugin4
-rw-r--r--nikola/plugins/task/bundles.py83
-rw-r--r--nikola/plugins/task/categories.plugin12
-rw-r--r--nikola/plugins/task/categories.py248
-rw-r--r--nikola/plugins/task/copy_assets.plugin2
-rw-r--r--nikola/plugins/task/copy_assets.py37
-rw-r--r--nikola/plugins/task/copy_files.plugin2
-rw-r--r--nikola/plugins/task/copy_files.py2
-rw-r--r--nikola/plugins/task/galleries.plugin2
-rw-r--r--nikola/plugins/task/galleries.py233
-rw-r--r--nikola/plugins/task/gzip.plugin2
-rw-r--r--nikola/plugins/task/gzip.py2
-rw-r--r--nikola/plugins/task/indexes.plugin5
-rw-r--r--nikola/plugins/task/indexes.py397
-rw-r--r--nikola/plugins/task/listings.plugin2
-rw-r--r--nikola/plugins/task/listings.py48
-rw-r--r--nikola/plugins/task/page_index.plugin12
-rw-r--r--nikola/plugins/task/page_index.py111
-rw-r--r--nikola/plugins/task/pages.plugin2
-rw-r--r--nikola/plugins/task/pages.py20
-rw-r--r--nikola/plugins/task/posts.plugin2
-rw-r--r--nikola/plugins/task/posts.py18
-rw-r--r--nikola/plugins/task/py3_switch.plugin13
-rw-r--r--nikola/plugins/task/py3_switch.py103
-rw-r--r--nikola/plugins/task/redirect.plugin2
-rw-r--r--nikola/plugins/task/redirect.py6
-rw-r--r--nikola/plugins/task/robots.plugin2
-rw-r--r--nikola/plugins/task/robots.py13
-rw-r--r--nikola/plugins/task/rss.plugin13
-rw-r--r--nikola/plugins/task/rss.py117
-rw-r--r--nikola/plugins/task/scale_images.plugin2
-rw-r--r--nikola/plugins/task/scale_images.py32
-rw-r--r--nikola/plugins/task/sitemap.plugin2
-rw-r--r--nikola/plugins/task/sitemap.py (renamed from nikola/plugins/task/sitemap/__init__.py)47
-rw-r--r--nikola/plugins/task/sources.plugin2
-rw-r--r--nikola/plugins/task/sources.py10
-rw-r--r--nikola/plugins/task/tags.plugin5
-rw-r--r--nikola/plugins/task/tags.py570
-rw-r--r--nikola/plugins/task/taxonomies.plugin12
-rw-r--r--nikola/plugins/task/taxonomies.py459
-rw-r--r--nikola/plugins/template/__init__.py2
-rw-r--r--nikola/plugins/template/jinja.plugin2
-rw-r--r--nikola/plugins/template/jinja.py21
-rw-r--r--nikola/plugins/template/mako.plugin2
-rw-r--r--nikola/plugins/template/mako.py33
159 files changed, 11091 insertions, 3485 deletions
diff --git a/nikola/plugins/__init__.py b/nikola/plugins/__init__.py
index b83f43f..70c8c0d 100644
--- a/nikola/plugins/__init__.py
+++ b/nikola/plugins/__init__.py
@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
"""Plugins for Nikola."""
-
-from __future__ import absolute_import
diff --git a/nikola/plugins/basic_import.py b/nikola/plugins/basic_import.py
index cf98ebc..3e6e21e 100644
--- a/nikola/plugins/basic_import.py
+++ b/nikola/plugins/basic_import.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,21 +26,15 @@
"""Mixin for importer plugins."""
-from __future__ import unicode_literals, print_function
import io
import csv
import datetime
import os
-import sys
-from pkg_resources import resource_filename
-
-try:
- from urlparse import urlparse
-except ImportError:
- from urllib.parse import urlparse # NOQA
+from urllib.parse import urlparse
from lxml import etree, html
from mako.template import Template
+from pkg_resources import resource_filename
from nikola import utils
@@ -90,7 +84,7 @@ class ImportMixin(object):
src = (urlparse(k).path + 'index.html')[1:]
dst = (urlparse(v).path)
if src == index:
- utils.LOGGER.warn("Can't do a redirect for: {0!r}".format(k))
+ utils.LOGGER.warning("Can't do a redirect for: {0!r}".format(k))
else:
redirections.append((src, dst))
return redirections
@@ -101,8 +95,8 @@ class ImportMixin(object):
os.system('nikola init -q ' + self.output_folder)
else:
self.import_into_existing_site = True
- utils.LOGGER.notice('The folder {0} already exists - assuming that this is a '
- 'already existing Nikola site.'.format(self.output_folder))
+ utils.LOGGER.warning('The folder {0} already exists - assuming that this is a '
+ 'already existing Nikola site.'.format(self.output_folder))
filename = resource_filename('nikola', 'conf.py.in')
# The 'strict_undefined=True' will give the missing symbol name if any,
@@ -150,7 +144,7 @@ class ImportMixin(object):
content = html.tostring(doc, encoding='utf8')
except etree.ParserError:
pass
- if isinstance(content, utils.bytes_str):
+ if isinstance(content, bytes):
content = content.decode('utf-8')
compiler.create_post(
filename,
@@ -158,8 +152,7 @@ class ImportMixin(object):
onefile=True,
**headers)
- @staticmethod
- def write_metadata(filename, title, slug, post_date, description, tags, **kwargs):
+ def write_metadata(self, filename, title, slug, post_date, description, tags, **kwargs):
"""Write metadata to meta file."""
if not description:
description = ""
@@ -168,13 +161,13 @@ class ImportMixin(object):
with io.open(filename, "w+", encoding="utf8") as fd:
data = {'title': title, 'slug': slug, 'date': post_date, 'tags': ','.join(tags), 'description': description}
data.update(kwargs)
- fd.write(utils.write_metadata(data))
+ fd.write(utils.write_metadata(data, site=self.site, comment_wrap=False))
@staticmethod
def write_urlmap_csv(output_file, url_map):
"""Write urlmap to csv file."""
utils.makedirs(os.path.dirname(output_file))
- fmode = 'wb+' if sys.version_info[0] == 2 else 'w+'
+ fmode = 'w+'
with io.open(output_file, fmode) as fd:
csv_writer = csv.writer(fd)
for item in url_map.items():
diff --git a/nikola/plugins/command/__init__.py b/nikola/plugins/command/__init__.py
index 62d7086..cdd1560 100644
--- a/nikola/plugins/command/__init__.py
+++ b/nikola/plugins/command/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/command/auto.plugin b/nikola/plugins/command/auto.plugin
index 1081c78..a847e14 100644
--- a/nikola/plugins/command/auto.plugin
+++ b/nikola/plugins/command/auto.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Automatically detect site changes, rebuild and optionally refresh a browser.
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/auto/__init__.py b/nikola/plugins/command/auto/__init__.py
index a82dc3e..6bedcac 100644
--- a/nikola/plugins/command/auto/__init__.py
+++ b/nikola/plugins/command/auto/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Chris Warrick, Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,66 +26,55 @@
"""Automatic rebuilds for Nikola."""
-from __future__ import print_function
-
-import json
+import asyncio
+import datetime
import mimetypes
import os
import re
+import stat
import subprocess
import sys
-import time
-try:
- from urlparse import urlparse
- from urllib2 import unquote
-except ImportError:
- from urllib.parse import urlparse, unquote # NOQA
+import typing
import webbrowser
-from wsgiref.simple_server import make_server
-import wsgiref.util
+
import pkg_resources
-from blinker import signal
+from nikola.plugin_categories import Command
+from nikola.utils import dns_sd, req_missing, get_theme_path, makedirs
+
try:
- from ws4py.websocket import WebSocket
- from ws4py.server.wsgirefserver import WSGIServer, WebSocketWSGIRequestHandler, WebSocketWSGIHandler
- from ws4py.server.wsgiutils import WebSocketWSGIApplication
- from ws4py.messaging import TextMessage
+ import aiohttp
+ from aiohttp import web
+ from aiohttp.web_urldispatcher import StaticResource
+ from aiohttp.web_exceptions import HTTPNotFound, HTTPForbidden, HTTPMovedPermanently
+ from aiohttp.web_response import Response
+ from aiohttp.web_fileresponse import FileResponse
except ImportError:
- WebSocket = object
+ aiohttp = web = None
+ StaticResource = HTTPNotFound = HTTPForbidden = Response = FileResponse = object
+
try:
- import watchdog
from watchdog.observers import Observer
- from watchdog.events import FileSystemEventHandler, PatternMatchingEventHandler
except ImportError:
- watchdog = None
- FileSystemEventHandler = object
- PatternMatchingEventHandler = object
+ Observer = None
-from nikola.plugin_categories import Command
-from nikola.utils import dns_sd, req_missing, get_logger, get_theme_path, STDERR_HANDLER
LRJS_PATH = os.path.join(os.path.dirname(__file__), 'livereload.js')
-error_signal = signal('error')
-refresh_signal = signal('refresh')
+REBUILDING_REFRESH_DELAY = 0.35
+IDLE_REFRESH_DELAY = 0.05
-ERROR_N = '''<html>
-<head>
-</head>
-<boody>
-ERROR {}
-</body>
-</html>
-'''
+if sys.platform == 'win32':
+ asyncio.set_event_loop(asyncio.ProactorEventLoop())
class CommandAuto(Command):
"""Automatic rebuilds for Nikola."""
name = "auto"
- logger = None
has_server = True
doc_purpose = "builds and serves a site; automatically detects site changes, rebuilds, and optionally refreshes a browser"
dns_sd = None
+ delta_last_rebuild = datetime.timedelta(milliseconds=100)
+ web_runner = None # type: web.AppRunner
cmd_options = [
{
@@ -94,7 +83,7 @@ class CommandAuto(Command):
'long': 'port',
'default': 8000,
'type': int,
- 'help': 'Port nummber (default: 8000)',
+ 'help': 'Port number',
},
{
'name': 'address',
@@ -102,7 +91,7 @@ class CommandAuto(Command):
'long': 'address',
'type': str,
'default': '127.0.0.1',
- 'help': 'Address to bind (default: 127.0.0.1 -- localhost)',
+ 'help': 'Address to bind',
},
{
'name': 'browser',
@@ -127,26 +116,50 @@ class CommandAuto(Command):
'type': bool,
'help': 'Disable the server, automate rebuilds only'
},
+ {
+ 'name': 'process',
+ 'short': 'n',
+ 'long': 'process',
+ 'default': 0,
+ 'type': int,
+ 'help': 'Number of subprocesses (nikola build argument)'
+ },
+ {
+ 'name': 'parallel-type',
+ 'short': 'P',
+ 'long': 'parallel-type',
+ 'default': 'process',
+ 'type': str,
+ 'help': "Parallelization mode ('process' or 'thread', nikola build argument)"
+ },
]
def _execute(self, options, args):
"""Start the watcher."""
- self.logger = get_logger('auto', STDERR_HANDLER)
- LRSocket.logger = self.logger
-
- if WebSocket is object and watchdog is None:
- req_missing(['ws4py', 'watchdog'], 'use the "auto" command')
- elif WebSocket is object:
- req_missing(['ws4py'], 'use the "auto" command')
- elif watchdog is None:
+ self.sockets = []
+ self.rebuild_queue = asyncio.Queue()
+ self.reload_queue = asyncio.Queue()
+ self.last_rebuild = datetime.datetime.now()
+ self.is_rebuilding = False
+
+ if aiohttp is None and Observer is None:
+ req_missing(['aiohttp', 'watchdog'], 'use the "auto" command')
+ elif aiohttp is None:
+ req_missing(['aiohttp'], 'use the "auto" command')
+ elif Observer is None:
req_missing(['watchdog'], 'use the "auto" command')
- self.cmd_arguments = ['nikola', 'build']
+ if sys.argv[0].endswith('__main__.py'):
+ self.nikola_cmd = [sys.executable, '-m', 'nikola', 'build']
+ else:
+ self.nikola_cmd = [sys.argv[0], 'build']
+
if self.site.configuration_filename != 'conf.py':
- self.cmd_arguments.append('--conf=' + self.site.configuration_filename)
+ self.nikola_cmd.append('--conf=' + self.site.configuration_filename)
- # Run an initial build so we are up-to-date
- subprocess.call(self.cmd_arguments)
+ if options and options.get('process'):
+ self.nikola_cmd += ['--process={}'.format(options['process']),
+ '--parallel-type={}'.format(options['parallel-type'])]
port = options and options.get('port')
self.snippet = '''<script>document.write('<script src="http://'
@@ -155,7 +168,7 @@ class CommandAuto(Command):
+ 'script>')</script>
</head>'''.format(port)
- # Do not duplicate entries -- otherwise, multiple rebuilds are triggered
+ # Deduplicate entries by using a set -- otherwise, multiple rebuilds are triggered
watched = set([
'templates/'
] + [get_theme_path(name) for name in self.site.THEMES])
@@ -167,12 +180,17 @@ class CommandAuto(Command):
watched.add(item)
for item in self.site.config['LISTINGS_FOLDERS']:
watched.add(item)
+ for item in self.site.config['IMAGE_FOLDERS']:
+ watched.add(item)
for item in self.site._plugin_places:
watched.add(item)
# Nikola itself (useful for developers)
watched.add(pkg_resources.resource_filename('nikola', ''))
out_folder = self.site.config['OUTPUT_FOLDER']
+ if not os.path.exists(out_folder):
+ makedirs(out_folder)
+
if options and options.get('browser'):
browser = True
else:
@@ -181,289 +199,387 @@ class CommandAuto(Command):
if options['ipv6']:
dhost = '::'
else:
- dhost = None
+ dhost = '0.0.0.0'
host = options['address'].strip('[').strip(']') or dhost
+ # Prepare asyncio event loop
+ # Required for subprocessing to work
+ loop = asyncio.get_event_loop()
+
+ # Set debug setting
+ loop.set_debug(self.site.debug)
+
# Server can be disabled (Issue #1883)
self.has_server = not options['no-server']
- # Instantiate global observer
- observer = Observer()
if self.has_server:
- # Watch output folders and trigger reloads
- observer.schedule(OurWatchHandler(self.do_refresh), out_folder, recursive=True)
+ loop.run_until_complete(self.set_up_server(host, port, out_folder))
+
+ # Run an initial build so we are up-to-date. The server is running, but we are not watching yet.
+ loop.run_until_complete(self.run_initial_rebuild())
+
+ self.wd_observer = Observer()
+ # Watch output folders and trigger reloads
+ if self.has_server:
+ self.wd_observer.schedule(NikolaEventHandler(self.reload_page, loop), out_folder, recursive=True)
# Watch input folders and trigger rebuilds
for p in watched:
if os.path.exists(p):
- observer.schedule(OurWatchHandler(self.do_rebuild), p, recursive=True)
+ self.wd_observer.schedule(NikolaEventHandler(self.queue_rebuild, loop), p, recursive=True)
# Watch config file (a bit of a hack, but we need a directory)
_conf_fn = os.path.abspath(self.site.configuration_filename or 'conf.py')
_conf_dn = os.path.dirname(_conf_fn)
- observer.schedule(ConfigWatchHandler(_conf_fn, self.do_rebuild), _conf_dn, recursive=False)
-
- try:
- self.logger.info("Watching files for changes...")
- observer.start()
- except KeyboardInterrupt:
- pass
+ self.wd_observer.schedule(ConfigEventHandler(_conf_fn, self.queue_rebuild, loop), _conf_dn, recursive=False)
+ self.wd_observer.start()
- parent = self
+ win_sleeper = None
+ # https://bugs.python.org/issue23057 (fixed in Python 3.8)
+ if sys.platform == 'win32' and sys.version_info < (3, 8):
+ win_sleeper = asyncio.ensure_future(windows_ctrlc_workaround())
- class Mixed(WebSocketWSGIApplication):
- """A class that supports WS and HTTP protocols on the same port."""
+ if not self.has_server:
+ self.logger.info("Watching for changes...")
+ # Run the event loop forever (no server mode).
+ try:
+ # Run rebuild queue
+ loop.run_until_complete(self.run_rebuild_queue())
- def __call__(self, environ, start_response):
- if environ.get('HTTP_UPGRADE') is None:
- return parent.serve_static(environ, start_response)
- return super(Mixed, self).__call__(environ, start_response)
+ loop.run_forever()
+ except KeyboardInterrupt:
+ pass
+ finally:
+ if win_sleeper:
+ win_sleeper.cancel()
+ self.wd_observer.stop()
+ self.wd_observer.join()
+ loop.close()
+ return
- if self.has_server:
- ws = make_server(
- host, port, server_class=WSGIServer,
- handler_class=WebSocketWSGIRequestHandler,
- app=Mixed(handler_cls=LRSocket)
- )
- ws.initialize_websockets_manager()
- self.logger.info("Serving HTTP on {0} port {1}...".format(host, port))
- if browser:
- if options['ipv6'] or '::' in host:
- server_url = "http://[{0}]:{1}/".format(host, port)
- else:
- server_url = "http://{0}:{1}/".format(host, port)
+ if options['ipv6'] or '::' in host:
+ server_url = "http://[{0}]:{1}/".format(host, port)
+ else:
+ server_url = "http://{0}:{1}/".format(host, port)
+ self.logger.info("Serving on {0} ...".format(server_url))
- self.logger.info("Opening {0} in the default web browser...".format(server_url))
- # Yes, this is racy
- webbrowser.open('http://{0}:{1}'.format(host, port))
+ if browser:
+ # Some browsers fail to load 0.0.0.0 (Issue #2755)
+ if host == '0.0.0.0':
+ server_url = "http://127.0.0.1:{0}/".format(port)
+ self.logger.info("Opening {0} in the default web browser...".format(server_url))
+ webbrowser.open(server_url)
- try:
- self.dns_sd = dns_sd(port, (options['ipv6'] or '::' in host))
- ws.serve_forever()
- except KeyboardInterrupt:
- self.logger.info("Server is shutting down.")
- if self.dns_sd:
- self.dns_sd.Reset()
- # This is a hack, but something is locking up in a futex
- # and exit() doesn't work.
- os.kill(os.getpid(), 15)
- else:
- # Workaround: can’t have nothing running (instant exit)
- # but also can’t join threads (no way to exit)
- # The joys of threading.
- try:
- while True:
- time.sleep(1)
- except KeyboardInterrupt:
- self.logger.info("Shutting down.")
- # This is a hack, but something is locking up in a futex
- # and exit() doesn't work.
- os.kill(os.getpid(), 15)
+ # Run the event loop forever and handle shutdowns.
+ try:
+ # Run rebuild queue
+ rebuild_queue_fut = asyncio.ensure_future(self.run_rebuild_queue())
+ reload_queue_fut = asyncio.ensure_future(self.run_reload_queue())
- def do_rebuild(self, event):
+ self.dns_sd = dns_sd(port, (options['ipv6'] or '::' in host))
+ loop.run_forever()
+ except KeyboardInterrupt:
+ pass
+ finally:
+ self.logger.info("Server is shutting down.")
+ if win_sleeper:
+ win_sleeper.cancel()
+ if self.dns_sd:
+ self.dns_sd.Reset()
+ rebuild_queue_fut.cancel()
+ reload_queue_fut.cancel()
+ loop.run_until_complete(self.web_runner.cleanup())
+ self.wd_observer.stop()
+ self.wd_observer.join()
+ loop.close()
+
+ async def set_up_server(self, host: str, port: int, out_folder: str) -> None:
+ """Set up aiohttp server and start it."""
+ webapp = web.Application()
+ webapp.router.add_get('/livereload.js', self.serve_livereload_js)
+ webapp.router.add_get('/robots.txt', self.serve_robots_txt)
+ webapp.router.add_route('*', '/livereload', self.websocket_handler)
+ resource = IndexHtmlStaticResource(True, self.snippet, '', out_folder)
+ webapp.router.register_resource(resource)
+ webapp.on_shutdown.append(self.remove_websockets)
+
+ self.web_runner = web.AppRunner(webapp)
+ await self.web_runner.setup()
+ website = web.TCPSite(self.web_runner, host, port)
+ await website.start()
+
+ async def run_initial_rebuild(self) -> None:
+ """Run an initial rebuild."""
+ await self._rebuild_site()
+ # If there are any clients, have them reload the root.
+ await self._send_reload_command(self.site.config['INDEX_FILE'])
+
+ async def queue_rebuild(self, event) -> None:
"""Rebuild the site."""
# Move events have a dest_path, some editors like gedit use a
# move on larger save operations for write protection
event_path = event.dest_path if hasattr(event, 'dest_path') else event.src_path
- fname = os.path.basename(event_path)
- if (fname.endswith('~') or
- fname.startswith('.') or
+ if sys.platform == 'win32':
+ # Windows hidden files support
+ is_hidden = os.stat(event_path).st_file_attributes & stat.FILE_ATTRIBUTE_HIDDEN
+ else:
+ is_hidden = False
+ has_hidden_component = any(p.startswith('.') for p in event_path.split(os.sep))
+ if (is_hidden or has_hidden_component or
'__pycache__' in event_path or
- event_path.endswith(('.pyc', '.pyo', '.pyd')) or
- os.path.isdir(event_path)): # Skip on folders, these are usually duplicates
+ event_path.endswith(('.pyc', '.pyo', '.pyd', '_bak', '~')) or
+ event.is_directory): # Skip on folders, these are usually duplicates
return
- self.logger.info('REBUILDING SITE (from {0})'.format(event_path))
- p = subprocess.Popen(self.cmd_arguments, stderr=subprocess.PIPE)
- error = p.stderr.read()
- errord = error.decode('utf-8')
- if p.wait() != 0:
- self.logger.error(errord)
- error_signal.send(error=errord)
+
+ self.logger.debug('Queuing rebuild from {0}'.format(event_path))
+ await self.rebuild_queue.put((datetime.datetime.now(), event_path))
+
+ async def run_rebuild_queue(self) -> None:
+ """Run rebuilds from a queue (Nikola can only build in a single instance)."""
+ while True:
+ date, event_path = await self.rebuild_queue.get()
+ if date < (self.last_rebuild + self.delta_last_rebuild):
+ self.logger.debug("Skipping rebuild from {0} (within delta)".format(event_path))
+ continue
+ await self._rebuild_site(event_path)
+
+ async def _rebuild_site(self, event_path: typing.Optional[str] = None) -> None:
+ """Rebuild the site."""
+ self.is_rebuilding = True
+ self.last_rebuild = datetime.datetime.now()
+ if event_path:
+ self.logger.info('REBUILDING SITE (from {0})'.format(event_path))
else:
- print(errord)
+ self.logger.info('REBUILDING SITE')
- def do_refresh(self, event):
- """Refresh the page."""
+ p = await asyncio.create_subprocess_exec(*self.nikola_cmd, stderr=subprocess.PIPE)
+ exit_code = await p.wait()
+ out = (await p.stderr.read()).decode('utf-8')
+
+ if exit_code != 0:
+ self.logger.error("Rebuild failed\n" + out)
+ await self.send_to_websockets({'command': 'alert', 'message': out})
+ else:
+ self.logger.info("Rebuild successful\n" + out)
+
+ self.is_rebuilding = False
+
+ async def run_reload_queue(self) -> None:
+ """Send reloads from a queue to limit CPU usage."""
+ while True:
+ p = await self.reload_queue.get()
+ self.logger.info('REFRESHING: {0}'.format(p))
+ await self._send_reload_command(p)
+ if self.is_rebuilding:
+ await asyncio.sleep(REBUILDING_REFRESH_DELAY)
+ else:
+ await asyncio.sleep(IDLE_REFRESH_DELAY)
+
+ async def _send_reload_command(self, path: str) -> None:
+ """Send a reload command."""
+ await self.send_to_websockets({'command': 'reload', 'path': path, 'liveCSS': True})
+
+ async def reload_page(self, event) -> None:
+ """Reload the page."""
# Move events have a dest_path, some editors like gedit use a
# move on larger save operations for write protection
- event_path = event.dest_path if hasattr(event, 'dest_path') else event.src_path
- self.logger.info('REFRESHING: {0}'.format(event_path))
- p = os.path.relpath(event_path, os.path.abspath(self.site.config['OUTPUT_FOLDER']))
- refresh_signal.send(path=p)
-
- def serve_static(self, environ, start_response):
- """Trivial static file server."""
- uri = wsgiref.util.request_uri(environ)
- p_uri = urlparse(uri)
- f_path = os.path.join(self.site.config['OUTPUT_FOLDER'], *[unquote(x) for x in p_uri.path.split('/')])
-
- # ‘Pretty’ URIs and root are assumed to be HTML
- mimetype = 'text/html' if uri.endswith('/') else mimetypes.guess_type(uri)[0] or 'application/octet-stream'
-
- if os.path.isdir(f_path):
- if not p_uri.path.endswith('/'): # Redirect to avoid breakage
- start_response('301 Moved Permanently', [('Location', p_uri.path + '/')])
- return []
- f_path = os.path.join(f_path, self.site.config['INDEX_FILE'])
- mimetype = 'text/html'
-
- if p_uri.path == '/robots.txt':
- start_response('200 OK', [('Content-type', 'text/plain; charset=UTF-8')])
- return ['User-Agent: *\nDisallow: /\n'.encode('utf-8')]
- elif os.path.isfile(f_path):
- with open(f_path, 'rb') as fd:
- if mimetype.startswith('text/') or mimetype.endswith('+xml'):
- start_response('200 OK', [('Content-type', "{0}; charset=UTF-8".format(mimetype))])
- else:
- start_response('200 OK', [('Content-type', mimetype)])
- return [self.file_filter(mimetype, fd.read())]
- elif p_uri.path == '/livereload.js':
- with open(LRJS_PATH, 'rb') as fd:
- start_response('200 OK', [('Content-type', mimetype)])
- return [self.file_filter(mimetype, fd.read())]
- start_response('404 ERR', [])
- return [self.file_filter('text/html', ERROR_N.format(404).format(uri).encode('utf-8'))]
-
- def file_filter(self, mimetype, data):
- """Apply necessary changes to document before serving."""
- if mimetype == 'text/html':
- data = data.decode('utf8')
- data = self.remove_base_tag(data)
- data = self.inject_js(data)
- data = data.encode('utf8')
- return data
-
- def inject_js(self, data):
- """Inject livereload.js."""
- data = re.sub('</head>', self.snippet, data, 1, re.IGNORECASE)
- return data
-
- def remove_base_tag(self, data):
- """Comment out any <base> to allow local resolution of relative URLs."""
- data = re.sub(r'<base\s([^>]*)>', '<!--base \g<1>-->', data, re.IGNORECASE)
- return data
-
-
-pending = []
-
-
-class LRSocket(WebSocket):
- """Speak Livereload protocol."""
-
- def __init__(self, *a, **kw):
- """Initialize protocol handler."""
- refresh_signal.connect(self.notify)
- error_signal.connect(self.send_error)
- super(LRSocket, self).__init__(*a, **kw)
-
- def received_message(self, message):
- """Handle received message."""
- message = json.loads(message.data.decode('utf8'))
- self.logger.info('<--- {0}'.format(message))
- response = None
- if message['command'] == 'hello': # Handshake
- response = {
- 'command': 'hello',
- 'protocols': [
- 'http://livereload.com/protocols/official-7',
- ],
- 'serverName': 'nikola-livereload',
- }
- elif message['command'] == 'info': # Someone connected
- self.logger.info('****** Browser connected: {0}'.format(message.get('url')))
- self.logger.info('****** sending {0} pending messages'.format(len(pending)))
- while pending:
- msg = pending.pop()
- self.logger.info('---> {0}'.format(msg.data))
- self.send(msg, msg.is_binary)
- else:
- response = {
- 'command': 'alert',
- 'message': 'HEY',
- }
- if response is not None:
- response = json.dumps(response)
- self.logger.info('---> {0}'.format(response))
- response = TextMessage(response)
- self.send(response, response.is_binary)
-
- def notify(self, sender, path):
- """Send reload requests to the client."""
- p = os.path.join('/', path)
- message = {
- 'command': 'reload',
- 'liveCSS': True,
- 'path': p,
- }
- response = json.dumps(message)
- self.logger.info('---> {0}'.format(p))
- response = TextMessage(response)
- if self.stream is None: # No client connected or whatever
- pending.append(response)
+ if event:
+ event_path = event.dest_path if hasattr(event, 'dest_path') else event.src_path
else:
- self.send(response, response.is_binary)
+ event_path = self.site.config['OUTPUT_FOLDER']
+ p = os.path.relpath(event_path, os.path.abspath(self.site.config['OUTPUT_FOLDER'])).replace(os.sep, '/')
+ await self.reload_queue.put(p)
+
+ async def serve_livereload_js(self, request):
+ """Handle requests to /livereload.js and serve the JS file."""
+ return FileResponse(LRJS_PATH)
+
+ async def serve_robots_txt(self, request):
+ """Handle requests to /robots.txt."""
+ return Response(body=b'User-Agent: *\nDisallow: /\n', content_type='text/plain', charset='utf-8')
+
+ async def websocket_handler(self, request):
+ """Handle requests to /livereload and initiate WebSocket communication."""
+ ws = web.WebSocketResponse()
+ await ws.prepare(request)
+ self.sockets.append(ws)
+
+ while True:
+ msg = await ws.receive()
+
+ self.logger.debug("Received message: {0}".format(msg))
+ if msg.type == aiohttp.WSMsgType.TEXT:
+ message = msg.json()
+ if message['command'] == 'hello':
+ response = {
+ 'command': 'hello',
+ 'protocols': [
+ 'http://livereload.com/protocols/official-7',
+ ],
+ 'serverName': 'Nikola Auto (livereload)',
+ }
+ await ws.send_json(response)
+ elif message['command'] != 'info':
+ self.logger.warning("Unknown command in message: {0}".format(message))
+ elif msg.type in (aiohttp.WSMsgType.CLOSED, aiohttp.WSMsgType.CLOSING):
+ break
+ elif msg.type == aiohttp.WSMsgType.CLOSE:
+ self.logger.debug("Closing WebSocket")
+ await ws.close()
+ break
+ elif msg.type == aiohttp.WSMsgType.ERROR:
+ self.logger.error('WebSocket connection closed with exception {0}'.format(ws.exception()))
+ break
+ else:
+ self.logger.warning("Received unknown message: {0}".format(msg))
+
+ self.sockets.remove(ws)
+ self.logger.debug("WebSocket connection closed: {0}".format(ws))
+
+ return ws
+
+ async def remove_websockets(self, app) -> None:
+ """Remove all websockets."""
+ for ws in self.sockets:
+ await ws.close()
+ self.sockets.clear()
+
+ async def send_to_websockets(self, message: dict) -> None:
+ """Send a message to all open WebSockets."""
+ to_delete = []
+ for ws in self.sockets:
+ if ws.closed:
+ to_delete.append(ws)
+ continue
- def send_error(self, sender, error=None):
- """Send reload requests to the client."""
- if self.stream is None: # No client connected or whatever
- return
- message = {
- 'command': 'alert',
- 'message': error,
- }
- response = json.dumps(message)
- response = TextMessage(response)
- if self.stream is None: # No client connected or whatever
- pending.append(response)
+ try:
+ await ws.send_json(message)
+ if ws._close_code:
+ await ws.close()
+ to_delete.append(ws)
+ except RuntimeError as e:
+ if 'closed' in e.args[0]:
+ self.logger.warning("WebSocket {0} closed uncleanly".format(ws))
+ to_delete.append(ws)
+ else:
+ raise
+
+ for ws in to_delete:
+ self.sockets.remove(ws)
+
+
+async def windows_ctrlc_workaround() -> None:
+ """Work around bpo-23057."""
+ # https://bugs.python.org/issue23057
+ while True:
+ await asyncio.sleep(1)
+
+
+class IndexHtmlStaticResource(StaticResource):
+ """A StaticResource implementation that serves /index.html in directory roots."""
+
+ modify_html = True
+ snippet = "</head>"
+
+ def __init__(self, modify_html=True, snippet="</head>", *args, **kwargs):
+ """Initialize a resource."""
+ self.modify_html = modify_html
+ self.snippet = snippet
+ super().__init__(*args, **kwargs)
+
+ async def _handle(self, request: 'web.Request') -> 'web.Response':
+ """Handle incoming requests (pass to handle_file)."""
+ filename = request.match_info['filename']
+ return await self.handle_file(request, filename)
+
+ async def handle_file(self, request: 'web.Request', filename: str, from_index=None) -> 'web.Response':
+ """Handle file requests."""
+ try:
+ filepath = self._directory.joinpath(filename).resolve()
+ if not self._follow_symlinks:
+ filepath.relative_to(self._directory)
+ except (ValueError, FileNotFoundError) as error:
+ # relatively safe
+ raise HTTPNotFound() from error
+ except Exception as error:
+ # perm error or other kind!
+ request.app.logger.exception(error)
+ raise HTTPNotFound() from error
+
+ # on opening a dir, load it's contents if allowed
+ if filepath.is_dir():
+ if filename.endswith('/') or not filename:
+ ret = await self.handle_file(request, filename + 'index.html', from_index=filename)
+ else:
+ # Redirect and add trailing slash so relative links work (Issue #3140)
+ new_url = request.rel_url.path + '/'
+ if request.rel_url.query_string:
+ new_url += '?' + request.rel_url.query_string
+ raise HTTPMovedPermanently(new_url)
+ elif filepath.is_file():
+ ct, encoding = mimetypes.guess_type(str(filepath))
+ encoding = encoding or 'utf-8'
+ if ct == 'text/html' and self.modify_html:
+ if sys.version_info[0] == 3 and sys.version_info[1] <= 5:
+ # Python 3.4 and 3.5 do not accept pathlib.Path objects in calls to open()
+ filepath = str(filepath)
+ with open(filepath, 'r', encoding=encoding) as fh:
+ text = fh.read()
+ text = self.transform_html(text)
+ ret = Response(text=text, content_type=ct, charset=encoding)
+ else:
+ ret = FileResponse(filepath, chunk_size=self._chunk_size)
+ elif from_index:
+ filepath = self._directory.joinpath(from_index).resolve()
+ try:
+ return Response(text=self._directory_as_html(filepath),
+ content_type="text/html")
+ except PermissionError:
+ raise HTTPForbidden
else:
- self.send(response, response.is_binary)
+ raise HTTPNotFound
+
+ return ret
+
+ def transform_html(self, text: str) -> str:
+ """Apply some transforms to HTML content."""
+ # Inject livereload.js
+ text = text.replace('</head>', self.snippet, 1)
+ # Disable <base> tag
+ text = re.sub(r'<base\s([^>]*)>', r'<!--base \g<1>-->', text, flags=re.IGNORECASE)
+ return text
-class OurWatchHandler(FileSystemEventHandler):
- """A Nikola-specific handler for Watchdog."""
+# Based on code from the 'hachiko' library by John Biesnecker — thanks!
+# https://github.com/biesnecker/hachiko
+class NikolaEventHandler:
+ """A Nikola-specific event handler for Watchdog. Based on code from hachiko."""
- def __init__(self, function):
+ def __init__(self, function, loop):
"""Initialize the handler."""
self.function = function
- super(OurWatchHandler, self).__init__()
+ self.loop = loop
- def on_any_event(self, event):
- """Call the provided function on any event."""
- self.function(event)
+ async def on_any_event(self, event):
+ """Handle all file events."""
+ await self.function(event)
+ def dispatch(self, event):
+ """Dispatch events to handler."""
+ self.loop.call_soon_threadsafe(asyncio.ensure_future, self.on_any_event(event))
-class ConfigWatchHandler(FileSystemEventHandler):
+
+class ConfigEventHandler(NikolaEventHandler):
"""A Nikola-specific handler for Watchdog that handles the config file (as a workaround)."""
- def __init__(self, configuration_filename, function):
+ def __init__(self, configuration_filename, function, loop):
"""Initialize the handler."""
self.configuration_filename = configuration_filename
self.function = function
+ self.loop = loop
- def on_any_event(self, event):
- """Call the provided function on any event."""
+ async def on_any_event(self, event):
+ """Handle file events if they concern the configuration file."""
if event._src_path == self.configuration_filename:
- self.function(event)
-
-
-try:
- # Monkeypatch to hide Broken Pipe Errors
- f = WebSocketWSGIHandler.finish_response
-
- if sys.version_info[0] == 3:
- EX = BrokenPipeError # NOQA
- else:
- EX = IOError
-
- def finish_response(self):
- """Monkeypatched finish_response that ignores broken pipes."""
- try:
- f(self)
- except EX: # Client closed the connection, not a real error
- pass
-
- WebSocketWSGIHandler.finish_response = finish_response
-except NameError:
- # In case there is no WebSocketWSGIHandler because of a failed import.
- pass
+ await self.function(event)
diff --git a/nikola/plugins/command/auto/livereload.js b/nikola/plugins/command/auto/livereload.js
index b4cafb3..282dce5 120000
--- a/nikola/plugins/command/auto/livereload.js
+++ b/nikola/plugins/command/auto/livereload.js
@@ -1 +1 @@
-../../../../bower_components/livereload-js/dist/livereload.js \ No newline at end of file
+../../../../npm_assets/node_modules/livereload-js/dist/livereload.js \ No newline at end of file
diff --git a/nikola/plugins/command/bootswatch_theme.py b/nikola/plugins/command/bootswatch_theme.py
deleted file mode 100644
index 4808fdb..0000000
--- a/nikola/plugins/command/bootswatch_theme.py
+++ /dev/null
@@ -1,116 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2016 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-"""Given a swatch name from bootswatch.com and a parent theme, creates a custom theme."""
-
-from __future__ import print_function
-import os
-import requests
-
-from nikola.plugin_categories import Command
-from nikola import utils
-
-LOGGER = utils.get_logger('bootswatch_theme', utils.STDERR_HANDLER)
-
-
-def _check_for_theme(theme, themes):
- for t in themes:
- if t.endswith(os.sep + theme):
- return True
- return False
-
-
-class CommandBootswatchTheme(Command):
- """Given a swatch name from bootswatch.com and a parent theme, creates a custom theme."""
-
- name = "bootswatch_theme"
- doc_usage = "[options]"
- doc_purpose = "given a swatch name from bootswatch.com and a parent theme, creates a custom"\
- " theme"
- cmd_options = [
- {
- 'name': 'name',
- 'short': 'n',
- 'long': 'name',
- 'default': 'custom',
- 'type': str,
- 'help': 'New theme name (default: custom)',
- },
- {
- 'name': 'swatch',
- 'short': 's',
- 'default': '',
- 'type': str,
- 'help': 'Name of the swatch from bootswatch.com.'
- },
- {
- 'name': 'parent',
- 'short': 'p',
- 'long': 'parent',
- 'default': 'bootstrap3',
- 'help': 'Parent theme name (default: bootstrap3)',
- },
- ]
-
- def _execute(self, options, args):
- """Given a swatch name and a parent theme, creates a custom theme."""
- name = options['name']
- swatch = options['swatch']
- if not swatch:
- LOGGER.error('The -s option is mandatory')
- return 1
- parent = options['parent']
- version = ''
-
- # See if we need bootswatch for bootstrap v2 or v3
- themes = utils.get_theme_chain(parent, self.site.themes_dirs)
- if not _check_for_theme('bootstrap3', themes) and not _check_for_theme('bootstrap3-jinja', themes):
- version = '2'
- elif not _check_for_theme('bootstrap', themes) and not _check_for_theme('bootstrap-jinja', themes):
- LOGGER.warn('"bootswatch_theme" only makes sense for themes that use bootstrap')
- elif _check_for_theme('bootstrap3-gradients', themes) or _check_for_theme('bootstrap3-gradients-jinja', themes):
- LOGGER.warn('"bootswatch_theme" doesn\'t work well with the bootstrap3-gradients family')
-
- LOGGER.info("Creating '{0}' theme from '{1}' and '{2}'".format(name, swatch, parent))
- utils.makedirs(os.path.join('themes', name, 'assets', 'css'))
- for fname in ('bootstrap.min.css', 'bootstrap.css'):
- url = 'https://bootswatch.com'
- if version:
- url += '/' + version
- url = '/'.join((url, swatch, fname))
- LOGGER.info("Downloading: " + url)
- r = requests.get(url)
- if r.status_code > 299:
- LOGGER.error('Error {} getting {}', r.status_code, url)
- exit(1)
- data = r.text
- with open(os.path.join('themes', name, 'assets', 'css', fname),
- 'wb+') as output:
- output.write(data.encode('utf-8'))
-
- with open(os.path.join('themes', name, 'parent'), 'wb+') as output:
- output.write(parent.encode('utf-8'))
- LOGGER.notice('Theme created. Change the THEME setting to "{0}" to use it.'.format(name))
diff --git a/nikola/plugins/command/check.plugin b/nikola/plugins/command/check.plugin
index 6d2df82..bc6ede3 100644
--- a/nikola/plugins/command/check.plugin
+++ b/nikola/plugins/command/check.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Check the generated site
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/check.py b/nikola/plugins/command/check.py
index 0141a6b..cac6000 100644
--- a/nikola/plugins/command/check.py
+++ b/nikola/plugins/command/check.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,25 +26,19 @@
"""Check the generated site."""
-from __future__ import print_function
-from collections import defaultdict
+import logging
import os
import re
import sys
import time
-import logbook
-try:
- from urllib import unquote
- from urlparse import urlparse, urljoin, urldefrag
-except ImportError:
- from urllib.parse import unquote, urlparse, urljoin, urldefrag # NOQA
+from collections import defaultdict
+from urllib.parse import unquote, urlparse, urljoin, urldefrag
-from doit.loader import generate_tasks
import lxml.html
import requests
+from doit.loader import generate_tasks
from nikola.plugin_categories import Command
-from nikola.utils import get_logger, STDERR_HANDLER
def _call_nikola_list(site, cache=None):
@@ -104,7 +98,6 @@ class CommandCheck(Command):
"""Check the generated site."""
name = "check"
- logger = None
doc_usage = "[-v] (-l [--find-sources] [-r] | -f [--clean-files])"
doc_purpose = "check links and files in the generated site"
@@ -159,15 +152,13 @@ class CommandCheck(Command):
def _execute(self, options, args):
"""Check the generated site."""
- self.logger = get_logger('check', STDERR_HANDLER)
-
if not options['links'] and not options['files'] and not options['clean']:
print(self.help())
- return False
+ return 1
if options['verbose']:
- self.logger.level = logbook.DEBUG
+ self.logger.level = logging.DEBUG
else:
- self.logger.level = logbook.NOTICE
+ self.logger.level = logging.WARNING
failure = False
if options['links']:
failure |= self.scan_links(options['find_sources'], options['remote'])
@@ -191,6 +182,7 @@ class CommandCheck(Command):
self.existing_targets.add(self.site.config['SITE_URL'])
self.existing_targets.add(self.site.config['BASE_URL'])
url_type = self.site.config['URL_TYPE']
+ atom_extension = self.site.config['ATOM_EXTENSION']
deps = {}
if find_sources:
@@ -205,7 +197,7 @@ class CommandCheck(Command):
# Do not look at links in the cache, which are not parsed by
# anyone and may result in false positives. Problems arise
# with galleries, for example. Full rationale: (Issue #1447)
- self.logger.notice("Ignoring {0} (in cache, links may be incorrect)".format(filename))
+ self.logger.warning("Ignoring {0} (in cache, links may be incorrect)".format(filename))
return False
if not os.path.exists(fname):
@@ -213,7 +205,8 @@ class CommandCheck(Command):
return False
if '.html' == fname[-5:]:
- d = lxml.html.fromstring(open(filename, 'rb').read())
+ with open(filename, 'rb') as inf:
+ d = lxml.html.fromstring(inf.read())
extra_objs = lxml.html.fromstring('<html/>')
# Turn elements with a srcset attribute into individual img elements with src attributes
@@ -223,7 +216,7 @@ class CommandCheck(Command):
extra_objs.append(lxml.etree.Element('img', src=srcset_item.strip().split(' ')[0]))
link_elements = list(d.iterlinks()) + list(extra_objs.iterlinks())
# Extract links from XML formats to minimal HTML, allowing those to go through the link checks
- elif '.atom' == filename[-5:]:
+ elif atom_extension == filename[-len(atom_extension):]:
d = lxml.etree.parse(filename)
link_elements = lxml.html.fromstring('<html/>')
for elm in d.findall('*//{http://www.w3.org/2005/Atom}link'):
@@ -257,13 +250,13 @@ class CommandCheck(Command):
# Warn about links from https to http (mixed-security)
if base_url.netloc == parsed.netloc and base_url.scheme == "https" and parsed.scheme == "http":
- self.logger.warn("Mixed-content security for link in {0}: {1}".format(filename, target))
+ self.logger.warning("Mixed-content security for link in {0}: {1}".format(filename, target))
# Link to an internal REDIRECTIONS page
if target in self.internal_redirects:
redir_status_code = 301
redir_target = [_dest for _target, _dest in self.site.config['REDIRECTIONS'] if urljoin('/', _target) == target][0]
- self.logger.warn("Remote link moved PERMANENTLY to \"{0}\" and should be updated in {1}: {2} [HTTP: 301]".format(redir_target, filename, target))
+ self.logger.warning("Remote link moved PERMANENTLY to \"{0}\" and should be updated in {1}: {2} [HTTP: 301]".format(redir_target, filename, target))
# Absolute links to other domains, skip
# Absolute links when using only paths, skip.
@@ -273,7 +266,7 @@ class CommandCheck(Command):
continue
if target in self.checked_remote_targets: # already checked this exact target
if self.checked_remote_targets[target] in [301, 308]:
- self.logger.warn("Remote link PERMANENTLY redirected in {0}: {1} [Error {2}]".format(filename, target, self.checked_remote_targets[target]))
+ self.logger.warning("Remote link PERMANENTLY redirected in {0}: {1} [Error {2}]".format(filename, target, self.checked_remote_targets[target]))
elif self.checked_remote_targets[target] in [302, 307]:
self.logger.debug("Remote link temporarily redirected in {0}: {1} [HTTP: {2}]".format(filename, target, self.checked_remote_targets[target]))
elif self.checked_remote_targets[target] > 399:
@@ -281,7 +274,7 @@ class CommandCheck(Command):
continue
# Skip whitelisted targets
- if any(re.search(_, target) for _ in self.whitelist):
+ if any(pattern.search(target) for pattern in self.whitelist):
continue
# Check the remote link works
@@ -301,7 +294,7 @@ class CommandCheck(Command):
resp = requests.get(target, headers=req_headers, allow_redirects=True)
# Permanent redirects should be updated
if redir_status_code in [301, 308]:
- self.logger.warn("Remote link moved PERMANENTLY to \"{0}\" and should be updated in {1}: {2} [HTTP: {3}]".format(resp.url, filename, target, redir_status_code))
+ self.logger.warning("Remote link moved PERMANENTLY to \"{0}\" and should be updated in {1}: {2} [HTTP: {3}]".format(resp.url, filename, target, redir_status_code))
if redir_status_code in [302, 307]:
self.logger.debug("Remote link temporarily redirected to \"{0}\" in {1}: {2} [HTTP: {3}]".format(resp.url, filename, target, redir_status_code))
self.checked_remote_targets[resp.url] = resp.status_code
@@ -315,7 +308,7 @@ class CommandCheck(Command):
elif resp.status_code <= 399: # The address leads *somewhere* that is not an error
self.logger.debug("Successfully checked remote link in {0}: {1} [HTTP: {2}]".format(filename, target, resp.status_code))
continue
- self.logger.warn("Could not check remote link in {0}: {1} [Unknown problem]".format(filename, target))
+ self.logger.warning("Could not check remote link in {0}: {1} [Unknown problem]".format(filename, target))
continue
if url_type == 'rel_path':
@@ -323,23 +316,44 @@ class CommandCheck(Command):
target_filename = os.path.abspath(
os.path.join(self.site.config['OUTPUT_FOLDER'], unquote(target.lstrip('/'))))
else: # Relative path
- unquoted_target = unquote(target).encode('utf-8') if sys.version_info.major >= 3 else unquote(target).decode('utf-8')
+ unquoted_target = unquote(target).encode('utf-8')
target_filename = os.path.abspath(
os.path.join(os.path.dirname(filename).encode('utf-8'), unquoted_target))
- elif url_type in ('full_path', 'absolute'):
+ else:
+ relative = False
if url_type == 'absolute':
# convert to 'full_path' case, ie url relative to root
- url_rel_path = parsed.path[len(url_netloc_to_root):]
+ if parsed.path.startswith(url_netloc_to_root):
+ url_rel_path = parsed.path[len(url_netloc_to_root):]
+ else:
+ url_rel_path = parsed.path
+ if not url_rel_path.startswith('/'):
+ relative = True
else:
# convert to relative to base path
- url_rel_path = target[len(url_netloc_to_root):]
+ if target.startswith(url_netloc_to_root):
+ url_rel_path = target[len(url_netloc_to_root):]
+ else:
+ url_rel_path = target
+ if not url_rel_path.startswith('/'):
+ relative = True
if url_rel_path == '' or url_rel_path.endswith('/'):
url_rel_path = urljoin(url_rel_path, self.site.config['INDEX_FILE'])
- fs_rel_path = fs_relpath_from_url_path(url_rel_path)
- target_filename = os.path.join(self.site.config['OUTPUT_FOLDER'], fs_rel_path)
+ if relative:
+ unquoted_target = unquote(target).encode('utf-8')
+ target_filename = os.path.abspath(
+ os.path.join(os.path.dirname(filename).encode('utf-8'), unquoted_target))
+ else:
+ fs_rel_path = fs_relpath_from_url_path(url_rel_path)
+ target_filename = os.path.join(self.site.config['OUTPUT_FOLDER'], fs_rel_path)
+
+ if isinstance(target_filename, str):
+ target_filename_str = target_filename
+ else:
+ target_filename_str = target_filename.decode("utf-8", errors="surrogateescape")
- if any(re.search(x, target_filename) for x in self.whitelist):
+ if any(pattern.search(target_filename_str) for pattern in self.whitelist):
continue
elif target_filename not in self.existing_targets:
@@ -348,11 +362,11 @@ class CommandCheck(Command):
self.existing_targets.add(target_filename)
else:
rv = True
- self.logger.warn("Broken link in {0}: {1}".format(filename, target))
+ self.logger.warning("Broken link in {0}: {1}".format(filename, target))
if find_sources:
- self.logger.warn("Possible sources:")
- self.logger.warn("\n".join(deps[filename]))
- self.logger.warn("===============================\n")
+ self.logger.warning("Possible sources:")
+ self.logger.warning("\n".join(deps[filename]))
+ self.logger.warning("===============================\n")
except Exception as exc:
self.logger.error(u"Error with: {0} {1}".format(filename, exc))
return rv
@@ -363,6 +377,7 @@ class CommandCheck(Command):
self.logger.debug("===============\n")
self.logger.debug("{0} mode".format(self.site.config['URL_TYPE']))
failure = False
+ atom_extension = self.site.config['ATOM_EXTENSION']
# Maybe we should just examine all HTML files
output_folder = self.site.config['OUTPUT_FOLDER']
@@ -374,7 +389,7 @@ class CommandCheck(Command):
if '.html' == fname[-5:]:
if self.analyze(fname, find_sources, check_remote):
failure = True
- if '.atom' == fname[-5:]:
+ if atom_extension == fname[-len(atom_extension):]:
if self.analyze(fname, find_sources, False):
failure = True
if fname.endswith('sitemap.xml') or fname.endswith('sitemapindex.xml'):
@@ -397,15 +412,15 @@ class CommandCheck(Command):
if only_on_output:
only_on_output.sort()
- self.logger.warn("Files from unknown origins (orphans):")
+ self.logger.warning("Files from unknown origins (orphans):")
for f in only_on_output:
- self.logger.warn(f)
+ self.logger.warning(f)
failure = True
if only_on_input:
only_on_input.sort()
- self.logger.warn("Files not generated:")
+ self.logger.warning("Files not generated:")
for f in only_on_input:
- self.logger.warn(f)
+ self.logger.warning(f)
if not failure:
self.logger.debug("All files checked.")
return failure
@@ -434,6 +449,7 @@ class CommandCheck(Command):
pass
if warn_flag:
- self.logger.warn('Some files or directories have been removed, your site may need rebuilding')
+ self.logger.warning('Some files or directories have been removed, your site may need rebuilding')
+ return True
- return True
+ return False
diff --git a/nikola/plugins/command/console.plugin b/nikola/plugins/command/console.plugin
index 9bcc909..35e3585 100644
--- a/nikola/plugins/command/console.plugin
+++ b/nikola/plugins/command/console.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Start a debugging python console
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/console.py b/nikola/plugins/command/console.py
index c6a8376..b4342b4 100644
--- a/nikola/plugins/command/console.py
+++ b/nikola/plugins/command/console.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Chris Warrick, Roberto Alsina and others.
+# Copyright © 2012-2020 Chris Warrick, Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,15 +26,14 @@
"""Start debugging console."""
-from __future__ import print_function, unicode_literals
import os
from nikola import __version__
from nikola.plugin_categories import Command
-from nikola.utils import get_logger, STDERR_HANDLER, req_missing, Commands
+from nikola.utils import get_logger, req_missing, Commands
-LOGGER = get_logger('console', STDERR_HANDLER)
+LOGGER = get_logger('console')
class CommandConsole(Command):
@@ -44,9 +43,9 @@ class CommandConsole(Command):
shells = ['ipython', 'bpython', 'plain']
doc_purpose = "start an interactive Python console with access to your site"
doc_description = """\
-The site engine is accessible as `site`, the config file as `conf`, and commands are available as `commands`.
+The site engine is accessible as `site` and `nikola_site`, the config file as `conf`, and commands are available as `commands`.
If there is no console to use specified (as -b, -i, -p) it tries IPython, then falls back to bpython, and finally falls back to the plain Python console."""
- header = "Nikola v" + __version__ + " -- {0} Console (conf = configuration file, site = site engine, commands = nikola commands)"
+ header = "Nikola v" + __version__ + " -- {0} Console (conf = configuration file, site, nikola_site = site engine, commands = nikola commands)"
cmd_options = [
{
'name': 'bpython',
@@ -72,19 +71,35 @@ If there is no console to use specified (as -b, -i, -p) it tries IPython, then f
'default': False,
'help': 'Use the plain Python interpreter',
},
+ {
+ 'name': 'command',
+ 'short': 'c',
+ 'long': 'command',
+ 'type': str,
+ 'default': None,
+ 'help': 'Run a single command',
+ },
+ {
+ 'name': 'script',
+ 'short': 's',
+ 'long': 'script',
+ 'type': str,
+ 'default': None,
+ 'help': 'Execute a python script in the console context',
+ },
]
def ipython(self, willful=True):
"""Run an IPython shell."""
try:
import IPython
- except ImportError as e:
+ except ImportError:
if willful:
req_missing(['IPython'], 'use the IPython console')
- raise e # That’s how _execute knows whether to try something else.
+ raise # That’s how _execute knows whether to try something else.
else:
site = self.context['site'] # NOQA
- nikola_site = self.context['site'] # NOQA
+ nikola_site = self.context['nikola_site'] # NOQA
conf = self.context['conf'] # NOQA
commands = self.context['commands'] # NOQA
IPython.embed(header=self.header.format('IPython'))
@@ -93,10 +108,10 @@ If there is no console to use specified (as -b, -i, -p) it tries IPython, then f
"""Run a bpython shell."""
try:
import bpython
- except ImportError as e:
+ except ImportError:
if willful:
req_missing(['bpython'], 'use the bpython console')
- raise e # That’s how _execute knows whether to try something else.
+ raise # That’s how _execute knows whether to try something else.
else:
bpython.embed(banner=self.header.format('bpython'), locals_=self.context)
@@ -134,7 +149,13 @@ If there is no console to use specified (as -b, -i, -p) it tries IPython, then f
'nikola_site': self.site,
'commands': self.site.commands,
}
- if options['bpython']:
+ if options['command']:
+ exec(options['command'], None, self.context)
+ elif options['script']:
+ with open(options['script']) as inf:
+ code = compile(inf.read(), options['script'], 'exec')
+ exec(code, None, self.context)
+ elif options['bpython']:
self.bpython(True)
elif options['ipython']:
self.ipython(True)
diff --git a/nikola/plugins/command/default_config.plugin b/nikola/plugins/command/default_config.plugin
new file mode 100644
index 0000000..af279f6
--- /dev/null
+++ b/nikola/plugins/command/default_config.plugin
@@ -0,0 +1,13 @@
+[Core]
+name = default_config
+module = default_config
+
+[Documentation]
+author = Roberto Alsina
+version = 1.0
+website = https://getnikola.com/
+description = Show the default configuration.
+
+[Nikola]
+PluginCategory = Command
+
diff --git a/nikola/plugins/command/default_config.py b/nikola/plugins/command/default_config.py
new file mode 100644
index 0000000..036f4d1
--- /dev/null
+++ b/nikola/plugins/command/default_config.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2012-2020 Roberto Alsina and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+"""Show the default configuration."""
+
+import sys
+
+import nikola.plugins.command.init
+from nikola.plugin_categories import Command
+from nikola.utils import get_logger
+
+
+LOGGER = get_logger('default_config')
+
+
+class CommandShowConfig(Command):
+ """Show the default configuration."""
+
+ name = "default_config"
+
+ doc_usage = ""
+ needs_config = False
+ doc_purpose = "Print the default Nikola configuration."
+ cmd_options = []
+
+ def _execute(self, options=None, args=None):
+ """Show the default configuration."""
+ try:
+ print(nikola.plugins.command.init.CommandInit.create_configuration_to_string())
+ except Exception:
+ sys.stdout.buffer.write(nikola.plugins.command.init.CommandInit.create_configuration_to_string().encode('utf-8'))
diff --git a/nikola/plugins/command/deploy.plugin b/nikola/plugins/command/deploy.plugin
index 8bdc0e2..7cff28d 100644
--- a/nikola/plugins/command/deploy.plugin
+++ b/nikola/plugins/command/deploy.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Deploy the site
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/deploy.py b/nikola/plugins/command/deploy.py
index c2289e8..5273b58 100644
--- a/nikola/plugins/command/deploy.py
+++ b/nikola/plugins/command/deploy.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,19 +26,16 @@
"""Deploy site."""
-from __future__ import print_function
-import io
-from datetime import datetime
-from dateutil.tz import gettz
-import dateutil
-import os
import subprocess
import time
+from datetime import datetime
+import dateutil
from blinker import signal
+from dateutil.tz import gettz
from nikola.plugin_categories import Command
-from nikola.utils import get_logger, clean_before_deployment, STDERR_HANDLER
+from nikola.utils import clean_before_deployment
class CommandDeploy(Command):
@@ -49,49 +46,28 @@ class CommandDeploy(Command):
doc_usage = "[preset [preset...]]"
doc_purpose = "deploy the site"
doc_description = "Deploy the site by executing deploy commands from the presets listed on the command line. If no presets are specified, `default` is executed."
- logger = None
def _execute(self, command, args):
"""Execute the deploy command."""
- self.logger = get_logger('deploy', STDERR_HANDLER)
- # Get last successful deploy date
- timestamp_path = os.path.join(self.site.config['CACHE_FOLDER'], 'lastdeploy')
-
# Get last-deploy from persistent state
last_deploy = self.site.state.get('last_deploy')
- if last_deploy is None:
- # If there is a last-deploy saved, move it to the new state persistence thing
- # FIXME: remove in Nikola 8
- if os.path.isfile(timestamp_path):
- try:
- with io.open(timestamp_path, 'r', encoding='utf8') as inf:
- last_deploy = dateutil.parser.parse(inf.read())
- clean = False
- except (IOError, Exception) as e:
- self.logger.debug("Problem when reading `{0}`: {1}".format(timestamp_path, e))
- last_deploy = datetime(1970, 1, 1)
- clean = True
- os.unlink(timestamp_path) # Remove because from now on it's in state
- else: # Just a default
- last_deploy = datetime(1970, 1, 1)
- clean = True
- else:
+ if last_deploy is not None:
last_deploy = dateutil.parser.parse(last_deploy)
clean = False
- if self.site.config['COMMENT_SYSTEM_ID'] == 'nikolademo':
- self.logger.warn("\nWARNING WARNING WARNING WARNING\n"
- "You are deploying using the nikolademo Disqus account.\n"
- "That means you will not be able to moderate the comments in your own site.\n"
- "And is probably not what you want to do.\n"
- "Think about it for 5 seconds, I'll wait :-)\n"
- "(press Ctrl+C to abort)\n")
+ if self.site.config['COMMENT_SYSTEM'] and self.site.config['COMMENT_SYSTEM_ID'] == 'nikolademo':
+ self.logger.warning("\nWARNING WARNING WARNING WARNING\n"
+ "You are deploying using the nikolademo Disqus account.\n"
+ "That means you will not be able to moderate the comments in your own site.\n"
+ "And is probably not what you want to do.\n"
+ "Think about it for 5 seconds, I'll wait :-)\n"
+ "(press Ctrl+C to abort)\n")
time.sleep(5)
# Remove drafts and future posts if requested
undeployed_posts = clean_before_deployment(self.site)
if undeployed_posts:
- self.logger.notice("Deleted {0} posts due to DEPLOY_* settings".format(len(undeployed_posts)))
+ self.logger.warning("Deleted {0} posts due to DEPLOY_* settings".format(len(undeployed_posts)))
if args:
presets = args
@@ -102,7 +78,7 @@ class CommandDeploy(Command):
for preset in presets:
try:
self.site.config['DEPLOY_COMMANDS'][preset]
- except:
+ except KeyError:
self.logger.error('No such preset: {0}'.format(preset))
return 255
diff --git a/nikola/plugins/command/github_deploy.plugin b/nikola/plugins/command/github_deploy.plugin
index 21e246c..fbdd3bf 100644
--- a/nikola/plugins/command/github_deploy.plugin
+++ b/nikola/plugins/command/github_deploy.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Deploy the site to GitHub pages.
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/github_deploy.py b/nikola/plugins/command/github_deploy.py
index b5ad322..d2c1f3f 100644
--- a/nikola/plugins/command/github_deploy.py
+++ b/nikola/plugins/command/github_deploy.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2014-2016 Puneeth Chaganti and others.
+# Copyright © 2014-2020 Puneeth Chaganti and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,14 +26,13 @@
"""Deploy site to GitHub Pages."""
-from __future__ import print_function
import os
import subprocess
from textwrap import dedent
from nikola.plugin_categories import Command
from nikola.plugins.command.check import real_scan_files
-from nikola.utils import get_logger, req_missing, clean_before_deployment, STDERR_HANDLER
+from nikola.utils import req_missing, clean_before_deployment
from nikola.__main__ import main
from nikola import __version__
@@ -54,6 +53,12 @@ def check_ghp_import_installed():
req_missing(['ghp-import2'], 'deploy the site to GitHub Pages')
+class DeployFailedException(Exception):
+ """An internal exception for deployment errors."""
+
+ pass
+
+
class CommandGitHubDeploy(Command):
"""Deploy site to GitHub Pages."""
@@ -63,11 +68,9 @@ class CommandGitHubDeploy(Command):
doc_purpose = 'deploy the site to GitHub Pages'
doc_description = dedent(
"""\
- This command can be used to deploy your site to GitHub Pages.
-
- It uses ghp-import to do this task.
+ This command can be used to deploy your site to GitHub Pages. It uses ghp-import to do this task. It also optionally commits to the source branch.
- """
+ Configuration help: https://getnikola.com/handbook.html#deploying-to-github"""
)
cmd_options = [
{
@@ -76,15 +79,12 @@ class CommandGitHubDeploy(Command):
'long': 'message',
'default': 'Nikola auto commit.',
'type': str,
- 'help': 'Commit message (default: Nikola auto commit.)',
+ 'help': 'Commit message',
},
]
- logger = None
def _execute(self, options, args):
"""Run the deployment."""
- self.logger = get_logger(CommandGitHubDeploy.name, STDERR_HANDLER)
-
# Check if ghp-import is installed
check_ghp_import_installed()
@@ -102,12 +102,10 @@ class CommandGitHubDeploy(Command):
# Remove drafts and future posts if requested (Issue #2406)
undeployed_posts = clean_before_deployment(self.site)
if undeployed_posts:
- self.logger.notice("Deleted {0} posts due to DEPLOY_* settings".format(len(undeployed_posts)))
+ self.logger.warning("Deleted {0} posts due to DEPLOY_* settings".format(len(undeployed_posts)))
# Commit and push
- self._commit_and_push(options['commit_message'])
-
- return
+ return self._commit_and_push(options['commit_message'])
def _run_command(self, command, xfail=False):
"""Run a command that may or may not fail."""
@@ -122,7 +120,7 @@ class CommandGitHubDeploy(Command):
'Failed GitHub deployment -- command {0} '
'returned {1}'.format(e.cmd, e.returncode)
)
- raise SystemError(e.returncode)
+ raise DeployFailedException(e.returncode)
def _commit_and_push(self, commit_first_line):
"""Commit all the files and push."""
@@ -145,9 +143,16 @@ class CommandGitHubDeploy(Command):
if e != 0:
self._run_command(['git', 'commit', '-am', commit_message])
else:
- self.logger.notice('Nothing to commit to source branch.')
+ self.logger.info('Nothing to commit to source branch.')
+
+ try:
+ source_commit = uni_check_output(['git', 'rev-parse', source])
+ except subprocess.CalledProcessError:
+ try:
+ source_commit = uni_check_output(['git', 'rev-parse', 'HEAD'])
+ except subprocess.CalledProcessError:
+ source_commit = '?'
- source_commit = uni_check_output(['git', 'rev-parse', source])
commit_message = (
'{0}\n\n'
'Source commit: {1}'
@@ -161,7 +166,7 @@ class CommandGitHubDeploy(Command):
if autocommit:
self._run_command(['git', 'push', '-u', remote, source])
- except SystemError as e:
+ except DeployFailedException as e:
return e.args[0]
self.logger.info("Successful deployment")
diff --git a/nikola/plugins/command/import_wordpress.plugin b/nikola/plugins/command/import_wordpress.plugin
index eab9d17..46df1ef 100644
--- a/nikola/plugins/command/import_wordpress.plugin
+++ b/nikola/plugins/command/import_wordpress.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Import a wordpress site from a XML dump (requires markdown).
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/import_wordpress.py b/nikola/plugins/command/import_wordpress.py
index 0b48583..5e2aee6 100644
--- a/nikola/plugins/command/import_wordpress.py
+++ b/nikola/plugins/command/import_wordpress.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,46 +26,45 @@
"""Import a WordPress dump."""
-from __future__ import unicode_literals, print_function
-import os
-import re
-import sys
import datetime
import io
import json
+import os
+import re
+import sys
+from collections import defaultdict
+from urllib.parse import urlparse, unquote
+
import requests
from lxml import etree
-from collections import defaultdict
-try:
- import html2text
-except:
- html2text = None
+from nikola.plugin_categories import Command
+from nikola import utils, hierarchy_utils
+from nikola.nikola import DEFAULT_TRANSLATIONS_PATTERN
+from nikola.utils import req_missing
+from nikola.plugins.basic_import import ImportMixin, links
+from nikola.plugins.command.init import (
+ SAMPLE_CONF, prepare_config,
+ format_default_translations_config,
+ get_default_translations_dict
+)
try:
- from urlparse import urlparse
- from urllib import unquote
+ import html2text
except ImportError:
- from urllib.parse import urlparse, unquote # NOQA
+ html2text = None
try:
import phpserialize
except ImportError:
- phpserialize = None # NOQA
+ phpserialize = None
-from nikola.plugin_categories import Command
-from nikola import utils
-from nikola.utils import req_missing, unicode_str
-from nikola.plugins.basic_import import ImportMixin, links
-from nikola.nikola import DEFAULT_TRANSLATIONS_PATTERN
-from nikola.plugins.command.init import SAMPLE_CONF, prepare_config, format_default_translations_config
-
-LOGGER = utils.get_logger('import_wordpress', utils.STDERR_HANDLER)
+LOGGER = utils.get_logger('import_wordpress')
def install_plugin(site, plugin_name, output_dir=None, show_install_notes=False):
"""Install a Nikola plugin."""
- LOGGER.notice("Installing plugin '{0}'".format(plugin_name))
+ LOGGER.info("Installing plugin '{0}'".format(plugin_name))
# Get hold of the 'plugin' plugin
plugin_installer_info = site.plugin_manager.getPluginByName('plugin', 'Command')
if plugin_installer_info is None:
@@ -148,15 +147,22 @@ class CommandImportWordpress(Command, ImportMixin):
'long': 'qtranslate',
'default': False,
'type': bool,
- 'help': "Look for translations generated by qtranslate plugin",
- # WARNING: won't recover translated titles that actually
- # don't seem to be part of the wordpress XML export at the
- # time of writing :(
+ 'help': """Look for translations generated by qtranslate plugin.
+WARNING: a default wordpress export won't allow to recover title translations.
+For this to be possible consider applying the hack suggested at
+https://github.com/qtranslate/qtranslate-xt/issues/199 :
+
+In wp-admin/includes/export.php change
+`echo apply_filters( 'the_title_rss', $post->post_title );
+
+to
+`echo apply_filters( 'the_title_export', $post->post_title );
+"""
},
{
'name': 'translations_pattern',
'long': 'translations_pattern',
- 'default': None,
+ 'default': DEFAULT_TRANSLATIONS_PATTERN,
'type': str,
'help': "The pattern for translation files names",
},
@@ -259,9 +265,9 @@ class CommandImportWordpress(Command, ImportMixin):
options['output_folder'] = args.pop(0)
if args:
- LOGGER.warn('You specified additional arguments ({0}). Please consider '
- 'putting these arguments before the filename if you '
- 'are running into problems.'.format(args))
+ LOGGER.warning('You specified additional arguments ({0}). Please consider '
+ 'putting these arguments before the filename if you '
+ 'are running into problems.'.format(args))
self.onefile = options.get('one_file', False)
@@ -307,7 +313,7 @@ class CommandImportWordpress(Command, ImportMixin):
LOGGER.error("You can use at most one of the options --html2text, --transform-to-html and --transform-to-markdown.")
return False
if (self.html2text or self.transform_to_html or self.transform_to_markdown) and self.use_wordpress_compiler:
- LOGGER.warn("It does not make sense to combine --use-wordpress-compiler with any of --html2text, --transform-to-html and --transform-to-markdown, as the latter convert all posts to HTML and the first option then affects zero posts.")
+ LOGGER.warning("It does not make sense to combine --use-wordpress-compiler with any of --html2text, --transform-to-html and --transform-to-markdown, as the latter convert all posts to HTML and the first option then affects zero posts.")
if (self.html2text or self.transform_to_markdown) and not html2text:
LOGGER.error("You need to install html2text via 'pip install html2text' before you can use the --html2text and --transform-to-markdown options.")
@@ -339,14 +345,14 @@ class CommandImportWordpress(Command, ImportMixin):
# cat_id = get_text_tag(cat, '{{{0}}}term_id'.format(wordpress_namespace), None)
cat_slug = get_text_tag(cat, '{{{0}}}category_nicename'.format(wordpress_namespace), None)
cat_parent_slug = get_text_tag(cat, '{{{0}}}category_parent'.format(wordpress_namespace), None)
- cat_name = get_text_tag(cat, '{{{0}}}cat_name'.format(wordpress_namespace), None)
+ cat_name = utils.html_unescape(get_text_tag(cat, '{{{0}}}cat_name'.format(wordpress_namespace), None))
cat_path = [cat_name]
if cat_parent_slug in cat_map:
cat_path = cat_map[cat_parent_slug] + cat_path
cat_map[cat_slug] = cat_path
self._category_paths = dict()
for cat, path in cat_map.items():
- self._category_paths[cat] = utils.join_hierarchical_category_path(path)
+ self._category_paths[cat] = hierarchy_utils.join_hierarchical_category_path(path)
def _execute(self, options={}, args=[]):
"""Import a WordPress blog from an export file into a Nikola site."""
@@ -373,17 +379,12 @@ class CommandImportWordpress(Command, ImportMixin):
if phpserialize is None:
req_missing(['phpserialize'], 'import WordPress dumps without --no-downloads')
- channel = self.get_channel_from_file(self.wordpress_export_file)
+ export_file_preprocessor = modernize_qtranslate_tags if self.separate_qtranslate_content else None
+ channel = self.get_channel_from_file(self.wordpress_export_file, export_file_preprocessor)
self._prepare(channel)
conf_template = self.generate_base_site()
- # If user has specified a custom pattern for translation files we
- # need to fix the config
- if self.translations_pattern:
- self.context['TRANSLATIONS_PATTERN'] = self.translations_pattern
-
self.import_posts(channel)
-
self.context['TRANSLATIONS'] = format_default_translations_config(
self.extra_languages)
self.context['REDIRECTIONS'] = self.configure_redirections(
@@ -397,7 +398,7 @@ class CommandImportWordpress(Command, ImportMixin):
# Add tag redirects
for tag in self.all_tags:
try:
- if isinstance(tag, utils.bytes_str):
+ if isinstance(tag, bytes):
tag_str = tag.decode('utf8', 'replace')
else:
tag_str = tag
@@ -420,9 +421,9 @@ class CommandImportWordpress(Command, ImportMixin):
if not install_plugin(self.site, 'wordpress_compiler', output_dir=os.path.join(self.output_folder, 'plugins')):
return False
else:
- LOGGER.warn("Make sure to install the WordPress page compiler via")
- LOGGER.warn(" nikola plugin -i wordpress_compiler")
- LOGGER.warn("in your imported blog's folder ({0}), if you haven't installed it system-wide or user-wide. Otherwise, your newly imported blog won't compile.".format(self.output_folder))
+ LOGGER.warning("Make sure to install the WordPress page compiler via")
+ LOGGER.warning(" nikola plugin -i wordpress_compiler")
+ LOGGER.warning("in your imported blog's folder ({0}), if you haven't installed it system-wide or user-wide. Otherwise, your newly imported blog won't compile.".format(self.output_folder))
@classmethod
def read_xml_file(cls, filename):
@@ -438,9 +439,16 @@ class CommandImportWordpress(Command, ImportMixin):
return b''.join(xml)
@classmethod
- def get_channel_from_file(cls, filename):
- """Get channel from XML file."""
- tree = etree.fromstring(cls.read_xml_file(filename))
+ def get_channel_from_file(cls, filename, xml_preprocessor=None):
+ """Get channel from XML file.
+
+ An optional 'xml_preprocessor' allows to modify the xml
+ (typically to deal with variations in tags injected by some WP plugin)
+ """
+ xml_string = cls.read_xml_file(filename)
+ if xml_preprocessor:
+ xml_string = xml_preprocessor(xml_string)
+ tree = etree.fromstring(xml_string)
channel = tree.find('channel')
return channel
@@ -451,7 +459,10 @@ class CommandImportWordpress(Command, ImportMixin):
context = SAMPLE_CONF.copy()
self.lang = get_text_tag(channel, 'language', 'en')[:2]
context['DEFAULT_LANG'] = self.lang
- context['TRANSLATIONS_PATTERN'] = DEFAULT_TRANSLATIONS_PATTERN
+ # If user has specified a custom pattern for translation files we
+ # need to fix the config
+ context['TRANSLATIONS_PATTERN'] = self.translations_pattern
+
context['BLOG_TITLE'] = get_text_tag(channel, 'title',
'PUT TITLE HERE')
context['BLOG_DESCRIPTION'] = get_text_tag(
@@ -482,17 +493,17 @@ class CommandImportWordpress(Command, ImportMixin):
PAGES = '(\n'
for extension in extensions:
POSTS += ' ("posts/*.{0}", "posts", "post.tmpl"),\n'.format(extension)
- PAGES += ' ("pages/*.{0}", "pages", "story.tmpl"),\n'.format(extension)
+ PAGES += ' ("pages/*.{0}", "pages", "page.tmpl"),\n'.format(extension)
POSTS += ')\n'
PAGES += ')\n'
context['POSTS'] = POSTS
context['PAGES'] = PAGES
COMPILERS = '{\n'
- COMPILERS += ''' "rest": ('.txt', '.rst'),''' + '\n'
- COMPILERS += ''' "markdown": ('.md', '.mdown', '.markdown'),''' + '\n'
- COMPILERS += ''' "html": ('.html', '.htm'),''' + '\n'
+ COMPILERS += ''' "rest": ['.txt', '.rst'],''' + '\n'
+ COMPILERS += ''' "markdown": ['.md', '.mdown', '.markdown'],''' + '\n'
+ COMPILERS += ''' "html": ['.html', '.htm'],''' + '\n'
if self.use_wordpress_compiler:
- COMPILERS += ''' "wordpress": ('.wp'),''' + '\n'
+ COMPILERS += ''' "wordpress": ['.wp'],''' + '\n'
COMPILERS += '}'
context['COMPILERS'] = COMPILERS
@@ -503,12 +514,12 @@ class CommandImportWordpress(Command, ImportMixin):
try:
request = requests.get(url, auth=self.auth)
if request.status_code >= 400:
- LOGGER.warn("Downloading {0} to {1} failed with HTTP status code {2}".format(url, dst_path, request.status_code))
+ LOGGER.warning("Downloading {0} to {1} failed with HTTP status code {2}".format(url, dst_path, request.status_code))
return
with open(dst_path, 'wb+') as fd:
fd.write(request.content)
except requests.exceptions.ConnectionError as err:
- LOGGER.warn("Downloading {0} to {1} failed: {2}".format(url, dst_path, err))
+ LOGGER.warning("Downloading {0} to {1} failed: {2}".format(url, dst_path, err))
def import_attachment(self, item, wordpress_namespace):
"""Import an attachment to the site."""
@@ -549,14 +560,7 @@ class CommandImportWordpress(Command, ImportMixin):
# that the export should give you the power to insert
# your blogging into another site or system its not.
# Why don't they just use JSON?
- if sys.version_info[0] == 2:
- try:
- metadata = phpserialize.loads(utils.sys_encode(meta_value.text))
- except ValueError:
- # local encoding might be wrong sometimes
- metadata = phpserialize.loads(meta_value.text.encode('utf-8'))
- else:
- metadata = phpserialize.loads(meta_value.text.encode('utf-8'))
+ metadata = phpserialize.loads(meta_value.text.encode('utf-8'))
meta_key = b'image_meta'
size_key = b'sizes'
@@ -583,6 +587,9 @@ class CommandImportWordpress(Command, ImportMixin):
if ignore_zero and value == 0:
return
elif is_float:
+ # in some locales (like fr) and for old posts there may be a comma here.
+ if isinstance(value, bytes):
+ value = value.replace(b",", b".")
value = float(value)
if ignore_zero and value == 0:
return
@@ -775,7 +782,7 @@ class CommandImportWordpress(Command, ImportMixin):
elif approved == 'spam' or approved == 'trash':
pass
else:
- LOGGER.warn("Unknown comment approved status: {0}".format(approved))
+ LOGGER.warning("Unknown comment approved status: {0}".format(approved))
parent = int(get_text_tag(comment, "{{{0}}}comment_parent".format(wordpress_namespace), 0))
if parent == 0:
parent = None
@@ -796,7 +803,7 @@ class CommandImportWordpress(Command, ImportMixin):
"""Write comment header line."""
if header_content is None:
return
- header_content = unicode_str(header_content).replace('\n', ' ')
+ header_content = str(header_content).replace('\n', ' ')
line = '.. ' + header_field + ': ' + header_content + '\n'
fd.write(line.encode('utf8'))
@@ -813,6 +820,16 @@ class CommandImportWordpress(Command, ImportMixin):
write_header_line(fd, "wordpress_user_id", comment["user_id"])
fd.write(('\n' + comment['content']).encode('utf8'))
+ def _create_meta_and_content_filenames(self, slug, extension, lang, default_language, translations_config):
+ out_meta_filename = slug + '.meta'
+ out_content_filename = slug + '.' + extension
+ if lang and lang != default_language:
+ out_meta_filename = utils.get_translation_candidate(translations_config,
+ out_meta_filename, lang)
+ out_content_filename = utils.get_translation_candidate(translations_config,
+ out_content_filename, lang)
+ return out_meta_filename, out_content_filename
+
def _create_metadata(self, status, excerpt, tags, categories, post_name=None):
"""Create post metadata."""
other_meta = {'wp-status': status}
@@ -824,16 +841,16 @@ class CommandImportWordpress(Command, ImportMixin):
if text in self._category_paths:
cats.append(self._category_paths[text])
else:
- cats.append(utils.join_hierarchical_category_path([text]))
+ cats.append(hierarchy_utils.join_hierarchical_category_path([utils.html_unescape(text)]))
other_meta['categories'] = ','.join(cats)
if len(cats) > 0:
other_meta['category'] = cats[0]
if len(cats) > 1:
- LOGGER.warn(('Post "{0}" has more than one category! ' +
- 'Will only use the first one.').format(post_name))
- tags_cats = tags
+ LOGGER.warning(('Post "{0}" has more than one category! ' +
+ 'Will only use the first one.').format(post_name))
+ tags_cats = [utils.html_unescape(tag) for tag in tags]
else:
- tags_cats = tags + categories
+ tags_cats = [utils.html_unescape(tag) for tag in tags + categories]
return tags_cats, other_meta
_tag_sanitize_map = {True: {}, False: {}}
@@ -847,7 +864,7 @@ class CommandImportWordpress(Command, ImportMixin):
previous = self._tag_sanitize_map[is_category][tag.lower()]
if self.tag_saniziting_strategy == 'first':
if tag != previous[0]:
- LOGGER.warn("Changing spelling of {0} name '{1}' to {2}.".format('category' if is_category else 'tag', tag, previous[0]))
+ LOGGER.warning("Changing spelling of {0} name '{1}' to {2}.".format('category' if is_category else 'tag', tag, previous[0]))
return previous[0]
else:
LOGGER.error("Unknown tag sanitizing strategy '{0}'!".format(self.tag_saniziting_strategy))
@@ -873,7 +890,7 @@ class CommandImportWordpress(Command, ImportMixin):
path = unquote(parsed.path.strip('/'))
try:
- if isinstance(path, utils.bytes_str):
+ if isinstance(path, bytes):
path = path.decode('utf8', 'replace')
else:
path = path
@@ -925,17 +942,19 @@ class CommandImportWordpress(Command, ImportMixin):
tags = []
categories = []
+ post_status = 'published'
+ has_math = "no"
if status == 'trash':
- LOGGER.warn('Trashed post "{0}" will not be imported.'.format(title))
+ LOGGER.warning('Trashed post "{0}" will not be imported.'.format(title))
return False
elif status == 'private':
- tags.append('private')
is_draft = False
is_private = True
+ post_status = 'private'
elif status != 'publish':
- tags.append('draft')
is_draft = True
is_private = False
+ post_status = 'draft'
else:
is_draft = False
is_private = False
@@ -953,7 +972,7 @@ class CommandImportWordpress(Command, ImportMixin):
tags.append(text)
if '$latex' in content:
- tags.append('mathjax')
+ has_math = "yes"
for i, cat in enumerate(categories[:]):
cat = self._sanitize(cat, True)
@@ -974,52 +993,56 @@ class CommandImportWordpress(Command, ImportMixin):
post_format = 'wp'
if is_draft and self.exclude_drafts:
- LOGGER.notice('Draft "{0}" will not be imported.'.format(title))
+ LOGGER.warning('Draft "{0}" will not be imported.'.format(title))
return False
elif is_private and self.exclude_privates:
- LOGGER.notice('Private post "{0}" will not be imported.'.format(title))
+ LOGGER.warning('Private post "{0}" will not be imported.'.format(title))
return False
elif content.strip() or self.import_empty_items:
# If no content is found, no files are written.
self.url_map[link] = (self.context['SITE_URL'] +
out_folder.rstrip('/') + '/' + slug +
'.html').replace(os.sep, '/')
- if hasattr(self, "separate_qtranslate_content") \
- and self.separate_qtranslate_content:
- content_translations = separate_qtranslate_content(content)
+ default_language = self.context["DEFAULT_LANG"]
+ if self.separate_qtranslate_content:
+ content_translations = separate_qtranslate_tagged_langs(content)
+ title_translations = separate_qtranslate_tagged_langs(title)
else:
content_translations = {"": content}
- default_language = self.context["DEFAULT_LANG"]
+ title_translations = {"": title}
+ # in case of mistmatch between the languages found in the title and in the content
+ default_title = title_translations.get(default_language, title)
+ extra_languages = [lang for lang in content_translations.keys() if lang not in ("", default_language)]
+ for extra_lang in extra_languages:
+ self.extra_languages.add(extra_lang)
+ translations_dict = get_default_translations_dict(default_language, extra_languages)
+ current_translations_config = {
+ "DEFAULT_LANG": default_language,
+ "TRANSLATIONS": translations_dict,
+ "TRANSLATIONS_PATTERN": self.context["TRANSLATIONS_PATTERN"]
+ }
for lang, content in content_translations.items():
try:
content, extension, rewrite_html = self.transform_content(content, post_format, attachments)
- except:
+ except Exception:
LOGGER.error(('Cannot interpret post "{0}" (language {1}) with post ' +
'format {2}!').format(os.path.join(out_folder, slug), lang, post_format))
return False
- if lang:
- out_meta_filename = slug + '.meta'
- if lang == default_language:
- out_content_filename = slug + '.' + extension
- else:
- out_content_filename \
- = utils.get_translation_candidate(self.context,
- slug + "." + extension, lang)
- self.extra_languages.add(lang)
- meta_slug = slug
- else:
- out_meta_filename = slug + '.meta'
- out_content_filename = slug + '.' + extension
- meta_slug = slug
+
+ out_meta_filename, out_content_filename = self._create_meta_and_content_filenames(
+ slug, extension, lang, default_language, current_translations_config)
+
tags, other_meta = self._create_metadata(status, excerpt, tags, categories,
post_name=os.path.join(out_folder, slug))
-
+ current_title = title_translations.get(lang, default_title)
meta = {
- "title": title,
- "slug": meta_slug,
+ "title": current_title,
+ "slug": slug,
"date": post_date,
"description": description,
"tags": ','.join(tags),
+ "status": post_status,
+ "has_math": has_math,
}
meta.update(other_meta)
if self.onefile:
@@ -1033,7 +1056,7 @@ class CommandImportWordpress(Command, ImportMixin):
else:
self.write_metadata(os.path.join(self.output_folder, out_folder,
out_meta_filename),
- title, meta_slug, post_date, description, tags, **other_meta)
+ current_title, slug, post_date, description, tags, **other_meta)
self.write_content(
os.path.join(self.output_folder,
out_folder, out_content_filename),
@@ -1053,8 +1076,8 @@ class CommandImportWordpress(Command, ImportMixin):
return (out_folder, slug)
else:
- LOGGER.warn(('Not going to import "{0}" because it seems to contain'
- ' no content.').format(title))
+ LOGGER.warning(('Not going to import "{0}" because it seems to contain'
+ ' no content.').format(title))
return False
def _extract_item_info(self, item):
@@ -1080,7 +1103,7 @@ class CommandImportWordpress(Command, ImportMixin):
if parent_id is not None and int(parent_id) != 0:
self.attachments[int(parent_id)][post_id] = data
else:
- LOGGER.warn("Attachment #{0} ({1}) has no parent!".format(post_id, data['files']))
+ LOGGER.warning("Attachment #{0} ({1}) has no parent!".format(post_id, data['files']))
def write_attachments_info(self, path, attachments):
"""Write attachments info file."""
@@ -1118,8 +1141,8 @@ class CommandImportWordpress(Command, ImportMixin):
self.process_item_if_post_or_page(item)
# Assign attachments to posts
for post_id in self.attachments:
- LOGGER.warn(("Found attachments for post or page #{0}, but didn't find post or page. " +
- "(Attachments: {1})").format(post_id, [e['files'][0] for e in self.attachments[post_id].values()]))
+ LOGGER.warning(("Found attachments for post or page #{0}, but didn't find post or page. " +
+ "(Attachments: {1})").format(post_id, [e['files'][0] for e in self.attachments[post_id].values()]))
def get_text_tag(tag, name, default):
@@ -1133,15 +1156,20 @@ def get_text_tag(tag, name, default):
return default
-def separate_qtranslate_content(text):
- """Parse the content of a wordpress post or page and separate qtranslate languages.
+def separate_qtranslate_tagged_langs(text):
+ """Parse the content of a wordpress post or page and separate languages.
+
+ For qtranslateX tags: [:LL]blabla[:]
- qtranslate tags: <!--:LL-->blabla<!--:-->
+ Note: qtranslate* plugins had a troubled history and used various
+ tags over time, application of the 'modernize_qtranslate_tags'
+ function is required for this function to handle most of the legacy
+ cases.
"""
- # TODO: uniformize qtranslate tags <!--/en--> => <!--:-->
- qt_start = "<!--:"
- qt_end = "-->"
- qt_end_with_lang_len = 5
+ qt_start = "[:"
+ qt_end = "]"
+ qt_end_len = len(qt_end)
+ qt_end_with_lang_len = qt_end_len + 2
qt_chunks = text.split(qt_start)
content_by_lang = {}
common_txt_list = []
@@ -1153,9 +1181,9 @@ def separate_qtranslate_content(text):
# be some piece of common text or tags, or just nothing
lang = "" # default language
c = c.lstrip(qt_end)
- if not c:
+ if not c.strip():
continue
- elif c[2:].startswith(qt_end):
+ elif c[2:qt_end_with_lang_len].startswith(qt_end):
# a language specific section (with language code at the begining)
lang = c[:2]
c = c[qt_end_with_lang_len:]
@@ -1176,3 +1204,26 @@ def separate_qtranslate_content(text):
for l in content_by_lang.keys():
content_by_lang[l] = " ".join(content_by_lang[l])
return content_by_lang
+
+
+def modernize_qtranslate_tags(xml_bytes):
+ """
+ Uniformize the "tag" used by various version of qtranslate.
+
+ The resulting byte string will only contain one set of qtranslate tags
+ (namely [:LG] and [:]), older ones being converted to new ones.
+ """
+ old_start_lang = re.compile(b"<!--:?(\\w{2})-->")
+ new_start_lang = b"[:\\1]"
+ old_end_lang = re.compile(b"<!--(/\\w{2}|:)-->")
+ new_end_lang = b"[:]"
+ title_match = re.compile(b"<title>(.*?)</title>")
+ modern_starts = old_start_lang.sub(new_start_lang, xml_bytes)
+ modernized_bytes = old_end_lang.sub(new_end_lang, modern_starts)
+
+ def title_escape(match):
+ title = match.group(1)
+ title = title.replace(b"&", b"&amp;").replace(b"<", b"&lt;").replace(b">", b"&gt;")
+ return b"<title>" + title + b"</title>"
+ fixed_bytes = title_match.sub(title_escape, modernized_bytes)
+ return fixed_bytes
diff --git a/nikola/plugins/command/init.plugin b/nikola/plugins/command/init.plugin
index a8b1523..6ee27d3 100644
--- a/nikola/plugins/command/init.plugin
+++ b/nikola/plugins/command/init.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Create a new site.
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/init.py b/nikola/plugins/command/init.py
index 3d6669c..0026edc 100644
--- a/nikola/plugins/command/init.py
+++ b/nikola/plugins/command/init.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,28 +26,28 @@
"""Create a new site."""
-from __future__ import print_function, unicode_literals
-import os
-import shutil
+import datetime
import io
import json
+import os
+import shutil
import textwrap
-import datetime
import unidecode
+from urllib.parse import urlsplit, urlunsplit
+
import dateutil.tz
import dateutil.zoneinfo
from mako.template import Template
from pkg_resources import resource_filename
-import tarfile
import nikola
-from nikola.nikola import DEFAULT_TRANSLATIONS_PATTERN, DEFAULT_INDEX_READ_MORE_LINK, DEFAULT_FEED_READ_MORE_LINK, LEGAL_VALUES, urlsplit, urlunsplit
+from nikola.nikola import DEFAULT_INDEX_READ_MORE_LINK, DEFAULT_FEED_READ_MORE_LINK, LEGAL_VALUES
from nikola.plugin_categories import Command
-from nikola.utils import ask, ask_yesno, get_logger, makedirs, STDERR_HANDLER, load_messages
+from nikola.utils import ask, ask_yesno, get_logger, makedirs, load_messages
from nikola.packages.tzlocal import get_localzone
-LOGGER = get_logger('init', STDERR_HANDLER)
+LOGGER = get_logger('init')
SAMPLE_CONF = {
'BLOG_AUTHOR': "Your Name",
@@ -55,50 +55,51 @@ SAMPLE_CONF = {
'SITE_URL': "https://example.com/",
'BLOG_EMAIL': "joe@demo.site",
'BLOG_DESCRIPTION': "This is a demo site for Nikola.",
- 'PRETTY_URLS': False,
- 'STRIP_INDEXES': False,
+ 'PRETTY_URLS': True,
+ 'STRIP_INDEXES': True,
'DEFAULT_LANG': "en",
'TRANSLATIONS': """{
DEFAULT_LANG: "",
# Example for another language:
# "es": "./es",
}""",
- 'THEME': 'bootstrap3',
+ 'THEME': LEGAL_VALUES['DEFAULT_THEME'],
'TIMEZONE': 'UTC',
'COMMENT_SYSTEM': 'disqus',
'COMMENT_SYSTEM_ID': 'nikolademo',
'CATEGORY_ALLOW_HIERARCHIES': False,
'CATEGORY_OUTPUT_FLAT_HIERARCHY': False,
- 'TRANSLATIONS_PATTERN': DEFAULT_TRANSLATIONS_PATTERN,
'INDEX_READ_MORE_LINK': DEFAULT_INDEX_READ_MORE_LINK,
'FEED_READ_MORE_LINK': DEFAULT_FEED_READ_MORE_LINK,
'POSTS': """(
("posts/*.rst", "posts", "post.tmpl"),
+ ("posts/*.md", "posts", "post.tmpl"),
("posts/*.txt", "posts", "post.tmpl"),
("posts/*.html", "posts", "post.tmpl"),
)""",
'PAGES': """(
- ("pages/*.rst", "pages", "story.tmpl"),
- ("pages/*.txt", "pages", "story.tmpl"),
- ("pages/*.html", "pages", "story.tmpl"),
+ ("pages/*.rst", "pages", "page.tmpl"),
+ ("pages/*.md", "pages", "page.tmpl"),
+ ("pages/*.txt", "pages", "page.tmpl"),
+ ("pages/*.html", "pages", "page.tmpl"),
)""",
'COMPILERS': """{
- "rest": ('.rst', '.txt'),
- "markdown": ('.md', '.mdown', '.markdown'),
- "textile": ('.textile',),
- "txt2tags": ('.t2t',),
- "bbcode": ('.bb',),
- "wiki": ('.wiki',),
- "ipynb": ('.ipynb',),
- "html": ('.html', '.htm'),
+ "rest": ['.rst', '.txt'],
+ "markdown": ['.md', '.mdown', '.markdown'],
+ "textile": ['.textile'],
+ "txt2tags": ['.t2t'],
+ "bbcode": ['.bb'],
+ "wiki": ['.wiki'],
+ "ipynb": ['.ipynb'],
+ "html": ['.html', '.htm'],
# PHP files are rendered the usual way (i.e. with the full templates).
# The resulting files have .php extensions, making it possible to run
# them without reconfiguring your server to recognize them.
- "php": ('.php',),
+ "php": ['.php'],
# Pandoc detects the input from the source filename
# but is disabled by default as it would conflict
# with many of the others.
- # "pandoc": ('.rst', '.md', '.txt'),
+ # "pandoc": ['.rst', '.md', '.txt'],
}""",
'NAVIGATION_LINKS': """{
DEFAULT_LANG: (
@@ -108,6 +109,7 @@ SAMPLE_CONF = {
),
}""",
'REDIRECTIONS': [],
+ '_METADATA_MAPPING_FORMATS': ', '.join(LEGAL_VALUES['METADATA_MAPPING'])
}
@@ -171,6 +173,14 @@ def format_default_translations_config(additional_languages):
return "{{\n{0}\n}}".format("\n".join(lang_paths))
+def get_default_translations_dict(default_lang, additional_languages):
+ """Generate a TRANSLATIONS dict matching the config from 'format_default_translations_config'."""
+ tr = {default_lang: ''}
+ for l in additional_languages:
+ tr[l] = './' + l
+ return tr
+
+
def format_navigation_links(additional_languages, default_lang, messages, strip_indexes=False):
"""Return the string to configure NAVIGATION_LINKS."""
f = u"""\
@@ -212,7 +222,7 @@ def prepare_config(config):
"""Parse sample config with JSON."""
p = config.copy()
p.update({k: json.dumps(v, ensure_ascii=False) for k, v in p.items()
- if k not in ('POSTS', 'PAGES', 'COMPILERS', 'TRANSLATIONS', 'NAVIGATION_LINKS', '_SUPPORTED_LANGUAGES', '_SUPPORTED_COMMENT_SYSTEMS', 'INDEX_READ_MORE_LINK', 'FEED_READ_MORE_LINK')})
+ if k not in ('POSTS', 'PAGES', 'COMPILERS', 'TRANSLATIONS', 'NAVIGATION_LINKS', '_SUPPORTED_LANGUAGES', '_SUPPORTED_COMMENT_SYSTEMS', 'INDEX_READ_MORE_LINK', 'FEED_READ_MORE_LINK', '_METADATA_MAPPING_FORMATS')})
# READ_MORE_LINKs require some special treatment.
p['INDEX_READ_MORE_LINK'] = "'" + p['INDEX_READ_MORE_LINK'].replace("'", "\\'") + "'"
p['FEED_READ_MORE_LINK'] = "'" + p['FEED_READ_MORE_LINK'].replace("'", "\\'") + "'"
@@ -285,7 +295,7 @@ class CommandInit(Command):
@classmethod
def create_empty_site(cls, target):
"""Create an empty site with directories only."""
- for folder in ('files', 'galleries', 'listings', 'posts', 'pages'):
+ for folder in ('files', 'galleries', 'images', 'listings', 'posts', 'pages'):
makedirs(os.path.join(target, folder))
@staticmethod
@@ -323,7 +333,6 @@ class CommandInit(Command):
def prettyhandler(default, toconf):
SAMPLE_CONF['PRETTY_URLS'] = ask_yesno('Enable pretty URLs (/page/ instead of /page.html) that don\'t need web server configuration?', default=True)
- SAMPLE_CONF['STRIP_INDEXES'] = SAMPLE_CONF['PRETTY_URLS']
def lhandler(default, toconf, show_header=True):
if show_header:
@@ -354,9 +363,8 @@ class CommandInit(Command):
# Get messages for navigation_links. In order to do this, we need
# to generate a throwaway TRANSLATIONS dict.
- tr = {default: ''}
- for l in langs:
- tr[l] = './' + l
+ tr = get_default_translations_dict(default, langs)
+
# Assuming that base contains all the locales, and that base does
# not inherit from anywhere.
try:
@@ -377,22 +385,22 @@ class CommandInit(Command):
while not answered:
try:
lz = get_localzone()
- except:
+ except Exception:
lz = None
answer = ask('Time zone', lz if lz else "UTC")
tz = dateutil.tz.gettz(answer)
if tz is None:
print(" WARNING: Time zone not found. Searching list of time zones for a match.")
- zonesfile = tarfile.open(fileobj=dateutil.zoneinfo.getzoneinfofile_stream())
- zonenames = [zone for zone in zonesfile.getnames() if answer.lower() in zone.lower()]
- if len(zonenames) == 1:
- tz = dateutil.tz.gettz(zonenames[0])
- answer = zonenames[0]
+ all_zones = dateutil.zoneinfo.get_zonefile_instance().zones
+ matching_zones = [zone for zone in all_zones if answer.lower() in zone.lower()]
+ if len(matching_zones) == 1:
+ tz = dateutil.tz.gettz(matching_zones[0])
+ answer = matching_zones[0]
print(" Picking '{0}'.".format(answer))
- elif len(zonenames) > 1:
+ elif len(matching_zones) > 1:
print(" The following time zones match your query:")
- print(' ' + '\n '.join(zonenames))
+ print(' ' + '\n '.join(matching_zones))
continue
if tz is not None:
diff --git a/nikola/plugins/command/install_theme.plugin b/nikola/plugins/command/install_theme.plugin
deleted file mode 100644
index aa68773..0000000
--- a/nikola/plugins/command/install_theme.plugin
+++ /dev/null
@@ -1,13 +0,0 @@
-[Core]
-name = install_theme
-module = install_theme
-
-[Documentation]
-author = Roberto Alsina
-version = 1.0
-website = https://getnikola.com/
-description = Install a theme into the current site.
-
-[Nikola]
-plugincategory = Command
-
diff --git a/nikola/plugins/command/install_theme.py b/nikola/plugins/command/install_theme.py
deleted file mode 100644
index 28f7aa3..0000000
--- a/nikola/plugins/command/install_theme.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2016 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-"""Install a theme."""
-
-from __future__ import print_function
-
-from nikola import utils
-from nikola.plugin_categories import Command
-LOGGER = utils.get_logger('install_theme', utils.STDERR_HANDLER)
-
-
-class CommandInstallTheme(Command):
- """Install a theme."""
-
- name = "install_theme"
- doc_usage = "[[-u] theme_name] | [[-u] -l]"
- doc_purpose = "install theme into current site"
- output_dir = 'themes'
- cmd_options = [
- {
- 'name': 'list',
- 'short': 'l',
- 'long': 'list',
- 'type': bool,
- 'default': False,
- 'help': 'Show list of available themes.'
- },
- {
- 'name': 'url',
- 'short': 'u',
- 'long': 'url',
- 'type': str,
- 'help': "URL for the theme repository (default: "
- "https://themes.getnikola.com/v7/themes.json)",
- 'default': 'https://themes.getnikola.com/v7/themes.json'
- },
- {
- 'name': 'getpath',
- 'short': 'g',
- 'long': 'get-path',
- 'type': bool,
- 'default': False,
- 'help': "Print the path for installed theme",
- },
- ]
-
- def _execute(self, options, args):
- """Install theme into current site."""
- p = self.site.plugin_manager.getPluginByName('theme', 'Command').plugin_object
- listing = options['list']
- url = options['url']
- if args:
- name = args[0]
- else:
- name = None
-
- if options['getpath'] and name:
- return p.get_path(name)
-
- if name is None and not listing:
- LOGGER.error("This command needs either a theme name or the -l option.")
- return False
-
- if listing:
- p.list_available(url)
- else:
- p.do_install_deps(url, name)
diff --git a/nikola/plugins/command/new_page.plugin b/nikola/plugins/command/new_page.plugin
index 3eaecb4..8734805 100644
--- a/nikola/plugins/command/new_page.plugin
+++ b/nikola/plugins/command/new_page.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Create a new page.
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/new_page.py b/nikola/plugins/command/new_page.py
index c09b4be..0f7996a 100644
--- a/nikola/plugins/command/new_page.py
+++ b/nikola/plugins/command/new_page.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina, Chris Warrick and others.
+# Copyright © 2012-2020 Roberto Alsina, Chris Warrick and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,7 +26,6 @@
"""Create a new page."""
-from __future__ import unicode_literals, print_function
from nikola.plugin_categories import Command
@@ -107,6 +106,7 @@ class CommandNewPage(Command):
options['tags'] = ''
options['schedule'] = False
options['is_page'] = True
+ options['date-path'] = False
# Even though stuff was split into `new_page`, it’s easier to do it
# there not to duplicate the code.
p = self.site.plugin_manager.getPluginByName('new_post', 'Command').plugin_object
diff --git a/nikola/plugins/command/new_post.plugin b/nikola/plugins/command/new_post.plugin
index e9c3af5..efdeb58 100644
--- a/nikola/plugins/command/new_post.plugin
+++ b/nikola/plugins/command/new_post.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Create a new post.
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/new_post.py b/nikola/plugins/command/new_post.py
index 36cc04f..e6eabbd 100644
--- a/nikola/plugins/command/new_post.py
+++ b/nikola/plugins/command/new_post.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,7 +26,6 @@
"""Create a new post."""
-from __future__ import unicode_literals, print_function
import io
import datetime
import operator
@@ -35,15 +34,15 @@ import shutil
import subprocess
import sys
-from blinker import signal
import dateutil.tz
+from blinker import signal
from nikola.plugin_categories import Command
from nikola import utils
COMPILERS_DOC_LINK = 'https://getnikola.com/handbook.html#configuring-other-input-formats'
-POSTLOGGER = utils.get_logger('new_post', utils.STDERR_HANDLER)
-PAGELOGGER = utils.get_logger('new_page', utils.STDERR_HANDLER)
+POSTLOGGER = utils.get_logger('new_post')
+PAGELOGGER = utils.get_logger('new_page')
LOGGER = POSTLOGGER
@@ -90,7 +89,7 @@ def get_date(schedule=False, rule=None, last_date=None, tz=None, iso8601=False):
except ImportError:
LOGGER.error('To use the --schedule switch of new_post, '
'you have to install the "dateutil" package.')
- rrule = None # NOQA
+ rrule = None
if schedule and rrule and rule:
try:
rule_ = rrule.rrulestr(rule, dtstart=last_date or date)
@@ -111,7 +110,7 @@ def get_date(schedule=False, rule=None, last_date=None, tz=None, iso8601=False):
else:
tz_str = ' UTC'
- return date.strftime('%Y-%m-%d %H:%M:%S') + tz_str
+ return (date.strftime('%Y-%m-%d %H:%M:%S') + tz_str, date)
class CommandNewPost(Command):
@@ -204,7 +203,14 @@ class CommandNewPost(Command):
'default': '',
'help': 'Import an existing file instead of creating a placeholder'
},
-
+ {
+ 'name': 'date-path',
+ 'short': 'd',
+ 'long': 'date-path',
+ 'type': bool,
+ 'default': False,
+ 'help': 'Create post with date path (eg. year/month/day, see NEW_POST_DATE_PATH_FORMAT in config)'
+ },
]
def _execute(self, options, args):
@@ -234,6 +240,10 @@ class CommandNewPost(Command):
twofile = options['twofile']
import_file = options['import']
wants_available = options['available-formats']
+ date_path_opt = options['date-path']
+ date_path_auto = self.site.config['NEW_POST_DATE_PATH'] and content_type == 'post'
+ date_path_format = self.site.config['NEW_POST_DATE_PATH_FORMAT'].strip('/')
+ post_type = options.get('type', 'text')
if wants_available:
self.print_compilers()
@@ -255,16 +265,39 @@ class CommandNewPost(Command):
if "@" in content_format:
content_format, content_subformat = content_format.split("@")
- if not content_format: # Issue #400
+ if not content_format and path and not os.path.isdir(path):
+ # content_format not specified. If path was given, use
+ # it to guess (Issue #2798)
+ extension = os.path.splitext(path)[-1]
+ for compiler, extensions in self.site.config['COMPILERS'].items():
+ if extension in extensions:
+ content_format = compiler
+ if not content_format:
+ LOGGER.error("Unknown {0} extension {1}, maybe you need to install a plugin or enable an existing one?".format(content_type, extension))
+ return
+
+ elif not content_format and import_file:
+ # content_format not specified. If import_file was given, use
+ # it to guess (Issue #2798)
+ extension = os.path.splitext(import_file)[-1]
+ for compiler, extensions in self.site.config['COMPILERS'].items():
+ if extension in extensions:
+ content_format = compiler
+ if not content_format:
+ LOGGER.error("Unknown {0} extension {1}, maybe you need to install a plugin or enable an existing one?".format(content_type, extension))
+ return
+
+ elif not content_format: # Issue #400
content_format = get_default_compiler(
is_post,
self.site.config['COMPILERS'],
self.site.config['post_pages'])
- if content_format not in compiler_names:
- LOGGER.error("Unknown {0} format {1}, maybe you need to install a plugin?".format(content_type, content_format))
+ elif content_format not in compiler_names:
+ LOGGER.error("Unknown {0} format {1}, maybe you need to install a plugin or enable an existing one?".format(content_type, content_format))
self.print_compilers()
return
+
compiler_plugin = self.site.plugin_manager.getPluginByName(
content_format, "PageCompiler").plugin_object
@@ -286,7 +319,7 @@ class CommandNewPost(Command):
while not title:
title = utils.ask('Title')
- if isinstance(title, utils.bytes_str):
+ if isinstance(title, bytes):
try:
title = title.decode(sys.stdin.encoding)
except (AttributeError, TypeError): # for tests
@@ -296,26 +329,34 @@ class CommandNewPost(Command):
if not path:
slug = utils.slugify(title, lang=self.site.default_lang)
else:
- if isinstance(path, utils.bytes_str):
+ if isinstance(path, bytes):
try:
path = path.decode(sys.stdin.encoding)
except (AttributeError, TypeError): # for tests
path = path.decode('utf-8')
- slug = utils.slugify(os.path.splitext(os.path.basename(path))[0], lang=self.site.default_lang)
+ if os.path.isdir(path):
+ # If the user provides a directory, add the file name generated from title (Issue #2651)
+ slug = utils.slugify(title, lang=self.site.default_lang)
+ pattern = os.path.basename(entry[0])
+ suffix = pattern[1:]
+ path = os.path.join(path, slug + suffix)
+ else:
+ slug = utils.slugify(os.path.splitext(os.path.basename(path))[0], lang=self.site.default_lang)
- if isinstance(author, utils.bytes_str):
- try:
- author = author.decode(sys.stdin.encoding)
- except (AttributeError, TypeError): # for tests
- author = author.decode('utf-8')
+ if isinstance(author, bytes):
+ try:
+ author = author.decode(sys.stdin.encoding)
+ except (AttributeError, TypeError): # for tests
+ author = author.decode('utf-8')
# Calculate the date to use for the content
- schedule = options['schedule'] or self.site.config['SCHEDULE_ALL']
+ # SCHEDULE_ALL is post-only (Issue #2921)
+ schedule = options['schedule'] or (self.site.config['SCHEDULE_ALL'] and is_post)
rule = self.site.config['SCHEDULE_RULE']
self.site.scan_posts()
timeline = self.site.timeline
last_date = None if not timeline else timeline[0].date
- date = get_date(schedule, rule, last_date, self.site.tzinfo, self.site.config['FORCE_ISO8601'])
+ date, dateobj = get_date(schedule, rule, last_date, self.site.tzinfo, self.site.config['FORCE_ISO8601'])
data = {
'title': title,
'slug': slug,
@@ -323,17 +364,21 @@ class CommandNewPost(Command):
'tags': tags,
'link': '',
'description': '',
- 'type': 'text',
+ 'type': post_type,
}
if not path:
pattern = os.path.basename(entry[0])
suffix = pattern[1:]
output_path = os.path.dirname(entry[0])
+ if date_path_auto or date_path_opt:
+ output_path += os.sep + dateobj.strftime(date_path_format)
txt_path = os.path.join(output_path, slug + suffix)
meta_path = os.path.join(output_path, slug + ".meta")
else:
+ if date_path_opt:
+ LOGGER.warning("A path has been specified, ignoring -d")
txt_path = os.path.join(self.site.original_cwd, path)
meta_path = os.path.splitext(txt_path)[0] + ".meta"
@@ -360,18 +405,18 @@ class CommandNewPost(Command):
metadata.update(self.site.config['ADDITIONAL_METADATA'])
data.update(metadata)
- # ipynb plugin needs the ipython kernel info. We get the kernel name
+ # ipynb plugin needs the Jupyter kernel info. We get the kernel name
# from the content_subformat and pass it to the compiler in the metadata
if content_format == "ipynb" and content_subformat is not None:
- metadata["ipython_kernel"] = content_subformat
+ metadata["jupyter_kernel"] = content_subformat
# Override onefile if not really supported.
if not compiler_plugin.supports_onefile and onefile:
onefile = False
- LOGGER.warn('This compiler does not support one-file posts.')
+ LOGGER.warning('This compiler does not support one-file posts.')
if onefile and import_file:
- with io.open(import_file, 'r', encoding='utf-8') as fh:
+ with io.open(import_file, 'r', encoding='utf-8-sig') as fh:
content = fh.read()
elif not import_file:
if is_page:
@@ -385,13 +430,13 @@ class CommandNewPost(Command):
else:
compiler_plugin.create_post(
txt_path, content=content, onefile=onefile, title=title,
- slug=slug, date=date, tags=tags, is_page=is_page, **metadata)
+ slug=slug, date=date, tags=tags, is_page=is_page, type=post_type, **metadata)
event = dict(path=txt_path)
if not onefile: # write metadata file
with io.open(meta_path, "w+", encoding="utf8") as fd:
- fd.write(utils.write_metadata(data))
+ fd.write(utils.write_metadata(data, comment_wrap=False, site=self.site))
LOGGER.info("Your {0}'s metadata is at: {1}".format(content_type, meta_path))
event['meta_path'] = meta_path
LOGGER.info("Your {0}'s text is at: {1}".format(content_type, txt_path))
@@ -406,7 +451,7 @@ class CommandNewPost(Command):
if editor:
subprocess.call(to_run)
else:
- LOGGER.error('$EDITOR not set, cannot edit the post. Please do it manually.')
+ LOGGER.error('The $EDITOR environment variable is not set, cannot edit the post with \'-e\'. Please edit the post manually.')
def filter_post_pages(self, compiler, is_post):
"""Return the correct entry from post_pages.
@@ -523,6 +568,6 @@ class CommandNewPost(Command):
More compilers are available in the Plugins Index.
Compilers marked with ! and ~ require additional configuration:
- ! not in the PAGES/POSTS tuples (unused)
+ ! not in the POSTS/PAGES tuples and any post scanners (unused)
~ not in the COMPILERS dict (disabled)
Read more: {0}""".format(COMPILERS_DOC_LINK))
diff --git a/nikola/plugins/command/orphans.plugin b/nikola/plugins/command/orphans.plugin
index d20c539..5107032 100644
--- a/nikola/plugins/command/orphans.plugin
+++ b/nikola/plugins/command/orphans.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = List all orphans
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/orphans.py b/nikola/plugins/command/orphans.py
index 5e2574d..0cf2e63 100644
--- a/nikola/plugins/command/orphans.py
+++ b/nikola/plugins/command/orphans.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina, Chris Warrick and others.
+# Copyright © 2012-2020 Roberto Alsina, Chris Warrick and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,7 +26,6 @@
"""List all orphans."""
-from __future__ import print_function
import os
from nikola.plugin_categories import Command
diff --git a/nikola/plugins/command/plugin.plugin b/nikola/plugins/command/plugin.plugin
index 016bcaa..db99ceb 100644
--- a/nikola/plugins/command/plugin.plugin
+++ b/nikola/plugins/command/plugin.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Manage Nikola plugins
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/plugin.py b/nikola/plugins/command/plugin.py
index 364f343..33dee23 100644
--- a/nikola/plugins/command/plugin.py
+++ b/nikola/plugins/command/plugin.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,8 +26,8 @@
"""Manage plugins."""
-from __future__ import print_function
import io
+import json.decoder
import os
import sys
import shutil
@@ -42,7 +42,7 @@ from pygments.formatters import TerminalFormatter
from nikola.plugin_categories import Command
from nikola import utils
-LOGGER = utils.get_logger('plugin', utils.STDERR_HANDLER)
+LOGGER = utils.get_logger('plugin')
class CommandPlugin(Command):
@@ -84,9 +84,8 @@ class CommandPlugin(Command):
'short': 'u',
'long': 'url',
'type': str,
- 'help': "URL for the plugin repository (default: "
- "https://plugins.getnikola.com/v7/plugins.json)",
- 'default': 'https://plugins.getnikola.com/v7/plugins.json'
+ 'help': "URL for the plugin repository",
+ 'default': 'https://plugins.getnikola.com/v8/plugins.json'
},
{
'name': 'user',
@@ -137,11 +136,11 @@ class CommandPlugin(Command):
self.output_dir = options.get('output_dir')
else:
if not self.site.configured and not user_mode and install:
- LOGGER.notice('No site found, assuming --user')
+ LOGGER.warning('No site found, assuming --user')
user_mode = True
if user_mode:
- self.output_dir = os.path.expanduser('~/.nikola/plugins')
+ self.output_dir = os.path.expanduser(os.path.join('~', '.nikola', 'plugins'))
else:
self.output_dir = 'plugins'
@@ -179,9 +178,18 @@ class CommandPlugin(Command):
plugins.sort()
print('Installed Plugins:')
print('------------------')
+ maxlength = max(len(i[0]) for i in plugins)
+ if self.site.colorful:
+ formatstring = '\x1b[1m{0:<{2}}\x1b[0m at {1}'
+ else:
+ formatstring = '{0:<{2}} at {1}'
for name, path in plugins:
- print('{0} at {1}'.format(name, path))
- print('\n\nAlso, you have disabled these plugins: {}'.format(self.site.config['DISABLED_PLUGINS']))
+ print(formatstring.format(name, path, maxlength))
+ dp = self.site.config['DISABLED_PLUGINS']
+ if dp:
+ print('\n\nAlso, you have disabled these plugins: {}'.format(', '.join(dp)))
+ else:
+ print('\n\nNo plugins are disabled.')
return 0
def do_upgrade(self, url):
@@ -235,43 +243,32 @@ class CommandPlugin(Command):
utils.extract_all(zip_file, self.output_dir)
dest_path = os.path.join(self.output_dir, name)
else:
- try:
- plugin_path = utils.get_plugin_path(name)
- except:
- LOGGER.error("Can't find plugin " + name)
- return 1
-
- utils.makedirs(self.output_dir)
- dest_path = os.path.join(self.output_dir, name)
- if os.path.exists(dest_path):
- LOGGER.error("{0} is already installed".format(name))
- return 1
-
- LOGGER.info('Copying {0} into plugins'.format(plugin_path))
- shutil.copytree(plugin_path, dest_path)
+ LOGGER.error("Can't find plugin " + name)
+ return 1
reqpath = os.path.join(dest_path, 'requirements.txt')
if os.path.exists(reqpath):
- LOGGER.notice('This plugin has Python dependencies.')
+ LOGGER.warning('This plugin has Python dependencies.')
LOGGER.info('Installing dependencies with pip...')
try:
subprocess.check_call((sys.executable, '-m', 'pip', 'install', '-r', reqpath))
except subprocess.CalledProcessError:
LOGGER.error('Could not install the dependencies.')
print('Contents of the requirements.txt file:\n')
- with io.open(reqpath, 'r', encoding='utf-8') as fh:
+ with io.open(reqpath, 'r', encoding='utf-8-sig') as fh:
print(utils.indent(fh.read(), 4 * ' '))
print('You have to install those yourself or through a '
'package manager.')
else:
LOGGER.info('Dependency installation succeeded.')
+
reqnpypath = os.path.join(dest_path, 'requirements-nonpy.txt')
if os.path.exists(reqnpypath):
- LOGGER.notice('This plugin has third-party '
- 'dependencies you need to install '
- 'manually.')
+ LOGGER.warning('This plugin has third-party '
+ 'dependencies you need to install '
+ 'manually.')
print('Contents of the requirements-nonpy.txt file:\n')
- with io.open(reqnpypath, 'r', encoding='utf-8') as fh:
+ with io.open(reqnpypath, 'r', encoding='utf-8-sig') as fh:
for l in fh.readlines():
i, j = l.split('::')
print(utils.indent(i.strip(), 4 * ' '))
@@ -280,17 +277,36 @@ class CommandPlugin(Command):
print('You have to install those yourself or through a package '
'manager.')
+
+ req_plug_path = os.path.join(dest_path, 'requirements-plugins.txt')
+ if os.path.exists(req_plug_path):
+ LOGGER.info('This plugin requires other Nikola plugins.')
+ LOGGER.info('Installing plugins...')
+ plugin_failure = False
+ try:
+ with io.open(req_plug_path, 'r', encoding='utf-8-sig') as inf:
+ for plugname in inf.readlines():
+ plugin_failure = self.do_install(url, plugname.strip(), show_install_notes) != 0
+ except Exception:
+ plugin_failure = True
+ if plugin_failure:
+ LOGGER.error('Could not install a plugin.')
+ print('Contents of the requirements-plugins.txt file:\n')
+ with io.open(req_plug_path, 'r', encoding='utf-8-sig') as fh:
+ print(utils.indent(fh.read(), 4 * ' '))
+ print('You have to install those yourself manually.')
+ else:
+ LOGGER.info('Dependency installation succeeded.')
+
confpypath = os.path.join(dest_path, 'conf.py.sample')
if os.path.exists(confpypath) and show_install_notes:
- LOGGER.notice('This plugin has a sample config file. Integrate it with yours in order to make this plugin work!')
+ LOGGER.warning('This plugin has a sample config file. Integrate it with yours in order to make this plugin work!')
print('Contents of the conf.py.sample file:\n')
- with io.open(confpypath, 'r', encoding='utf-8') as fh:
+ with io.open(confpypath, 'r', encoding='utf-8-sig') as fh:
if self.site.colorful:
- print(utils.indent(pygments.highlight(
- fh.read(), PythonLexer(), TerminalFormatter()),
- 4 * ' '))
+ print(pygments.highlight(fh.read(), PythonLexer(), TerminalFormatter()))
else:
- print(utils.indent(fh.read(), 4 * ' '))
+ print(fh.read())
return 0
def do_uninstall(self, name):
@@ -320,10 +336,19 @@ class CommandPlugin(Command):
"""Download the JSON file with all plugins."""
if self.json is None:
try:
- self.json = requests.get(url).json()
- except requests.exceptions.SSLError:
- LOGGER.warning("SSL error, using http instead of https (press ^C to abort)")
- time.sleep(1)
- url = url.replace('https', 'http', 1)
- self.json = requests.get(url).json()
+ try:
+ self.json = requests.get(url).json()
+ except requests.exceptions.SSLError:
+ LOGGER.warning("SSL error, using http instead of https (press ^C to abort)")
+ time.sleep(1)
+ url = url.replace('https', 'http', 1)
+ self.json = requests.get(url).json()
+ except json.decoder.JSONDecodeError as e:
+ LOGGER.error("Failed to decode JSON data in response from server.")
+ LOGGER.error("JSON error encountered: " + str(e))
+ LOGGER.error("This issue might be caused by server-side issues, or by to unusual activity in your "
+ "network (as determined by CloudFlare). Please visit https://plugins.getnikola.com/ in "
+ "a browser.")
+ sys.exit(2)
+
return self.json
diff --git a/nikola/plugins/command/rst2html.plugin b/nikola/plugins/command/rst2html.plugin
index a095705..6f2fb25 100644
--- a/nikola/plugins/command/rst2html.plugin
+++ b/nikola/plugins/command/rst2html.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Compile reStructuredText to HTML using the Nikola architecture
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/rst2html/__init__.py b/nikola/plugins/command/rst2html/__init__.py
index c877f63..5576b35 100644
--- a/nikola/plugins/command/rst2html/__init__.py
+++ b/nikola/plugins/command/rst2html/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2015-2016 Chris Warrick and others.
+# Copyright © 2015-2020 Chris Warrick and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,7 +26,6 @@
"""Compile reStructuredText to HTML, using Nikola architecture."""
-from __future__ import unicode_literals, print_function
import io
import lxml.html
@@ -50,12 +49,12 @@ class CommandRst2Html(Command):
print("This command takes only one argument (input file name).")
return 2
source = args[0]
- with io.open(source, "r", encoding="utf8") as in_file:
+ with io.open(source, "r", encoding="utf-8-sig") as in_file:
data = in_file.read()
- output, error_level, deps = compiler.compile_html_string(data, source, True)
+ output, error_level, deps, shortcode_deps = compiler.compile_string(data, source, True)
- rstcss_path = resource_filename('nikola', 'data/themes/base/assets/css/rst.css')
- with io.open(rstcss_path, "r", encoding="utf8") as fh:
+ rstcss_path = resource_filename('nikola', 'data/themes/base/assets/css/rst_base.css')
+ with io.open(rstcss_path, "r", encoding="utf-8-sig") as fh:
rstcss = fh.read()
template_path = resource_filename('nikola', 'plugins/command/rst2html/rst2html.tmpl')
diff --git a/nikola/plugins/command/serve.plugin b/nikola/plugins/command/serve.plugin
index a4a726f..aa40073 100644
--- a/nikola/plugins/command/serve.plugin
+++ b/nikola/plugins/command/serve.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Start test server.
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/serve.py b/nikola/plugins/command/serve.py
index c9702d5..ede5179 100644
--- a/nikola/plugins/command/serve.py
+++ b/nikola/plugins/command/serve.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,26 +26,18 @@
"""Start test server."""
-from __future__ import print_function
import os
+import sys
import re
+import signal
import socket
import webbrowser
-try:
- from BaseHTTPServer import HTTPServer
- from SimpleHTTPServer import SimpleHTTPRequestHandler
-except ImportError:
- from http.server import HTTPServer # NOQA
- from http.server import SimpleHTTPRequestHandler # NOQA
-
-try:
- from StringIO import StringIO
-except ImportError:
- from io import BytesIO as StringIO # NOQA
-
+from http.server import HTTPServer
+from http.server import SimpleHTTPRequestHandler
+from io import BytesIO as StringIO
from nikola.plugin_categories import Command
-from nikola.utils import dns_sd, get_logger, STDERR_HANDLER
+from nikola.utils import dns_sd
class IPv6Server(HTTPServer):
@@ -60,7 +52,6 @@ class CommandServe(Command):
name = "serve"
doc_usage = "[options]"
doc_purpose = "start the test webserver"
- logger = None
dns_sd = None
cmd_options = (
@@ -70,7 +61,7 @@ class CommandServe(Command):
'long': 'port',
'default': 8000,
'type': int,
- 'help': 'Port number (default: 8000)',
+ 'help': 'Port number',
},
{
'name': 'address',
@@ -78,7 +69,7 @@ class CommandServe(Command):
'long': 'address',
'type': str,
'default': '',
- 'help': 'Address to bind (default: 0.0.0.0 -- all local IPv4 interfaces)',
+ 'help': 'Address to bind, defaults to all local IPv4 interfaces',
},
{
'name': 'detach',
@@ -106,13 +97,24 @@ class CommandServe(Command):
},
)
+ def shutdown(self, signum=None, _frame=None):
+ """Shut down the server that is running detached."""
+ if self.dns_sd:
+ self.dns_sd.Reset()
+ if os.path.exists(self.serve_pidfile):
+ os.remove(self.serve_pidfile)
+ if not self.detached:
+ self.logger.info("Server is shutting down.")
+ if signum:
+ sys.exit(0)
+
def _execute(self, options, args):
"""Start test server."""
- self.logger = get_logger('serve', STDERR_HANDLER)
out_dir = self.site.config['OUTPUT_FOLDER']
if not os.path.isdir(out_dir):
self.logger.error("Missing '{0}' folder?".format(out_dir))
else:
+ self.serve_pidfile = os.path.abspath('nikolaserve.pid')
os.chdir(out_dir)
if '[' in options['address']:
options['address'] = options['address'].strip('[').strip(']')
@@ -128,35 +130,43 @@ class CommandServe(Command):
httpd = OurHTTP((options['address'], options['port']),
OurHTTPRequestHandler)
sa = httpd.socket.getsockname()
- self.logger.info("Serving HTTP on {0} port {1}...".format(*sa))
+ if ipv6:
+ server_url = "http://[{0}]:{1}/".format(*sa)
+ else:
+ server_url = "http://{0}:{1}/".format(*sa)
+ self.logger.info("Serving on {0} ...".format(server_url))
+
if options['browser']:
- if ipv6:
- server_url = "http://[{0}]:{1}/".format(*sa)
- else:
- server_url = "http://{0}:{1}/".format(*sa)
+ # Some browsers fail to load 0.0.0.0 (Issue #2755)
+ if sa[0] == '0.0.0.0':
+ server_url = "http://127.0.0.1:{1}/".format(*sa)
self.logger.info("Opening {0} in the default web browser...".format(server_url))
webbrowser.open(server_url)
if options['detach']:
+ self.detached = True
OurHTTPRequestHandler.quiet = True
try:
pid = os.fork()
if pid == 0:
+ signal.signal(signal.SIGTERM, self.shutdown)
httpd.serve_forever()
else:
- self.logger.info("Detached with PID {0}. Run `kill {0}` to stop the server.".format(pid))
- except AttributeError as e:
+ with open(self.serve_pidfile, 'w') as fh:
+ fh.write('{0}\n'.format(pid))
+ self.logger.info("Detached with PID {0}. Run `kill {0}` or `kill $(cat nikolaserve.pid)` to stop the server.".format(pid))
+ except AttributeError:
if os.name == 'nt':
self.logger.warning("Detaching is not available on Windows, server is running in the foreground.")
else:
- raise e
+ raise
else:
+ self.detached = False
try:
self.dns_sd = dns_sd(options['port'], (options['ipv6'] or '::' in options['address']))
+ signal.signal(signal.SIGTERM, self.shutdown)
httpd.serve_forever()
except KeyboardInterrupt:
- self.logger.info("Server is shutting down.")
- if self.dns_sd:
- self.dns_sd.Reset()
+ self.shutdown()
return 130
@@ -172,8 +182,7 @@ class OurHTTPRequestHandler(SimpleHTTPRequestHandler):
if self.quiet:
return
else:
- # Old-style class in Python 2.7, cannot use super()
- return SimpleHTTPRequestHandler.log_message(self, *args)
+ return super().log_message(*args)
# NOTICE: this is a patched version of send_head() to disable all sorts of
# caching. `nikola serve` is a development server, hence caching should
@@ -185,9 +194,9 @@ class OurHTTPRequestHandler(SimpleHTTPRequestHandler):
# Note that it might break in future versions of Python, in which case we
# would need to do even more magic.
def send_head(self):
- """Common code for GET and HEAD commands.
+ """Send response code and MIME header.
- This sends the response code and MIME headers.
+ This is common code for GET and HEAD commands.
Return value is either a file object (which has to be copied
to the outputfile by the caller unless the command was HEAD,
@@ -198,10 +207,12 @@ class OurHTTPRequestHandler(SimpleHTTPRequestHandler):
path = self.translate_path(self.path)
f = None
if os.path.isdir(path):
- if not self.path.endswith('/'):
+ path_parts = list(self.path.partition('?'))
+ if not path_parts[0].endswith('/'):
# redirect browser - doing basically what apache does
+ path_parts[0] += '/'
self.send_response(301)
- self.send_header("Location", self.path + "/")
+ self.send_header("Location", ''.join(path_parts))
# begin no-cache patch
# For redirects. With redirects, caching is even worse and can
# break more. Especially with 301 Moved Permanently redirects,
@@ -227,7 +238,7 @@ class OurHTTPRequestHandler(SimpleHTTPRequestHandler):
# transmitted *less* than the content-length!
f = open(path, 'rb')
except IOError:
- self.send_error(404, "File not found")
+ self.send_error(404, "File not found: {}".format(path))
return None
filtered_bytes = None
@@ -235,7 +246,7 @@ class OurHTTPRequestHandler(SimpleHTTPRequestHandler):
# Comment out any <base> to allow local resolution of relative URLs.
data = f.read().decode('utf8')
f.close()
- data = re.sub(r'<base\s([^>]*)>', '<!--base \g<1>-->', data, re.IGNORECASE)
+ data = re.sub(r'<base\s([^>]*)>', r'<!--base \g<1>-->', data, flags=re.IGNORECASE)
data = data.encode('utf8')
f = StringIO()
f.write(data)
diff --git a/nikola/plugins/command/status.plugin b/nikola/plugins/command/status.plugin
index 91390d2..7e2bd96 100644
--- a/nikola/plugins/command/status.plugin
+++ b/nikola/plugins/command/status.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com
description = Site status
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/status.py b/nikola/plugins/command/status.py
index b3ffbb4..c96d13f 100644
--- a/nikola/plugins/command/status.py
+++ b/nikola/plugins/command/status.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,7 +26,6 @@
"""Display site status."""
-from __future__ import print_function
import os
from datetime import datetime
from dateutil.tz import gettz, tzlocal
diff --git a/nikola/plugins/command/bootswatch_theme.plugin b/nikola/plugins/command/subtheme.plugin
index 51e6718..d377e22 100644
--- a/nikola/plugins/command/bootswatch_theme.plugin
+++ b/nikola/plugins/command/subtheme.plugin
@@ -1,13 +1,13 @@
[Core]
-name = bootswatch_theme
-module = bootswatch_theme
+name = subtheme
+module = subtheme
[Documentation]
author = Roberto Alsina
-version = 1.0
+version = 1.1
website = https://getnikola.com/
-description = Given a swatch name and a parent theme, creates a custom theme.
+description = Given a swatch name and a parent theme, creates a custom subtheme.
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/subtheme.py b/nikola/plugins/command/subtheme.py
new file mode 100644
index 0000000..554a241
--- /dev/null
+++ b/nikola/plugins/command/subtheme.py
@@ -0,0 +1,150 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2012-2020 Roberto Alsina and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+"""Given a swatch name from bootswatch.com or hackerthemes.com and a parent theme, creates a custom theme."""
+
+import configparser
+import os
+
+import requests
+
+from nikola import utils
+from nikola.plugin_categories import Command
+
+LOGGER = utils.get_logger('subtheme')
+
+
+def _check_for_theme(theme, themes):
+ for t in themes:
+ if t.endswith(os.sep + theme):
+ return True
+ return False
+
+
+class CommandSubTheme(Command):
+ """Given a swatch name from bootswatch.com and a parent theme, creates a custom theme."""
+
+ name = "subtheme"
+ doc_usage = "[options]"
+ doc_purpose = "given a swatch name from bootswatch.com or hackerthemes.com and a parent theme, creates a custom"\
+ " theme"
+ cmd_options = [
+ {
+ 'name': 'name',
+ 'short': 'n',
+ 'long': 'name',
+ 'default': 'custom',
+ 'type': str,
+ 'help': 'New theme name',
+ },
+ {
+ 'name': 'swatch',
+ 'short': 's',
+ 'default': '',
+ 'type': str,
+ 'help': 'Name of the swatch from bootswatch.com.'
+ },
+ {
+ 'name': 'parent',
+ 'short': 'p',
+ 'long': 'parent',
+ 'default': 'bootstrap4',
+ 'help': 'Parent theme name',
+ },
+ ]
+
+ def _execute(self, options, args):
+ """Given a swatch name and a parent theme, creates a custom theme."""
+ name = options['name']
+ swatch = options['swatch']
+ if not swatch:
+ LOGGER.error('The -s option is mandatory')
+ return 1
+ parent = options['parent']
+ version = '4'
+
+ # Check which Bootstrap version to use
+ themes = utils.get_theme_chain(parent, self.site.themes_dirs)
+ if _check_for_theme('bootstrap', themes) or _check_for_theme('bootstrap-jinja', themes):
+ version = '2'
+ elif _check_for_theme('bootstrap3', themes) or _check_for_theme('bootstrap3-jinja', themes):
+ version = '3'
+ elif _check_for_theme('bootstrap4', themes) or _check_for_theme('bootstrap4-jinja', themes):
+ version = '4'
+ elif not _check_for_theme('bootstrap4', themes) and not _check_for_theme('bootstrap4-jinja', themes):
+ LOGGER.warning(
+ '"subtheme" only makes sense for themes that use bootstrap')
+ elif _check_for_theme('bootstrap3-gradients', themes) or _check_for_theme('bootstrap3-gradients-jinja', themes):
+ LOGGER.warning(
+ '"subtheme" doesn\'t work well with the bootstrap3-gradients family')
+
+ LOGGER.info("Creating '{0}' theme from '{1}' and '{2}'".format(
+ name, swatch, parent))
+ utils.makedirs(os.path.join('themes', name, 'assets', 'css'))
+ for fname in ('bootstrap.min.css', 'bootstrap.css'):
+ if swatch in [
+ 'bubblegum', 'business-tycoon', 'charming', 'daydream',
+ 'executive-suite', 'good-news', 'growth', 'harbor', 'hello-world',
+ 'neon-glow', 'pleasant', 'retro', 'vibrant-sea', 'wizardry']: # Hackerthemes
+ LOGGER.info(
+ 'Hackertheme-based subthemes often require you use a custom font for full effect.')
+ if version != '4':
+ LOGGER.error(
+ 'The hackertheme subthemes are only available for Bootstrap 4.')
+ return 1
+ if fname == 'bootstrap.css':
+ url = 'https://raw.githubusercontent.com/HackerThemes/theme-machine/master/dist/{swatch}/css/bootstrap4-{swatch}.css'.format(
+ swatch=swatch)
+ else:
+ url = 'https://raw.githubusercontent.com/HackerThemes/theme-machine/master/dist/{swatch}/css/bootstrap4-{swatch}.min.css'.format(
+ swatch=swatch)
+ else: # Bootswatch
+ url = 'https://bootswatch.com'
+ if version:
+ url += '/' + version
+ url = '/'.join((url, swatch, fname))
+ LOGGER.info("Downloading: " + url)
+ r = requests.get(url)
+ if r.status_code > 299:
+ LOGGER.error('Error {} getting {}', r.status_code, url)
+ return 1
+ data = r.text
+
+ with open(os.path.join('themes', name, 'assets', 'css', fname),
+ 'w+') as output:
+ output.write(data)
+
+ with open(os.path.join('themes', name, '%s.theme' % name), 'w+') as output:
+ parent_theme_data_path = utils.get_asset_path(
+ '%s.theme' % parent, themes)
+ cp = configparser.ConfigParser()
+ cp.read(parent_theme_data_path)
+ cp['Theme']['parent'] = parent
+ cp['Family'] = {'family': cp['Family']['family']}
+ cp.write(output)
+
+ LOGGER.info(
+ 'Theme created. Change the THEME setting to "{0}" to use it.'.format(name))
diff --git a/nikola/plugins/command/theme.plugin b/nikola/plugins/command/theme.plugin
index b0c1886..421d027 100644
--- a/nikola/plugins/command/theme.plugin
+++ b/nikola/plugins/command/theme.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Manage Nikola themes
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/theme.py b/nikola/plugins/command/theme.py
index 7513491..6f4339a 100644
--- a/nikola/plugins/command/theme.py
+++ b/nikola/plugins/command/theme.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina, Chris Warrick and others.
+# Copyright © 2012-2020 Roberto Alsina, Chris Warrick and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,13 +26,15 @@
"""Manage themes."""
-from __future__ import print_function
-import os
+import configparser
import io
+import json.decoder
+import os
import shutil
+import sys
import time
-import requests
+import requests
import pygments
from pygments.lexers import PythonLexer
from pygments.formatters import TerminalFormatter
@@ -41,7 +43,7 @@ from pkg_resources import resource_filename
from nikola.plugin_categories import Command
from nikola import utils
-LOGGER = utils.get_logger('theme', utils.STDERR_HANDLER)
+LOGGER = utils.get_logger('theme')
class CommandTheme(Command):
@@ -89,9 +91,8 @@ class CommandTheme(Command):
'short': 'u',
'long': 'url',
'type': str,
- 'help': "URL for the theme repository (default: "
- "https://themes.getnikola.com/v7/themes.json)",
- 'default': 'https://themes.getnikola.com/v7/themes.json'
+ 'help': "URL for the theme repository",
+ 'default': 'https://themes.getnikola.com/v8/themes.json'
},
{
'name': 'getpath',
@@ -122,14 +123,21 @@ class CommandTheme(Command):
'long': 'engine',
'type': str,
'default': 'mako',
- 'help': 'Engine to use for new theme (mako or jinja -- default: mako)',
+ 'help': 'Engine to use for new theme (mako or jinja)',
},
{
'name': 'new_parent',
'long': 'parent',
'type': str,
'default': 'base',
- 'help': 'Parent to use for new theme (default: base)',
+ 'help': 'Parent to use for new theme',
+ },
+ {
+ 'name': 'new_legacy_meta',
+ 'long': 'legacy-meta',
+ 'type': bool,
+ 'default': False,
+ 'help': 'Create legacy meta files for new theme',
},
]
@@ -147,6 +155,7 @@ class CommandTheme(Command):
new = options.get('new')
new_engine = options.get('new_engine')
new_parent = options.get('new_parent')
+ new_legacy_meta = options.get('new_legacy_meta')
command_count = [bool(x) for x in (
install,
uninstall,
@@ -172,7 +181,7 @@ class CommandTheme(Command):
elif copy_template:
return self.copy_template(copy_template)
elif new:
- return self.new_theme(new, new_engine, new_parent)
+ return self.new_theme(new, new_engine, new_parent, new_legacy_meta)
def do_install_deps(self, url, name):
"""Install themes and their dependencies."""
@@ -188,11 +197,11 @@ class CommandTheme(Command):
try:
utils.get_theme_path_real(parent_name, self.site.themes_dirs)
break
- except: # Not available
+ except Exception: # Not available
self.do_install(parent_name, data)
name = parent_name
if installstatus:
- LOGGER.notice('Remember to set THEME="{0}" in conf.py to use this theme.'.format(origname))
+ LOGGER.info('Remember to set THEME="{0}" in conf.py to use this theme.'.format(origname))
def do_install(self, name, data):
"""Download and install a theme."""
@@ -225,15 +234,13 @@ class CommandTheme(Command):
confpypath = os.path.join(dest_path, 'conf.py.sample')
if os.path.exists(confpypath):
- LOGGER.notice('This theme has a sample config file. Integrate it with yours in order to make this theme work!')
+ LOGGER.warning('This theme has a sample config file. Integrate it with yours in order to make this theme work!')
print('Contents of the conf.py.sample file:\n')
- with io.open(confpypath, 'r', encoding='utf-8') as fh:
+ with io.open(confpypath, 'r', encoding='utf-8-sig') as fh:
if self.site.colorful:
- print(utils.indent(pygments.highlight(
- fh.read(), PythonLexer(), TerminalFormatter()),
- 4 * ' '))
+ print(pygments.highlight(fh.read(), PythonLexer(), TerminalFormatter()))
else:
- print(utils.indent(fh.read(), 4 * ' '))
+ print(fh.read())
return True
def do_uninstall(self, name):
@@ -282,7 +289,9 @@ class CommandTheme(Command):
themes = []
themes_dirs = self.site.themes_dirs + [resource_filename('nikola', os.path.join('data', 'themes'))]
for tdir in themes_dirs:
- themes += [(i, os.path.join(tdir, i)) for i in os.listdir(tdir)]
+ if os.path.isdir(tdir):
+ themes += [(i, os.path.join(tdir, i)) for i in os.listdir(tdir)]
+
for tname, tpath in sorted(set(themes)):
if os.path.isdir(tpath):
print("{0} at {1}".format(tname, tpath))
@@ -316,7 +325,7 @@ class CommandTheme(Command):
LOGGER.error("This file already exists in your templates directory ({0}).".format(base))
return 3
- def new_theme(self, name, engine, parent):
+ def new_theme(self, name, engine, parent, create_legacy_meta=False):
"""Create a new theme."""
base = 'themes'
themedir = os.path.join(base, name)
@@ -326,9 +335,7 @@ class CommandTheme(Command):
LOGGER.info("Created directory {0}".format(base))
# Check if engine and parent match
- engine_file = utils.get_asset_path('engine', utils.get_theme_chain(parent, self.site.themes_dirs))
- with io.open(engine_file, 'r', encoding='utf-8') as fh:
- parent_engine = fh.read().strip()
+ parent_engine = utils.get_template_engine(utils.get_theme_chain(parent, self.site.themes_dirs))
if parent_engine != engine:
LOGGER.error("Cannot use engine {0} because parent theme '{1}' uses {2}".format(engine, parent, parent_engine))
@@ -342,24 +349,45 @@ class CommandTheme(Command):
LOGGER.error("Theme already exists")
return 2
- with io.open(os.path.join(themedir, 'parent'), 'w', encoding='utf-8') as fh:
- fh.write(parent + '\n')
- LOGGER.info("Created file {0}".format(os.path.join(themedir, 'parent')))
- with io.open(os.path.join(themedir, 'engine'), 'w', encoding='utf-8') as fh:
- fh.write(engine + '\n')
- LOGGER.info("Created file {0}".format(os.path.join(themedir, 'engine')))
+ cp = configparser.ConfigParser()
+ cp['Theme'] = {
+ 'engine': engine,
+ 'parent': parent
+ }
+
+ theme_meta_path = os.path.join(themedir, name + '.theme')
+ with io.open(theme_meta_path, 'w', encoding='utf-8') as fh:
+ cp.write(fh)
+ LOGGER.info("Created file {0}".format(theme_meta_path))
+
+ if create_legacy_meta:
+ with io.open(os.path.join(themedir, 'parent'), 'w', encoding='utf-8') as fh:
+ fh.write(parent + '\n')
+ LOGGER.info("Created file {0}".format(os.path.join(themedir, 'parent')))
+ with io.open(os.path.join(themedir, 'engine'), 'w', encoding='utf-8') as fh:
+ fh.write(engine + '\n')
+ LOGGER.info("Created file {0}".format(os.path.join(themedir, 'engine')))
LOGGER.info("Theme {0} created successfully.".format(themedir))
- LOGGER.notice('Remember to set THEME="{0}" in conf.py to use this theme.'.format(name))
+ LOGGER.info('Remember to set THEME="{0}" in conf.py to use this theme.'.format(name))
def get_json(self, url):
"""Download the JSON file with all plugins."""
if self.json is None:
try:
- self.json = requests.get(url).json()
- except requests.exceptions.SSLError:
- LOGGER.warning("SSL error, using http instead of https (press ^C to abort)")
- time.sleep(1)
- url = url.replace('https', 'http', 1)
- self.json = requests.get(url).json()
+ try:
+ self.json = requests.get(url).json()
+ except requests.exceptions.SSLError:
+ LOGGER.warning("SSL error, using http instead of https (press ^C to abort)")
+ time.sleep(1)
+ url = url.replace('https', 'http', 1)
+ self.json = requests.get(url).json()
+ except json.decoder.JSONDecodeError as e:
+ LOGGER.error("Failed to decode JSON data in response from server.")
+ LOGGER.error("JSON error encountered:" + str(e))
+ LOGGER.error("This issue might be caused by server-side issues, or by to unusual activity in your "
+ "network (as determined by CloudFlare). Please visit https://themes.getnikola.com/ in "
+ "a browser.")
+ sys.exit(2)
+
return self.json
diff --git a/nikola/plugins/command/version.plugin b/nikola/plugins/command/version.plugin
index d78b79b..a172e28 100644
--- a/nikola/plugins/command/version.plugin
+++ b/nikola/plugins/command/version.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Show nikola version
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/version.py b/nikola/plugins/command/version.py
index 267837e..9b81343 100644
--- a/nikola/plugins/command/version.py
+++ b/nikola/plugins/command/version.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,15 +26,13 @@
"""Print Nikola version."""
-from __future__ import print_function
-import lxml
import requests
from nikola.plugin_categories import Command
from nikola import __version__
-URL = 'https://pypi.python.org/pypi?:action=doap&name=Nikola'
+URL = 'https://pypi.org/pypi/Nikola/json'
class CommandVersion(Command):
@@ -60,10 +58,11 @@ class CommandVersion(Command):
"""Print the version number."""
print("Nikola v" + __version__)
if options.get('check'):
- data = requests.get(URL).text
- doc = lxml.etree.fromstring(data.encode('utf8'))
- revision = doc.findall('*//{http://usefulinc.com/ns/doap#}revision')[0].text
- if revision == __version__:
+ data = requests.get(URL).json()
+ pypi_version = data['info']['version']
+ if pypi_version == __version__:
print("Nikola is up-to-date")
else:
- print("The latest version of Nikola is v{0} -- please upgrade using `pip install --upgrade Nikola=={0}` or your system package manager".format(revision))
+ print("The latest version of Nikola is v{0}. Please upgrade "
+ "using `pip install --upgrade Nikola=={0}` or your "
+ "system package manager.".format(pypi_version))
diff --git a/nikola/plugins/compile/__init__.py b/nikola/plugins/compile/__init__.py
index ff7e9a2..db78fce 100644
--- a/nikola/plugins/compile/__init__.py
+++ b/nikola/plugins/compile/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/compile/html.plugin b/nikola/plugins/compile/html.plugin
index f95bdd5..be1f876 100644
--- a/nikola/plugins/compile/html.plugin
+++ b/nikola/plugins/compile/html.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Compile HTML into HTML (just copy)
[Nikola]
-plugincategory = Compiler
+PluginCategory = Compiler
friendlyname = HTML
diff --git a/nikola/plugins/compile/html.py b/nikola/plugins/compile/html.py
index 942d6da..80b6713 100644
--- a/nikola/plugins/compile/html.py
+++ b/nikola/plugins/compile/html.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -24,15 +24,17 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""Implementation of compile_html for HTML source files."""
+"""Page compiler plugin for HTML source files."""
-from __future__ import unicode_literals
-import os
import io
+import os
+
+import lxml.html
+from nikola import shortcodes as sc
from nikola.plugin_categories import PageCompiler
-from nikola.utils import makedirs, write_metadata
+from nikola.utils import LocaleBorg, makedirs, map_metadata, write_metadata
class CompileHtml(PageCompiler):
@@ -40,25 +42,27 @@ class CompileHtml(PageCompiler):
name = "html"
friendly_name = "HTML"
+ supports_metadata = True
- def compile_html(self, source, dest, is_two_file=True):
- """Compile source file into HTML and save as dest."""
+ def compile_string(self, data, source_path=None, is_two_file=True, post=None, lang=None):
+ """Compile HTML into HTML strings, with shortcode support."""
+ if not is_two_file:
+ _, data = self.split_metadata(data, post, lang)
+ new_data, shortcodes = sc.extract_shortcodes(data)
+ return self.site.apply_shortcodes_uuid(new_data, shortcodes, filename=source_path, extra_context={'post': post})
+
+ def compile(self, source, dest, is_two_file=True, post=None, lang=None):
+ """Compile the source file into HTML and save as dest."""
makedirs(os.path.dirname(dest))
- try:
- post = self.site.post_per_input_file[source]
- except KeyError:
- post = None
- with io.open(dest, "w+", encoding="utf8") as out_file:
- with io.open(source, "r", encoding="utf8") as in_file:
+ with io.open(dest, "w+", encoding="utf-8") as out_file:
+ with io.open(source, "r", encoding="utf-8-sig") as in_file:
data = in_file.read()
- if not is_two_file:
- _, data = self.split_metadata(data)
- data, shortcode_deps = self.site.apply_shortcodes(data, with_dependencies=True, extra_context=dict(post=post))
+ data, shortcode_deps = self.compile_string(data, source, is_two_file, post, lang)
out_file.write(data)
if post is None:
if shortcode_deps:
self.logger.error(
- "Cannot save dependencies for post {0} due to unregistered source file name",
+ "Cannot save dependencies for post {0} (post unknown)",
source)
else:
post._depfile[dest] += shortcode_deps
@@ -76,9 +80,41 @@ class CompileHtml(PageCompiler):
makedirs(os.path.dirname(path))
if not content.endswith('\n'):
content += '\n'
- with io.open(path, "w+", encoding="utf8") as fd:
+ with io.open(path, "w+", encoding="utf-8") as fd:
if onefile:
- fd.write('<!--\n')
- fd.write(write_metadata(metadata))
- fd.write('-->\n\n')
+ fd.write(write_metadata(metadata, comment_wrap=True, site=self.site, compiler=self))
fd.write(content)
+
+ def read_metadata(self, post, file_metadata_regexp=None, unslugify_titles=False, lang=None):
+ """Read the metadata from a post's meta tags, and return a metadata dict."""
+ if lang is None:
+ lang = LocaleBorg().current_lang
+ source_path = post.translated_source_path(lang)
+
+ with io.open(source_path, 'r', encoding='utf-8-sig') as inf:
+ data = inf.read()
+
+ metadata = {}
+ try:
+ doc = lxml.html.document_fromstring(data)
+ except lxml.etree.ParserError as e:
+ # Issue #374 -> #2851
+ if str(e) == "Document is empty":
+ return {}
+ # let other errors raise
+ raise
+ title_tag = doc.find('*//title')
+ if title_tag is not None and title_tag.text:
+ metadata['title'] = title_tag.text
+ meta_tags = doc.findall('*//meta')
+ for tag in meta_tags:
+ k = tag.get('name', '').lower()
+ if not k:
+ continue
+ elif k == 'keywords':
+ k = 'tags'
+ content = tag.get('content')
+ if content:
+ metadata[k] = content
+ map_metadata(metadata, 'html_metadata', self.site.config)
+ return metadata
diff --git a/nikola/plugins/compile/ipynb.plugin b/nikola/plugins/compile/ipynb.plugin
index c369ab2..c146172 100644
--- a/nikola/plugins/compile/ipynb.plugin
+++ b/nikola/plugins/compile/ipynb.plugin
@@ -6,8 +6,8 @@ module = ipynb
author = Damian Avila, Chris Warrick and others
version = 2.0.0
website = http://www.damian.oquanta.info/
-description = Compile IPython notebooks into Nikola posts
+description = Compile Jupyter notebooks into Nikola posts
[Nikola]
-plugincategory = Compiler
-friendlyname = Jupyter/IPython Notebook
+PluginCategory = Compiler
+friendlyname = Jupyter Notebook
diff --git a/nikola/plugins/compile/ipynb.py b/nikola/plugins/compile/ipynb.py
index f3fdeea..039604b 100644
--- a/nikola/plugins/compile/ipynb.py
+++ b/nikola/plugins/compile/ipynb.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2013-2016 Damián Avila, Chris Warrick and others.
+# Copyright © 2013-2020 Damián Avila, Chris Warrick and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -24,99 +24,95 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""Implementation of compile_html based on nbconvert."""
+"""Page compiler plugin for nbconvert."""
-from __future__ import unicode_literals, print_function
import io
+import json
import os
-import sys
try:
+ import nbconvert
from nbconvert.exporters import HTMLExporter
import nbformat
current_nbformat = nbformat.current_nbformat
from jupyter_client import kernelspec
from traitlets.config import Config
+ NBCONVERT_VERSION_MAJOR = int(nbconvert.__version__.partition(".")[0])
flag = True
- ipy_modern = True
except ImportError:
- try:
- import IPython
- from IPython.nbconvert.exporters import HTMLExporter
- if IPython.version_info[0] >= 3: # API changed with 3.0.0
- from IPython import nbformat
- current_nbformat = nbformat.current_nbformat
- from IPython.kernel import kernelspec
- ipy_modern = True
- else:
- import IPython.nbformat.current as nbformat
- current_nbformat = 'json'
- kernelspec = None
- ipy_modern = False
-
- from IPython.config import Config
- flag = True
- except ImportError:
- flag = None
- ipy_modern = None
+ flag = None
+from nikola import shortcodes as sc
from nikola.plugin_categories import PageCompiler
-from nikola.utils import makedirs, req_missing, get_logger, STDERR_HANDLER
+from nikola.utils import makedirs, req_missing, LocaleBorg
class CompileIPynb(PageCompiler):
"""Compile IPynb into HTML."""
name = "ipynb"
- friendly_name = "Jupyter/IPython Notebook"
+ friendly_name = "Jupyter Notebook"
demote_headers = True
- default_kernel = 'python2' if sys.version_info[0] == 2 else 'python3'
+ default_kernel = 'python3'
+ supports_metadata = True
- def set_site(self, site):
- """Set Nikola site."""
- self.logger = get_logger('compile_ipynb', STDERR_HANDLER)
- super(CompileIPynb, self).set_site(site)
-
- def compile_html_string(self, source, is_two_file=True):
+ def _compile_string(self, nb_json):
"""Export notebooks as HTML strings."""
- if flag is None:
- req_missing(['ipython[notebook]>=2.0.0'], 'build this site (compile ipynb)')
- c = Config(self.site.config['IPYNB_CONFIG'])
+ self._req_missing_ipynb()
+ c = Config(get_default_jupyter_config())
+ c.merge(Config(self.site.config['IPYNB_CONFIG']))
+ if 'template_file' not in self.site.config['IPYNB_CONFIG'].get('Exporter', {}):
+ if NBCONVERT_VERSION_MAJOR >= 6:
+ c['Exporter']['template_file'] = 'classic/base.html.j2'
+ else:
+ c['Exporter']['template_file'] = 'basic.tpl' # not a typo
exportHtml = HTMLExporter(config=c)
- with io.open(source, "r", encoding="utf8") as in_file:
- nb_json = nbformat.read(in_file, current_nbformat)
- (body, resources) = exportHtml.from_notebook_node(nb_json)
+ body, _ = exportHtml.from_notebook_node(nb_json)
return body
- def compile_html(self, source, dest, is_two_file=True):
- """Compile source file into HTML and save as dest."""
+ @staticmethod
+ def _nbformat_read(in_file):
+ return nbformat.read(in_file, current_nbformat)
+
+ def _req_missing_ipynb(self):
+ if flag is None:
+ req_missing(['notebook>=4.0.0'], 'build this site (compile ipynb)')
+
+ def compile_string(self, data, source_path=None, is_two_file=True, post=None, lang=None):
+ """Compile notebooks into HTML strings."""
+ new_data, shortcodes = sc.extract_shortcodes(data)
+ output = self._compile_string(nbformat.reads(new_data, current_nbformat))
+ return self.site.apply_shortcodes_uuid(output, shortcodes, filename=source_path, extra_context={'post': post})
+
+ def compile(self, source, dest, is_two_file=False, post=None, lang=None):
+ """Compile the source file into HTML and save as dest."""
makedirs(os.path.dirname(dest))
- try:
- post = self.site.post_per_input_file[source]
- except KeyError:
- post = None
- with io.open(dest, "w+", encoding="utf8") as out_file:
- output = self.compile_html_string(source, is_two_file)
- output, shortcode_deps = self.site.apply_shortcodes(output, filename=source, with_dependencies=True, extra_context=dict(post=post))
+ with io.open(dest, "w+", encoding="utf-8") as out_file:
+ with io.open(source, "r", encoding="utf-8-sig") as in_file:
+ nb_str = in_file.read()
+ output, shortcode_deps = self.compile_string(nb_str, source,
+ is_two_file, post,
+ lang)
out_file.write(output)
if post is None:
if shortcode_deps:
self.logger.error(
- "Cannot save dependencies for post {0} due to unregistered source file name",
+ "Cannot save dependencies for post {0} (post unknown)",
source)
else:
post._depfile[dest] += shortcode_deps
- def read_metadata(self, post, file_metadata_regexp=None, unslugify_titles=False, lang=None):
+ def read_metadata(self, post, lang=None):
"""Read metadata directly from ipynb file.
- As ipynb file support arbitrary metadata as json, the metadata used by Nikola
+ As ipynb files support arbitrary metadata as json, the metadata used by Nikola
will be assume to be in the 'nikola' subfield.
"""
- if flag is None:
- req_missing(['ipython[notebook]>=2.0.0'], 'build this site (compile ipynb)')
- source = post.source_path
- with io.open(source, "r", encoding="utf8") as in_file:
+ self._req_missing_ipynb()
+ if lang is None:
+ lang = LocaleBorg().current_lang
+ source = post.translated_source_path(lang)
+ with io.open(source, "r", encoding="utf-8-sig") as in_file:
nb_json = nbformat.read(in_file, current_nbformat)
# Metadata might not exist in two-file posts or in hand-crafted
# .ipynb files.
@@ -124,11 +120,10 @@ class CompileIPynb(PageCompiler):
def create_post(self, path, **kw):
"""Create a new post."""
- if flag is None:
- req_missing(['ipython[notebook]>=2.0.0'], 'build this site (compile ipynb)')
+ self._req_missing_ipynb()
content = kw.pop('content', None)
onefile = kw.pop('onefile', False)
- kernel = kw.pop('ipython_kernel', None)
+ kernel = kw.pop('jupyter_kernel', None)
# is_page is not needed to create the file
kw.pop('is_page', False)
@@ -142,40 +137,52 @@ class CompileIPynb(PageCompiler):
# imported .ipynb file, guaranteed to start with "{" because it’s JSON.
nb = nbformat.reads(content, current_nbformat)
else:
- if ipy_modern:
- nb = nbformat.v4.new_notebook()
- nb["cells"] = [nbformat.v4.new_markdown_cell(content)]
- else:
- nb = nbformat.new_notebook()
- nb["worksheets"] = [nbformat.new_worksheet(cells=[nbformat.new_text_cell('markdown', [content])])]
-
- if kernelspec is not None:
- if kernel is None:
- kernel = self.default_kernel
- self.logger.notice('No kernel specified, assuming "{0}".'.format(kernel))
-
- IPYNB_KERNELS = {}
- ksm = kernelspec.KernelSpecManager()
- for k in ksm.find_kernel_specs():
- IPYNB_KERNELS[k] = ksm.get_kernel_spec(k).to_dict()
- IPYNB_KERNELS[k]['name'] = k
- del IPYNB_KERNELS[k]['argv']
-
- if kernel not in IPYNB_KERNELS:
- self.logger.error('Unknown kernel "{0}". Maybe you mispelled it?'.format(kernel))
- self.logger.info("Available kernels: {0}".format(", ".join(sorted(IPYNB_KERNELS))))
- raise Exception('Unknown kernel "{0}"'.format(kernel))
-
- nb["metadata"]["kernelspec"] = IPYNB_KERNELS[kernel]
- else:
- # Older IPython versions don’t need kernelspecs.
- pass
+ nb = nbformat.v4.new_notebook()
+ nb["cells"] = [nbformat.v4.new_markdown_cell(content)]
+
+ if kernel is None:
+ kernel = self.default_kernel
+ self.logger.warning('No kernel specified, assuming "{0}".'.format(kernel))
+
+ IPYNB_KERNELS = {}
+ ksm = kernelspec.KernelSpecManager()
+ for k in ksm.find_kernel_specs():
+ IPYNB_KERNELS[k] = ksm.get_kernel_spec(k).to_dict()
+ IPYNB_KERNELS[k]['name'] = k
+ del IPYNB_KERNELS[k]['argv']
+
+ if kernel not in IPYNB_KERNELS:
+ self.logger.error('Unknown kernel "{0}". Maybe you mispelled it?'.format(kernel))
+ self.logger.info("Available kernels: {0}".format(", ".join(sorted(IPYNB_KERNELS))))
+ raise Exception('Unknown kernel "{0}"'.format(kernel))
+
+ nb["metadata"]["kernelspec"] = IPYNB_KERNELS[kernel]
if onefile:
nb["metadata"]["nikola"] = metadata
- with io.open(path, "w+", encoding="utf8") as fd:
- if ipy_modern:
- nbformat.write(nb, fd, 4)
- else:
- nbformat.write(nb, fd, 'ipynb')
+ with io.open(path, "w+", encoding="utf-8") as fd:
+ nbformat.write(nb, fd, 4)
+
+
+def get_default_jupyter_config():
+ """Search default jupyter configuration location paths.
+
+ Return dictionary from configuration json files.
+ """
+ config = {}
+ from jupyter_core.paths import jupyter_config_path
+
+ for parent in jupyter_config_path():
+ try:
+ for file in os.listdir(parent):
+ if 'nbconvert' in file and file.endswith('.json'):
+ abs_path = os.path.join(parent, file)
+ with open(abs_path) as config_file:
+ config.update(json.load(config_file))
+ except OSError:
+ # some paths jupyter uses to find configurations
+ # may not exist
+ pass
+
+ return config
diff --git a/nikola/plugins/compile/markdown.plugin b/nikola/plugins/compile/markdown.plugin
index 2607413..85c67c3 100644
--- a/nikola/plugins/compile/markdown.plugin
+++ b/nikola/plugins/compile/markdown.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Compile Markdown into HTML
[Nikola]
-plugincategory = Compiler
+PluginCategory = Compiler
friendlyname = Markdown
diff --git a/nikola/plugins/compile/markdown/__init__.py b/nikola/plugins/compile/markdown/__init__.py
index 2e4234c..74e8c75 100644
--- a/nikola/plugins/compile/markdown/__init__.py
+++ b/nikola/plugins/compile/markdown/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -24,23 +24,44 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""Implementation of compile_html based on markdown."""
-
-from __future__ import unicode_literals
+"""Page compiler plugin for Markdown."""
import io
+import json
import os
+import threading
+
+from nikola import shortcodes as sc
+from nikola.plugin_categories import PageCompiler
+from nikola.utils import makedirs, req_missing, write_metadata, LocaleBorg, map_metadata
try:
- from markdown import markdown
+ from markdown import Markdown
except ImportError:
- markdown = None # NOQA
- nikola_extension = None
- gist_extension = None
- podcast_extension = None
+ Markdown = None
-from nikola.plugin_categories import PageCompiler
-from nikola.utils import makedirs, req_missing, write_metadata
+
+class ThreadLocalMarkdown(threading.local):
+ """Convert Markdown to HTML using per-thread Markdown objects.
+
+ See discussion in #2661.
+ """
+
+ def __init__(self, extensions, extension_configs):
+ """Create a Markdown instance."""
+ self.markdown = Markdown(extensions=extensions, extension_configs=extension_configs, output_format="html5")
+
+ def convert(self, data):
+ """Convert data to HTML and reset internal state."""
+ result = self.markdown.convert(data)
+ try:
+ meta = {}
+ for k in self.markdown.Meta: # This reads everything as lists
+ meta[k.lower()] = ','.join(self.markdown.Meta[k])
+ except Exception:
+ meta = {}
+ self.markdown.reset()
+ return result, meta
class CompileMarkdown(PageCompiler):
@@ -49,42 +70,61 @@ class CompileMarkdown(PageCompiler):
name = "markdown"
friendly_name = "Markdown"
demote_headers = True
- extensions = []
site = None
+ supports_metadata = False
def set_site(self, site):
"""Set Nikola site."""
- super(CompileMarkdown, self).set_site(site)
+ super().set_site(site)
self.config_dependencies = []
+ extensions = []
for plugin_info in self.get_compiler_extensions():
self.config_dependencies.append(plugin_info.name)
- self.extensions.append(plugin_info.plugin_object)
+ extensions.append(plugin_info.plugin_object)
plugin_info.plugin_object.short_help = plugin_info.description
- self.config_dependencies.append(str(sorted(site.config.get("MARKDOWN_EXTENSIONS"))))
+ site_extensions = self.site.config.get("MARKDOWN_EXTENSIONS")
+ self.config_dependencies.append(str(sorted(site_extensions)))
+ extensions.extend(site_extensions)
+
+ site_extension_configs = self.site.config.get("MARKDOWN_EXTENSION_CONFIGS")
+ if site_extension_configs:
+ self.config_dependencies.append(json.dumps(site_extension_configs.values, sort_keys=True))
+
+ if Markdown is not None:
+ self.converters = {}
+ for lang in self.site.config['TRANSLATIONS']:
+ lang_extension_configs = site_extension_configs(lang) if site_extension_configs else {}
+ self.converters[lang] = ThreadLocalMarkdown(extensions, lang_extension_configs)
+ self.supports_metadata = 'markdown.extensions.meta' in extensions
+
+ def compile_string(self, data, source_path=None, is_two_file=True, post=None, lang=None):
+ """Compile Markdown into HTML strings."""
+ if lang is None:
+ lang = LocaleBorg().current_lang
+ if Markdown is None:
+ req_missing(['markdown'], 'build this site (compile Markdown)')
+ if not is_two_file:
+ _, data = self.split_metadata(data, post, lang)
+ new_data, shortcodes = sc.extract_shortcodes(data)
+ output, _ = self.converters[lang].convert(new_data)
+ output, shortcode_deps = self.site.apply_shortcodes_uuid(output, shortcodes, filename=source_path, extra_context={'post': post})
+ return output, shortcode_deps
- def compile_html(self, source, dest, is_two_file=True):
- """Compile source file into HTML and save as dest."""
- if markdown is None:
+ def compile(self, source, dest, is_two_file=True, post=None, lang=None):
+ """Compile the source file into HTML and save as dest."""
+ if Markdown is None:
req_missing(['markdown'], 'build this site (compile Markdown)')
makedirs(os.path.dirname(dest))
- self.extensions += self.site.config.get("MARKDOWN_EXTENSIONS")
- try:
- post = self.site.post_per_input_file[source]
- except KeyError:
- post = None
- with io.open(dest, "w+", encoding="utf8") as out_file:
- with io.open(source, "r", encoding="utf8") as in_file:
+ with io.open(dest, "w+", encoding="utf-8") as out_file:
+ with io.open(source, "r", encoding="utf-8-sig") as in_file:
data = in_file.read()
- if not is_two_file:
- _, data = self.split_metadata(data)
- output = markdown(data, self.extensions, output_format="html5")
- output, shortcode_deps = self.site.apply_shortcodes(output, filename=source, with_dependencies=True, extra_context=dict(post=post))
+ output, shortcode_deps = self.compile_string(data, source, is_two_file, post, lang)
out_file.write(output)
if post is None:
if shortcode_deps:
self.logger.error(
- "Cannot save dependencies for post {0} due to unregistered source file name",
+ "Cannot save dependencies for post {0} (post unknown)",
source)
else:
post._depfile[dest] += shortcode_deps
@@ -102,9 +142,30 @@ class CompileMarkdown(PageCompiler):
makedirs(os.path.dirname(path))
if not content.endswith('\n'):
content += '\n'
- with io.open(path, "w+", encoding="utf8") as fd:
+ with io.open(path, "w+", encoding="utf-8") as fd:
if onefile:
- fd.write('<!-- \n')
- fd.write(write_metadata(metadata))
- fd.write('-->\n\n')
+ fd.write(write_metadata(metadata, comment_wrap=True, site=self.site, compiler=self))
fd.write(content)
+
+ def read_metadata(self, post, lang=None):
+ """Read the metadata from a post, and return a metadata dict."""
+ lang = lang or self.site.config['DEFAULT_LANG']
+ if not self.supports_metadata:
+ return {}
+ if Markdown is None:
+ req_missing(['markdown'], 'build this site (compile Markdown)')
+ if lang is None:
+ lang = LocaleBorg().current_lang
+ source = post.translated_source_path(lang)
+ with io.open(source, 'r', encoding='utf-8-sig') as inf:
+ # Note: markdown meta returns lowercase keys
+ data = inf.read()
+ # If the metadata starts with "---" it's actually YAML and
+ # we should not let markdown parse it, because it will do
+ # bad things like setting empty tags to "''"
+ if data.startswith('---\n'):
+ return {}
+ _, meta = self.converters[lang].convert(data)
+ # Map metadata from other platforms to names Nikola expects (Issue #2817)
+ map_metadata(meta, 'markdown_metadata', self.site.config)
+ return meta
diff --git a/nikola/plugins/compile/markdown/mdx_gist.plugin b/nikola/plugins/compile/markdown/mdx_gist.plugin
index 85b5450..f962cb7 100644
--- a/nikola/plugins/compile/markdown/mdx_gist.plugin
+++ b/nikola/plugins/compile/markdown/mdx_gist.plugin
@@ -4,7 +4,7 @@ module = mdx_gist
[Nikola]
compiler = markdown
-plugincategory = CompilerExtension
+PluginCategory = CompilerExtension
[Documentation]
author = Roberto Alsina
diff --git a/nikola/plugins/compile/markdown/mdx_gist.py b/nikola/plugins/compile/markdown/mdx_gist.py
index 25c071f..f6ce20a 100644
--- a/nikola/plugins/compile/markdown/mdx_gist.py
+++ b/nikola/plugins/compile/markdown/mdx_gist.py
@@ -75,7 +75,10 @@ Error Case: non-existent file:
[:gist: 4747847 doesntexist.py]
"""
-from __future__ import unicode_literals, print_function
+import requests
+
+from nikola.plugin_categories import MarkdownExtension
+from nikola.utils import get_logger
try:
from markdown.extensions import Extension
@@ -87,12 +90,8 @@ except ImportError:
# the markdown compiler will fail first
Extension = Pattern = object
-from nikola.plugin_categories import MarkdownExtension
-from nikola.utils import get_logger, STDERR_HANDLER
-import requests
-
-LOGGER = get_logger('compile_markdown.mdx_gist', STDERR_HANDLER)
+LOGGER = get_logger('compile_markdown.mdx_gist')
GIST_JS_URL = "https://gist.github.com/{0}.js"
GIST_FILE_JS_URL = "https://gist.github.com/{0}.js?file={1}"
@@ -167,7 +166,7 @@ class GistPattern(Pattern):
pre_elem.text = AtomicString(raw_gist)
except GistFetchException as e:
- LOGGER.warn(e.message)
+ LOGGER.warning(e.message)
warning_comment = etree.Comment(' WARNING: {0} '.format(e.message))
noscript_elem.append(warning_comment)
@@ -186,15 +185,15 @@ class GistExtension(MarkdownExtension, Extension):
for key, value in configs:
self.setConfig(key, value)
- def extendMarkdown(self, md, md_globals):
+ def extendMarkdown(self, md, md_globals=None):
"""Extend Markdown."""
gist_md_pattern = GistPattern(GIST_MD_RE, self.getConfigs())
gist_md_pattern.md = md
- md.inlinePatterns.add('gist', gist_md_pattern, "<not_strong")
+ md.inlinePatterns.register(gist_md_pattern, 'gist', 175)
gist_rst_pattern = GistPattern(GIST_RST_RE, self.getConfigs())
gist_rst_pattern.md = md
- md.inlinePatterns.add('gist-rst', gist_rst_pattern, ">gist")
+ md.inlinePatterns.register(gist_rst_pattern, 'gist-rst', 176)
md.registerExtension(self)
@@ -203,6 +202,7 @@ def makeExtension(configs=None): # pragma: no cover
"""Make Markdown extension."""
return GistExtension(configs)
+
if __name__ == '__main__':
import doctest
diff --git a/nikola/plugins/compile/markdown/mdx_nikola.plugin b/nikola/plugins/compile/markdown/mdx_nikola.plugin
index 3c5c638..9751598 100644
--- a/nikola/plugins/compile/markdown/mdx_nikola.plugin
+++ b/nikola/plugins/compile/markdown/mdx_nikola.plugin
@@ -4,7 +4,7 @@ module = mdx_nikola
[Nikola]
compiler = markdown
-plugincategory = CompilerExtension
+PluginCategory = CompilerExtension
[Documentation]
author = Roberto Alsina
diff --git a/nikola/plugins/compile/markdown/mdx_nikola.py b/nikola/plugins/compile/markdown/mdx_nikola.py
index 59a5d5b..06a6d9a 100644
--- a/nikola/plugins/compile/markdown/mdx_nikola.py
+++ b/nikola/plugins/compile/markdown/mdx_nikola.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -30,8 +30,10 @@
- Strikethrough inline patterns.
"""
-from __future__ import unicode_literals
import re
+
+from nikola.plugin_categories import MarkdownExtension
+
try:
from markdown.postprocessors import Postprocessor
from markdown.inlinepatterns import SimpleTagPattern
@@ -41,8 +43,6 @@ except ImportError:
# the markdown compiler will fail first
Postprocessor = SimpleTagPattern = Extension = object
-from nikola.plugin_categories import MarkdownExtension
-
CODERE = re.compile('<div class="codehilite"><pre>(.*?)</pre></div>', flags=re.MULTILINE | re.DOTALL)
STRIKE_RE = r"(~{2})(.+?)(~{2})" # ~~strike~~
@@ -68,14 +68,14 @@ class NikolaExtension(MarkdownExtension, Extension):
def _add_nikola_post_processor(self, md):
"""Extend Markdown with the postprocessor."""
pp = NikolaPostProcessor()
- md.postprocessors.add('nikola_post_processor', pp, '_end')
+ md.postprocessors.register(pp, 'nikola_post_processor', 1)
def _add_strikethrough_inline_pattern(self, md):
"""Support PHP-Markdown style strikethrough, for example: ``~~strike~~``."""
pattern = SimpleTagPattern(STRIKE_RE, 'del')
- md.inlinePatterns.add('strikethrough', pattern, '_end')
+ md.inlinePatterns.register(pattern, 'strikethrough', 175)
- def extendMarkdown(self, md, md_globals):
+ def extendMarkdown(self, md, md_globals=None):
"""Extend markdown to Nikola flavours."""
self._add_nikola_post_processor(md)
self._add_strikethrough_inline_pattern(md)
diff --git a/nikola/plugins/compile/markdown/mdx_podcast.plugin b/nikola/plugins/compile/markdown/mdx_podcast.plugin
index c4ee7e9..df5260d 100644
--- a/nikola/plugins/compile/markdown/mdx_podcast.plugin
+++ b/nikola/plugins/compile/markdown/mdx_podcast.plugin
@@ -4,7 +4,7 @@ module = mdx_podcast
[Nikola]
compiler = markdown
-plugincategory = CompilerExtension
+PluginCategory = CompilerExtension
[Documentation]
author = Roberto Alsina
diff --git a/nikola/plugins/compile/markdown/mdx_podcast.py b/nikola/plugins/compile/markdown/mdx_podcast.py
index 96a70ed..5090407 100644
--- a/nikola/plugins/compile/markdown/mdx_podcast.py
+++ b/nikola/plugins/compile/markdown/mdx_podcast.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright © 2013-2016 Michael Rabbitt, Roberto Alsina and others.
+# Copyright © 2013-2020 Michael Rabbitt, Roberto Alsina and others.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
@@ -36,7 +36,6 @@ Basic Example:
<p><audio controls=""><source src="https://archive.org/download/Rebeldes_Stereotipos/rs20120609_1.mp3" type="audio/mpeg"></source></audio></p>
"""
-from __future__ import print_function, unicode_literals
from nikola.plugin_categories import MarkdownExtension
try:
from markdown.extensions import Extension
@@ -69,7 +68,7 @@ class PodcastPattern(Pattern):
class PodcastExtension(MarkdownExtension, Extension):
- """"Podcast extension for Markdown."""
+ """Podcast extension for Markdown."""
def __init__(self, configs={}):
"""Initialize extension."""
@@ -80,11 +79,11 @@ class PodcastExtension(MarkdownExtension, Extension):
for key, value in configs:
self.setConfig(key, value)
- def extendMarkdown(self, md, md_globals):
+ def extendMarkdown(self, md, md_globals=None):
"""Extend Markdown."""
podcast_md_pattern = PodcastPattern(PODCAST_RE, self.getConfigs())
podcast_md_pattern.md = md
- md.inlinePatterns.add('podcast', podcast_md_pattern, "<not_strong")
+ md.inlinePatterns.register(podcast_md_pattern, 'podcast', 175)
md.registerExtension(self)
@@ -92,6 +91,7 @@ def makeExtension(configs=None): # pragma: no cover
"""Make Markdown extension."""
return PodcastExtension(configs)
+
if __name__ == '__main__':
import doctest
doctest.testmod(optionflags=(doctest.NORMALIZE_WHITESPACE +
diff --git a/nikola/plugins/compile/pandoc.plugin b/nikola/plugins/compile/pandoc.plugin
index 2a69095..8f339e4 100644
--- a/nikola/plugins/compile/pandoc.plugin
+++ b/nikola/plugins/compile/pandoc.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Compile markups into HTML using pandoc
[Nikola]
-plugincategory = Compiler
+PluginCategory = Compiler
friendlyname = Pandoc
diff --git a/nikola/plugins/compile/pandoc.py b/nikola/plugins/compile/pandoc.py
index 2368ae9..af14344 100644
--- a/nikola/plugins/compile/pandoc.py
+++ b/nikola/plugins/compile/pandoc.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -24,12 +24,11 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""Implementation of compile_html based on pandoc.
+"""Page compiler plugin for pandoc.
You will need, of course, to install pandoc
"""
-from __future__ import unicode_literals
import io
import os
@@ -48,25 +47,21 @@ class CompilePandoc(PageCompiler):
def set_site(self, site):
"""Set Nikola site."""
self.config_dependencies = [str(site.config['PANDOC_OPTIONS'])]
- super(CompilePandoc, self).set_site(site)
+ super().set_site(site)
- def compile_html(self, source, dest, is_two_file=True):
- """Compile source file into HTML and save as dest."""
+ def compile(self, source, dest, is_two_file=True, post=None, lang=None):
+ """Compile the source file into HTML and save as dest."""
makedirs(os.path.dirname(dest))
try:
- try:
- post = self.site.post_per_input_file[source]
- except KeyError:
- post = None
subprocess.check_call(['pandoc', '-o', dest, source] + self.site.config['PANDOC_OPTIONS'])
- with open(dest, 'r', encoding='utf-8') as inf:
- output, shortcode_deps = self.site.apply_shortcodes(inf.read(), with_dependencies=True)
+ with open(dest, 'r', encoding='utf-8-sig') as inf:
+ output, shortcode_deps = self.site.apply_shortcodes(inf.read())
with open(dest, 'w', encoding='utf-8') as outf:
outf.write(output)
if post is None:
if shortcode_deps:
self.logger.error(
- "Cannot save dependencies for post {0} due to unregistered source file name",
+ "Cannot save dependencies for post {0} (post unknown)",
source)
else:
post._depfile[dest] += shortcode_deps
@@ -74,6 +69,10 @@ class CompilePandoc(PageCompiler):
if e.strreror == 'No such file or directory':
req_missing(['pandoc'], 'build this site (compile with pandoc)', python=False)
+ def compile_string(self, data, source_path=None, is_two_file=True, post=None, lang=None):
+ """Compile into HTML strings."""
+ raise ValueError("Pandoc compiler does not support compile_string due to multiple output formats")
+
def create_post(self, path, **kw):
"""Create a new post."""
content = kw.pop('content', None)
@@ -88,7 +87,5 @@ class CompilePandoc(PageCompiler):
content += '\n'
with io.open(path, "w+", encoding="utf8") as fd:
if onefile:
- fd.write('<!--\n')
- fd.write(write_metadata(metadata))
- fd.write('-->\n\n')
+ fd.write(write_metadata(metadata, comment_wrap=True, site=self.site, compiler=self))
fd.write(content)
diff --git a/nikola/plugins/compile/php.plugin b/nikola/plugins/compile/php.plugin
index f4fb0c1..13384bd 100644
--- a/nikola/plugins/compile/php.plugin
+++ b/nikola/plugins/compile/php.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Compile PHP into HTML (just copy and name the file .php)
[Nikola]
-plugincategory = Compiler
+PluginCategory = Compiler
friendlyname = PHP
diff --git a/nikola/plugins/compile/php.py b/nikola/plugins/compile/php.py
index d2559fd..818e10d 100644
--- a/nikola/plugins/compile/php.py
+++ b/nikola/plugins/compile/php.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -24,16 +24,14 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""Implementation of compile_html for HTML+php."""
+"""Page compiler plugin for PHP."""
-from __future__ import unicode_literals
-
-import os
import io
+import os
+from hashlib import md5
from nikola.plugin_categories import PageCompiler
from nikola.utils import makedirs, write_metadata
-from hashlib import md5
class CompilePhp(PageCompiler):
@@ -42,8 +40,8 @@ class CompilePhp(PageCompiler):
name = "php"
friendly_name = "PHP"
- def compile_html(self, source, dest, is_two_file=True):
- """Compile source file into HTML and save as dest."""
+ def compile(self, source, dest, is_two_file=True, post=None, lang=None):
+ """Compile the source file into HTML and save as dest."""
makedirs(os.path.dirname(dest))
with io.open(dest, "w+", encoding="utf8") as out_file:
with open(source, "rb") as in_file:
@@ -51,6 +49,10 @@ class CompilePhp(PageCompiler):
out_file.write('<!-- __NIKOLA_PHP_TEMPLATE_INJECTION source:{0} checksum:{1}__ -->'.format(source, hash))
return True
+ def compile_string(self, data, source_path=None, is_two_file=True, post=None, lang=None):
+ """Compile PHP into HTML strings."""
+ return data, []
+
def create_post(self, path, **kw):
"""Create a new post."""
content = kw.pop('content', None)
@@ -76,9 +78,7 @@ class CompilePhp(PageCompiler):
content += '\n'
with io.open(path, "w+", encoding="utf8") as fd:
if onefile:
- fd.write('<!--\n')
- fd.write(write_metadata(metadata))
- fd.write('-->\n\n')
+ fd.write(write_metadata(metadata, comment_wrap=True, site=self.site, compiler=self))
fd.write(content)
def extension(self):
diff --git a/nikola/plugins/compile/rest.plugin b/nikola/plugins/compile/rest.plugin
index 4d9041a..43bdf2d 100644
--- a/nikola/plugins/compile/rest.plugin
+++ b/nikola/plugins/compile/rest.plugin
@@ -6,8 +6,8 @@ module = rest
author = Roberto Alsina
version = 1.0
website = https://getnikola.com/
-description = Compile reSt into HTML
+description = Compile reST into HTML
[Nikola]
-plugincategory = Compiler
+PluginCategory = Compiler
friendlyname = reStructuredText
diff --git a/nikola/plugins/compile/rest/__init__.py b/nikola/plugins/compile/rest/__init__.py
index b75849f..44da076 100644
--- a/nikola/plugins/compile/rest/__init__.py
+++ b/nikola/plugins/compile/rest/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,28 +26,28 @@
"""reStructuredText compiler for Nikola."""
-from __future__ import unicode_literals
import io
+import logging
import os
import docutils.core
import docutils.nodes
+import docutils.transforms
import docutils.utils
import docutils.io
import docutils.readers.standalone
-import docutils.writers.html4css1
+import docutils.writers.html5_polyglot
import docutils.parsers.rst.directives
from docutils.parsers.rst import roles
from nikola.nikola import LEGAL_VALUES
+from nikola.metadata_extractors import MetaCondition
from nikola.plugin_categories import PageCompiler
from nikola.utils import (
- unicode_str,
- get_logger,
makedirs,
write_metadata,
- STDERR_HANDLER,
- LocaleBorg
+ LocaleBorg,
+ map_metadata
)
@@ -58,15 +58,57 @@ class CompileRest(PageCompiler):
friendly_name = "reStructuredText"
demote_headers = True
logger = None
-
- def compile_html_string(self, data, source_path=None, is_two_file=True):
+ supports_metadata = True
+ metadata_conditions = [(MetaCondition.config_bool, "USE_REST_DOCINFO_METADATA")]
+
+ def read_metadata(self, post, lang=None):
+ """Read the metadata from a post, and return a metadata dict."""
+ if lang is None:
+ lang = LocaleBorg().current_lang
+ source_path = post.translated_source_path(lang)
+
+ # Silence reST errors, some of which are due to a different
+ # environment. Real issues will be reported while compiling.
+ null_logger = logging.getLogger('NULL')
+ null_logger.setLevel(1000)
+ with io.open(source_path, 'r', encoding='utf-8-sig') as inf:
+ data = inf.read()
+ _, _, _, document = rst2html(data, logger=null_logger, source_path=source_path, transforms=self.site.rst_transforms)
+ meta = {}
+ if 'title' in document:
+ meta['title'] = document['title']
+ for docinfo in document.traverse(docutils.nodes.docinfo):
+ for element in docinfo.children:
+ if element.tagname == 'field': # custom fields (e.g. summary)
+ name_elem, body_elem = element.children
+ name = name_elem.astext()
+ value = body_elem.astext()
+ elif element.tagname == 'authors': # author list
+ name = element.tagname
+ value = [element.astext() for element in element.children]
+ else: # standard fields (e.g. address)
+ name = element.tagname
+ value = element.astext()
+ name = name.lower()
+
+ meta[name] = value
+
+ # Put 'authors' meta field contents in 'author', too
+ if 'authors' in meta and 'author' not in meta:
+ meta['author'] = '; '.join(meta['authors'])
+
+ # Map metadata from other platforms to names Nikola expects (Issue #2817)
+ map_metadata(meta, 'rest_docinfo', self.site.config)
+ return meta
+
+ def compile_string(self, data, source_path=None, is_two_file=True, post=None, lang=None):
"""Compile reST into HTML strings."""
# If errors occur, this will be added to the line number reported by
# docutils so the line number matches the actual line number (off by
# 7 with default metadata, could be more or less depending on the post).
add_ln = 0
if not is_two_file:
- m_data, data = self.split_metadata(data)
+ m_data, data = self.split_metadata(data, post, lang)
add_ln = len(m_data.splitlines()) + 1
default_template_path = os.path.join(os.path.dirname(__file__), 'template.txt')
@@ -76,38 +118,42 @@ class CompileRest(PageCompiler):
'stylesheet_path': None,
'link_stylesheet': True,
'syntax_highlight': 'short',
- 'math_output': 'mathjax',
+ # This path is not used by Nikola, but we need something to silence
+ # warnings about it from reST.
+ 'math_output': 'mathjax /assets/js/mathjax.js',
'template': default_template_path,
- 'language_code': LEGAL_VALUES['DOCUTILS_LOCALES'].get(LocaleBorg().current_lang, 'en')
+ 'language_code': LEGAL_VALUES['DOCUTILS_LOCALES'].get(LocaleBorg().current_lang, 'en'),
+ 'doctitle_xform': self.site.config.get('USE_REST_DOCINFO_METADATA'),
+ 'file_insertion_enabled': self.site.config.get('REST_FILE_INSERTION_ENABLED'),
}
- output, error_level, deps = rst2html(
- data, settings_overrides=settings_overrides, logger=self.logger, source_path=source_path, l_add_ln=add_ln, transforms=self.site.rst_transforms,
- no_title_transform=self.site.config.get('NO_DOCUTILS_TITLE_TRANSFORM', False))
- if not isinstance(output, unicode_str):
+ from nikola import shortcodes as sc
+ new_data, shortcodes = sc.extract_shortcodes(data)
+ if self.site.config.get('HIDE_REST_DOCINFO', False):
+ self.site.rst_transforms.append(RemoveDocinfo)
+ output, error_level, deps, _ = rst2html(
+ new_data, settings_overrides=settings_overrides, logger=self.logger, source_path=source_path, l_add_ln=add_ln, transforms=self.site.rst_transforms)
+ if not isinstance(output, str):
# To prevent some weird bugs here or there.
# Original issue: empty files. `output` became a bytestring.
output = output.decode('utf-8')
- return output, error_level, deps
- def compile_html(self, source, dest, is_two_file=True):
- """Compile source file into HTML and save as dest."""
+ output, shortcode_deps = self.site.apply_shortcodes_uuid(output, shortcodes, filename=source_path, extra_context={'post': post})
+ return output, error_level, deps, shortcode_deps
+
+ def compile(self, source, dest, is_two_file=True, post=None, lang=None):
+ """Compile the source file into HTML and save as dest."""
makedirs(os.path.dirname(dest))
error_level = 100
- with io.open(dest, "w+", encoding="utf8") as out_file:
- try:
- post = self.site.post_per_input_file[source]
- except KeyError:
- post = None
- with io.open(source, "r", encoding="utf8") as in_file:
+ with io.open(dest, "w+", encoding="utf-8") as out_file:
+ with io.open(source, "r", encoding="utf-8-sig") as in_file:
data = in_file.read()
- output, error_level, deps = self.compile_html_string(data, source, is_two_file)
- output, shortcode_deps = self.site.apply_shortcodes(output, filename=source, with_dependencies=True, extra_context=dict(post=post))
+ output, error_level, deps, shortcode_deps = self.compile_string(data, source, is_two_file, post, lang)
out_file.write(output)
if post is None:
if deps.list:
self.logger.error(
- "Cannot save dependencies for post {0} due to unregistered source file name",
+ "Cannot save dependencies for post {0} (post unknown)",
source)
else:
post._depfile[dest] += deps.list
@@ -129,23 +175,21 @@ class CompileRest(PageCompiler):
makedirs(os.path.dirname(path))
if not content.endswith('\n'):
content += '\n'
- with io.open(path, "w+", encoding="utf8") as fd:
+ with io.open(path, "w+", encoding="utf-8") as fd:
if onefile:
- fd.write(write_metadata(metadata))
- fd.write('\n')
+ fd.write(write_metadata(metadata, comment_wrap=False, site=self.site, compiler=self))
fd.write(content)
def set_site(self, site):
"""Set Nikola site."""
- super(CompileRest, self).set_site(site)
+ super().set_site(site)
self.config_dependencies = []
for plugin_info in self.get_compiler_extensions():
self.config_dependencies.append(plugin_info.name)
plugin_info.plugin_object.short_help = plugin_info.description
- self.logger = get_logger('compile_rest', STDERR_HANDLER)
if not site.debug:
- self.logger.level = 4
+ self.logger.level = logging.WARNING
def get_observer(settings):
@@ -155,19 +199,25 @@ def get_observer(settings):
Error code mapping:
- +------+---------+------+----------+
- | dNUM | dNAME | lNUM | lNAME | d = docutils, l = logbook
- +------+---------+------+----------+
- | 0 | DEBUG | 1 | DEBUG |
- | 1 | INFO | 2 | INFO |
- | 2 | WARNING | 4 | WARNING |
- | 3 | ERROR | 5 | ERROR |
- | 4 | SEVERE | 6 | CRITICAL |
- +------+---------+------+----------+
+ +----------+----------+
+ | docutils | logging |
+ +----------+----------+
+ | DEBUG | DEBUG |
+ | INFO | INFO |
+ | WARNING | WARNING |
+ | ERROR | ERROR |
+ | SEVERE | CRITICAL |
+ +----------+----------+
"""
- errormap = {0: 1, 1: 2, 2: 4, 3: 5, 4: 6}
+ errormap = {
+ docutils.utils.Reporter.DEBUG_LEVEL: logging.DEBUG,
+ docutils.utils.Reporter.INFO_LEVEL: logging.INFO,
+ docutils.utils.Reporter.WARNING_LEVEL: logging.WARNING,
+ docutils.utils.Reporter.ERROR_LEVEL: logging.ERROR,
+ docutils.utils.Reporter.SEVERE_LEVEL: logging.CRITICAL
+ }
text = docutils.nodes.Element.astext(msg)
- line = msg['line'] + settings['add_ln'] if 'line' in msg else 0
+ line = msg['line'] + settings['add_ln'] if 'line' in msg else ''
out = '[{source}{colon}{line}] {text}'.format(
source=settings['source'], colon=(':' if line else ''),
line=line, text=text)
@@ -179,32 +229,32 @@ def get_observer(settings):
class NikolaReader(docutils.readers.standalone.Reader):
"""Nikola-specific docutils reader."""
+ config_section = 'nikola'
+
def __init__(self, *args, **kwargs):
"""Initialize the reader."""
self.transforms = kwargs.pop('transforms', [])
- self.no_title_transform = kwargs.pop('no_title_transform', False)
+ self.logging_settings = kwargs.pop('nikola_logging_settings', {})
docutils.readers.standalone.Reader.__init__(self, *args, **kwargs)
def get_transforms(self):
"""Get docutils transforms."""
- transforms = docutils.readers.standalone.Reader(self).get_transforms() + self.transforms
- if self.no_title_transform:
- transforms = [t for t in transforms if str(t) != "<class 'docutils.transforms.frontmatter.DocTitle'>"]
- return transforms
+ return docutils.readers.standalone.Reader(self).get_transforms() + self.transforms
def new_document(self):
"""Create and return a new empty document tree (root node)."""
document = docutils.utils.new_document(self.source.source_path, self.settings)
document.reporter.stream = False
- document.reporter.attach_observer(get_observer(self.l_settings))
+ document.reporter.attach_observer(get_observer(self.logging_settings))
return document
def shortcode_role(name, rawtext, text, lineno, inliner,
options={}, content=[]):
- """A shortcode role that passes through raw inline HTML."""
+ """Return a shortcode role that passes through raw inline HTML."""
return [docutils.nodes.raw('', text, format='html')], []
+
roles.register_canonical_role('raw-html', shortcode_role)
roles.register_canonical_role('html', shortcode_role)
roles.register_canonical_role('sc', shortcode_role)
@@ -226,7 +276,7 @@ def add_node(node, visit_function=None, depart_function=None):
self.site = site
directives.register_directive('math', MathDirective)
add_node(MathBlock, visit_Math, depart_Math)
- return super(Plugin, self).set_site(site)
+ return super().set_site(site)
class MathDirective(Directive):
def run(self):
@@ -245,18 +295,53 @@ def add_node(node, visit_function=None, depart_function=None):
"""
docutils.nodes._add_node_class_names([node.__name__])
if visit_function:
- setattr(docutils.writers.html4css1.HTMLTranslator, 'visit_' + node.__name__, visit_function)
+ setattr(docutils.writers.html5_polyglot.HTMLTranslator, 'visit_' + node.__name__, visit_function)
if depart_function:
- setattr(docutils.writers.html4css1.HTMLTranslator, 'depart_' + node.__name__, depart_function)
+ setattr(docutils.writers.html5_polyglot.HTMLTranslator, 'depart_' + node.__name__, depart_function)
+
+
+# Output <code> for ``double backticks``. (Code and extra logic based on html4css1 translator)
+def visit_literal(self, node):
+ """Output <code> for double backticks."""
+ # special case: "code" role
+ classes = node.get('classes', [])
+ if 'code' in classes:
+ # filter 'code' from class arguments
+ node['classes'] = [cls for cls in classes if cls != 'code']
+ self.body.append(self.starttag(node, 'code', ''))
+ return
+ self.body.append(
+ self.starttag(node, 'code', '', CLASS='docutils literal'))
+ text = node.astext()
+ for token in self.words_and_spaces.findall(text):
+ if token.strip():
+ # Protect text like "--an-option" and the regular expression
+ # ``[+]?(\d+(\.\d*)?|\.\d+)`` from bad line wrapping
+ if self.in_word_wrap_point.search(token):
+ self.body.append('<span class="pre">%s</span>'
+ % self.encode(token))
+ else:
+ self.body.append(self.encode(token))
+ elif token in ('\n', ' '):
+ # Allow breaks at whitespace:
+ self.body.append(token)
+ else:
+ # Protect runs of multiple spaces; the last space can wrap:
+ self.body.append('&nbsp;' * (len(token) - 1) + ' ')
+ self.body.append('</code>')
+ # Content already processed:
+ raise docutils.nodes.SkipNode
+
+
+setattr(docutils.writers.html5_polyglot.HTMLTranslator, 'visit_literal', visit_literal)
def rst2html(source, source_path=None, source_class=docutils.io.StringInput,
destination_path=None, reader=None,
parser=None, parser_name='restructuredtext', writer=None,
- writer_name='html', settings=None, settings_spec=None,
- settings_overrides=None, config_section=None,
- enable_exit_status=None, logger=None, l_add_ln=0, transforms=None,
- no_title_transform=False):
+ writer_name='html5_polyglot', settings=None, settings_spec=None,
+ settings_overrides=None, config_section='nikola',
+ enable_exit_status=None, logger=None, l_add_ln=0, transforms=None):
"""Set up & run a ``Publisher``, and return a dictionary of document parts.
Dictionary keys are the names of parts, and values are Unicode strings;
@@ -268,20 +353,22 @@ def rst2html(source, source_path=None, source_class=docutils.io.StringInput,
publish_parts(..., settings_overrides={'input_encoding': 'unicode'})
- Parameters: see `publish_programmatically`.
+ For a description of the parameters, see `publish_programmatically`.
WARNING: `reader` should be None (or NikolaReader()) if you want Nikola to report
reStructuredText syntax errors.
"""
if reader is None:
- reader = NikolaReader(transforms=transforms, no_title_transform=no_title_transform)
# For our custom logging, we have special needs and special settings we
# specify here.
# logger a logger from Nikola
# source source filename (docutils gets a string)
- # add_ln amount of metadata lines (see comment in compile_html above)
- reader.l_settings = {'logger': logger, 'source': source_path,
- 'add_ln': l_add_ln}
+ # add_ln amount of metadata lines (see comment in CompileRest.compile above)
+ reader = NikolaReader(transforms=transforms,
+ nikola_logging_settings={
+ 'logger': logger, 'source': source_path,
+ 'add_ln': l_add_ln
+ })
pub = docutils.core.Publisher(reader, parser, writer, settings=settings,
source_class=source_class,
@@ -294,7 +381,8 @@ def rst2html(source, source_path=None, source_class=docutils.io.StringInput,
pub.set_destination(None, destination_path)
pub.publish(enable_exit_status=enable_exit_status)
- return pub.writer.parts['docinfo'] + pub.writer.parts['fragment'], pub.document.reporter.max_level, pub.settings.record_dependencies
+ return pub.writer.parts['docinfo'] + pub.writer.parts['fragment'], pub.document.reporter.max_level, pub.settings.record_dependencies, pub.document
+
# Alignment helpers for extensions
_align_options_base = ('left', 'center', 'right')
@@ -302,3 +390,14 @@ _align_options_base = ('left', 'center', 'right')
def _align_choice(argument):
return docutils.parsers.rst.directives.choice(argument, _align_options_base + ("none", ""))
+
+
+class RemoveDocinfo(docutils.transforms.Transform):
+ """Remove docinfo nodes."""
+
+ default_priority = 870
+
+ def apply(self):
+ """Remove docinfo nodes."""
+ for node in self.document.traverse(docutils.nodes.docinfo):
+ node.parent.remove(node)
diff --git a/nikola/plugins/compile/rest/chart.plugin b/nikola/plugins/compile/rest/chart.plugin
index 0a7896f..4434477 100644
--- a/nikola/plugins/compile/rest/chart.plugin
+++ b/nikola/plugins/compile/rest/chart.plugin
@@ -4,7 +4,7 @@ module = chart
[Nikola]
compiler = rest
-plugincategory = CompilerExtension
+PluginCategory = CompilerExtension
[Documentation]
author = Roberto Alsina
diff --git a/nikola/plugins/compile/rest/chart.py b/nikola/plugins/compile/rest/chart.py
index 24f459b..17363cb 100644
--- a/nikola/plugins/compile/rest/chart.py
+++ b/nikola/plugins/compile/rest/chart.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -23,21 +23,17 @@
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
"""Chart directive for reSTructuredText."""
-from ast import literal_eval
-
from docutils import nodes
from docutils.parsers.rst import Directive, directives
+from nikola.plugin_categories import RestExtension
+
try:
import pygal
except ImportError:
- pygal = None # NOQA
-
-from nikola.plugin_categories import RestExtension
-from nikola.utils import req_missing
+ pygal = None
_site = None
@@ -52,8 +48,7 @@ class Plugin(RestExtension):
global _site
_site = self.site = site
directives.register_directive('chart', Chart)
- self.site.register_shortcode('chart', _gen_chart)
- return super(Plugin, self).set_site(site)
+ return super().set_site(site)
class Chart(Directive):
@@ -77,6 +72,7 @@ class Chart(Directive):
"classes": directives.unchanged,
"css": directives.unchanged,
"defs": directives.unchanged,
+ "data_file": directives.unchanged,
"disable_xml_declaration": directives.unchanged,
"dots_size": directives.unchanged,
"dynamic_print_values": directives.unchanged,
@@ -157,41 +153,9 @@ class Chart(Directive):
def run(self):
"""Run the directive."""
self.options['site'] = None
- html = _gen_chart(self.arguments[0], data='\n'.join(self.content), **self.options)
+ html = _site.plugin_manager.getPluginByName(
+ 'chart', 'ShortcodePlugin').plugin_object.handler(
+ self.arguments[0],
+ data='\n'.join(self.content),
+ **self.options)
return [nodes.raw('', html, format='html')]
-
-
-def _gen_chart(chart_type, **_options):
- if pygal is None:
- msg = req_missing(['pygal'], 'use the Chart directive', optional=True)
- return '<div class="text-error">{0}</div>'.format(msg)
- options = {}
- data = _options.pop('data')
- _options.pop('post', None)
- _options.pop('site')
- if 'style' in _options:
- style_name = _options.pop('style')
- else:
- style_name = 'BlueStyle'
- if '(' in style_name: # Parametric style
- style = eval('pygal.style.' + style_name)
- else:
- style = getattr(pygal.style, style_name)
- for k, v in _options.items():
- try:
- options[k] = literal_eval(v)
- except:
- options[k] = v
- chart = pygal
- for o in chart_type.split('.'):
- chart = getattr(chart, o)
- chart = chart(style=style)
- if _site and _site.invariant:
- chart.no_prefix = True
- chart.config(**options)
- for line in data.splitlines():
- line = line.strip()
- if line:
- label, series = literal_eval('({0})'.format(line))
- chart.add(label, series)
- return chart.render().decode('utf8')
diff --git a/nikola/plugins/compile/rest/doc.plugin b/nikola/plugins/compile/rest/doc.plugin
index e447eb2..3b5c9c7 100644
--- a/nikola/plugins/compile/rest/doc.plugin
+++ b/nikola/plugins/compile/rest/doc.plugin
@@ -4,7 +4,7 @@ module = doc
[Nikola]
compiler = rest
-plugincategory = CompilerExtension
+PluginCategory = CompilerExtension
[Documentation]
author = Manuel Kaufmann
diff --git a/nikola/plugins/compile/rest/doc.py b/nikola/plugins/compile/rest/doc.py
index 55f576d..705c0bc 100644
--- a/nikola/plugins/compile/rest/doc.py
+++ b/nikola/plugins/compile/rest/doc.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -29,7 +29,7 @@
from docutils import nodes
from docutils.parsers.rst import roles
-from nikola.utils import split_explicit_title, LOGGER
+from nikola.utils import split_explicit_title, LOGGER, slugify
from nikola.plugin_categories import RestExtension
@@ -44,14 +44,11 @@ class Plugin(RestExtension):
roles.register_canonical_role('doc', doc_role)
self.site.register_shortcode('doc', doc_shortcode)
doc_role.site = site
- return super(Plugin, self).set_site(site)
+ return super().set_site(site)
-def _doc_link(rawtext, text, options={}, content=[]):
- """Handle the doc role."""
- # split link's text and post's slug in role content
- has_explicit_title, title, slug = split_explicit_title(text)
- # check if the slug given is part of our blog posts/pages
+def _find_post(slug):
+ """Find a post with the given slug in posts or pages."""
twin_slugs = False
post = None
for p in doc_role.site.timeline:
@@ -61,10 +58,27 @@ def _doc_link(rawtext, text, options={}, content=[]):
else:
twin_slugs = True
break
+ return post, twin_slugs
+
+
+def _doc_link(rawtext, text, options={}, content=[]):
+ """Handle the doc role."""
+ # split link's text and post's slug in role content
+ has_explicit_title, title, slug = split_explicit_title(text)
+ if '#' in slug:
+ slug, fragment = slug.split('#', 1)
+ else:
+ fragment = None
+
+ # Look for the unslugified input first, then try to slugify (Issue #3450)
+ post, twin_slugs = _find_post(slug)
+ if post is None:
+ slug = slugify(slug)
+ post, twin_slugs = _find_post(slug)
try:
if post is None:
- raise ValueError
+ raise ValueError("No post with matching slug found.")
except ValueError:
return False, False, None, None, slug
@@ -72,6 +86,8 @@ def _doc_link(rawtext, text, options={}, content=[]):
# use post's title as link's text
title = post.title()
permalink = post.permalink()
+ if fragment:
+ permalink += '#' + fragment
return True, twin_slugs, title, permalink, slug
@@ -83,7 +99,7 @@ def doc_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
if twin_slugs:
inliner.reporter.warning(
'More than one post with the same slug. Using "{0}"'.format(permalink))
- LOGGER.warn(
+ LOGGER.warning(
'More than one post with the same slug. Using "{0}" for doc role'.format(permalink))
node = make_link_node(rawtext, title, permalink, options)
return [node], []
@@ -101,7 +117,7 @@ def doc_shortcode(*args, **kwargs):
success, twin_slugs, title, permalink, slug = _doc_link(text, text, LOGGER)
if success:
if twin_slugs:
- LOGGER.warn(
+ LOGGER.warning(
'More than one post with the same slug. Using "{0}" for doc shortcode'.format(permalink))
return '<a href="{0}">{1}</a>'.format(permalink, title)
else:
diff --git a/nikola/plugins/compile/rest/gist.plugin b/nikola/plugins/compile/rest/gist.plugin
index 763c1d2..4a8a3a7 100644
--- a/nikola/plugins/compile/rest/gist.plugin
+++ b/nikola/plugins/compile/rest/gist.plugin
@@ -4,7 +4,7 @@ module = gist
[Nikola]
compiler = rest
-plugincategory = CompilerExtension
+PluginCategory = CompilerExtension
[Documentation]
author = Roberto Alsina
diff --git a/nikola/plugins/compile/rest/gist.py b/nikola/plugins/compile/rest/gist.py
index e40c3b2..08aa46d 100644
--- a/nikola/plugins/compile/rest/gist.py
+++ b/nikola/plugins/compile/rest/gist.py
@@ -19,7 +19,7 @@ class Plugin(RestExtension):
"""Set Nikola site."""
self.site = site
directives.register_directive('gist', GitHubGist)
- return super(Plugin, self).set_site(site)
+ return super().set_site(site)
class GitHubGist(Directive):
diff --git a/nikola/plugins/compile/rest/listing.plugin b/nikola/plugins/compile/rest/listing.plugin
index 3ebb296..5239f92 100644
--- a/nikola/plugins/compile/rest/listing.plugin
+++ b/nikola/plugins/compile/rest/listing.plugin
@@ -4,7 +4,7 @@ module = listing
[Nikola]
compiler = rest
-plugincategory = CompilerExtension
+PluginCategory = CompilerExtension
[Documentation]
author = Roberto Alsina
diff --git a/nikola/plugins/compile/rest/listing.py b/nikola/plugins/compile/rest/listing.py
index 4dfbedc..e5a73fa 100644
--- a/nikola/plugins/compile/rest/listing.py
+++ b/nikola/plugins/compile/rest/listing.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -28,26 +28,21 @@
"""Define and register a listing directive using the existing CodeBlock."""
-from __future__ import unicode_literals
import io
import os
import uuid
-try:
- from urlparse import urlunsplit
-except ImportError:
- from urllib.parse import urlunsplit # NOQA
+from urllib.parse import urlunsplit
import docutils.parsers.rst.directives.body
import docutils.parsers.rst.directives.misc
+import pygments
+import pygments.util
from docutils import core
from docutils import nodes
from docutils.parsers.rst import Directive, directives
from docutils.parsers.rst.roles import set_classes
from docutils.parsers.rst.directives.misc import Include
-
from pygments.lexers import get_lexer_by_name
-import pygments
-import pygments.util
from nikola import utils
from nikola.plugin_categories import RestExtension
@@ -119,6 +114,7 @@ class CodeBlock(Directive):
return [node]
+
# Monkey-patch: replace insane docutils CodeBlock with our implementation.
docutils.parsers.rst.directives.body.CodeBlock = CodeBlock
docutils.parsers.rst.directives.misc.CodeBlock = CodeBlock
@@ -142,7 +138,7 @@ class Plugin(RestExtension):
directives.register_directive('sourcecode', CodeBlock)
directives.register_directive('listing', Listing)
Listing.folders = site.config['LISTINGS_FOLDERS']
- return super(Plugin, self).set_site(site)
+ return super().set_site(site)
# Add sphinx compatibility option
@@ -186,7 +182,7 @@ class Listing(Include):
self.arguments.insert(0, fpath)
if 'linenos' in self.options:
self.options['number-lines'] = self.options['linenos']
- with io.open(fpath, 'r+', encoding='utf8') as fileobject:
+ with io.open(fpath, 'r+', encoding='utf-8-sig') as fileobject:
self.content = fileobject.read().splitlines()
self.state.document.settings.record_dependencies.add(fpath)
target = urlunsplit(("link", 'listing', fpath.replace('\\', '/'), '', ''))
@@ -200,8 +196,11 @@ class Listing(Include):
def get_code_from_file(self, data):
"""Create CodeBlock nodes from file object content."""
- return super(Listing, self).run()
+ return super().run()
def assert_has_content(self):
- """Listing has no content, override check from superclass."""
+ """Override check from superclass with nothing.
+
+ Listing has no content, override check from superclass.
+ """
pass
diff --git a/nikola/plugins/compile/rest/media.plugin b/nikola/plugins/compile/rest/media.plugin
index 8dfb19c..396c2f9 100644
--- a/nikola/plugins/compile/rest/media.plugin
+++ b/nikola/plugins/compile/rest/media.plugin
@@ -4,7 +4,7 @@ module = media
[Nikola]
compiler = rest
-plugincategory = CompilerExtension
+PluginCategory = CompilerExtension
[Documentation]
author = Roberto Alsina
diff --git a/nikola/plugins/compile/rest/media.py b/nikola/plugins/compile/rest/media.py
index 8a69586..d29d0a2 100644
--- a/nikola/plugins/compile/rest/media.py
+++ b/nikola/plugins/compile/rest/media.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -29,14 +29,13 @@
from docutils import nodes
from docutils.parsers.rst import Directive, directives
+from nikola.plugin_categories import RestExtension
+from nikola.utils import req_missing
+
try:
import micawber
except ImportError:
- micawber = None # NOQA
-
-
-from nikola.plugin_categories import RestExtension
-from nikola.utils import req_missing
+ micawber = None
class Plugin(RestExtension):
@@ -49,7 +48,7 @@ class Plugin(RestExtension):
self.site = site
directives.register_directive('media', Media)
self.site.register_shortcode('media', _gen_media_embed)
- return super(Plugin, self).set_site(site)
+ return super().set_site(site)
class Media(Directive):
diff --git a/nikola/plugins/compile/rest/post_list.plugin b/nikola/plugins/compile/rest/post_list.plugin
index 1802f2b..68abaef 100644
--- a/nikola/plugins/compile/rest/post_list.plugin
+++ b/nikola/plugins/compile/rest/post_list.plugin
@@ -4,11 +4,11 @@ module = post_list
[Nikola]
compiler = rest
-plugincategory = CompilerExtension
+PluginCategory = CompilerExtension
[Documentation]
author = Udo Spallek
-version = 0.1
+version = 0.2
website = https://getnikola.com/
-description = Includes a list of posts with tag and slide based filters.
+description = Includes a list of posts with tag and slice based filters.
diff --git a/nikola/plugins/compile/rest/post_list.py b/nikola/plugins/compile/rest/post_list.py
index 8cfd5bf..f7e95ed 100644
--- a/nikola/plugins/compile/rest/post_list.py
+++ b/nikola/plugins/compile/rest/post_list.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2013-2016 Udo Spallek, Roberto Alsina and others.
+# Copyright © 2013-2020 Udo Spallek, Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -23,21 +23,13 @@
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
"""Post list directive for reStructuredText."""
-from __future__ import unicode_literals
-
-import os
-import uuid
-import natsort
-
from docutils import nodes
from docutils.parsers.rst import Directive, directives
from nikola import utils
from nikola.plugin_categories import RestExtension
-from nikola.packages.datecond import date_in_range
# WARNING: the directive name is post-list
# (with a DASH instead of an UNDERSCORE)
@@ -51,91 +43,14 @@ class Plugin(RestExtension):
def set_site(self, site):
"""Set Nikola site."""
self.site = site
- self.site.register_shortcode('post-list', _do_post_list)
- directives.register_directive('post-list', PostList)
- PostList.site = site
- return super(Plugin, self).set_site(site)
-
-
-class PostList(Directive):
- """Provide a reStructuredText directive to create a list of posts.
-
- Post List
- =========
- :Directive Arguments: None.
- :Directive Options: lang, start, stop, reverse, sort, date, tags, categories, sections, slugs, post_type, all, template, id
- :Directive Content: None.
-
- The posts appearing in the list can be filtered by options.
- *List slicing* is provided with the *start*, *stop* and *reverse* options.
-
- The following not required options are recognized:
-
- ``start`` : integer
- The index of the first post to show.
- A negative value like ``-3`` will show the *last* three posts in the
- post-list.
- Defaults to None.
-
- ``stop`` : integer
- The index of the last post to show.
- A value negative value like ``-1`` will show every post, but not the
- *last* in the post-list.
- Defaults to None.
-
- ``reverse`` : flag
- Reverse the order of the post-list.
- Defaults is to not reverse the order of posts.
-
- ``sort`` : string
- Sort post list by one of each post's attributes, usually ``title`` or a
- custom ``priority``. Defaults to None (chronological sorting).
+ directives.register_directive('post-list', PostListDirective)
+ directives.register_directive('post_list', PostListDirective)
+ PostListDirective.site = site
+ return super().set_site(site)
- ``date`` : string
- Show posts that match date range specified by this option. Format:
- * comma-separated clauses (AND)
- * clause: attribute comparison_operator value (spaces optional)
- * attribute: year, month, day, hour, month, second, weekday, isoweekday; or empty for full datetime
- * comparison_operator: == != <= >= < >
- * value: integer or dateutil-compatible date input
-
- ``tags`` : string [, string...]
- Filter posts to show only posts having at least one of the ``tags``.
- Defaults to None.
-
- ``categories`` : string [, string...]
- Filter posts to show only posts having one of the ``categories``.
- Defaults to None.
-
- ``sections`` : string [, string...]
- Filter posts to show only posts having one of the ``sections``.
- Defaults to None.
-
- ``slugs`` : string [, string...]
- Filter posts to show only posts having at least one of the ``slugs``.
- Defaults to None.
-
- ``post_type`` (or ``type``) : string
- Show only ``posts``, ``pages`` or ``all``.
- Replaces ``all``. Defaults to ``posts``.
-
- ``all`` : flag
- (deprecated, use ``post_type`` instead)
- Shows all posts and pages in the post list. Defaults to show only posts.
-
- ``lang`` : string
- The language of post *titles* and *links*.
- Defaults to default language.
-
- ``template`` : string
- The name of an alternative template to render the post-list.
- Defaults to ``post_list_directive.tmpl``
-
- ``id`` : string
- A manual id for the post list.
- Defaults to a random name composed by 'post_list_' + uuid.uuid4().hex.
- """
+class PostListDirective(Directive):
+ """Provide a reStructuredText directive to create a list of posts."""
option_spec = {
'start': int,
@@ -143,12 +58,12 @@ class PostList(Directive):
'reverse': directives.flag,
'sort': directives.unchanged,
'tags': directives.unchanged,
+ 'require_all_tags': directives.flag,
'categories': directives.unchanged,
'sections': directives.unchanged,
'slugs': directives.unchanged,
'post_type': directives.unchanged,
'type': directives.unchanged,
- 'all': directives.flag,
'lang': directives.unchanged,
'template': directives.path,
'id': directives.unchanged,
@@ -161,151 +76,42 @@ class PostList(Directive):
stop = self.options.get('stop')
reverse = self.options.get('reverse', False)
tags = self.options.get('tags')
+ require_all_tags = 'require_all_tags' in self.options
categories = self.options.get('categories')
sections = self.options.get('sections')
slugs = self.options.get('slugs')
post_type = self.options.get('post_type')
type = self.options.get('type', False)
- all = self.options.get('all', False)
lang = self.options.get('lang', utils.LocaleBorg().current_lang)
template = self.options.get('template', 'post_list_directive.tmpl')
sort = self.options.get('sort')
date = self.options.get('date')
-
- output, deps = _do_post_list(start, stop, reverse, tags, categories, sections, slugs, post_type, type,
- all, lang, template, sort, state=self.state, site=self.site, date=date)
- self.state.document.settings.record_dependencies.add("####MAGIC####TIMELINE")
+ filename = self.state.document.settings._nikola_source_path
+
+ output, deps = self.site.plugin_manager.getPluginByName(
+ 'post_list', 'ShortcodePlugin').plugin_object.handler(
+ start,
+ stop,
+ reverse,
+ tags,
+ require_all_tags,
+ categories,
+ sections,
+ slugs,
+ post_type,
+ type,
+ lang,
+ template,
+ sort,
+ state=self.state,
+ site=self.site,
+ date=date,
+ filename=filename)
+ self.state.document.settings.record_dependencies.add(
+ "####MAGIC####TIMELINE")
for d in deps:
self.state.document.settings.record_dependencies.add(d)
if output:
return [nodes.raw('', output, format='html')]
else:
return []
-
-
-def _do_post_list(start=None, stop=None, reverse=False, tags=None, categories=None,
- sections=None, slugs=None, post_type='post', type=False, all=False,
- lang=None, template='post_list_directive.tmpl', sort=None,
- id=None, data=None, state=None, site=None, date=None, filename=None, post=None):
- if lang is None:
- lang = utils.LocaleBorg().current_lang
- if site.invariant: # for testing purposes
- post_list_id = id or 'post_list_' + 'fixedvaluethatisnotauuid'
- else:
- post_list_id = id or 'post_list_' + uuid.uuid4().hex
-
- # Get post from filename if available
- if filename:
- self_post = site.post_per_input_file.get(filename)
- else:
- self_post = None
-
- if self_post:
- self_post.register_depfile("####MAGIC####TIMELINE", lang=lang)
-
- # If we get strings for start/stop, make them integers
- if start is not None:
- start = int(start)
- if stop is not None:
- stop = int(stop)
-
- # Parse tags/categories/sections/slugs (input is strings)
- tags = [t.strip().lower() for t in tags.split(',')] if tags else []
- categories = [c.strip().lower() for c in categories.split(',')] if categories else []
- sections = [s.strip().lower() for s in sections.split(',')] if sections else []
- slugs = [s.strip() for s in slugs.split(',')] if slugs else []
-
- filtered_timeline = []
- posts = []
- step = -1 if reverse is None else None
-
- if type is not False:
- post_type = type
-
- # TODO: remove in v8
- if all is not False:
- timeline = [p for p in site.timeline]
- elif post_type == 'page' or post_type == 'pages':
- timeline = [p for p in site.timeline if not p.use_in_feeds]
- elif post_type == 'all':
- timeline = [p for p in site.timeline]
- else: # post
- timeline = [p for p in site.timeline if p.use_in_feeds]
-
- # TODO: replaces all, uncomment in v8
- # if post_type == 'page' or post_type == 'pages':
- # timeline = [p for p in site.timeline if not p.use_in_feeds]
- # elif post_type == 'all':
- # timeline = [p for p in site.timeline]
- # else: # post
- # timeline = [p for p in site.timeline if p.use_in_feeds]
-
- if categories:
- timeline = [p for p in timeline if p.meta('category', lang=lang).lower() in categories]
-
- if sections:
- timeline = [p for p in timeline if p.section_name(lang).lower() in sections]
-
- for post in timeline:
- if tags:
- cont = True
- tags_lower = [t.lower() for t in post.tags]
- for tag in tags:
- if tag in tags_lower:
- cont = False
-
- if cont:
- continue
-
- filtered_timeline.append(post)
-
- if sort:
- filtered_timeline = natsort.natsorted(filtered_timeline, key=lambda post: post.meta[lang][sort], alg=natsort.ns.F | natsort.ns.IC)
-
- if date:
- filtered_timeline = [p for p in filtered_timeline if date_in_range(date, p.date)]
-
- for post in filtered_timeline[start:stop:step]:
- if slugs:
- cont = True
- for slug in slugs:
- if slug == post.meta('slug'):
- cont = False
-
- if cont:
- continue
-
- bp = post.translated_base_path(lang)
- if os.path.exists(bp) and state:
- state.document.settings.record_dependencies.add(bp)
- elif os.path.exists(bp) and self_post:
- self_post.register_depfile(bp, lang=lang)
-
- posts += [post]
-
- if not posts:
- return '', []
-
- template_deps = site.template_system.template_deps(template)
- if state:
- # Register template as a dependency (Issue #2391)
- for d in template_deps:
- state.document.settings.record_dependencies.add(d)
- elif self_post:
- for d in template_deps:
- self_post.register_depfile(d, lang=lang)
-
- template_data = {
- 'lang': lang,
- 'posts': posts,
- # Need to provide str, not TranslatableSetting (Issue #2104)
- 'date_format': site.GLOBAL_CONTEXT.get('date_format')[lang],
- 'post_list_id': post_list_id,
- 'messages': site.MESSAGES,
- }
- output = site.template_system.render_template(
- template, None, template_data)
- return output, template_deps
-
-# Request file name from shortcode (Issue #2412)
-_do_post_list.nikola_shortcode_pass_filename = True
diff --git a/nikola/plugins/compile/rest/slides.plugin b/nikola/plugins/compile/rest/slides.plugin
deleted file mode 100644
index 389da39..0000000
--- a/nikola/plugins/compile/rest/slides.plugin
+++ /dev/null
@@ -1,14 +0,0 @@
-[Core]
-name = rest_slides
-module = slides
-
-[Nikola]
-compiler = rest
-plugincategory = CompilerExtension
-
-[Documentation]
-author = Roberto Alsina
-version = 0.1
-website = https://getnikola.com/
-description = Slides directive
-
diff --git a/nikola/plugins/compile/rest/slides.py b/nikola/plugins/compile/rest/slides.py
deleted file mode 100644
index 7c5b34b..0000000
--- a/nikola/plugins/compile/rest/slides.py
+++ /dev/null
@@ -1,78 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2016 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-"""Slides directive for reStructuredText."""
-
-from __future__ import unicode_literals
-
-import uuid
-
-from docutils import nodes
-from docutils.parsers.rst import Directive, directives
-
-from nikola.plugin_categories import RestExtension
-
-
-class Plugin(RestExtension):
- """Plugin for reST slides directive."""
-
- name = "rest_slides"
-
- def set_site(self, site):
- """Set Nikola site."""
- self.site = site
- directives.register_directive('slides', Slides)
- Slides.site = site
- return super(Plugin, self).set_site(site)
-
-
-class Slides(Directive):
- """reST extension for inserting slideshows."""
-
- has_content = True
-
- def run(self):
- """Run the slides directive."""
- if len(self.content) == 0: # pragma: no cover
- return
-
- if self.site.invariant: # for testing purposes
- carousel_id = 'slides_' + 'fixedvaluethatisnotauuid'
- else:
- carousel_id = 'slides_' + uuid.uuid4().hex
-
- output = self.site.template_system.render_template(
- 'slides.tmpl',
- None,
- {
- 'slides_content': self.content,
- 'carousel_id': carousel_id,
- }
- )
- return [nodes.raw('', output, format='html')]
-
-
-directives.register_directive('slides', Slides)
diff --git a/nikola/plugins/compile/rest/soundcloud.plugin b/nikola/plugins/compile/rest/soundcloud.plugin
index 4e36ea4..f85a964 100644
--- a/nikola/plugins/compile/rest/soundcloud.plugin
+++ b/nikola/plugins/compile/rest/soundcloud.plugin
@@ -4,7 +4,7 @@ module = soundcloud
[Nikola]
compiler = rest
-plugincategory = CompilerExtension
+PluginCategory = CompilerExtension
[Documentation]
author = Roberto Alsina
diff --git a/nikola/plugins/compile/rest/soundcloud.py b/nikola/plugins/compile/rest/soundcloud.py
index 9fabe70..5dbcfc3 100644
--- a/nikola/plugins/compile/rest/soundcloud.py
+++ b/nikola/plugins/compile/rest/soundcloud.py
@@ -1,5 +1,29 @@
# -*- coding: utf-8 -*-
+# Copyright © 2012-2020 Roberto Alsina and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
"""SoundCloud directive for reStructuredText."""
from docutils import nodes
@@ -19,7 +43,7 @@ class Plugin(RestExtension):
self.site = site
directives.register_directive('soundcloud', SoundCloud)
directives.register_directive('soundcloud_playlist', SoundCloudPlaylist)
- return super(Plugin, self).set_site(site)
+ return super().set_site(site)
CODE = """\
diff --git a/nikola/plugins/compile/rest/thumbnail.plugin b/nikola/plugins/compile/rest/thumbnail.plugin
index 3324c31..e7b649d 100644
--- a/nikola/plugins/compile/rest/thumbnail.plugin
+++ b/nikola/plugins/compile/rest/thumbnail.plugin
@@ -4,7 +4,7 @@ module = thumbnail
[Nikola]
compiler = rest
-plugincategory = CompilerExtension
+PluginCategory = CompilerExtension
[Documentation]
author = Pelle Nilsson
diff --git a/nikola/plugins/compile/rest/thumbnail.py b/nikola/plugins/compile/rest/thumbnail.py
index 37e0973..06ca9e4 100644
--- a/nikola/plugins/compile/rest/thumbnail.py
+++ b/nikola/plugins/compile/rest/thumbnail.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2014-2016 Pelle Nilsson and others.
+# Copyright © 2014-2020 Pelle Nilsson and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -43,7 +43,7 @@ class Plugin(RestExtension):
"""Set Nikola site."""
self.site = site
directives.register_directive('thumbnail', Thumbnail)
- return super(Plugin, self).set_site(site)
+ return super().set_site(site)
class Thumbnail(Figure):
@@ -69,7 +69,7 @@ class Thumbnail(Figure):
"""Run the thumbnail directive."""
uri = directives.uri(self.arguments[0])
if uri.endswith('.svg'):
- # the ? at the end makes docutil output an <img> instead of an object for the svg, which colorbox requires
+ # the ? at the end makes docutil output an <img> instead of an object for the svg, which lightboxes may require
self.arguments[0] = '.thumbnail'.join(os.path.splitext(uri)) + '?'
else:
self.arguments[0] = '.thumbnail'.join(os.path.splitext(uri))
diff --git a/nikola/plugins/compile/rest/vimeo.plugin b/nikola/plugins/compile/rest/vimeo.plugin
index 688f981..89b171b 100644
--- a/nikola/plugins/compile/rest/vimeo.plugin
+++ b/nikola/plugins/compile/rest/vimeo.plugin
@@ -4,7 +4,7 @@ module = vimeo
[Nikola]
compiler = rest
-plugincategory = CompilerExtension
+PluginCategory = CompilerExtension
[Documentation]
description = Vimeo directive
diff --git a/nikola/plugins/compile/rest/vimeo.py b/nikola/plugins/compile/rest/vimeo.py
index f1ac6c3..7047b03 100644
--- a/nikola/plugins/compile/rest/vimeo.py
+++ b/nikola/plugins/compile/rest/vimeo.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,15 +26,14 @@
"""Vimeo directive for reStructuredText."""
-from docutils import nodes
-from docutils.parsers.rst import Directive, directives
-from nikola.plugins.compile.rest import _align_choice, _align_options_base
-
-import requests
import json
+import requests
+from docutils import nodes
+from docutils.parsers.rst import Directive, directives
from nikola.plugin_categories import RestExtension
+from nikola.plugins.compile.rest import _align_choice, _align_options_base
class Plugin(RestExtension):
@@ -46,7 +45,7 @@ class Plugin(RestExtension):
"""Set Nikola site."""
self.site = site
directives.register_directive('vimeo', Vimeo)
- return super(Plugin, self).set_site(site)
+ return super().set_site(site)
CODE = """<div class="vimeo-video{align}">
diff --git a/nikola/plugins/compile/rest/youtube.plugin b/nikola/plugins/compile/rest/youtube.plugin
index 5fbd67b..d83d0f8 100644
--- a/nikola/plugins/compile/rest/youtube.plugin
+++ b/nikola/plugins/compile/rest/youtube.plugin
@@ -4,7 +4,7 @@ module = youtube
[Nikola]
compiler = rest
-plugincategory = CompilerExtension
+PluginCategory = CompilerExtension
[Documentation]
version = 0.1
diff --git a/nikola/plugins/compile/rest/youtube.py b/nikola/plugins/compile/rest/youtube.py
index b3dde62..d52ec64 100644
--- a/nikola/plugins/compile/rest/youtube.py
+++ b/nikola/plugins/compile/rest/youtube.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -28,8 +28,8 @@
from docutils import nodes
from docutils.parsers.rst import Directive, directives
-from nikola.plugins.compile.rest import _align_choice, _align_options_base
+from nikola.plugins.compile.rest import _align_choice, _align_options_base
from nikola.plugin_categories import RestExtension
@@ -42,13 +42,14 @@ class Plugin(RestExtension):
"""Set Nikola site."""
self.site = site
directives.register_directive('youtube', Youtube)
- return super(Plugin, self).set_site(site)
+ return super().set_site(site)
CODE = """\
<div class="youtube-video{align}">
<iframe width="{width}" height="{height}"
-src="https://www.youtube.com/embed/{yid}?rel=0&amp;hd=1&amp;wmode=transparent"
+src="https://www.youtube-nocookie.com/embed/{yid}?rel=0&wmode=transparent"
+frameborder="0" allow="encrypted-media" allowfullscreen
></iframe>
</div>"""
@@ -66,8 +67,8 @@ class Youtube(Directive):
has_content = True
required_arguments = 1
option_spec = {
- "width": directives.positive_int,
- "height": directives.positive_int,
+ "width": directives.unchanged,
+ "height": directives.unchanged,
"align": _align_choice
}
@@ -76,10 +77,10 @@ class Youtube(Directive):
self.check_content()
options = {
'yid': self.arguments[0],
- 'width': 425,
- 'height': 344,
+ 'width': 560,
+ 'height': 315,
}
- options.update(self.options)
+ options.update({k: v for k, v in self.options.items() if v})
if self.options.get('align') in _align_options_base:
options['align'] = ' align-' + self.options['align']
else:
diff --git a/nikola/plugins/misc/__init__.py b/nikola/plugins/misc/__init__.py
index 518fac1..1e7e6e1 100644
--- a/nikola/plugins/misc/__init__.py
+++ b/nikola/plugins/misc/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/misc/scan_posts.py b/nikola/plugins/misc/scan_posts.py
index f584a05..8812779 100644
--- a/nikola/plugins/misc/scan_posts.py
+++ b/nikola/plugins/misc/scan_posts.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,7 +26,6 @@
"""The default post scanner."""
-from __future__ import unicode_literals, print_function
import glob
import os
import sys
@@ -35,7 +34,7 @@ from nikola.plugin_categories import PostScanner
from nikola import utils
from nikola.post import Post
-LOGGER = utils.get_logger('scan_posts', utils.STDERR_HANDLER)
+LOGGER = utils.get_logger('scan_posts')
class ScanPosts(PostScanner):
@@ -55,10 +54,10 @@ class ScanPosts(PostScanner):
self.site.config['post_pages']:
if not self.site.quiet:
print(".", end='', file=sys.stderr)
+ destination_translatable = utils.TranslatableSetting('destination', destination, self.site.config['TRANSLATIONS'])
dirname = os.path.dirname(wildcard)
for dirpath, _, _ in os.walk(dirname, followlinks=True):
- dest_dir = os.path.normpath(os.path.join(destination,
- os.path.relpath(dirpath, dirname))) # output/destination/foo/
+ rel_dest_dir = os.path.relpath(dirpath, dirname)
# Get all the untranslated paths
dir_glob = os.path.join(dirpath, os.path.basename(wildcard)) # posts/foo/*.rst
untranslated = glob.glob(dir_glob)
@@ -84,24 +83,30 @@ class ScanPosts(PostScanner):
if not any([x.startswith('.')
for x in p.split(os.sep)])]
- for base_path in full_list:
+ for base_path in sorted(full_list):
if base_path in seen:
continue
- else:
- seen.add(base_path)
try:
post = Post(
base_path,
self.site.config,
- dest_dir,
+ rel_dest_dir,
use_in_feeds,
self.site.MESSAGES,
template_name,
- self.site.get_compiler(base_path)
+ self.site.get_compiler(base_path),
+ destination_base=destination_translatable,
+ metadata_extractors_by=self.site.metadata_extractors_by
)
+ for lang in post.translated_to:
+ seen.add(post.translated_source_path(lang))
timeline.append(post)
- except Exception as err:
+ except Exception:
LOGGER.error('Error reading post {}'.format(base_path))
- raise err
+ raise
return timeline
+
+ def supported_extensions(self):
+ """Return a list of supported file extensions, or None if such a list isn't known beforehand."""
+ return list({os.path.splitext(x[0])[1] for x in self.site.config['post_pages']})
diff --git a/nikola/plugins/misc/taxonomies_classifier.plugin b/nikola/plugins/misc/taxonomies_classifier.plugin
new file mode 100644
index 0000000..55c59af
--- /dev/null
+++ b/nikola/plugins/misc/taxonomies_classifier.plugin
@@ -0,0 +1,12 @@
+[Core]
+name = classify_taxonomies
+module = taxonomies_classifier
+
+[Documentation]
+author = Roberto Alsina
+version = 1.0
+website = https://getnikola.com/
+description = Classifies the timeline into taxonomies.
+
+[Nikola]
+PluginCategory = SignalHandler
diff --git a/nikola/plugins/misc/taxonomies_classifier.py b/nikola/plugins/misc/taxonomies_classifier.py
new file mode 100644
index 0000000..da8045b
--- /dev/null
+++ b/nikola/plugins/misc/taxonomies_classifier.py
@@ -0,0 +1,335 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2012-2020 Roberto Alsina and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+"""Render the taxonomy overviews, classification pages and feeds."""
+
+import functools
+import os
+import sys
+from collections import defaultdict
+
+import blinker
+import natsort
+
+from nikola.plugin_categories import SignalHandler
+from nikola import utils, hierarchy_utils
+
+
+class TaxonomiesClassifier(SignalHandler):
+ """Classify posts and pages by taxonomies."""
+
+ name = "classify_taxonomies"
+
+ def _do_classification(self, site):
+ # Needed to avoid strange errors during tests
+ if site is not self.site:
+ return
+
+ # Get list of enabled taxonomy plugins and initialize data structures
+ taxonomies = site.taxonomy_plugins.values()
+ site.posts_per_classification = {}
+ for taxonomy in taxonomies:
+ site.posts_per_classification[taxonomy.classification_name] = {
+ lang: defaultdict(set) for lang in site.config['TRANSLATIONS'].keys()
+ }
+
+ # Classify posts
+ for post in site.timeline:
+ # Do classify pages, but don’t classify posts that are hidden
+ # (draft/private/future)
+ if post.is_post and not post.use_in_feeds:
+ continue
+ for taxonomy in taxonomies:
+ if taxonomy.apply_to_posts if post.is_post else taxonomy.apply_to_pages:
+ classifications = {}
+ for lang in site.config['TRANSLATIONS'].keys():
+ # Extract classifications for this language
+ classifications[lang] = taxonomy.classify(post, lang)
+ if not taxonomy.more_than_one_classifications_per_post and len(classifications[lang]) > 1:
+ raise ValueError("Too many {0} classifications for post {1}".format(taxonomy.classification_name, post.source_path))
+ # Add post to sets
+ for classification in classifications[lang]:
+ while True:
+ site.posts_per_classification[taxonomy.classification_name][lang][classification].add(post)
+ if not taxonomy.include_posts_from_subhierarchies or not taxonomy.has_hierarchy:
+ break
+ classification_path = taxonomy.extract_hierarchy(classification)
+ if len(classification_path) <= 1:
+ if len(classification_path) == 0 or not taxonomy.include_posts_into_hierarchy_root:
+ break
+ classification = taxonomy.recombine_classification_from_hierarchy(classification_path[:-1])
+
+ # Sort everything.
+ site.page_count_per_classification = {}
+ site.hierarchy_per_classification = {}
+ site.flat_hierarchy_per_classification = {}
+ site.hierarchy_lookup_per_classification = {}
+ for taxonomy in taxonomies:
+ site.page_count_per_classification[taxonomy.classification_name] = {}
+ # Sort post lists
+ for lang, posts_per_classification in site.posts_per_classification[taxonomy.classification_name].items():
+ # Ensure implicit classifications are inserted
+ for classification in taxonomy.get_implicit_classifications(lang):
+ if classification not in posts_per_classification:
+ posts_per_classification[classification] = []
+ site.page_count_per_classification[taxonomy.classification_name][lang] = {}
+ # Convert sets to lists and sort them
+ for classification in list(posts_per_classification.keys()):
+ posts = list(posts_per_classification[classification])
+ posts = self.site.sort_posts_chronologically(posts, lang)
+ taxonomy.sort_posts(posts, classification, lang)
+ posts_per_classification[classification] = posts
+ # Create hierarchy information
+ if taxonomy.has_hierarchy:
+ site.hierarchy_per_classification[taxonomy.classification_name] = {}
+ site.flat_hierarchy_per_classification[taxonomy.classification_name] = {}
+ site.hierarchy_lookup_per_classification[taxonomy.classification_name] = {}
+ for lang, posts_per_classification in site.posts_per_classification[taxonomy.classification_name].items():
+ # Compose hierarchy
+ hierarchy = {}
+ for classification in posts_per_classification.keys():
+ hier = taxonomy.extract_hierarchy(classification)
+ node = hierarchy
+ for he in hier:
+ if he not in node:
+ node[he] = {}
+ node = node[he]
+ hierarchy_lookup = {}
+
+ def create_hierarchy(hierarchy, parent=None, level=0):
+ """Create hierarchy."""
+ result = {}
+ for name, children in hierarchy.items():
+ node = hierarchy_utils.TreeNode(name, parent)
+ node.children = create_hierarchy(children, node, level + 1)
+ node.classification_path = [pn.name for pn in node.get_path()]
+ node.classification_name = taxonomy.recombine_classification_from_hierarchy(node.classification_path)
+ hierarchy_lookup[node.classification_name] = node
+ result[node.name] = node
+ classifications = natsort.natsorted(result.keys(), alg=natsort.ns.F | natsort.ns.IC)
+ taxonomy.sort_classifications(classifications, lang, level=level)
+ return [result[classification] for classification in classifications]
+
+ root_list = create_hierarchy(hierarchy)
+ if '' in posts_per_classification:
+ node = hierarchy_utils.TreeNode('', parent=None)
+ node.children = root_list
+ node.classification_path = []
+ node.classification_name = ''
+ hierarchy_lookup[node.name] = node
+ root_list = [node]
+ flat_hierarchy = hierarchy_utils.flatten_tree_structure(root_list)
+ # Store result
+ site.hierarchy_per_classification[taxonomy.classification_name][lang] = root_list
+ site.flat_hierarchy_per_classification[taxonomy.classification_name][lang] = flat_hierarchy
+ site.hierarchy_lookup_per_classification[taxonomy.classification_name][lang] = hierarchy_lookup
+ taxonomy.postprocess_posts_per_classification(site.posts_per_classification[taxonomy.classification_name],
+ site.flat_hierarchy_per_classification[taxonomy.classification_name],
+ site.hierarchy_lookup_per_classification[taxonomy.classification_name])
+ else:
+ taxonomy.postprocess_posts_per_classification(site.posts_per_classification[taxonomy.classification_name])
+
+ # Check for valid paths and for collisions
+ taxonomy_outputs = {lang: dict() for lang in site.config['TRANSLATIONS'].keys()}
+ quit = False
+ for taxonomy in taxonomies:
+ # Check for collisions (per language)
+ for lang in site.config['TRANSLATIONS'].keys():
+ if not taxonomy.is_enabled(lang):
+ continue
+ for classification, posts in site.posts_per_classification[taxonomy.classification_name][lang].items():
+ # Do we actually generate this classification page?
+ filtered_posts = [x for x in posts if self.site.config["SHOW_UNTRANSLATED_POSTS"] or x.is_translation_available(lang)]
+ generate_list = taxonomy.should_generate_classification_page(classification, filtered_posts, lang)
+ if not generate_list:
+ continue
+ # Obtain path as tuple
+ path = site.path_handlers[taxonomy.classification_name](classification, lang)
+ # Check that path is OK
+ for path_element in path:
+ if len(path_element) == 0:
+ utils.LOGGER.error("{0} {1} yields invalid path '{2}'!".format(taxonomy.classification_name.title(), classification, '/'.join(path)))
+ quit = True
+ # Combine path
+ path = os.path.join(*[os.path.normpath(p) for p in path if p != '.'])
+ # Determine collisions
+ if path in taxonomy_outputs[lang]:
+ other_classification_name, other_classification, other_posts = taxonomy_outputs[lang][path]
+ if other_classification_name == taxonomy.classification_name and other_classification == classification:
+ taxonomy_outputs[lang][path][2].extend(filtered_posts)
+ else:
+ utils.LOGGER.error('You have classifications that are too similar: {0} "{1}" and {2} "{3}" both result in output path {4} for language {5}.'.format(
+ taxonomy.classification_name, classification, other_classification_name, other_classification, path, lang))
+ utils.LOGGER.error('{0} "{1}" is used in: {2}'.format(
+ taxonomy.classification_name.title(), classification, ', '.join(sorted([p.source_path for p in filtered_posts]))))
+ utils.LOGGER.error('{0} "{1}" is used in: {2}'.format(
+ other_classification_name.title(), other_classification, ', '.join(sorted([p.source_path for p in other_posts]))))
+ quit = True
+ else:
+ taxonomy_outputs[lang][path] = (taxonomy.classification_name, classification, list(posts))
+ if quit:
+ sys.exit(1)
+ blinker.signal('taxonomies_classified').send(site)
+
+ def _get_filtered_list(self, taxonomy, classification, lang):
+ """Return the filtered list of posts for this classification and language."""
+ post_list = self.site.posts_per_classification[taxonomy.classification_name][lang].get(classification, [])
+ if self.site.config["SHOW_UNTRANSLATED_POSTS"]:
+ return post_list
+ else:
+ return [x for x in post_list if x.is_translation_available(lang)]
+
+ @staticmethod
+ def _compute_number_of_pages(filtered_posts, posts_count):
+ """Given a list of posts and the maximal number of posts per page, computes the number of pages needed."""
+ return min(1, (len(filtered_posts) + posts_count - 1) // posts_count)
+
+ def _postprocess_path(self, path, lang, append_index='auto', dest_type='page', page_info=None, alternative_path=False):
+ """Postprocess a generated path.
+
+ Takes the path `path` for language `lang`, and postprocesses it.
+
+ It appends `site.config['INDEX_FILE']` depending on `append_index`
+ (which can have the values `'always'`, `'never'` and `'auto'`) and
+ `site.config['PRETTY_URLS']`.
+
+ It also modifies/adds the extension of the last path element resp.
+ `site.config['INDEX_FILE']` depending on `dest_type`, which can be
+ `'feed'`, `'rss'` or `'page'`.
+
+ If `dest_type` is `'page'`, `page_info` can be `None` or a tuple
+ of two integers: the page number and the number of pages. This will
+ be used to append the correct page number by calling
+ `utils.adjust_name_for_index_path_list` and
+ `utils.get_displayed_page_number`.
+
+ If `alternative_path` is set to `True`, `utils.adjust_name_for_index_path_list`
+ is called with `force_addition=True`, resulting in an alternative path for the
+ first page of an index or Atom feed by including the page number into the path.
+ """
+ # Forcing extension for Atom feeds and RSS feeds
+ force_extension = None
+ if dest_type == 'feed':
+ force_extension = self.site.config['ATOM_EXTENSION']
+ elif dest_type == 'rss':
+ force_extension = self.site.config['RSS_EXTENSION']
+ # Determine how to extend path
+ path = [_f for _f in path if _f]
+ if force_extension is not None:
+ if len(path) == 0 and dest_type == 'rss':
+ path = [self.site.config['RSS_FILENAME_BASE'](lang)]
+ elif len(path) == 0 and dest_type == 'feed':
+ path = [self.site.config['ATOM_FILENAME_BASE'](lang)]
+ elif len(path) == 0 or append_index == 'always':
+ path = path + [os.path.splitext(self.site.config['INDEX_FILE'])[0]]
+ elif len(path) > 0 and append_index == 'never':
+ path[-1] = os.path.splitext(path[-1])[0]
+ path[-1] += force_extension
+ elif (self.site.config['PRETTY_URLS'] and append_index != 'never') or len(path) == 0 or append_index == 'always':
+ path = path + [self.site.config['INDEX_FILE']]
+ elif append_index != 'never':
+ path[-1] += '.html'
+ # Create path
+ result = [_f for _f in [self.site.config['TRANSLATIONS'][lang]] + path if _f]
+ if page_info is not None and dest_type in ('page', 'feed'):
+ result = utils.adjust_name_for_index_path_list(result,
+ page_info[0],
+ utils.get_displayed_page_number(page_info[0], page_info[1], self.site),
+ lang,
+ self.site, force_addition=alternative_path, extension=force_extension)
+ return result
+
+ @staticmethod
+ def _parse_path_result(result):
+ """Interpret the return values of taxonomy.get_path() and taxonomy.get_overview_path() as if all three return values were given."""
+ if not isinstance(result[0], (list, tuple)):
+ # The result must be a list or tuple of strings. Wrap into a tuple
+ result = (result, )
+ path = result[0]
+ append_index = result[1] if len(result) > 1 else 'auto'
+ page_info = result[2] if len(result) > 2 else None
+ return path, append_index, page_info
+
+ def _taxonomy_index_path(self, name, lang, taxonomy):
+ """Return path to the classification overview."""
+ result = taxonomy.get_overview_path(lang)
+ path, append_index, _ = self._parse_path_result(result)
+ return self._postprocess_path(path, lang, append_index=append_index, dest_type='list')
+
+ def _taxonomy_path(self, name, lang, taxonomy, dest_type='page', page=None, alternative_path=False):
+ """Return path to a classification."""
+ if taxonomy.has_hierarchy:
+ result = taxonomy.get_path(taxonomy.extract_hierarchy(name), lang, dest_type=dest_type)
+ else:
+ result = taxonomy.get_path(name, lang, dest_type=dest_type)
+ path, append_index, page_ = self._parse_path_result(result)
+
+ if page is not None:
+ page = int(page)
+ else:
+ page = page_
+
+ page_info = None
+ if taxonomy.show_list_as_index and page is not None:
+ number_of_pages = self.site.page_count_per_classification[taxonomy.classification_name][lang].get(name)
+ if number_of_pages is None:
+ number_of_pages = self._compute_number_of_pages(self._get_filtered_list(taxonomy, name, lang), self.site.config['INDEX_DISPLAY_POST_COUNT'])
+ self.site.page_count_per_classification[taxonomy.classification_name][lang][name] = number_of_pages
+ page_info = (page, number_of_pages)
+ return self._postprocess_path(path, lang, append_index=append_index, dest_type=dest_type, page_info=page_info)
+
+ def _taxonomy_atom_path(self, name, lang, taxonomy, page=None, alternative_path=False):
+ """Return path to a classification Atom feed."""
+ return self._taxonomy_path(name, lang, taxonomy, dest_type='feed', page=page, alternative_path=alternative_path)
+
+ def _taxonomy_rss_path(self, name, lang, taxonomy):
+ """Return path to a classification RSS feed."""
+ return self._taxonomy_path(name, lang, taxonomy, dest_type='rss')
+
+ def _register_path_handlers(self, taxonomy):
+ functions = (
+ ('{0}_index', self._taxonomy_index_path),
+ ('{0}', self._taxonomy_path),
+ ('{0}_atom', self._taxonomy_atom_path),
+ ('{0}_rss', self._taxonomy_rss_path),
+ )
+
+ for name, function in functions:
+ name = name.format(taxonomy.classification_name)
+ p = functools.partial(function, taxonomy=taxonomy)
+ doc = taxonomy.path_handler_docstrings[name]
+ if doc is not False:
+ p.__doc__ = doc
+ self.site.register_path_handler(name, p)
+
+ def set_site(self, site):
+ """Set site, which is a Nikola instance."""
+ super().set_site(site)
+ # Add hook for after post scanning
+ blinker.signal("scanned").connect(self._do_classification)
+ # Register path handlers
+ for taxonomy in site.taxonomy_plugins.values():
+ self._register_path_handlers(taxonomy)
diff --git a/nikola/plugins/shortcode/chart.plugin b/nikola/plugins/shortcode/chart.plugin
new file mode 100644
index 0000000..edcbc13
--- /dev/null
+++ b/nikola/plugins/shortcode/chart.plugin
@@ -0,0 +1,13 @@
+[Core]
+name = chart
+module = chart
+
+[Nikola]
+PluginCategory = Shortcode
+
+[Documentation]
+author = Roberto Alsina
+version = 0.1
+website = https://getnikola.com/
+description = Chart directive based in PyGal
+
diff --git a/nikola/plugins/shortcode/chart.py b/nikola/plugins/shortcode/chart.py
new file mode 100644
index 0000000..64341e8
--- /dev/null
+++ b/nikola/plugins/shortcode/chart.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2012-2020 Roberto Alsina and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""Chart shortcode."""
+
+from ast import literal_eval
+
+from nikola.plugin_categories import ShortcodePlugin
+from nikola.utils import req_missing, load_data
+
+try:
+ import pygal
+except ImportError:
+ pygal = None
+
+_site = None
+
+
+class ChartShortcode(ShortcodePlugin):
+ """Plugin for chart shortcode."""
+
+ name = "chart"
+
+ def handler(self, chart_type, **_options):
+ """Generate chart using Pygal."""
+ if pygal is None:
+ msg = req_missing(
+ ['pygal'], 'use the Chart directive', optional=True)
+ return '<div class="text-error">{0}</div>'.format(msg)
+ options = {}
+ chart_data = []
+ _options.pop('post', None)
+ _options.pop('site')
+ data = _options.pop('data')
+
+ for line in data.splitlines():
+ line = line.strip()
+ if line:
+ chart_data.append(literal_eval('({0})'.format(line)))
+ if 'data_file' in _options:
+ options = load_data(_options['data_file'])
+ _options.pop('data_file')
+ if not chart_data: # If there is data in the document, it wins
+ for k, v in options.pop('data', {}).items():
+ chart_data.append((k, v))
+
+ options.update(_options)
+
+ style_name = options.pop('style', 'BlueStyle')
+ if '(' in style_name: # Parametric style
+ style = eval('pygal.style.' + style_name)
+ else:
+ style = getattr(pygal.style, style_name)
+ for k, v in options.items():
+ try:
+ options[k] = literal_eval(v)
+ except Exception:
+ options[k] = v
+ chart = pygal
+ for o in chart_type.split('.'):
+ chart = getattr(chart, o)
+ chart = chart(style=style)
+ if _site and _site.invariant:
+ chart.no_prefix = True
+ chart.config(**options)
+ for label, series in chart_data:
+ chart.add(label, series)
+ return chart.render().decode('utf8')
diff --git a/nikola/plugins/shortcode/emoji.plugin b/nikola/plugins/shortcode/emoji.plugin
new file mode 100644
index 0000000..c9a272c
--- /dev/null
+++ b/nikola/plugins/shortcode/emoji.plugin
@@ -0,0 +1,13 @@
+[Core]
+name = emoji
+module = emoji
+
+[Nikola]
+PluginCategory = Shortcode
+
+[Documentation]
+author = Roberto Alsina
+version = 0.1
+website = https://getnikola.com/
+description = emoji shortcode
+
diff --git a/nikola/plugins/shortcode/emoji/__init__.py b/nikola/plugins/shortcode/emoji/__init__.py
new file mode 100644
index 0000000..9ae2228
--- /dev/null
+++ b/nikola/plugins/shortcode/emoji/__init__.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+# This file is public domain according to its author, Roberto Alsina
+
+"""Emoji directive for reStructuredText."""
+
+import glob
+import json
+import os
+
+from nikola.plugin_categories import ShortcodePlugin
+from nikola import utils
+
+TABLE = {}
+
+LOGGER = utils.get_logger('scan_posts')
+
+
+def _populate():
+ for fname in glob.glob(os.path.join(os.path.dirname(__file__), 'data', '*.json')):
+ with open(fname, encoding="utf-8-sig") as inf:
+ data = json.load(inf)
+ data = data[list(data.keys())[0]]
+ data = data[list(data.keys())[0]]
+ for item in data:
+ if item['key'] in TABLE:
+ LOGGER.warning('Repeated emoji {}'.format(item['key']))
+ else:
+ TABLE[item['key']] = item['value']
+
+
+class Plugin(ShortcodePlugin):
+ """Plugin for gist directive."""
+
+ name = "emoji"
+
+ def handler(self, name, filename=None, site=None, data=None, lang=None, post=None):
+ """Create HTML for emoji."""
+ if not TABLE:
+ _populate()
+ try:
+ output = u'''<span class="emoji">{}</span>'''.format(TABLE[name])
+ except KeyError:
+ LOGGER.warning('Unknown emoji {}'.format(name))
+ output = u'''<span class="emoji error">{}</span>'''.format(name)
+
+ return output, []
diff --git a/nikola/plugins/shortcode/emoji/data/Activity.json b/nikola/plugins/shortcode/emoji/data/Activity.json
new file mode 100644
index 0000000..1461f19
--- /dev/null
+++ b/nikola/plugins/shortcode/emoji/data/Activity.json
@@ -0,0 +1,418 @@
+{
+ "activities": {
+ "activity": [
+ {
+ "key": "soccer_ball",
+ "value": "⚽"
+ },
+ {
+ "key": "basket_ball",
+ "value": "🏀"
+ },
+ {
+ "key": "american_football",
+ "value": "🏈"
+ },
+ {
+ "key": "baseball",
+ "value": "⚾"
+ },
+ {
+ "key": "tennis_racquet_ball",
+ "value": "🎾"
+ },
+ {
+ "key": "volley_ball",
+ "value": "🏐"
+ },
+ {
+ "key": "rugby_football",
+ "value": "🏉"
+ },
+ {
+ "key": "billiards",
+ "value": "🎱"
+ },
+ {
+ "key": "activity_in_hole",
+ "value": "⛳"
+ },
+ {
+ "key": "golfer",
+ "value": "🏌"
+ },
+ {
+ "key": "table_tennis_paddle_ball",
+ "value": "🏓"
+ },
+ {
+ "key": "badminton_racquet_shuttle_cock",
+ "value": "🏸"
+ },
+ {
+ "key": "ice_hockey_stick_puck",
+ "value": "🏒"
+ },
+ {
+ "key": "field_hockey_stick_ball",
+ "value": "🏑"
+ },
+ {
+ "key": "cricket_bat_ball",
+ "value": "🏏"
+ },
+ {
+ "key": "ski_and_ski_boot",
+ "value": "🎿"
+ },
+ {
+ "key": "skier",
+ "value": "⛷"
+ },
+ {
+ "key": "snow_boarder",
+ "value": "🏂"
+ },
+ {
+ "key": "ice_skate",
+ "value": "⛸"
+ },
+ {
+ "key": "bow_and_arrow",
+ "value": "🏹"
+ },
+ {
+ "key": "fishing_pole_and_fish",
+ "value": "🎣"
+ },
+ {
+ "key": "row_boat",
+ "value": "🚣"
+ },
+ {
+ "key": "row_boat_type_1_2",
+ "value": "🚣🏻"
+ },
+ {
+ "key": "row_boat_type_3",
+ "value": "🚣🏼"
+ },
+ {
+ "key": "row_boat_type_4",
+ "value": "🚣🏽"
+ },
+ {
+ "key": "row_boat_type_5",
+ "value": "🚣🏾"
+ },
+ {
+ "key": "row_boat_type_6",
+ "value": "🚣🏿"
+ },
+ {
+ "key": "swimmer",
+ "value": "🏊"
+ },
+ {
+ "key": "swimmer_type_1_2",
+ "value": "🏊🏻"
+ },
+ {
+ "key": "swimmer_type_3",
+ "value": "🏊🏼"
+ },
+ {
+ "key": "swimmer_type_4",
+ "value": "🏊🏽"
+ },
+ {
+ "key": "swimmer_type_5",
+ "value": "🏊🏾"
+ },
+ {
+ "key": "swimmer_type_6",
+ "value": "🏊🏿"
+ },
+ {
+ "key": "surfer",
+ "value": "🏄"
+ },
+ {
+ "key": "surfer_type_1_2",
+ "value": "🏄🏻"
+ },
+ {
+ "key": "surfer_type_3",
+ "value": "🏄🏼"
+ },
+ {
+ "key": "surfer_type_4",
+ "value": "🏄🏽"
+ },
+ {
+ "key": "surfer_type_5",
+ "value": "🏄🏾"
+ },
+ {
+ "key": "surfer_type_6",
+ "value": "🏄🏿"
+ },
+ {
+ "key": "bath",
+ "value": "🛀"
+ },
+ {
+ "key": "bath_type_1_2",
+ "value": "🛀🏻"
+ },
+ {
+ "key": "bath_type_3",
+ "value": "🛀🏼"
+ },
+ {
+ "key": "bath_type_4",
+ "value": "🛀🏽"
+ },
+ {
+ "key": "bath_type_5",
+ "value": "🛀🏾"
+ },
+ {
+ "key": "bath_type_6",
+ "value": "🛀🏿"
+ },
+ {
+ "key": "person_with_ball",
+ "value": "⛹"
+ },
+ {
+ "key": "person_with_ball_type_1_2",
+ "value": "⛹🏻"
+ },
+ {
+ "key": "person_with_ball_type_3",
+ "value": "⛹🏼"
+ },
+ {
+ "key": "person_with_ball_type_4",
+ "value": "⛹🏽"
+ },
+ {
+ "key": "person_with_ball_type_5",
+ "value": "⛹🏾"
+ },
+ {
+ "key": "person_with_ball_type_6",
+ "value": "⛹🏿"
+ },
+ {
+ "key": "weight_lifter",
+ "value": "🏋"
+ },
+ {
+ "key": "weight_lifter_type_1_2",
+ "value": "🏋🏻"
+ },
+ {
+ "key": "weight_lifter_type_3",
+ "value": "🏋🏼"
+ },
+ {
+ "key": "weight_lifter_type_4",
+ "value": "🏋🏽"
+ },
+ {
+ "key": "weight_lifter_type_5",
+ "value": "🏋🏾"
+ },
+ {
+ "key": "weight_lifter_type_6",
+ "value": "🏋🏿"
+ },
+ {
+ "key": "bicyclist",
+ "value": "🚴"
+ },
+ {
+ "key": "bicyclist_type_1_2",
+ "value": "🚴🏻"
+ },
+ {
+ "key": "bicyclist_type_3",
+ "value": "🚴🏼"
+ },
+ {
+ "key": "bicyclist_type_4",
+ "value": "🚴🏽"
+ },
+ {
+ "key": "bicyclist_type_5",
+ "value": "🚴🏾"
+ },
+ {
+ "key": "bicyclist_type_6",
+ "value": "🚴🏿"
+ },
+ {
+ "key": "mountain_bicyclist",
+ "value": "🚵"
+ },
+ {
+ "key": "mountain_bicyclist_type_1_2",
+ "value": "🚵🏻"
+ },
+ {
+ "key": "mountain_bicyclist_type_3",
+ "value": "🚵🏼"
+ },
+ {
+ "key": "mountain_bicyclist_type_4",
+ "value": "🚵🏽"
+ },
+ {
+ "key": "mountain_bicyclist_type_5",
+ "value": "🚵🏾"
+ },
+ {
+ "key": "mountain_bicyclist_type_6",
+ "value": "🚵🏿"
+ },
+ {
+ "key": "horse_racing",
+ "value": "🏇"
+ },
+ {
+ "key": "horse_racing_type_1_2",
+ "value": "🏇🏻"
+ },
+ {
+ "key": "horse_racing_type_3",
+ "value": "🏇🏻"
+ },
+ {
+ "key": "horse_racing_type_4",
+ "value": "🏇🏽"
+ },
+ {
+ "key": "horse_racing_type_5",
+ "value": "🏇🏾"
+ },
+ {
+ "key": "horse_racing_type_6",
+ "value": "🏇🏿"
+ },
+ {
+ "key": "main_business_suit_levitating",
+ "value": "🕴"
+ },
+ {
+ "key": "trophy",
+ "value": "🏆"
+ },
+ {
+ "key": "running_shirt_with_sash",
+ "value": "🎽"
+ },
+ {
+ "key": "sports_medal",
+ "value": "🏅"
+ },
+ {
+ "key": "military_medal",
+ "value": "🎖"
+ },
+ {
+ "key": "reminder_ribbon",
+ "value": "🎗"
+ },
+ {
+ "key": "rosette",
+ "value": "🏵"
+ },
+ {
+ "key": "ticket",
+ "value": "🎫"
+ },
+ {
+ "key": "admission_tickets",
+ "value": "🎟"
+ },
+ {
+ "key": "performing_arts",
+ "value": "🎭"
+ },
+ {
+ "key": "artist_palette",
+ "value": "🎨"
+ },
+ {
+ "key": "circus_tent",
+ "value": "🎪"
+ },
+ {
+ "key": "microphone",
+ "value": "🎤"
+ },
+ {
+ "key": "headphone",
+ "value": "🎧"
+ },
+ {
+ "key": "musical_score",
+ "value": "🎼"
+ },
+ {
+ "key": "musical_keyboard",
+ "value": "🎹"
+ },
+ {
+ "key": "saxophone",
+ "value": "🎷"
+ },
+ {
+ "key": "trumpet",
+ "value": "🎺"
+ },
+ {
+ "key": "guitar",
+ "value": "🎸"
+ },
+ {
+ "key": "violin",
+ "value": "🎻"
+ },
+ {
+ "key": "clapper_board",
+ "value": "🎬"
+ },
+ {
+ "key": "video_game",
+ "value": "🎮"
+ },
+ {
+ "key": "alien_monster",
+ "value": "👾"
+ },
+ {
+ "key": "direct_hit",
+ "value": "🎯"
+ },
+ {
+ "key": "game_die",
+ "value": "🎲"
+ },
+ {
+ "key": "slot_machine",
+ "value": "🎰"
+ },
+ {
+ "key": "bowling",
+ "value": "🎳"
+ },
+ {
+ "key": "olympic_rings",
+ "value": "◯‍◯‍◯‍◯‍◯"
+ }
+ ]
+ }
+} \ No newline at end of file
diff --git a/nikola/plugins/shortcode/emoji/data/Flags.json b/nikola/plugins/shortcode/emoji/data/Flags.json
new file mode 100644
index 0000000..d1d4bdc
--- /dev/null
+++ b/nikola/plugins/shortcode/emoji/data/Flags.json
@@ -0,0 +1,998 @@
+{
+ "flags": {
+ "flag": [
+ {
+ "key": "afghanistan",
+ "value": "🇦🇫"
+ },
+ {
+ "key": "land_island",
+ "value": "🇦🇽"
+ },
+ {
+ "key": "albania",
+ "value": "🇦🇱"
+ },
+ {
+ "key": "algeria",
+ "value": "🇩🇿"
+ },
+ {
+ "key": "american_samoa",
+ "value": "🇦🇸"
+ },
+ {
+ "key": "andorra",
+ "value": "🇦🇩"
+ },
+ {
+ "key": "angola",
+ "value": "🇦🇴"
+ },
+ {
+ "key": "anguilla",
+ "value": "🇦🇮"
+ },
+ {
+ "key": "antarctica",
+ "value": "🇦🇶"
+ },
+ {
+ "key": "antigua_and_barbuda",
+ "value": "🇦🇬"
+ },
+ {
+ "key": "argentina",
+ "value": "🇦🇷"
+ },
+ {
+ "key": "armenia",
+ "value": "🇦🇲"
+ },
+ {
+ "key": "aruba",
+ "value": "🇦🇼"
+ },
+ {
+ "key": "australia",
+ "value": "🇦🇺"
+ },
+ {
+ "key": "austria",
+ "value": "🇦🇹"
+ },
+ {
+ "key": "azerbaijan",
+ "value": "🇦🇿"
+ },
+ {
+ "key": "bahamas",
+ "value": "🇧🇸"
+ },
+ {
+ "key": "bahrain",
+ "value": "🇧🇭"
+ },
+ {
+ "key": "bangladesh",
+ "value": "🇧🇩"
+ },
+ {
+ "key": "barbados",
+ "value": "🇧🇧"
+ },
+ {
+ "key": "belarus",
+ "value": "🇧🇾"
+ },
+ {
+ "key": "belgium",
+ "value": "🇧🇪"
+ },
+ {
+ "key": "belize",
+ "value": "🇧🇿"
+ },
+ {
+ "key": "benin",
+ "value": "🇧🇯"
+ },
+ {
+ "key": "bermuda",
+ "value": "🇧🇲"
+ },
+ {
+ "key": "bhutan",
+ "value": "🇧🇹"
+ },
+ {
+ "key": "bolivia",
+ "value": "🇧🇴"
+ },
+ {
+ "key": "caribbean_netherlands",
+ "value": "🇧🇶"
+ },
+ {
+ "key": "bosnia_and_herzegovina",
+ "value": "🇧🇦"
+ },
+ {
+ "key": "botswana",
+ "value": "🇧🇼"
+ },
+ {
+ "key": "brazil",
+ "value": "🇧🇷"
+ },
+ {
+ "key": "british_indian_ocean_territory",
+ "value": "🇮🇴"
+ },
+ {
+ "key": "british_virgin_islands",
+ "value": "🇻🇬"
+ },
+ {
+ "key": "brunei",
+ "value": "🇧🇳"
+ },
+ {
+ "key": "bulgaria",
+ "value": "🇧🇬"
+ },
+ {
+ "key": "burkina_faso",
+ "value": "🇧🇫"
+ },
+ {
+ "key": "burundi",
+ "value": "🇧🇮"
+ },
+ {
+ "key": "cape_verde",
+ "value": "🇨🇻"
+ },
+ {
+ "key": "cambodia",
+ "value": "🇰🇭"
+ },
+ {
+ "key": "cameroon",
+ "value": "🇨🇲"
+ },
+ {
+ "key": "canada",
+ "value": "🇨🇦"
+ },
+ {
+ "key": "canary_islands",
+ "value": "🇮🇨"
+ },
+ {
+ "key": "cayman_islands",
+ "value": "🇰🇾"
+ },
+ {
+ "key": "central_african_republic",
+ "value": "🇨🇫"
+ },
+ {
+ "key": "chad",
+ "value": "🇹🇩"
+ },
+ {
+ "key": "chile",
+ "value": "🇨🇱"
+ },
+ {
+ "key": "china",
+ "value": "🇨🇳"
+ },
+ {
+ "key": "christmas_island",
+ "value": "🇨🇽"
+ },
+ {
+ "key": "cocos_keeling_island",
+ "value": "🇨🇨"
+ },
+ {
+ "key": "colombia",
+ "value": "🇨🇴"
+ },
+ {
+ "key": "comoros",
+ "value": "🇰🇲"
+ },
+ {
+ "key": "congo_brazzaville",
+ "value": "🇨🇬"
+ },
+ {
+ "key": "congo_kingshasa",
+ "value": "🇨🇩"
+ },
+ {
+ "key": "cook_islands",
+ "value": "🇨🇰"
+ },
+ {
+ "key": "costa_rica",
+ "value": "🇨🇷"
+ },
+ {
+ "key": "croatia",
+ "value": "🇭🇷"
+ },
+ {
+ "key": "cuba",
+ "value": "🇨🇺"
+ },
+ {
+ "key": "curaao",
+ "value": "🇨🇼"
+ },
+ {
+ "key": "cyprus",
+ "value": "🇨🇾"
+ },
+ {
+ "key": "czech_republic",
+ "value": "🇨🇿"
+ },
+ {
+ "key": "denmark",
+ "value": "🇩🇰"
+ },
+ {
+ "key": "djibouti",
+ "value": "🇩🇯"
+ },
+ {
+ "key": "dominica",
+ "value": "🇩🇲"
+ },
+ {
+ "key": "dominican_republic",
+ "value": "🇩🇴"
+ },
+ {
+ "key": "ecuador",
+ "value": "🇪🇨"
+ },
+ {
+ "key": "egypt",
+ "value": "🇪🇬"
+ },
+ {
+ "key": "el_salvador",
+ "value": "🇸🇻"
+ },
+ {
+ "key": "equatorial_guinea",
+ "value": "🇬🇶"
+ },
+ {
+ "key": "eritrea",
+ "value": "🇪🇷"
+ },
+ {
+ "key": "estonia",
+ "value": "🇪🇪"
+ },
+ {
+ "key": "ethiopia",
+ "value": "🇪🇹"
+ },
+ {
+ "key": "european_union",
+ "value": "🇪🇺"
+ },
+ {
+ "key": "falkland_islands",
+ "value": "🇫🇰"
+ },
+ {
+ "key": "faroe_islands",
+ "value": "🇫🇴"
+ },
+ {
+ "key": "fiji",
+ "value": "🇫🇯"
+ },
+ {
+ "key": "finland",
+ "value": "🇫🇮"
+ },
+ {
+ "key": "france",
+ "value": "🇫🇷"
+ },
+ {
+ "key": "french_guiana",
+ "value": "🇬🇫"
+ },
+ {
+ "key": "french_polynesia",
+ "value": "🇵🇫"
+ },
+ {
+ "key": "french_southern_territories",
+ "value": "🇹🇫"
+ },
+ {
+ "key": "gabon",
+ "value": "🇬🇦"
+ },
+ {
+ "key": "gambia",
+ "value": "🇬🇲"
+ },
+ {
+ "key": "georgia",
+ "value": "🇬🇪"
+ },
+ {
+ "key": "germany",
+ "value": "🇩🇪"
+ },
+ {
+ "key": "ghana",
+ "value": "🇬🇭"
+ },
+ {
+ "key": "gibraltar",
+ "value": "🇬🇮"
+ },
+ {
+ "key": "greece",
+ "value": "🇬🇷"
+ },
+ {
+ "key": "greenland",
+ "value": "🇬🇱"
+ },
+ {
+ "key": "grenada",
+ "value": "🇬🇩"
+ },
+ {
+ "key": "guadeloupe",
+ "value": "🇬🇵"
+ },
+ {
+ "key": "guam",
+ "value": "🇬🇺"
+ },
+ {
+ "key": "guatemala",
+ "value": "🇬🇹"
+ },
+ {
+ "key": "guernsey",
+ "value": "🇬🇬"
+ },
+ {
+ "key": "guinea",
+ "value": "🇬🇳"
+ },
+ {
+ "key": "guinea_bissau",
+ "value": "🇬🇼"
+ },
+ {
+ "key": "guyana",
+ "value": "🇬🇾"
+ },
+ {
+ "key": "haiti",
+ "value": "🇭🇹"
+ },
+ {
+ "key": "honduras",
+ "value": "🇭🇳"
+ },
+ {
+ "key": "hong_kong",
+ "value": "🇭🇰"
+ },
+ {
+ "key": "hungary",
+ "value": "🇭🇺"
+ },
+ {
+ "key": "iceland",
+ "value": "🇮🇸"
+ },
+ {
+ "key": "india",
+ "value": "🇮🇳"
+ },
+ {
+ "key": "indonesia",
+ "value": "🇮🇩"
+ },
+ {
+ "key": "iran",
+ "value": "🇮🇷"
+ },
+ {
+ "key": "iraq",
+ "value": "🇮🇶"
+ },
+ {
+ "key": "ireland",
+ "value": "🇮🇪"
+ },
+ {
+ "key": "isle_of_man",
+ "value": "🇮🇲"
+ },
+ {
+ "key": "israel",
+ "value": "🇮🇱"
+ },
+ {
+ "key": "italy",
+ "value": "🇮🇹"
+ },
+ {
+ "key": "ctedivoire",
+ "value": "🇨🇮"
+ },
+ {
+ "key": "jamaica",
+ "value": "🇯🇲"
+ },
+ {
+ "key": "japan",
+ "value": "🇯🇵"
+ },
+ {
+ "key": "jersey",
+ "value": "🇯🇪"
+ },
+ {
+ "key": "jordan",
+ "value": "🇯🇴"
+ },
+ {
+ "key": "kazakhstan",
+ "value": "🇰🇿"
+ },
+ {
+ "key": "kenya",
+ "value": "🇰🇪"
+ },
+ {
+ "key": "kiribati",
+ "value": "🇰🇮"
+ },
+ {
+ "key": "kosovo",
+ "value": "🇽🇰"
+ },
+ {
+ "key": "kuwait",
+ "value": "🇰🇼"
+ },
+ {
+ "key": "kyrgyzstan",
+ "value": "🇰🇬"
+ },
+ {
+ "key": "laos",
+ "value": "🇱🇦"
+ },
+ {
+ "key": "latvia",
+ "value": "🇱🇻"
+ },
+ {
+ "key": "lebanon",
+ "value": "🇱🇧"
+ },
+ {
+ "key": "lesotho",
+ "value": "🇱🇸"
+ },
+ {
+ "key": "liberia",
+ "value": "🇱🇷"
+ },
+ {
+ "key": "libya",
+ "value": "🇱🇾"
+ },
+ {
+ "key": "liechtenstein",
+ "value": "🇱🇮"
+ },
+ {
+ "key": "lithuania",
+ "value": "🇱🇹"
+ },
+ {
+ "key": "luxembourg",
+ "value": "🇱🇺"
+ },
+ {
+ "key": "macau",
+ "value": "🇲🇴"
+ },
+ {
+ "key": "macedonia",
+ "value": "🇲🇰"
+ },
+ {
+ "key": "madagascar",
+ "value": "🇲🇬"
+ },
+ {
+ "key": "malawi",
+ "value": "🇲🇼"
+ },
+ {
+ "key": "malaysia",
+ "value": "🇲🇾"
+ },
+ {
+ "key": "maldives",
+ "value": "🇲🇻"
+ },
+ {
+ "key": "mali",
+ "value": "🇲🇱"
+ },
+ {
+ "key": "malta",
+ "value": "🇲🇹"
+ },
+ {
+ "key": "marshall_islands",
+ "value": "🇲🇭"
+ },
+ {
+ "key": "martinique",
+ "value": "🇲🇶"
+ },
+ {
+ "key": "mauritania",
+ "value": "🇲🇷"
+ },
+ {
+ "key": "mauritius",
+ "value": "🇲🇺"
+ },
+ {
+ "key": "mayotte",
+ "value": "🇾🇹"
+ },
+ {
+ "key": "mexico",
+ "value": "🇲🇽"
+ },
+ {
+ "key": "micronesia",
+ "value": "🇫🇲"
+ },
+ {
+ "key": "moldova",
+ "value": "🇲🇩"
+ },
+ {
+ "key": "monaco",
+ "value": "🇲🇨"
+ },
+ {
+ "key": "mongolia",
+ "value": "🇲🇳"
+ },
+ {
+ "key": "montenegro",
+ "value": "🇲🇪"
+ },
+ {
+ "key": "montserrat",
+ "value": "🇲🇸"
+ },
+ {
+ "key": "morocco",
+ "value": "🇲🇦"
+ },
+ {
+ "key": "mozambique",
+ "value": "🇲🇿"
+ },
+ {
+ "key": "myanmar_burma",
+ "value": "🇲🇲"
+ },
+ {
+ "key": "namibia",
+ "value": "🇳🇦"
+ },
+ {
+ "key": "nauru",
+ "value": "🇳🇷"
+ },
+ {
+ "key": "nepal",
+ "value": "🇳🇵"
+ },
+ {
+ "key": "netherlands",
+ "value": "🇳🇱"
+ },
+ {
+ "key": "new_caledonia",
+ "value": "🇳🇨"
+ },
+ {
+ "key": "new_zealand",
+ "value": "🇳🇿"
+ },
+ {
+ "key": "nicaragua",
+ "value": "🇳🇮"
+ },
+ {
+ "key": "niger",
+ "value": "🇳🇪"
+ },
+ {
+ "key": "nigeria",
+ "value": "🇳🇬"
+ },
+ {
+ "key": "niue",
+ "value": "🇳🇺"
+ },
+ {
+ "key": "norfolk_island",
+ "value": "🇳🇫"
+ },
+ {
+ "key": "northern_mariana_islands",
+ "value": "🇲🇵"
+ },
+ {
+ "key": "north_korea",
+ "value": "🇰🇵"
+ },
+ {
+ "key": "norway",
+ "value": "🇳🇴"
+ },
+ {
+ "key": "oman",
+ "value": "🇴🇲"
+ },
+ {
+ "key": "pakistan",
+ "value": "🇵🇰"
+ },
+ {
+ "key": "palau",
+ "value": "🇵🇼"
+ },
+ {
+ "key": "palestinian_territories",
+ "value": "🇵🇸"
+ },
+ {
+ "key": "panama",
+ "value": "🇵🇦"
+ },
+ {
+ "key": "papua_new_guinea",
+ "value": "🇵🇬"
+ },
+ {
+ "key": "paraguay",
+ "value": "🇵🇾"
+ },
+ {
+ "key": "peru",
+ "value": "🇵🇪"
+ },
+ {
+ "key": "philippines",
+ "value": "🇵🇭"
+ },
+ {
+ "key": "pitcairn_islands",
+ "value": "🇵🇳"
+ },
+ {
+ "key": "poland",
+ "value": "🇵🇱"
+ },
+ {
+ "key": "portugal",
+ "value": "🇵🇹"
+ },
+ {
+ "key": "puerto_rico",
+ "value": "🇵🇷"
+ },
+ {
+ "key": "qatar",
+ "value": "🇶🇦"
+ },
+ {
+ "key": "reunion",
+ "value": "🇷🇪"
+ },
+ {
+ "key": "romania",
+ "value": "🇷🇴"
+ },
+ {
+ "key": "russia",
+ "value": "🇷🇺"
+ },
+ {
+ "key": "rwanda",
+ "value": "🇷🇼"
+ },
+ {
+ "key": "saint_barthlemy",
+ "value": "🇧🇱"
+ },
+ {
+ "key": "saint_helena",
+ "value": "🇸🇭"
+ },
+ {
+ "key": "saint_kitts_and_nevis",
+ "value": "🇰🇳"
+ },
+ {
+ "key": "saint_lucia",
+ "value": "🇱🇨"
+ },
+ {
+ "key": "saint_pierre_and_miquelon",
+ "value": "🇵🇲"
+ },
+ {
+ "key": "st_vincent_grenadines",
+ "value": "🇻🇨"
+ },
+ {
+ "key": "samoa",
+ "value": "🇼🇸"
+ },
+ {
+ "key": "san_marino",
+ "value": "🇸🇲"
+ },
+ {
+ "key": "sotom_and_prncipe",
+ "value": "🇸🇹"
+ },
+ {
+ "key": "saudi_arabia",
+ "value": "🇸🇦"
+ },
+ {
+ "key": "senegal",
+ "value": "🇸🇳"
+ },
+ {
+ "key": "serbia",
+ "value": "🇷🇸"
+ },
+ {
+ "key": "seychelles",
+ "value": "🇸🇨"
+ },
+ {
+ "key": "sierra_leone",
+ "value": "🇸🇱"
+ },
+ {
+ "key": "singapore",
+ "value": "🇸🇬"
+ },
+ {
+ "key": "sint_maarten",
+ "value": "🇸🇽"
+ },
+ {
+ "key": "slovakia",
+ "value": "🇸🇰"
+ },
+ {
+ "key": "slovenia",
+ "value": "🇸🇮"
+ },
+ {
+ "key": "solomon_islands",
+ "value": "🇸🇧"
+ },
+ {
+ "key": "somalia",
+ "value": "🇸🇴"
+ },
+ {
+ "key": "south_africa",
+ "value": "🇿🇦"
+ },
+ {
+ "key": "south_georgia_south_sandwich_islands",
+ "value": "🇬🇸"
+ },
+ {
+ "key": "south_korea",
+ "value": "🇰🇷"
+ },
+ {
+ "key": "south_sudan",
+ "value": "🇸🇸"
+ },
+ {
+ "key": "spain",
+ "value": "🇪🇸"
+ },
+ {
+ "key": "sri_lanka",
+ "value": "🇱🇰"
+ },
+ {
+ "key": "sudan",
+ "value": "🇸🇩"
+ },
+ {
+ "key": "suriname",
+ "value": "🇸🇷"
+ },
+ {
+ "key": "swaziland",
+ "value": "🇸🇿"
+ },
+ {
+ "key": "sweden",
+ "value": "🇸🇪"
+ },
+ {
+ "key": "switzerland",
+ "value": "🇨🇭"
+ },
+ {
+ "key": "syria",
+ "value": "🇸🇾"
+ },
+ {
+ "key": "taiwan",
+ "value": "🇹🇼"
+ },
+ {
+ "key": "tajikistan",
+ "value": "🇹🇯"
+ },
+ {
+ "key": "tanzania",
+ "value": "🇹🇿"
+ },
+ {
+ "key": "thailand",
+ "value": "🇹🇭"
+ },
+ {
+ "key": "timorleste",
+ "value": "🇹🇱"
+ },
+ {
+ "key": "togo",
+ "value": "🇹🇬"
+ },
+ {
+ "key": "tokelau",
+ "value": "🇹🇰"
+ },
+ {
+ "key": "tonga",
+ "value": "🇹🇴"
+ },
+ {
+ "key": "trinidad_and_tobago",
+ "value": "🇹🇹"
+ },
+ {
+ "key": "tunisia",
+ "value": "🇹🇳"
+ },
+ {
+ "key": "turkey",
+ "value": "🇹🇷"
+ },
+ {
+ "key": "turkmenistan",
+ "value": "🇹🇲"
+ },
+ {
+ "key": "turks_and_caicos_islands",
+ "value": "🇹🇨"
+ },
+ {
+ "key": "tuvalu",
+ "value": "🇹🇻"
+ },
+ {
+ "key": "uganda",
+ "value": "🇺🇬"
+ },
+ {
+ "key": "ukraine",
+ "value": "🇺🇦"
+ },
+ {
+ "key": "united_arab_emirates",
+ "value": "🇦🇪"
+ },
+ {
+ "key": "united_kingdom",
+ "value": "🇬🇧"
+ },
+ {
+ "key": "united_states",
+ "value": "🇺🇸"
+ },
+ {
+ "key": "us_virgin_islands",
+ "value": "🇻🇮"
+ },
+ {
+ "key": "uruguay",
+ "value": "🇺🇾"
+ },
+ {
+ "key": "uzbekistan",
+ "value": "🇺🇿"
+ },
+ {
+ "key": "vanuatu",
+ "value": "🇻🇺"
+ },
+ {
+ "key": "vatican_city",
+ "value": "🇻🇦"
+ },
+ {
+ "key": "venezuela",
+ "value": "🇻🇪"
+ },
+ {
+ "key": "vietnam",
+ "value": "🇻🇳"
+ },
+ {
+ "key": "wallis_and_futuna",
+ "value": "🇼🇫"
+ },
+ {
+ "key": "western_sahara",
+ "value": "🇪🇭"
+ },
+ {
+ "key": "yemen",
+ "value": "🇾🇪"
+ },
+ {
+ "key": "zambia",
+ "value": "🇿🇲"
+ },
+ {
+ "key": "zimbabwe",
+ "value": "🇿🇼"
+ },
+ {
+ "key": "england",
+ "value": "🇽🇪"
+ }
+ ]
+ }
+} \ No newline at end of file
diff --git a/nikola/plugins/shortcode/emoji/data/Food.json b/nikola/plugins/shortcode/emoji/data/Food.json
new file mode 100644
index 0000000..c755a20
--- /dev/null
+++ b/nikola/plugins/shortcode/emoji/data/Food.json
@@ -0,0 +1,274 @@
+{
+ "foods": {
+ "food": [
+ {
+ "key": "green_apple",
+ "value": "🍏"
+ },
+ {
+ "key": "red_apple",
+ "value": "🍎"
+ },
+ {
+ "key": "pear",
+ "value": "🍐"
+ },
+ {
+ "key": "tangerine",
+ "value": "🍊"
+ },
+ {
+ "key": "lemon",
+ "value": "🍋"
+ },
+ {
+ "key": "banana",
+ "value": "🍌"
+ },
+ {
+ "key": "watermelon",
+ "value": "🍉"
+ },
+ {
+ "key": "grapes",
+ "value": "🍇"
+ },
+ {
+ "key": "strawberry",
+ "value": "🍓"
+ },
+ {
+ "key": "melon",
+ "value": "🍈"
+ },
+ {
+ "key": "cherry",
+ "value": "🍒"
+ },
+ {
+ "key": "peach",
+ "value": "🍑"
+ },
+ {
+ "key": "pineapple",
+ "value": "🍍"
+ },
+ {
+ "key": "tomato",
+ "value": "🍅"
+ },
+ {
+ "key": "egg_plant",
+ "value": "🍆"
+ },
+ {
+ "key": "hot_pepper",
+ "value": "🌶"
+ },
+ {
+ "key": "ear_of_maize",
+ "value": "🌽"
+ },
+ {
+ "key": "roasted_sweet_potato",
+ "value": "🍠"
+ },
+ {
+ "key": "honey_pot",
+ "value": "🍯"
+ },
+ {
+ "key": "bread",
+ "value": "🍞"
+ },
+ {
+ "key": "cheese",
+ "value": "🧀"
+ },
+ {
+ "key": "poultry_leg",
+ "value": "🍗"
+ },
+ {
+ "key": "meat_on_bone",
+ "value": "🍖"
+ },
+ {
+ "key": "fried_shrimp",
+ "value": "🍤"
+ },
+ {
+ "key": "cooking",
+ "value": "🍳"
+ },
+ {
+ "key": "hamburger",
+ "value": "🍔"
+ },
+ {
+ "key": "french_fries",
+ "value": "🍟"
+ },
+ {
+ "key": "hot_dog",
+ "value": "🌭"
+ },
+ {
+ "key": "slice_of_pizza",
+ "value": "🍕"
+ },
+ {
+ "key": "spaghetti",
+ "value": "🍝"
+ },
+ {
+ "key": "taco",
+ "value": "🌮"
+ },
+ {
+ "key": "burrito",
+ "value": "🌯"
+ },
+ {
+ "key": "steaming_bowl",
+ "value": "🍜"
+ },
+ {
+ "key": "pot_of_food",
+ "value": "🍲"
+ },
+ {
+ "key": "fish_cake",
+ "value": "🍥"
+ },
+ {
+ "key": "sushi",
+ "value": "🍣"
+ },
+ {
+ "key": "bento_box",
+ "value": "🍱"
+ },
+ {
+ "key": "curry_and_rice",
+ "value": "🍛"
+ },
+ {
+ "key": "rice_ball",
+ "value": "🍙"
+ },
+ {
+ "key": "cooked_rice",
+ "value": "🍚"
+ },
+ {
+ "key": "rice_cracker",
+ "value": "🍘"
+ },
+ {
+ "key": "oden",
+ "value": "🍢"
+ },
+ {
+ "key": "dango",
+ "value": "🍡"
+ },
+ {
+ "key": "shaved_ice",
+ "value": "🍧"
+ },
+ {
+ "key": "ice_cream",
+ "value": "🍨"
+ },
+ {
+ "key": "soft_ice_cream",
+ "value": "🍦"
+ },
+ {
+ "key": "short_cake",
+ "value": "🍰"
+ },
+ {
+ "key": "birthday_cake",
+ "value": "🎂"
+ },
+ {
+ "key": "custard",
+ "value": "🍮"
+ },
+ {
+ "key": "candy",
+ "value": "🍬"
+ },
+ {
+ "key": "lollipop",
+ "value": "🍭"
+ },
+ {
+ "key": "chocolate_bar",
+ "value": "🍫"
+ },
+ {
+ "key": "popcorn",
+ "value": "🍿"
+ },
+ {
+ "key": "doughnut",
+ "value": "🍩"
+ },
+ {
+ "key": "cookie",
+ "value": "🍪"
+ },
+ {
+ "key": "bear_mug",
+ "value": "🍺"
+ },
+ {
+ "key": "clinking_beer_mugs",
+ "value": "🍻"
+ },
+ {
+ "key": "wine_glass",
+ "value": "🍷"
+ },
+ {
+ "key": "cocktail_glass",
+ "value": "🍸"
+ },
+ {
+ "key": "tropical_drink",
+ "value": "🍹"
+ },
+ {
+ "key": "bottle_with_popping_cork",
+ "value": "🍾"
+ },
+ {
+ "key": "sake_bottle_and_cup",
+ "value": "🍶"
+ },
+ {
+ "key": "tea_cup_without_handle",
+ "value": "🍵"
+ },
+ {
+ "key": "hot_beverage",
+ "value": "☕"
+ },
+ {
+ "key": "baby_bottle",
+ "value": "🍼"
+ },
+ {
+ "key": "fork_and_knife",
+ "value": "🍴"
+ },
+ {
+ "key": "fork_and_knife_with_plate",
+ "value": "🍽"
+ }
+ ]
+ }
+} \ No newline at end of file
diff --git a/nikola/plugins/shortcode/emoji/data/LICENSE b/nikola/plugins/shortcode/emoji/data/LICENSE
new file mode 100644
index 0000000..c7bf1f4
--- /dev/null
+++ b/nikola/plugins/shortcode/emoji/data/LICENSE
@@ -0,0 +1,25 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 -2017 Shayan Rais
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+------------
+
+Copied from https://github.com/shanraisshan/EmojiCodeSheet
diff --git a/nikola/plugins/shortcode/emoji/data/Nature.json b/nikola/plugins/shortcode/emoji/data/Nature.json
new file mode 100644
index 0000000..f845a64
--- /dev/null
+++ b/nikola/plugins/shortcode/emoji/data/Nature.json
@@ -0,0 +1,594 @@
+{
+ "natures": {
+ "nature": [
+ {
+ "key": "dog_face",
+ "value": "🐶"
+ },
+ {
+ "key": "cat_face",
+ "value": "🐱"
+ },
+ {
+ "key": "mouse_face",
+ "value": "🐭"
+ },
+ {
+ "key": "hamster_face",
+ "value": "🐹"
+ },
+ {
+ "key": "rabbit_face",
+ "value": "🐰"
+ },
+ {
+ "key": "bear_face",
+ "value": "🐻"
+ },
+ {
+ "key": "panda_face",
+ "value": "🐼"
+ },
+ {
+ "key": "koala_face",
+ "value": "🐨"
+ },
+ {
+ "key": "lion_face",
+ "value": "🦁"
+ },
+ {
+ "key": "cow_face",
+ "value": "🐮"
+ },
+ {
+ "key": "pig_face",
+ "value": "🐷"
+ },
+ {
+ "key": "pig_nose",
+ "value": "🐽"
+ },
+ {
+ "key": "frog_face",
+ "value": "🐸"
+ },
+ {
+ "key": "octopus",
+ "value": "🐙"
+ },
+ {
+ "key": "monkey_face",
+ "value": "🐵"
+ },
+ {
+ "key": "tiger_face",
+ "value": "🐯"
+ },
+ {
+ "key": "see_no_evil_monkey",
+ "value": "🙈"
+ },
+ {
+ "key": "hear_no_evil_monkey",
+ "value": "🙉"
+ },
+ {
+ "key": "speak_no_evil_monkey",
+ "value": "🙊"
+ },
+ {
+ "key": "monkey",
+ "value": "🐒"
+ },
+ {
+ "key": "chicken",
+ "value": "🐔"
+ },
+ {
+ "key": "penguin",
+ "value": "🐧"
+ },
+ {
+ "key": "bird",
+ "value": "🐦"
+ },
+ {
+ "key": "baby_chick",
+ "value": "🐤"
+ },
+ {
+ "key": "hatching_chick",
+ "value": "🐣"
+ },
+ {
+ "key": "front_face_chick",
+ "value": "🐥"
+ },
+ {
+ "key": "wolf_face",
+ "value": "🐺"
+ },
+ {
+ "key": "boar",
+ "value": "🐗"
+ },
+ {
+ "key": "horse_face",
+ "value": "🐴"
+ },
+ {
+ "key": "unicorn_face",
+ "value": "🦄"
+ },
+ {
+ "key": "honey_bee",
+ "value": "🐝"
+ },
+ {
+ "key": "bug",
+ "value": "🐛"
+ },
+ {
+ "key": "snail",
+ "value": "🐌"
+ },
+ {
+ "key": "lady_beetle",
+ "value": "🐞"
+ },
+ {
+ "key": "ant",
+ "value": "🐜"
+ },
+ {
+ "key": "spider",
+ "value": "🕷"
+ },
+ {
+ "key": "scorpion",
+ "value": "🦂"
+ },
+ {
+ "key": "crab",
+ "value": "🦀"
+ },
+ {
+ "key": "snake",
+ "value": "🐍"
+ },
+ {
+ "key": "turtle",
+ "value": "🐢"
+ },
+ {
+ "key": "tropical_fish",
+ "value": "🐠"
+ },
+ {
+ "key": "fish",
+ "value": "🐟"
+ },
+ {
+ "key": "blow_fish",
+ "value": "🐡"
+ },
+ {
+ "key": "dolphin",
+ "value": "🐬"
+ },
+ {
+ "key": "spouting_whale",
+ "value": "🐳"
+ },
+ {
+ "key": "whale",
+ "value": "🐋"
+ },
+ {
+ "key": "crocodile",
+ "value": "🐊"
+ },
+ {
+ "key": "leopard",
+ "value": "🐆"
+ },
+ {
+ "key": "tiger",
+ "value": "🐅"
+ },
+ {
+ "key": "water_buffalo",
+ "value": "🐃"
+ },
+ {
+ "key": "ox",
+ "value": "🐂"
+ },
+ {
+ "key": "cow",
+ "value": "🐄"
+ },
+ {
+ "key": "dromedary_camel",
+ "value": "🐪"
+ },
+ {
+ "key": "bactrian_camel",
+ "value": "🐫"
+ },
+ {
+ "key": "elephant",
+ "value": "🐘"
+ },
+ {
+ "key": "goat",
+ "value": "🐐"
+ },
+ {
+ "key": "ram",
+ "value": "🐏"
+ },
+ {
+ "key": "sheep",
+ "value": "🐑"
+ },
+ {
+ "key": "horse",
+ "value": "🐎"
+ },
+ {
+ "key": "pig",
+ "value": "🐖"
+ },
+ {
+ "key": "rat",
+ "value": "🐀"
+ },
+ {
+ "key": "mouse",
+ "value": "🐁"
+ },
+ {
+ "key": "rooster",
+ "value": "🐓"
+ },
+ {
+ "key": "turkey",
+ "value": "🦃"
+ },
+ {
+ "key": "dove",
+ "value": "🕊"
+ },
+ {
+ "key": "dog",
+ "value": "🐕"
+ },
+ {
+ "key": "poodle",
+ "value": "🐩"
+ },
+ {
+ "key": "cat",
+ "value": "🐈"
+ },
+ {
+ "key": "rabbit",
+ "value": "🐇"
+ },
+ {
+ "key": "chipmunk",
+ "value": "🐿"
+ },
+ {
+ "key": "paw_prints",
+ "value": "🐾"
+ },
+ {
+ "key": "dragon",
+ "value": "🐉"
+ },
+ {
+ "key": "dragon_face",
+ "value": "🐲"
+ },
+ {
+ "key": "cactus",
+ "value": "🌵"
+ },
+ {
+ "key": "christmas_tree",
+ "value": "🎄"
+ },
+ {
+ "key": "ever_green_tree",
+ "value": "🌲"
+ },
+ {
+ "key": "deciduous_tree",
+ "value": "🌳"
+ },
+ {
+ "key": "palm_tree",
+ "value": "🌴"
+ },
+ {
+ "key": "seedling",
+ "value": "🌱"
+ },
+ {
+ "key": "herb",
+ "value": "🌿"
+ },
+ {
+ "key": "shamrock",
+ "value": "☘"
+ },
+ {
+ "key": "four_leaf",
+ "value": "🍀"
+ },
+ {
+ "key": "pine_decoration",
+ "value": "🎍"
+ },
+ {
+ "key": "tanabata_tree",
+ "value": "🎋"
+ },
+ {
+ "key": "leaf_wind",
+ "value": "🍃"
+ },
+ {
+ "key": "fallen_leaf",
+ "value": "🍂"
+ },
+ {
+ "key": "maple_leaf",
+ "value": "🍁"
+ },
+ {
+ "key": "ear_of_rice",
+ "value": "🌾"
+ },
+ {
+ "key": "hibiscus",
+ "value": "🌺"
+ },
+ {
+ "key": "sunflower",
+ "value": "🌻"
+ },
+ {
+ "key": "rose",
+ "value": "🌹"
+ },
+ {
+ "key": "tulip",
+ "value": "🌷"
+ },
+ {
+ "key": "blossom",
+ "value": "🌼"
+ },
+ {
+ "key": "cherry_blossom",
+ "value": "🌸"
+ },
+ {
+ "key": "bouquet",
+ "value": "💐"
+ },
+ {
+ "key": "mushroom",
+ "value": "🍄"
+ },
+ {
+ "key": "chestnut",
+ "value": "🌰"
+ },
+ {
+ "key": "jack_o_lantern",
+ "value": "🎃"
+ },
+ {
+ "key": "spiral_shell",
+ "value": "🐚"
+ },
+ {
+ "key": "spider_web",
+ "value": "🕸"
+ },
+ {
+ "key": "earth_america",
+ "value": "🌎"
+ },
+ {
+ "key": "earth_europe",
+ "value": "🌍"
+ },
+ {
+ "key": "earth_australia",
+ "value": "🌏"
+ },
+ {
+ "key": "full_moon",
+ "value": "🌕"
+ },
+ {
+ "key": "waning_gibbous_moon",
+ "value": "🌖"
+ },
+ {
+ "key": "last_quarter_moon",
+ "value": "🌗"
+ },
+ {
+ "key": "waning_crescent_moon",
+ "value": "🌘"
+ },
+ {
+ "key": "new_moon_symbol",
+ "value": "🌑"
+ },
+ {
+ "key": "waxing_crescent_moon",
+ "value": "🌒"
+ },
+ {
+ "key": "first_quarter_moon",
+ "value": "🌓"
+ },
+ {
+ "key": "waxing_gibbous_moon",
+ "value": "🌔"
+ },
+ {
+ "key": "new_moon_with_face",
+ "value": "🌚"
+ },
+ {
+ "key": "full_moon_face",
+ "value": "🌝"
+ },
+ {
+ "key": "first_quarter_moon_face",
+ "value": "🌛"
+ },
+ {
+ "key": "last_quarter_moon_face",
+ "value": "🌜"
+ },
+ {
+ "key": "sun_face",
+ "value": "🌞"
+ },
+ {
+ "key": "crescent_moon",
+ "value": "🌙"
+ },
+ {
+ "key": "white_star",
+ "value": "⭐"
+ },
+ {
+ "key": "glowing_star",
+ "value": "🌟"
+ },
+ {
+ "key": "dizzy_symbol",
+ "value": "💫"
+ },
+ {
+ "key": "sparkles",
+ "value": "✨"
+ },
+ {
+ "key": "comet",
+ "value": "☄"
+ },
+ {
+ "key": "black_sun_with_rays",
+ "value": "☀"
+ },
+ {
+ "key": "white_sun_small_cloud",
+ "value": "🌤"
+ },
+ {
+ "key": "sun_behind_cloud",
+ "value": "⛅"
+ },
+ {
+ "key": "white_sun_behind_cloud",
+ "value": "🌥"
+ },
+ {
+ "key": "white_sun_behind_cloud_rain",
+ "value": "🌦"
+ },
+ {
+ "key": "cloud",
+ "value": "☁"
+ },
+ {
+ "key": "cloud_with_rain",
+ "value": "🌧"
+ },
+ {
+ "key": "thunder_cloud_rain",
+ "value": "⛈"
+ },
+ {
+ "key": "cloud_lightening",
+ "value": "🌩"
+ },
+ {
+ "key": "high_voltage",
+ "value": "⚡"
+ },
+ {
+ "key": "fire",
+ "value": "🔥"
+ },
+ {
+ "key": "collision",
+ "value": "💥"
+ },
+ {
+ "key": "snow_flake",
+ "value": "❄"
+ },
+ {
+ "key": "cloud_with_snow",
+ "value": "🌨"
+ },
+ {
+ "key": "snowman",
+ "value": "☃"
+ },
+ {
+ "key": "snowman_without_snow",
+ "value": "⛄"
+ },
+ {
+ "key": "wind_blowing_face",
+ "value": "🌬"
+ },
+ {
+ "key": "dash_symbol",
+ "value": "💨"
+ },
+ {
+ "key": "cloud_with_tornado",
+ "value": "🌪"
+ },
+ {
+ "key": "fog",
+ "value": "🌫"
+ },
+ {
+ "key": "umbrella",
+ "value": "☂"
+ },
+ {
+ "key": "umbrella_with_rain_drops",
+ "value": "☔"
+ },
+ {
+ "key": "droplet",
+ "value": "💧"
+ },
+ {
+ "key": "splashing_sweat",
+ "value": "💦"
+ },
+ {
+ "key": "water_wave",
+ "value": "🌊"
+ }
+ ]
+ }
+} \ No newline at end of file
diff --git a/nikola/plugins/shortcode/emoji/data/Objects.json b/nikola/plugins/shortcode/emoji/data/Objects.json
new file mode 100644
index 0000000..5f13056
--- /dev/null
+++ b/nikola/plugins/shortcode/emoji/data/Objects.json
@@ -0,0 +1,718 @@
+{
+ "objects": {
+ "object": [
+ {
+ "key": "watch",
+ "value": "⌚"
+ },
+ {
+ "key": "mobile_phone",
+ "value": "📱"
+ },
+ {
+ "key": "mobile_phone_with_right_arrow",
+ "value": "📲"
+ },
+ {
+ "key": "personal_computer",
+ "value": "💻"
+ },
+ {
+ "key": "keyboard",
+ "value": "⌨"
+ },
+ {
+ "key": "desktop_computer",
+ "value": "🖥"
+ },
+ {
+ "key": "printer",
+ "value": "🖨"
+ },
+ {
+ "key": "three_button_mouse",
+ "value": "🖱"
+ },
+ {
+ "key": "track_ball",
+ "value": "🖲"
+ },
+ {
+ "key": "joystick",
+ "value": "🕹"
+ },
+ {
+ "key": "compression",
+ "value": "🗜"
+ },
+ {
+ "key": "mini_disc",
+ "value": "💽"
+ },
+ {
+ "key": "floppy_disk",
+ "value": "💾"
+ },
+ {
+ "key": "optical_disc",
+ "value": "💿"
+ },
+ {
+ "key": "dvd",
+ "value": "📀"
+ },
+ {
+ "key": "video_cassette",
+ "value": "📼"
+ },
+ {
+ "key": "camera",
+ "value": "📷"
+ },
+ {
+ "key": "camera_with_flash",
+ "value": "📸"
+ },
+ {
+ "key": "video_camera",
+ "value": "📹"
+ },
+ {
+ "key": "movie_camera",
+ "value": "🎥"
+ },
+ {
+ "key": "film_projector",
+ "value": "📽"
+ },
+ {
+ "key": "film_frames",
+ "value": "🎞"
+ },
+ {
+ "key": "telephone_receiver",
+ "value": "📞"
+ },
+ {
+ "key": "black_telephone",
+ "value": "☎"
+ },
+ {
+ "key": "pager",
+ "value": "📟"
+ },
+ {
+ "key": "fax_machine",
+ "value": "📠"
+ },
+ {
+ "key": "television",
+ "value": "📺"
+ },
+ {
+ "key": "radio",
+ "value": "📻"
+ },
+ {
+ "key": "studio_microphone",
+ "value": "🎙"
+ },
+ {
+ "key": "level_slider",
+ "value": "🎚"
+ },
+ {
+ "key": "control_knobs",
+ "value": "🎛"
+ },
+ {
+ "key": "stop_watch",
+ "value": "⏱"
+ },
+ {
+ "key": "timer_clock",
+ "value": "⏲"
+ },
+ {
+ "key": "alarm_clock",
+ "value": "⏰"
+ },
+ {
+ "key": "mantel_piece_clock",
+ "value": "🕰"
+ },
+ {
+ "key": "hour_glass_with_flowing_stand",
+ "value": "⏳"
+ },
+ {
+ "key": "hour_glass",
+ "value": "⌛"
+ },
+ {
+ "key": "satellite_antenna",
+ "value": "📡"
+ },
+ {
+ "key": "battery",
+ "value": "🔋"
+ },
+ {
+ "key": "electric_plug",
+ "value": "🔌"
+ },
+ {
+ "key": "electric_light_bulb",
+ "value": "💡"
+ },
+ {
+ "key": "electric_torch",
+ "value": "🔦"
+ },
+ {
+ "key": "candle",
+ "value": "🕯"
+ },
+ {
+ "key": "waste_basket",
+ "value": "🗑"
+ },
+ {
+ "key": "oil_drum",
+ "value": "🛢"
+ },
+ {
+ "key": "money_with_wings",
+ "value": "💸"
+ },
+ {
+ "key": "bank_note_with_dollar_sign",
+ "value": "💵"
+ },
+ {
+ "key": "bank_note_with_yen_sign",
+ "value": "💴"
+ },
+ {
+ "key": "bank_note_with_euro_sign",
+ "value": "💶"
+ },
+ {
+ "key": "bank_note_with_pounds_sign",
+ "value": "💷"
+ },
+ {
+ "key": "money_bag",
+ "value": "💰"
+ },
+ {
+ "key": "credit_card",
+ "value": "💳"
+ },
+ {
+ "key": "gem_stone",
+ "value": "💎"
+ },
+ {
+ "key": "scales",
+ "value": "⚖"
+ },
+ {
+ "key": "wrench",
+ "value": "🔧"
+ },
+ {
+ "key": "hammer",
+ "value": "🔨"
+ },
+ {
+ "key": "hammer_and_pick",
+ "value": "⚒"
+ },
+ {
+ "key": "hammer_and_wrench",
+ "value": "🛠"
+ },
+ {
+ "key": "pick",
+ "value": "⛏"
+ },
+ {
+ "key": "nut_and_bolt",
+ "value": "🔩"
+ },
+ {
+ "key": "gear",
+ "value": "⚙"
+ },
+ {
+ "key": "chains",
+ "value": "⛓"
+ },
+ {
+ "key": "pistol",
+ "value": "🔫"
+ },
+ {
+ "key": "bomb",
+ "value": "💣"
+ },
+ {
+ "key": "hocho",
+ "value": "🔪"
+ },
+ {
+ "key": "dagger_knife",
+ "value": "🗡"
+ },
+ {
+ "key": "crossed_words",
+ "value": "⚔"
+ },
+ {
+ "key": "shield",
+ "value": "🛡"
+ },
+ {
+ "key": "smoking_symbol",
+ "value": "🚬"
+ },
+ {
+ "key": "skull_and_cross_bones",
+ "value": "☠"
+ },
+ {
+ "key": "coffin",
+ "value": "⚰"
+ },
+ {
+ "key": "funeral_urn",
+ "value": "⚱"
+ },
+ {
+ "key": "amphora",
+ "value": "🏺"
+ },
+ {
+ "key": "crystal_ball",
+ "value": "🔮"
+ },
+ {
+ "key": "prayer_beads",
+ "value": "📿"
+ },
+ {
+ "key": "barber_pole",
+ "value": "💈"
+ },
+ {
+ "key": "alembic",
+ "value": "⚗"
+ },
+ {
+ "key": "telescope",
+ "value": "🔭"
+ },
+ {
+ "key": "microscope",
+ "value": "🔬"
+ },
+ {
+ "key": "hole",
+ "value": "🕳"
+ },
+ {
+ "key": "pill",
+ "value": "💊"
+ },
+ {
+ "key": "syringe",
+ "value": "💉"
+ },
+ {
+ "key": "thermometer",
+ "value": "🌡"
+ },
+ {
+ "key": "label",
+ "value": "🏷"
+ },
+ {
+ "key": "bookmark",
+ "value": "🔖"
+ },
+ {
+ "key": "toilet",
+ "value": "🚽"
+ },
+ {
+ "key": "shower",
+ "value": "🚿"
+ },
+ {
+ "key": "bath_tub",
+ "value": "🛁"
+ },
+ {
+ "key": "key",
+ "value": "🔑"
+ },
+ {
+ "key": "old_key",
+ "value": "🗝"
+ },
+ {
+ "key": "couch_and_lamp",
+ "value": "🛋"
+ },
+ {
+ "key": "sleeping_accommodation",
+ "value": "🛌"
+ },
+ {
+ "key": "bed",
+ "value": "🛏"
+ },
+ {
+ "key": "door",
+ "value": "🚪"
+ },
+ {
+ "key": "bell_hop_bell",
+ "value": "🛎"
+ },
+ {
+ "key": "frame_with_picture",
+ "value": "🖼"
+ },
+ {
+ "key": "world_map",
+ "value": "🗺"
+ },
+ {
+ "key": "umbrella_on_ground",
+ "value": "⛱"
+ },
+ {
+ "key": "moyai",
+ "value": "🗿"
+ },
+ {
+ "key": "shopping_bags",
+ "value": "🛍"
+ },
+ {
+ "key": "balloon",
+ "value": "🎈"
+ },
+ {
+ "key": "carp_streamer",
+ "value": "🎏"
+ },
+ {
+ "key": "ribbon",
+ "value": "🎀"
+ },
+ {
+ "key": "wrapped_present",
+ "value": "🎁"
+ },
+ {
+ "key": "confetti_ball",
+ "value": "🎊"
+ },
+ {
+ "key": "party_popper",
+ "value": "🎉"
+ },
+ {
+ "key": "japanese_dolls",
+ "value": "🎎"
+ },
+ {
+ "key": "wind_chime",
+ "value": "🎐"
+ },
+ {
+ "key": "crossed_flags",
+ "value": "🎌"
+ },
+ {
+ "key": "izakaya_lantern",
+ "value": "🏮"
+ },
+ {
+ "key": "envelope",
+ "value": "✉"
+ },
+ {
+ "key": "envelope_with_down_arrow",
+ "value": "📩"
+ },
+ {
+ "key": "incoming_envelope",
+ "value": "📨"
+ },
+ {
+ "key": "email_symbol",
+ "value": "📧"
+ },
+ {
+ "key": "love_letter",
+ "value": "💌"
+ },
+ {
+ "key": "post_box",
+ "value": "📮"
+ },
+ {
+ "key": "closed_mail_box_with_lowered_flag",
+ "value": "📪"
+ },
+ {
+ "key": "closed_mail_box_with_raised_flag",
+ "value": "📫"
+ },
+ {
+ "key": "open_mail_box_with_raised_flag",
+ "value": "📬"
+ },
+ {
+ "key": "open_mail_box_with_lowered_flag",
+ "value": "📭"
+ },
+ {
+ "key": "package",
+ "value": "📦"
+ },
+ {
+ "key": "postal_horn",
+ "value": "📯"
+ },
+ {
+ "key": "inbox_tray",
+ "value": "📥"
+ },
+ {
+ "key": "outbox_tray",
+ "value": "📤"
+ },
+ {
+ "key": "scroll",
+ "value": "📜"
+ },
+ {
+ "key": "page_with_curl",
+ "value": "📃"
+ },
+ {
+ "key": "bookmark_tabs",
+ "value": "📑"
+ },
+ {
+ "key": "bar_chart",
+ "value": "📊"
+ },
+ {
+ "key": "chart_with_upwards_trend",
+ "value": "📈"
+ },
+ {
+ "key": "chart_with_downwards_trend",
+ "value": "📉"
+ },
+ {
+ "key": "page_facing_up",
+ "value": "📄"
+ },
+ {
+ "key": "calender",
+ "value": "📅"
+ },
+ {
+ "key": "tear_off_calendar",
+ "value": "📆"
+ },
+ {
+ "key": "spiral_calendar_pad",
+ "value": "🗓"
+ },
+ {
+ "key": "card_index",
+ "value": "📇"
+ },
+ {
+ "key": "card_file_box",
+ "value": "🗃"
+ },
+ {
+ "key": "ballot_box_with_ballot",
+ "value": "🗳"
+ },
+ {
+ "key": "file_cabinet",
+ "value": "🗄"
+ },
+ {
+ "key": "clip_board",
+ "value": "📋"
+ },
+ {
+ "key": "spiral_notepad",
+ "value": "🗒"
+ },
+ {
+ "key": "file_folder",
+ "value": "📁"
+ },
+ {
+ "key": "open_file_folder",
+ "value": "📂"
+ },
+ {
+ "key": "card_index_dividers",
+ "value": "🗂"
+ },
+ {
+ "key": "rolled_up_newspaper",
+ "value": "🗞"
+ },
+ {
+ "key": "newspaper",
+ "value": "📰"
+ },
+ {
+ "key": "notebook",
+ "value": "📓"
+ },
+ {
+ "key": "closed_book",
+ "value": "📕"
+ },
+ {
+ "key": "green_book",
+ "value": "📗"
+ },
+ {
+ "key": "blue_book",
+ "value": "📘"
+ },
+ {
+ "key": "orange_book",
+ "value": "📙"
+ },
+ {
+ "key": "notebook_with_decorative_cover",
+ "value": "📔"
+ },
+ {
+ "key": "ledger",
+ "value": "📒"
+ },
+ {
+ "key": "books",
+ "value": "📚"
+ },
+ {
+ "key": "open_book",
+ "value": "📖"
+ },
+ {
+ "key": "link_symbol",
+ "value": "🔗"
+ },
+ {
+ "key": "paper_clip",
+ "value": "📎"
+ },
+ {
+ "key": "linked_paper_clips",
+ "value": "🖇"
+ },
+ {
+ "key": "black_scissors",
+ "value": "✂"
+ },
+ {
+ "key": "triangular_ruler",
+ "value": "📐"
+ },
+ {
+ "key": "straight_ruler",
+ "value": "📏"
+ },
+ {
+ "key": "pushpin",
+ "value": "📌"
+ },
+ {
+ "key": "round_pushpin",
+ "value": "📍"
+ },
+ {
+ "key": "triangular_flag_post",
+ "value": "🚩"
+ },
+ {
+ "key": "waving_white_flag",
+ "value": "🏳"
+ },
+ {
+ "key": "waving_black_flag",
+ "value": "🏴"
+ },
+ {
+ "key": "closed_lock_with_key",
+ "value": "🔐"
+ },
+ {
+ "key": "lock",
+ "value": "🔒"
+ },
+ {
+ "key": "open_lock",
+ "value": "🔓"
+ },
+ {
+ "key": "lock_with_ink_pen",
+ "value": "🔏"
+ },
+ {
+ "key": "lower_left_ball_point_pen",
+ "value": "🖊"
+ },
+ {
+ "key": "lower_left_fountain_pen",
+ "value": "🖋"
+ },
+ {
+ "key": "black_nib",
+ "value": "✒"
+ },
+ {
+ "key": "memo",
+ "value": "📝"
+ },
+ {
+ "key": "pencil",
+ "value": "✏"
+ },
+ {
+ "key": "lower_left_crayon",
+ "value": "🖍"
+ },
+ {
+ "key": "lower_left_paint_brush",
+ "value": "🖌"
+ },
+ {
+ "key": "left_pointing_magnifying_glass",
+ "value": "🔍"
+ },
+ {
+ "key": "right_pointing_magnifying_glass",
+ "value": "🔎"
+ }
+ ]
+ }
+} \ No newline at end of file
diff --git a/nikola/plugins/shortcode/emoji/data/People.json b/nikola/plugins/shortcode/emoji/data/People.json
new file mode 100644
index 0000000..a5fb88f
--- /dev/null
+++ b/nikola/plugins/shortcode/emoji/data/People.json
@@ -0,0 +1,1922 @@
+{
+ "peoples": {
+ "people": [
+ {
+ "key": "grinning_face",
+ "value": "😀"
+ },
+ {
+ "key": "grimacing_face",
+ "value": "😬"
+ },
+ {
+ "key": "grimacing_face_with_smile_eyes",
+ "value": "😁"
+ },
+ {
+ "key": "face_with_tear_of_joy",
+ "value": "😂"
+ },
+ {
+ "key": "smiling_face_with_open_mouth",
+ "value": "😃"
+ },
+ {
+ "key": "smiling_face_with_open_mouth_eyes",
+ "value": "😄"
+ },
+ {
+ "key": "smiling_face_with_open_mouth_cold_sweat",
+ "value": "😅"
+ },
+ {
+ "key": "smiling_face_with_open_mouth_hand_tight",
+ "value": "😆"
+ },
+ {
+ "key": "smiling_face_with_halo",
+ "value": "😇"
+ },
+ {
+ "key": "winking_face",
+ "value": "😉"
+ },
+ {
+ "key": "black_smiling_face",
+ "value": "😊"
+ },
+ {
+ "key": "slightly_smiling_face",
+ "value": "🙂"
+ },
+ {
+ "key": "upside_down_face",
+ "value": "🙃"
+ },
+ {
+ "key": "white_smiling_face",
+ "value": "☺"
+ },
+ {
+ "key": "face_savouring_delicious_food",
+ "value": "😋"
+ },
+ {
+ "key": "relieved_face",
+ "value": "😌"
+ },
+ {
+ "key": "smiling_face_heart_eyes",
+ "value": "😍"
+ },
+ {
+ "key": "face_throwing_kiss",
+ "value": "😘"
+ },
+ {
+ "key": "kissing_face",
+ "value": "😗"
+ },
+ {
+ "key": "kissing_face_with_smile_eyes",
+ "value": "😙"
+ },
+ {
+ "key": "kissing_face_with_closed_eyes",
+ "value": "😚"
+ },
+ {
+ "key": "face_with_tongue_wink_eye",
+ "value": "😜"
+ },
+ {
+ "key": "face_with_tongue_closed_eye",
+ "value": "😝"
+ },
+ {
+ "key": "face_with_stuck_out_tongue",
+ "value": "😛"
+ },
+ {
+ "key": "money_mouth_face",
+ "value": "🤑"
+ },
+ {
+ "key": "nerd_face",
+ "value": "🤓"
+ },
+ {
+ "key": "smiling_face_with_sun_glass",
+ "value": "😎"
+ },
+ {
+ "key": "hugging_face",
+ "value": "🤗"
+ },
+ {
+ "key": "smirking_face",
+ "value": "😏"
+ },
+ {
+ "key": "face_without_mouth",
+ "value": "😶"
+ },
+ {
+ "key": "neutral_face",
+ "value": "😐"
+ },
+ {
+ "key": "expressionless_face",
+ "value": "😑"
+ },
+ {
+ "key": "unamused_face",
+ "value": "😒"
+ },
+ {
+ "key": "face_with_rolling_eyes",
+ "value": "🙄"
+ },
+ {
+ "key": "thinking_face",
+ "value": "🤔"
+ },
+ {
+ "key": "flushed_face",
+ "value": "😳"
+ },
+ {
+ "key": "disappointed_face",
+ "value": "😞"
+ },
+ {
+ "key": "worried_face",
+ "value": "😟"
+ },
+ {
+ "key": "angry_face",
+ "value": "😠"
+ },
+ {
+ "key": "pouting_face",
+ "value": "😡"
+ },
+ {
+ "key": "pensive_face",
+ "value": "😔"
+ },
+ {
+ "key": "confused_face",
+ "value": "😕"
+ },
+ {
+ "key": "slightly_frowning_face",
+ "value": "🙁"
+ },
+ {
+ "key": "white_frowning_face",
+ "value": "☹"
+ },
+ {
+ "key": "persevering_face",
+ "value": "😣"
+ },
+ {
+ "key": "confounded_face",
+ "value": "😖"
+ },
+ {
+ "key": "tired_face",
+ "value": "😫"
+ },
+ {
+ "key": "weary_face",
+ "value": "😩"
+ },
+ {
+ "key": "face_with_look_of_triumph",
+ "value": "😤"
+ },
+ {
+ "key": "face_with_open_mouth",
+ "value": "😮"
+ },
+ {
+ "key": "face_screaming_in_fear",
+ "value": "😱"
+ },
+ {
+ "key": "fearful_face",
+ "value": "😨"
+ },
+ {
+ "key": "face_with_open_mouth_cold_sweat",
+ "value": "😰"
+ },
+ {
+ "key": "hushed_face",
+ "value": "😯"
+ },
+ {
+ "key": "frowning_face_with_open_mouth",
+ "value": "😦"
+ },
+ {
+ "key": "anguished_face",
+ "value": "😧"
+ },
+ {
+ "key": "crying_face",
+ "value": "😢"
+ },
+ {
+ "key": "disappointed_but_relieved_face",
+ "value": "😥"
+ },
+ {
+ "key": "sleepy_face",
+ "value": "😪"
+ },
+ {
+ "key": "face_with_cold_sweat",
+ "value": "😓"
+ },
+ {
+ "key": "loudly_crying_face",
+ "value": "😭"
+ },
+ {
+ "key": "dizzy_face",
+ "value": "😵"
+ },
+ {
+ "key": "astonished_face",
+ "value": "😲"
+ },
+ {
+ "key": "zipper_mouth_face",
+ "value": "🤐"
+ },
+ {
+ "key": "face_with_medical_mask",
+ "value": "😷"
+ },
+ {
+ "key": "face_with_thermometer",
+ "value": "🤒"
+ },
+ {
+ "key": "face_with_head_bandage",
+ "value": "🤕"
+ },
+ {
+ "key": "sleeping_face",
+ "value": "😴"
+ },
+ {
+ "key": "sleeping_symbol",
+ "value": "💤"
+ },
+ {
+ "key": "pile_of_poo",
+ "value": "💩"
+ },
+ {
+ "key": "smiling_face_with_horns",
+ "value": "😈"
+ },
+ {
+ "key": "imp",
+ "value": "👿"
+ },
+ {
+ "key": "japanese_ogre",
+ "value": "👹"
+ },
+ {
+ "key": "japanese_goblin",
+ "value": "👺"
+ },
+ {
+ "key": "skull",
+ "value": "💀"
+ },
+ {
+ "key": "ghost",
+ "value": "👻"
+ },
+ {
+ "key": "extra_terrestrial_alien",
+ "value": "👽"
+ },
+ {
+ "key": "robot_face",
+ "value": "🤖"
+ },
+ {
+ "key": "smiling_cat_face_open_mouth",
+ "value": "😺"
+ },
+ {
+ "key": "grinning_cat_face_smile_eyes",
+ "value": "😸"
+ },
+ {
+ "key": "cat_face_tears_of_joy",
+ "value": "😹"
+ },
+ {
+ "key": "smiling_cat_face_heart_shaped_eyes",
+ "value": "😻"
+ },
+ {
+ "key": "cat_face_wry_smile",
+ "value": "😼"
+ },
+ {
+ "key": "kissing_cat_face_closed_eyes",
+ "value": "😽"
+ },
+ {
+ "key": "weary_cat_face",
+ "value": "🙀"
+ },
+ {
+ "key": "crying_cat_face",
+ "value": "😿"
+ },
+ {
+ "key": "pouting_cat_face",
+ "value": "😾"
+ },
+ {
+ "key": "person_both_hand_celebration",
+ "value": "🙌"
+ },
+ {
+ "key": "person_both_hand_celebration_type_1_2",
+ "value": "🙌🏻"
+ },
+ {
+ "key": "person_both_hand_celebration_type_3",
+ "value": "🙌🏼"
+ },
+ {
+ "key": "person_both_hand_celebration_type_4",
+ "value": "🙌🏽"
+ },
+ {
+ "key": "person_both_hand_celebration_type_5",
+ "value": "🙌🏾"
+ },
+ {
+ "key": "person_both_hand_celebration_type_6",
+ "value": "🙌🏿"
+ },
+ {
+ "key": "clapping_hand",
+ "value": "👏"
+ },
+ {
+ "key": "clapping_hand_type_1_2",
+ "value": "👏🏼"
+ },
+ {
+ "key": "clapping_hand_type_3",
+ "value": "👏🏼"
+ },
+ {
+ "key": "clapping_hand_type_4",
+ "value": "👏🏽"
+ },
+ {
+ "key": "clapping_hand_type_5",
+ "value": "👏🏾"
+ },
+ {
+ "key": "clapping_hand_type_6",
+ "value": "👏🏿"
+ },
+ {
+ "key": "waving_hands",
+ "value": "👋"
+ },
+ {
+ "key": "waving_hands_type_1_2",
+ "value": "👋🏻"
+ },
+ {
+ "key": "waving_hands_type_3",
+ "value": "👋🏼"
+ },
+ {
+ "key": "waving_hands_type_4",
+ "value": "👋🏽"
+ },
+ {
+ "key": "waving_hands_type_5",
+ "value": "👋🏾"
+ },
+ {
+ "key": "waving_hands_type_6",
+ "value": "👋🏿"
+ },
+ {
+ "key": "thumbs_up",
+ "value": "👍"
+ },
+ {
+ "key": "thumbs_up_type_1_2",
+ "value": "👍🏻"
+ },
+ {
+ "key": "thumbs_up_type_3",
+ "value": "👍🏼"
+ },
+ {
+ "key": "thumbs_up_type_4",
+ "value": "👍🏽"
+ },
+ {
+ "key": "thumbs_up_type_5",
+ "value": "👍🏾"
+ },
+ {
+ "key": "thumbs_up_type_6",
+ "value": "👍🏿"
+ },
+ {
+ "key": "thumbs_down",
+ "value": "👎"
+ },
+ {
+ "key": "thumbs_down_type_1_2",
+ "value": "👎🏻"
+ },
+ {
+ "key": "thumbs_down_type_3",
+ "value": "👎🏼"
+ },
+ {
+ "key": "thumbs_down_type_4",
+ "value": "👎🏽"
+ },
+ {
+ "key": "thumbs_down_type_5",
+ "value": "👎🏾"
+ },
+ {
+ "key": "thumbs_down_type_6",
+ "value": "👎🏿"
+ },
+ {
+ "key": "fist_hand",
+ "value": "👊"
+ },
+ {
+ "key": "fist_hand_type_1_2",
+ "value": "👊🏻"
+ },
+ {
+ "key": "fist_hand_type_3",
+ "value": "👊🏼"
+ },
+ {
+ "key": "fist_hand_type_4",
+ "value": "👊🏽"
+ },
+ {
+ "key": "fist_hand_type_5",
+ "value": "👊🏾"
+ },
+ {
+ "key": "fist_hand_type_6",
+ "value": "👊🏿"
+ },
+ {
+ "key": "raised_fist",
+ "value": "✊"
+ },
+ {
+ "key": "raised_fist_type_1_2",
+ "value": "✊🏻"
+ },
+ {
+ "key": "raised_fist_type_3",
+ "value": "✊🏼"
+ },
+ {
+ "key": "raised_fist_type_4",
+ "value": "✊🏽"
+ },
+ {
+ "key": "raised_fist_type_5",
+ "value": "✊🏾"
+ },
+ {
+ "key": "raised_fist_type_6",
+ "value": "✊🏿"
+ },
+ {
+ "key": "victory_hand",
+ "value": "✌"
+ },
+ {
+ "key": "victory_hand_type_1_2",
+ "value": "✌🏻"
+ },
+ {
+ "key": "victory_hand_type_3",
+ "value": "✌🏼"
+ },
+ {
+ "key": "victory_hand_type_4",
+ "value": "✌🏽"
+ },
+ {
+ "key": "victory_hand_type_5",
+ "value": "✌🏾"
+ },
+ {
+ "key": "victory_hand_type_6",
+ "value": "✌🏿"
+ },
+ {
+ "key": "ok_hand",
+ "value": "👌"
+ },
+ {
+ "key": "ok_hand_type_1_2",
+ "value": "👌🏻"
+ },
+ {
+ "key": "ok_hand_type_3",
+ "value": "👌🏼"
+ },
+ {
+ "key": "ok_hand_type_4",
+ "value": "👌🏽"
+ },
+ {
+ "key": "ok_hand_type_5",
+ "value": "👌🏾"
+ },
+ {
+ "key": "ok_hand_type_6",
+ "value": "👌🏿"
+ },
+ {
+ "key": "raised_hand",
+ "value": "✋"
+ },
+ {
+ "key": "raised_hand_type_1_2",
+ "value": "✋🏻"
+ },
+ {
+ "key": "raised_hand_type_3",
+ "value": "✋🏼"
+ },
+ {
+ "key": "raised_hand_type_4",
+ "value": "✋🏽"
+ },
+ {
+ "key": "raised_hand_type_5",
+ "value": "✋🏾"
+ },
+ {
+ "key": "raised_hand_type_6",
+ "value": "✋🏿"
+ },
+ {
+ "key": "open_hand",
+ "value": "👐"
+ },
+ {
+ "key": "open_hand_type_1_2",
+ "value": "👐🏻"
+ },
+ {
+ "key": "open_hand_type_3",
+ "value": "👐🏼"
+ },
+ {
+ "key": "open_hand_type_4",
+ "value": "👐🏽"
+ },
+ {
+ "key": "open_hand_type_5",
+ "value": "👐🏾"
+ },
+ {
+ "key": "open_hand_type_6",
+ "value": "👐🏿"
+ },
+ {
+ "key": "flexed_biceps",
+ "value": "💪"
+ },
+ {
+ "key": "flexed_biceps_type_1_2",
+ "value": "💪🏻"
+ },
+ {
+ "key": "flexed_biceps_type_3",
+ "value": "💪🏼"
+ },
+ {
+ "key": "flexed_biceps_type_4",
+ "value": "💪🏽"
+ },
+ {
+ "key": "flexed_biceps_type_5",
+ "value": "💪🏾"
+ },
+ {
+ "key": "flexed_biceps_type_6",
+ "value": "💪🏿"
+ },
+ {
+ "key": "folded_hands",
+ "value": "🙏"
+ },
+ {
+ "key": "folded_hands_type_1_2",
+ "value": "🙏🏻"
+ },
+ {
+ "key": "folded_hands_type_3",
+ "value": "🙏🏼"
+ },
+ {
+ "key": "folded_hands_type_4",
+ "value": "🙏🏽"
+ },
+ {
+ "key": "folded_hands_type_5",
+ "value": "🙏🏾"
+ },
+ {
+ "key": "folded_hands_type_6",
+ "value": "🙏🏿"
+ },
+ {
+ "key": "up_pointing_index",
+ "value": "☝"
+ },
+ {
+ "key": "up_pointing_index_type_1_2",
+ "value": "☝🏻"
+ },
+ {
+ "key": "up_pointing_index_type_3",
+ "value": "☝🏼"
+ },
+ {
+ "key": "up_pointing_index_type_4",
+ "value": "☝🏽"
+ },
+ {
+ "key": "up_pointing_index_type_5",
+ "value": "☝🏾"
+ },
+ {
+ "key": "up_pointing_index_type_6",
+ "value": "☝🏿"
+ },
+ {
+ "key": "up_pointing_backhand_index",
+ "value": "👆"
+ },
+ {
+ "key": "up_pointing_backhand_index_type_1_2",
+ "value": "👆🏻"
+ },
+ {
+ "key": "up_pointing_backhand_index_type_3",
+ "value": "👆🏼"
+ },
+ {
+ "key": "up_pointing_backhand_index_type_4",
+ "value": "👆🏽"
+ },
+ {
+ "key": "up_pointing_backhand_index_type_5",
+ "value": "👆🏾"
+ },
+ {
+ "key": "up_pointing_backhand_index_type_6",
+ "value": "👆🏿"
+ },
+ {
+ "key": "down_pointing_backhand_index",
+ "value": "👇"
+ },
+ {
+ "key": "down_pointing_backhand_index_type_1_2",
+ "value": "👇🏻"
+ },
+ {
+ "key": "down_pointing_backhand_index_type_3",
+ "value": "👇🏼"
+ },
+ {
+ "key": "down_pointing_backhand_index_type_4",
+ "value": "👇🏽"
+ },
+ {
+ "key": "down_pointing_backhand_index_type_5",
+ "value": "👇🏾"
+ },
+ {
+ "key": "down_pointing_backhand_index_type_6",
+ "value": "👇🏿"
+ },
+ {
+ "key": "left_pointing_backhand_index",
+ "value": "👈"
+ },
+ {
+ "key": "left_pointing_backhand_index_type_1_2",
+ "value": "👈🏻"
+ },
+ {
+ "key": "left_pointing_backhand_index_type_3",
+ "value": "👈🏼"
+ },
+ {
+ "key": "left_pointing_backhand_index_type_4",
+ "value": "👈🏽"
+ },
+ {
+ "key": "left_pointing_backhand_index_type_5",
+ "value": "👈🏾"
+ },
+ {
+ "key": "left_pointing_backhand_index_type_6",
+ "value": "👈🏿"
+ },
+ {
+ "key": "right_pointing_backhand_index",
+ "value": "👉"
+ },
+ {
+ "key": "right_pointing_backhand_index_type_1_2",
+ "value": "👉🏻"
+ },
+ {
+ "key": "right_pointing_backhand_index_type_3",
+ "value": "👉🏼"
+ },
+ {
+ "key": "right_pointing_backhand_index_type_4",
+ "value": "👉🏽"
+ },
+ {
+ "key": "right_pointing_backhand_index_type_5",
+ "value": "👉🏾"
+ },
+ {
+ "key": "right_pointing_backhand_index_type_6",
+ "value": "👉🏿"
+ },
+ {
+ "key": "reverse_middle_finger",
+ "value": "🖕"
+ },
+ {
+ "key": "reverse_middle_finger_type_1_2",
+ "value": "🖕🏻"
+ },
+ {
+ "key": "reverse_middle_finger_type_3",
+ "value": "🖕🏼"
+ },
+ {
+ "key": "reverse_middle_finger_type_4",
+ "value": "🖕🏽"
+ },
+ {
+ "key": "reverse_middle_finger_type_5",
+ "value": "🖕🏾"
+ },
+ {
+ "key": "reverse_middle_finger_type_6",
+ "value": "🖕🏿"
+ },
+ {
+ "key": "raised_hand_fingers_splayed",
+ "value": "🖐"
+ },
+ {
+ "key": "raised_hand_fingers_splayed_type_1_2",
+ "value": "🖐🏻"
+ },
+ {
+ "key": "raised_hand_fingers_splayed_type_3",
+ "value": "🖐🏼"
+ },
+ {
+ "key": "raised_hand_fingers_splayed_type_4",
+ "value": "🖐🏽"
+ },
+ {
+ "key": "raised_hand_fingers_splayed_type_5",
+ "value": "🖐🏾"
+ },
+ {
+ "key": "raised_hand_fingers_splayed_type_6",
+ "value": "🖐🏿"
+ },
+ {
+ "key": "sign_of_horn",
+ "value": "🤘"
+ },
+ {
+ "key": "sign_of_horn_type_1_2",
+ "value": "🤘🏻"
+ },
+ {
+ "key": "sign_of_horn_type_3",
+ "value": "🤘🏼"
+ },
+ {
+ "key": "sign_of_horn_type_4",
+ "value": "🤘🏽"
+ },
+ {
+ "key": "sign_of_horn_type_5",
+ "value": "🤘🏾"
+ },
+ {
+ "key": "sign_of_horn_type_6",
+ "value": "🤘🏿"
+ },
+ {
+ "key": "raised_hand_part_between_middle_ring",
+ "value": "🖖"
+ },
+ {
+ "key": "raised_hand_part_between_middle_ring_type_1_2",
+ "value": "🖖🏻"
+ },
+ {
+ "key": "raised_hand_part_between_middle_ring_type_3",
+ "value": "🖖🏼"
+ },
+ {
+ "key": "raised_hand_part_between_middle_ring_type_4",
+ "value": "🖖🏽"
+ },
+ {
+ "key": "raised_hand_part_between_middle_ring_type_5",
+ "value": "🖖🏾"
+ },
+ {
+ "key": "raised_hand_part_between_middle_ring_type_6",
+ "value": "🖖🏿"
+ },
+ {
+ "key": "writing_hand",
+ "value": "✍"
+ },
+ {
+ "key": "writing_hand_type_1_2",
+ "value": "✍🏻"
+ },
+ {
+ "key": "writing_hand_type_3",
+ "value": "✍🏼"
+ },
+ {
+ "key": "writing_hand_type_4",
+ "value": "✍🏽"
+ },
+ {
+ "key": "writing_hand_type_5",
+ "value": "✍🏾"
+ },
+ {
+ "key": "writing_hand_type_6",
+ "value": "✍🏿"
+ },
+ {
+ "key": "nail_polish",
+ "value": "💅"
+ },
+ {
+ "key": "nail_polish_type_1_2",
+ "value": "💅🏻"
+ },
+ {
+ "key": "nail_polish_type_3",
+ "value": "💅🏼"
+ },
+ {
+ "key": "nail_polish_type_4",
+ "value": "💅🏽"
+ },
+ {
+ "key": "nail_polish_type_5",
+ "value": "💅🏾"
+ },
+ {
+ "key": "nail_polish_type_6",
+ "value": "💅🏿"
+ },
+ {
+ "key": "mouth",
+ "value": "👄"
+ },
+ {
+ "key": "tongue",
+ "value": "👅"
+ },
+ {
+ "key": "ear",
+ "value": "👂"
+ },
+ {
+ "key": "ear_type_1_2",
+ "value": "👂🏻"
+ },
+ {
+ "key": "ear_type_3",
+ "value": "👂🏼"
+ },
+ {
+ "key": "ear_type_4",
+ "value": "👂🏽"
+ },
+ {
+ "key": "ear_type_5",
+ "value": "👂🏾"
+ },
+ {
+ "key": "ear_type_6",
+ "value": "👂🏿"
+ },
+ {
+ "key": "nose",
+ "value": "👃"
+ },
+ {
+ "key": "nose_type_1_2",
+ "value": "👃🏻"
+ },
+ {
+ "key": "nose_type_3",
+ "value": "👃🏼"
+ },
+ {
+ "key": "nose_type_4",
+ "value": "👃🏽"
+ },
+ {
+ "key": "nose_type_5",
+ "value": "👃🏾"
+ },
+ {
+ "key": "nose_type_6",
+ "value": "👃🏿"
+ },
+ {
+ "key": "eye",
+ "value": "👁"
+ },
+ {
+ "key": "eyes",
+ "value": "👀"
+ },
+ {
+ "key": "bust_in_silhouette",
+ "value": "👤"
+ },
+ {
+ "key": "busts_in_silhouette",
+ "value": "👥"
+ },
+ {
+ "key": "speaking_head_in_silhouette",
+ "value": "🗣"
+ },
+ {
+ "key": "baby",
+ "value": "👶"
+ },
+ {
+ "key": "baby_type_1_2",
+ "value": "👶🏻"
+ },
+ {
+ "key": "baby_type_3",
+ "value": "👶🏼"
+ },
+ {
+ "key": "baby_type_4",
+ "value": "👶🏽"
+ },
+ {
+ "key": "baby_type_5",
+ "value": "👶🏾"
+ },
+ {
+ "key": "baby_type_6",
+ "value": "👶🏿"
+ },
+ {
+ "key": "boy",
+ "value": "👦"
+ },
+ {
+ "key": "boy_type_1_2",
+ "value": "👦🏻"
+ },
+ {
+ "key": "boy_type_3",
+ "value": "👦🏼"
+ },
+ {
+ "key": "boy_type_4",
+ "value": "👦🏽"
+ },
+ {
+ "key": "boy_type_5",
+ "value": "👦🏾"
+ },
+ {
+ "key": "boy_type_6",
+ "value": "👦🏿"
+ },
+ {
+ "key": "girl",
+ "value": "👧"
+ },
+ {
+ "key": "girl_type_1_2",
+ "value": "👧🏻"
+ },
+ {
+ "key": "girl_type_3",
+ "value": "👧🏼"
+ },
+ {
+ "key": "girl_type_4",
+ "value": "👧🏽"
+ },
+ {
+ "key": "girl_type_5",
+ "value": "👧🏾"
+ },
+ {
+ "key": "girl_type_6",
+ "value": "👧🏿"
+ },
+ {
+ "key": "man",
+ "value": "👨"
+ },
+ {
+ "key": "man_type_1_2",
+ "value": "👨🏻"
+ },
+ {
+ "key": "man_type_3",
+ "value": "👨🏼"
+ },
+ {
+ "key": "man_type_4",
+ "value": "👨🏽"
+ },
+ {
+ "key": "man_type_5",
+ "value": "👨🏾"
+ },
+ {
+ "key": "man_type_6",
+ "value": "👨🏿"
+ },
+ {
+ "key": "women",
+ "value": "👩"
+ },
+ {
+ "key": "women_type_1_2",
+ "value": "👩🏻"
+ },
+ {
+ "key": "women_type_3",
+ "value": "👩🏼"
+ },
+ {
+ "key": "women_type_4",
+ "value": "👩🏽"
+ },
+ {
+ "key": "women_type_5",
+ "value": "👩🏾"
+ },
+ {
+ "key": "women_type_6",
+ "value": "👩🏿"
+ },
+ {
+ "key": "person_with_blond_hair",
+ "value": "👱"
+ },
+ {
+ "key": "person_with_blond_hair_type_1_2",
+ "value": "👱🏻"
+ },
+ {
+ "key": "person_with_blond_hair_type_3",
+ "value": "👱🏼"
+ },
+ {
+ "key": "person_with_blond_hair_type_4",
+ "value": "👱🏽"
+ },
+ {
+ "key": "person_with_blond_hair_type_5",
+ "value": "👱🏾"
+ },
+ {
+ "key": "person_with_blond_hair_type_6",
+ "value": "👱🏿"
+ },
+ {
+ "key": "older_man",
+ "value": "👴"
+ },
+ {
+ "key": "older_man_type_1_2",
+ "value": "👴🏻"
+ },
+ {
+ "key": "older_man_type_3",
+ "value": "👴🏼"
+ },
+ {
+ "key": "older_man_type_4",
+ "value": "👴🏽"
+ },
+ {
+ "key": "older_man_type_5",
+ "value": "👴🏾"
+ },
+ {
+ "key": "older_man_type_6",
+ "value": "👴🏿"
+ },
+ {
+ "key": "older_women",
+ "value": "👵"
+ },
+ {
+ "key": "older_women_type_1_2",
+ "value": "👵🏻"
+ },
+ {
+ "key": "older_women_type_3",
+ "value": "👵🏼"
+ },
+ {
+ "key": "older_women_type_4",
+ "value": "👵🏽"
+ },
+ {
+ "key": "older_women_type_5",
+ "value": "👵🏾"
+ },
+ {
+ "key": "older_women_type_6",
+ "value": "👵🏿"
+ },
+ {
+ "key": "man_with_gua_pi_mao",
+ "value": "👲"
+ },
+ {
+ "key": "man_with_gua_pi_mao_type_1_2",
+ "value": "👲🏼"
+ },
+ {
+ "key": "man_with_gua_pi_mao_type_3",
+ "value": "👲🏼"
+ },
+ {
+ "key": "man_with_gua_pi_mao_type_4",
+ "value": "👲🏽"
+ },
+ {
+ "key": "man_with_gua_pi_mao_type_5",
+ "value": "👲🏾"
+ },
+ {
+ "key": "man_with_gua_pi_mao_type_6",
+ "value": "👲🏿"
+ },
+ {
+ "key": "man_with_turban",
+ "value": "👳"
+ },
+ {
+ "key": "man_with_turban_type_1_2",
+ "value": "👳🏻"
+ },
+ {
+ "key": "man_with_turban_type_3",
+ "value": "👳🏼"
+ },
+ {
+ "key": "man_with_turban_type_4",
+ "value": "👳🏽"
+ },
+ {
+ "key": "man_with_turban_type_5",
+ "value": "👳🏾"
+ },
+ {
+ "key": "man_with_turban_type_6",
+ "value": "👳🏿"
+ },
+ {
+ "key": "police_officer",
+ "value": "👮"
+ },
+ {
+ "key": "police_officer_type_1_2",
+ "value": "👮🏻"
+ },
+ {
+ "key": "police_officer_type_3",
+ "value": "👮🏼"
+ },
+ {
+ "key": "police_officer_type_4",
+ "value": "👮🏽"
+ },
+ {
+ "key": "police_officer_type_5",
+ "value": "👮🏾"
+ },
+ {
+ "key": "police_officer_type_6",
+ "value": "👮🏿"
+ },
+ {
+ "key": "construction_worker",
+ "value": "👷"
+ },
+ {
+ "key": "construction_worker_type_1_2",
+ "value": "👷🏻"
+ },
+ {
+ "key": "construction_worker_type_3",
+ "value": "👷🏼"
+ },
+ {
+ "key": "construction_worker_type_4",
+ "value": "👷🏽"
+ },
+ {
+ "key": "construction_worker_type_5",
+ "value": "👷🏾"
+ },
+ {
+ "key": "construction_worker_type_6",
+ "value": "👷🏿"
+ },
+ {
+ "key": "guards_man",
+ "value": "💂"
+ },
+ {
+ "key": "guards_man_type_1_2",
+ "value": "💂🏻"
+ },
+ {
+ "key": "guards_man_type_3",
+ "value": "💂🏼"
+ },
+ {
+ "key": "guards_man_type_4",
+ "value": "💂🏽"
+ },
+ {
+ "key": "guards_man_type_5",
+ "value": "💂🏾"
+ },
+ {
+ "key": "guards_man_type_6",
+ "value": "💂🏿"
+ },
+ {
+ "key": "spy",
+ "value": "🕵"
+ },
+ {
+ "key": "father_christmas",
+ "value": "🎅"
+ },
+ {
+ "key": "father_christmas_type_1_2",
+ "value": "🎅🏻"
+ },
+ {
+ "key": "father_christmas_type_3",
+ "value": "🎅🏼"
+ },
+ {
+ "key": "father_christmas_type_4",
+ "value": "🎅🏽"
+ },
+ {
+ "key": "father_christmas_type_5",
+ "value": "🎅🏾"
+ },
+ {
+ "key": "father_christmas_type_6",
+ "value": "🎅🏿"
+ },
+ {
+ "key": "baby_angel",
+ "value": "👼"
+ },
+ {
+ "key": "baby_angel_type_1_2",
+ "value": "👼🏻"
+ },
+ {
+ "key": "baby_angel_type_3",
+ "value": "👼🏼"
+ },
+ {
+ "key": "baby_angel_type_4",
+ "value": "👼🏽"
+ },
+ {
+ "key": "baby_angel_type_5",
+ "value": "👼🏾"
+ },
+ {
+ "key": "baby_angel_type_6",
+ "value": "👼🏿"
+ },
+ {
+ "key": "princess",
+ "value": "👸"
+ },
+ {
+ "key": "princess_type_1_2",
+ "value": "👸🏻"
+ },
+ {
+ "key": "princess_type_3",
+ "value": "👸🏼"
+ },
+ {
+ "key": "princess_type_4",
+ "value": "👸🏽"
+ },
+ {
+ "key": "princess_type_5",
+ "value": "👸🏾"
+ },
+ {
+ "key": "princess_type_6",
+ "value": "👸🏿"
+ },
+ {
+ "key": "bride_with_veil",
+ "value": "👰"
+ },
+ {
+ "key": "bride_with_veil_type_1_2",
+ "value": "👰🏻"
+ },
+ {
+ "key": "bride_with_veil_type_3",
+ "value": "👰🏼"
+ },
+ {
+ "key": "bride_with_veil_type_4",
+ "value": "👰🏽"
+ },
+ {
+ "key": "bride_with_veil_type_5",
+ "value": "👰🏾"
+ },
+ {
+ "key": "bride_with_veil_type_6",
+ "value": "👰🏿"
+ },
+ {
+ "key": "pedestrian",
+ "value": "🚶"
+ },
+ {
+ "key": "pedestrian_type_1_2",
+ "value": "🚶🏻"
+ },
+ {
+ "key": "pedestrian_type_3",
+ "value": "🚶🏼"
+ },
+ {
+ "key": "pedestrian_type_4",
+ "value": "🚶🏽"
+ },
+ {
+ "key": "pedestrian_type_5",
+ "value": "🚶🏾"
+ },
+ {
+ "key": "pedestrian_type_6",
+ "value": "🚶🏿"
+ },
+ {
+ "key": "runner",
+ "value": "🏃"
+ },
+ {
+ "key": "runner_type_1_2",
+ "value": "🏃🏻"
+ },
+ {
+ "key": "runner_type_3",
+ "value": "🏃🏼"
+ },
+ {
+ "key": "runner_type_4",
+ "value": "🏃🏽"
+ },
+ {
+ "key": "runner_type_5",
+ "value": "🏃🏾"
+ },
+ {
+ "key": "runner_type_6",
+ "value": "🏃🏿"
+ },
+ {
+ "key": "dancer",
+ "value": "💃"
+ },
+ {
+ "key": "dancer_type_1_2",
+ "value": "💃🏻"
+ },
+ {
+ "key": "dancer_type_3",
+ "value": "💃🏼"
+ },
+ {
+ "key": "dancer_type_4",
+ "value": "💃🏽"
+ },
+ {
+ "key": "dancer_type_5",
+ "value": "💃🏾"
+ },
+ {
+ "key": "dancer_type_6",
+ "value": "💃🏿"
+ },
+ {
+ "key": "women_with_bunny_years",
+ "value": "👯"
+ },
+ {
+ "key": "man_women_holding_hands",
+ "value": "👫"
+ },
+ {
+ "key": "two_man_holding_hands",
+ "value": "👬"
+ },
+ {
+ "key": "two_women_holding_hands",
+ "value": "👭"
+ },
+ {
+ "key": "person_bowing_deeply",
+ "value": "🙇"
+ },
+ {
+ "key": "person_bowing_deeply_type_1_2",
+ "value": "🙇🏻"
+ },
+ {
+ "key": "person_bowing_deeply_type_3",
+ "value": "🙇🏼"
+ },
+ {
+ "key": "person_bowing_deeply_type_4",
+ "value": "🙇🏽"
+ },
+ {
+ "key": "person_bowing_deeply_type_5",
+ "value": "🙇🏾"
+ },
+ {
+ "key": "person_bowing_deeply_type_6",
+ "value": "🙇🏿"
+ },
+ {
+ "key": "information_desk_person",
+ "value": "💁"
+ },
+ {
+ "key": "information_desk_person_type_1_2",
+ "value": "💁🏻"
+ },
+ {
+ "key": "information_desk_person_type_3",
+ "value": "💁🏼"
+ },
+ {
+ "key": "information_desk_person_type_4",
+ "value": "💁🏽"
+ },
+ {
+ "key": "information_desk_person_type_5",
+ "value": "💁🏾"
+ },
+ {
+ "key": "information_desk_person_type_6",
+ "value": "💁🏿"
+ },
+ {
+ "key": "face_with_no_good_gesture",
+ "value": "🙅"
+ },
+ {
+ "key": "face_with_no_good_gesture_type_1_2",
+ "value": "🙅🏻"
+ },
+ {
+ "key": "face_with_no_good_gesture_type_3",
+ "value": "🙅🏼"
+ },
+ {
+ "key": "face_with_no_good_gesture_type_4",
+ "value": "🙅🏽"
+ },
+ {
+ "key": "face_with_no_good_gesture_type_5",
+ "value": "🙅🏾"
+ },
+ {
+ "key": "face_with_no_good_gesture_type_6",
+ "value": "🙅🏿"
+ },
+ {
+ "key": "face_with_ok_gesture",
+ "value": "🙆"
+ },
+ {
+ "key": "face_with_ok_gesture_type_1_2",
+ "value": "🙆🏻"
+ },
+ {
+ "key": "face_with_ok_gesture_type_3",
+ "value": "🙆🏼"
+ },
+ {
+ "key": "face_with_ok_gesture_type_4",
+ "value": "🙆🏽"
+ },
+ {
+ "key": "face_with_ok_gesture_type_5",
+ "value": "🙆🏾"
+ },
+ {
+ "key": "face_with_ok_gesture_type_6",
+ "value": "🙆🏿"
+ },
+ {
+ "key": "happy_person_raise_one_hand",
+ "value": "🙋"
+ },
+ {
+ "key": "happy_person_raise_one_hand_type_1_2",
+ "value": "🙋🏻"
+ },
+ {
+ "key": "happy_person_raise_one_hand_type_3",
+ "value": "🙋🏼"
+ },
+ {
+ "key": "happy_person_raise_one_hand_type_4",
+ "value": "🙋🏽"
+ },
+ {
+ "key": "happy_person_raise_one_hand_type_5",
+ "value": "🙋🏾"
+ },
+ {
+ "key": "happy_person_raise_one_hand_type_6",
+ "value": "🙋🏿"
+ },
+ {
+ "key": "person_with_pouting_face",
+ "value": "🙎"
+ },
+ {
+ "key": "person_with_pouting_face_type_1_2",
+ "value": "🙎🏻"
+ },
+ {
+ "key": "person_with_pouting_face_type_3",
+ "value": "🙎🏼"
+ },
+ {
+ "key": "person_with_pouting_face_type_4",
+ "value": "🙎🏽"
+ },
+ {
+ "key": "person_with_pouting_face_type_5",
+ "value": "🙎🏾"
+ },
+ {
+ "key": "person_with_pouting_face_type_6",
+ "value": "🙎🏿"
+ },
+ {
+ "key": "person_frowning",
+ "value": "🙍"
+ },
+ {
+ "key": "person_frowning_type_1_2",
+ "value": "🙍🏻"
+ },
+ {
+ "key": "person_frowning_type_3",
+ "value": "🙍🏼"
+ },
+ {
+ "key": "person_frowning_type_4",
+ "value": "🙍🏽"
+ },
+ {
+ "key": "person_frowning_type_5",
+ "value": "🙍🏾"
+ },
+ {
+ "key": "person_frowning_type_6",
+ "value": "🙍🏿"
+ },
+ {
+ "key": "haircut",
+ "value": "💇"
+ },
+ {
+ "key": "haircut_type_1_2",
+ "value": "💇🏻"
+ },
+ {
+ "key": "haircut_type_3",
+ "value": "💇🏼"
+ },
+ {
+ "key": "haircut_type_4",
+ "value": "💇🏽"
+ },
+ {
+ "key": "haircut_type_5",
+ "value": "💇🏾"
+ },
+ {
+ "key": "haircut_type_6",
+ "value": "💇🏿"
+ },
+ {
+ "key": "face_massage",
+ "value": "💆"
+ },
+ {
+ "key": "face_massage_type_1_2",
+ "value": "💆🏻"
+ },
+ {
+ "key": "face_massage_type_3",
+ "value": "💆🏻"
+ },
+ {
+ "key": "face_massage_type_4",
+ "value": "💆🏽"
+ },
+ {
+ "key": "face_massage_type_5",
+ "value": "💆🏾"
+ },
+ {
+ "key": "face_massage_type_6",
+ "value": "💆🏿"
+ },
+ {
+ "key": "couple_with_heart",
+ "value": "💑"
+ },
+ {
+ "key": "couple_with_heart_woman",
+ "value": "👩‍❤️‍👩"
+ },
+ {
+ "key": "couple_with_heart_man",
+ "value": "👨‍❤️‍👨"
+ },
+ {
+ "key": "kiss",
+ "value": "💏"
+ },
+ {
+ "key": "kiss_woman",
+ "value": "👩‍❤️‍💋‍👩"
+ },
+ {
+ "key": "kiss_man",
+ "value": "👨‍❤️‍💋‍👨"
+ },
+ {
+ "key": "family",
+ "value": "👪"
+ },
+ {
+ "key": "family_man_women_girl",
+ "value": "👨‍👩‍👧"
+ },
+ {
+ "key": "family_man_women_girl_boy",
+ "value": "👨‍👩‍👧‍👦"
+ },
+ {
+ "key": "family_man_women_boy_boy",
+ "value": "👨‍👩‍👦‍👦"
+ },
+ {
+ "key": "family_man_women_girl_girl",
+ "value": "👨‍👩‍👧‍👧"
+ },
+ {
+ "key": "family_woman_women_boy",
+ "value": "👩‍👩‍👦"
+ },
+ {
+ "key": "family_woman_women_girl",
+ "value": "👩‍👩‍👧"
+ },
+ {
+ "key": "family_woman_women_girl_boy",
+ "value": "👩‍👩‍👧‍👦"
+ },
+ {
+ "key": "family_woman_women_boy_boy",
+ "value": "👩‍👩‍👦‍👦"
+ },
+ {
+ "key": "family_woman_women_girl_girl",
+ "value": "👩‍👩‍👧‍👧"
+ },
+ {
+ "key": "family_man_man_boy",
+ "value": "👨‍👨‍👦"
+ },
+ {
+ "key": "family_man_man_girl",
+ "value": "👨‍👨‍👧"
+ },
+ {
+ "key": "family_man_man_girl_boy",
+ "value": "👨‍👨‍👧‍👦"
+ },
+ {
+ "key": "family_man_man_boy_boy",
+ "value": "👨‍👨‍👦‍👦"
+ },
+ {
+ "key": "family_man_man_girl_girl",
+ "value": "👨‍👨‍👧‍👧"
+ },
+ {
+ "key": "woman_clothes",
+ "value": "👚"
+ },
+ {
+ "key": "t_shirt",
+ "value": "👕"
+ },
+ {
+ "key": "jeans",
+ "value": "👖"
+ },
+ {
+ "key": "necktie",
+ "value": "👔"
+ },
+ {
+ "key": "dress",
+ "value": "👗"
+ },
+ {
+ "key": "bikini",
+ "value": "👙"
+ },
+ {
+ "key": "kimono",
+ "value": "👘"
+ },
+ {
+ "key": "lipstick",
+ "value": "💄"
+ },
+ {
+ "key": "kiss_mark",
+ "value": "💋"
+ },
+ {
+ "key": "footprints",
+ "value": "👣"
+ },
+ {
+ "key": "high_heeled_shoe",
+ "value": "👠"
+ },
+ {
+ "key": "woman_sandal",
+ "value": "👡"
+ },
+ {
+ "key": "woman_boots",
+ "value": "👢"
+ },
+ {
+ "key": "man_shoe",
+ "value": "👞"
+ },
+ {
+ "key": "athletic_shoe",
+ "value": "👟"
+ },
+ {
+ "key": "woman_hat",
+ "value": "👒"
+ },
+ {
+ "key": "top_hat",
+ "value": "🎩"
+ },
+ {
+ "key": "graduation_cap",
+ "value": "🎓"
+ },
+ {
+ "key": "crown",
+ "value": "👑"
+ },
+ {
+ "key": "helmet_with_white_cross",
+ "value": "⛑"
+ },
+ {
+ "key": "school_satchel",
+ "value": "🎒"
+ },
+ {
+ "key": "pouch",
+ "value": "👝"
+ },
+ {
+ "key": "purse",
+ "value": "👛"
+ },
+ {
+ "key": "handbag",
+ "value": "👜"
+ },
+ {
+ "key": "briefcase",
+ "value": "💼"
+ },
+ {
+ "key": "eye_glasses",
+ "value": "👓"
+ },
+ {
+ "key": "dark_sun_glasses",
+ "value": "🕶"
+ },
+ {
+ "key": "ring",
+ "value": "💍"
+ },
+ {
+ "key": "closed_umbrella",
+ "value": "🌂"
+ }
+ ]
+ }
+} \ No newline at end of file
diff --git a/nikola/plugins/shortcode/emoji/data/Symbols.json b/nikola/plugins/shortcode/emoji/data/Symbols.json
new file mode 100644
index 0000000..2dd5454
--- /dev/null
+++ b/nikola/plugins/shortcode/emoji/data/Symbols.json
@@ -0,0 +1,1082 @@
+{
+ "symbols": {
+ "symbol": [
+ {
+ "key": "heavy_black_heart",
+ "value": "❤"
+ },
+ {
+ "key": "yellow_heart",
+ "value": "💛"
+ },
+ {
+ "key": "green_heart",
+ "value": "💚"
+ },
+ {
+ "key": "blue_heart",
+ "value": "💙"
+ },
+ {
+ "key": "purple_heart",
+ "value": "💜"
+ },
+ {
+ "key": "broken_heart",
+ "value": "💔"
+ },
+ {
+ "key": "heavy_heart_exclamation_mark_ornament",
+ "value": "❣"
+ },
+ {
+ "key": "two_hearts",
+ "value": "💕"
+ },
+ {
+ "key": "revolving_hearts",
+ "value": "💞"
+ },
+ {
+ "key": "beating_heart",
+ "value": "💓"
+ },
+ {
+ "key": "growing_heart",
+ "value": "💗"
+ },
+ {
+ "key": "sparkling_heart",
+ "value": "💖"
+ },
+ {
+ "key": "heart_with_arrow",
+ "value": "💘"
+ },
+ {
+ "key": "heart_with_ribbon",
+ "value": "💝"
+ },
+ {
+ "key": "heart_decoration",
+ "value": "💟"
+ },
+ {
+ "key": "peace_symbol",
+ "value": "☮"
+ },
+ {
+ "key": "latin_cross",
+ "value": "✝"
+ },
+ {
+ "key": "star_and_crescent",
+ "value": "☪"
+ },
+ {
+ "key": "om_symbol",
+ "value": "🕉"
+ },
+ {
+ "key": "wheel_of_dharma",
+ "value": "☸"
+ },
+ {
+ "key": "star_of_david",
+ "value": "✡"
+ },
+ {
+ "key": "six_pointed_star_with_middle_dot",
+ "value": "🔯"
+ },
+ {
+ "key": "menorah_with_nine_branches",
+ "value": "🕎"
+ },
+ {
+ "key": "yin_yang",
+ "value": "☯"
+ },
+ {
+ "key": "orthodox_cross",
+ "value": "☦"
+ },
+ {
+ "key": "place_of_worship",
+ "value": "🛐"
+ },
+ {
+ "key": "ophiuchus",
+ "value": "⛎"
+ },
+ {
+ "key": "aries",
+ "value": "♈"
+ },
+ {
+ "key": "taurus",
+ "value": "♉"
+ },
+ {
+ "key": "gemini",
+ "value": "♊"
+ },
+ {
+ "key": "cancer",
+ "value": "♋"
+ },
+ {
+ "key": "leo",
+ "value": "♌"
+ },
+ {
+ "key": "virgo",
+ "value": "♍"
+ },
+ {
+ "key": "libra",
+ "value": "♎"
+ },
+ {
+ "key": "scorpius",
+ "value": "♏"
+ },
+ {
+ "key": "sagittarius",
+ "value": "♐"
+ },
+ {
+ "key": "capricorn",
+ "value": "♑"
+ },
+ {
+ "key": "aquarius",
+ "value": "♒"
+ },
+ {
+ "key": "pisces",
+ "value": "♓"
+ },
+ {
+ "key": "squared_id",
+ "value": "🆔"
+ },
+ {
+ "key": "atom_symbol",
+ "value": "⚛"
+ },
+ {
+ "key": "squared_cjk_unified_ideograph_7a7a",
+ "value": "🈳"
+ },
+ {
+ "key": "squared_cjk_unified_ideograph_5272",
+ "value": "🈹"
+ },
+ {
+ "key": "radioactive_sign",
+ "value": "☢"
+ },
+ {
+ "key": "biohazard_sign",
+ "value": "☣"
+ },
+ {
+ "key": "mobile_phone_off",
+ "value": "📴"
+ },
+ {
+ "key": "vibration_mode",
+ "value": "📳"
+ },
+ {
+ "key": "squared_cjk_unified_ideograph_6709",
+ "value": "🈶"
+ },
+ {
+ "key": "squared_cjk_unified_ideograph_7121",
+ "value": "🈚"
+ },
+ {
+ "key": "squared_cjk_unified_ideograph_7533",
+ "value": "🈸"
+ },
+ {
+ "key": "squared_cjk_unified_ideograph_55b6",
+ "value": "🈺"
+ },
+ {
+ "key": "squared_cjk_unified_ideograph_6708",
+ "value": "🈷"
+ },
+ {
+ "key": "eight_pointed_black_star",
+ "value": "✴"
+ },
+ {
+ "key": "squared_vs",
+ "value": "🆚"
+ },
+ {
+ "key": "circled_ideograph_accept",
+ "value": "🉑"
+ },
+ {
+ "key": "white_flower",
+ "value": "💮"
+ },
+ {
+ "key": "circled_ideograph_advantage",
+ "value": "🉐"
+ },
+ {
+ "key": "circled_ideograph_secret",
+ "value": "㊙"
+ },
+ {
+ "key": "circled_ideograph_congratulation",
+ "value": "㊗"
+ },
+ {
+ "key": "squared_cjk_unified_ideograph_5408",
+ "value": "🈴"
+ },
+ {
+ "key": "squared_cjk_unified_ideograph_6e80",
+ "value": "🈵"
+ },
+ {
+ "key": "squared_cjk_unified_ideograph_7981",
+ "value": "🈲"
+ },
+ {
+ "key": "negative_squared_latin_capital_letter_a",
+ "value": "🅰"
+ },
+ {
+ "key": "negative_squared_latin_capital_letter_b",
+ "value": "🅱"
+ },
+ {
+ "key": "negative_squared_ab",
+ "value": "🆎"
+ },
+ {
+ "key": "squared_cl",
+ "value": "🆑"
+ },
+ {
+ "key": "negative_squared_latin_capital_letter_o",
+ "value": "🅾"
+ },
+ {
+ "key": "squared_sos",
+ "value": "🆘"
+ },
+ {
+ "key": "no_entry",
+ "value": "⛔"
+ },
+ {
+ "key": "name_badge",
+ "value": "📛"
+ },
+ {
+ "key": "no_entry_sign",
+ "value": "🚫"
+ },
+ {
+ "key": "cross_mark",
+ "value": "❌"
+ },
+ {
+ "key": "heavy_large_circle",
+ "value": "⭕"
+ },
+ {
+ "key": "anger_symbol",
+ "value": "💢"
+ },
+ {
+ "key": "hot_springs",
+ "value": "♨"
+ },
+ {
+ "key": "no_pedestrians",
+ "value": "🚷"
+ },
+ {
+ "key": "do_not_litter_symbol",
+ "value": "🚯"
+ },
+ {
+ "key": "no_bi_cycles",
+ "value": "🚳"
+ },
+ {
+ "key": "non_potable_water_symbol",
+ "value": "🚱"
+ },
+ {
+ "key": "no_one_under_eighteen_symbol",
+ "value": "🔞"
+ },
+ {
+ "key": "no_mobile_phones",
+ "value": "📵"
+ },
+ {
+ "key": "heavy_exclamation_mark_symbol",
+ "value": "❗"
+ },
+ {
+ "key": "white_exclamation_mark_ornament",
+ "value": "❕"
+ },
+ {
+ "key": "black_question_mark_ornament",
+ "value": "❓"
+ },
+ {
+ "key": "white_question_mark_ornament",
+ "value": "❔"
+ },
+ {
+ "key": "double_exclamation_mark",
+ "value": "‼"
+ },
+ {
+ "key": "exclamation_question_mark",
+ "value": "⁉"
+ },
+ {
+ "key": "hundred_points_symbol",
+ "value": "💯"
+ },
+ {
+ "key": "low_brightness_symbol",
+ "value": "🔅"
+ },
+ {
+ "key": "high_brightness_symbol",
+ "value": "🔆"
+ },
+ {
+ "key": "trident_emblem",
+ "value": "🔱"
+ },
+ {
+ "key": "fleur_de_lis",
+ "value": "⚜"
+ },
+ {
+ "key": "part_alternation_mark",
+ "value": "〽"
+ },
+ {
+ "key": "warning_sign",
+ "value": "⚠"
+ },
+ {
+ "key": "children_crossing",
+ "value": "🚸"
+ },
+ {
+ "key": "japanese_symbol_for_beginner",
+ "value": "🔰"
+ },
+ {
+ "key": "black_universal_recycling_symbol",
+ "value": "♻"
+ },
+ {
+ "key": "squared_cjk_unified_ideograph_6307",
+ "value": "🈯"
+ },
+ {
+ "key": "chart_with_upwards_trend_and_yen_sign",
+ "value": "💹"
+ },
+ {
+ "key": "sparkle",
+ "value": "❇"
+ },
+ {
+ "key": "eight_spoked_asterisk",
+ "value": "✳"
+ },
+ {
+ "key": "negative_squared_crossmark",
+ "value": "❎"
+ },
+ {
+ "key": "white_heavy_checkmark",
+ "value": "✅"
+ },
+ {
+ "key": "diamond_shape_with_a_dot_inside",
+ "value": "💠"
+ },
+ {
+ "key": "cyclone",
+ "value": "🌀"
+ },
+ {
+ "key": "double_curly_loop",
+ "value": "➿"
+ },
+ {
+ "key": "globe_with_meridians",
+ "value": "🌐"
+ },
+ {
+ "key": "circled_latin_capital_letter_m",
+ "value": "ⓜ"
+ },
+ {
+ "key": "automated_teller_machine",
+ "value": "🏧"
+ },
+ {
+ "key": "squared_katakanasa",
+ "value": "🈂"
+ },
+ {
+ "key": "passport_control",
+ "value": "🛂"
+ },
+ {
+ "key": "customs",
+ "value": "🛃"
+ },
+ {
+ "key": "baggage_claim",
+ "value": "🛄"
+ },
+ {
+ "key": "left_luggage",
+ "value": "🛅"
+ },
+ {
+ "key": "wheel_chair_symbol",
+ "value": "♿"
+ },
+ {
+ "key": "no_smoking_symbol",
+ "value": "🚭"
+ },
+ {
+ "key": "water_closet",
+ "value": "🚾"
+ },
+ {
+ "key": "negative_squared_letter_p",
+ "value": "🅿"
+ },
+ {
+ "key": "potable_water_symbol",
+ "value": "🚰"
+ },
+ {
+ "key": "mens_symbol",
+ "value": "🚹"
+ },
+ {
+ "key": "womens_symbol",
+ "value": "🚺"
+ },
+ {
+ "key": "baby_symbol",
+ "value": "🚼"
+ },
+ {
+ "key": "restroom",
+ "value": "🚻"
+ },
+ {
+ "key": "put_litter_in_its_place",
+ "value": "🚮"
+ },
+ {
+ "key": "cinema",
+ "value": "🎦"
+ },
+ {
+ "key": "antenna_with_bars",
+ "value": "📶"
+ },
+ {
+ "key": "squared_katakana_koko",
+ "value": "🈁"
+ },
+ {
+ "key": "squared_ng",
+ "value": "🆖"
+ },
+ {
+ "key": "squared_ok",
+ "value": "🆗"
+ },
+ {
+ "key": "squared_exclamation_mark",
+ "value": "🆙"
+ },
+ {
+ "key": "squared_cool",
+ "value": "🆒"
+ },
+ {
+ "key": "squared_new",
+ "value": "🆕"
+ },
+ {
+ "key": "squared_free",
+ "value": "🆓"
+ },
+ {
+ "key": "keycap_digit_zero",
+ "value": "0⃣"
+ },
+ {
+ "key": "keycap_digit_one",
+ "value": "1⃣"
+ },
+ {
+ "key": "keycap_digit_two",
+ "value": "2⃣"
+ },
+ {
+ "key": "keycap_digit_three",
+ "value": "3⃣"
+ },
+ {
+ "key": "keycap_digit_four",
+ "value": "4⃣"
+ },
+ {
+ "key": "keycap_digit_five",
+ "value": "5⃣"
+ },
+ {
+ "key": "keycap_digit_six",
+ "value": "6⃣"
+ },
+ {
+ "key": "keycap_digit_seven",
+ "value": "7⃣"
+ },
+ {
+ "key": "keycap_digit_eight",
+ "value": "8⃣"
+ },
+ {
+ "key": "keycap_digit_nine",
+ "value": "9⃣"
+ },
+ {
+ "key": "keycap_ten",
+ "value": "🔟"
+ },
+ {
+ "key": "input_symbol_for_numbers",
+ "value": "🔢"
+ },
+ {
+ "key": "black_right_pointing_triangle",
+ "value": "▶"
+ },
+ {
+ "key": "double_vertical_bar",
+ "value": "⏸"
+ },
+ {
+ "key": "blk_rgt_point_triangle_dbl_vertical_bar",
+ "value": "⏯"
+ },
+ {
+ "key": "black_square_for_stop",
+ "value": "⏹"
+ },
+ {
+ "key": "black_circle_for_record",
+ "value": "⏺"
+ },
+ {
+ "key": "blk_rgt_point_dbl_triangle_vertical_bar",
+ "value": "⏭"
+ },
+ {
+ "key": "blk_lft_point_dbl_triangle_vertical_bar",
+ "value": "⏮"
+ },
+ {
+ "key": "blk_rgt_point_dbl_triangle",
+ "value": "⏩"
+ },
+ {
+ "key": "blk_lft_point_dbl_triangle",
+ "value": "⏪"
+ },
+ {
+ "key": "twisted_rightwards_arrows",
+ "value": "🔀"
+ },
+ {
+ "key": "cwise_rgt_lft_open_circle_arrow",
+ "value": "🔁"
+ },
+ {
+ "key": "cwise_rgt_lft_open_circle_arrow_overlay",
+ "value": "🔂"
+ },
+ {
+ "key": "blk_lft_point_triangle",
+ "value": "◀"
+ },
+ {
+ "key": "up_point_small_red_triangle",
+ "value": "🔼"
+ },
+ {
+ "key": "down_point_small_red_triangle",
+ "value": "🔽"
+ },
+ {
+ "key": "blk_up_point_double_triangle",
+ "value": "⏫"
+ },
+ {
+ "key": "blk_down_point_double_triangle",
+ "value": "⏬"
+ },
+ {
+ "key": "black_rightwards_arrow",
+ "value": "➡"
+ },
+ {
+ "key": "leftwards_black_arrow",
+ "value": "⬅"
+ },
+ {
+ "key": "upwards_black_arrow",
+ "value": "⬆"
+ },
+ {
+ "key": "downwards_black_arrow",
+ "value": "⬇"
+ },
+ {
+ "key": "northeast_arrow",
+ "value": "↗"
+ },
+ {
+ "key": "southeast_arrow",
+ "value": "↘"
+ },
+ {
+ "key": "south_west_arrow",
+ "value": "↙"
+ },
+ {
+ "key": "north_west_arrow",
+ "value": "↖"
+ },
+ {
+ "key": "up_down_arrow",
+ "value": "↕"
+ },
+ {
+ "key": "left_right_arrow",
+ "value": "↔"
+ },
+ {
+ "key": "acwise_down_up_open_circle_arrow",
+ "value": "🔄"
+ },
+ {
+ "key": "rightwards_arrow_with_hook",
+ "value": "↪"
+ },
+ {
+ "key": "leftwards_arrow_with_hook",
+ "value": "↩"
+ },
+ {
+ "key": "arrow_point_rgt_then_curving_up",
+ "value": "⤴"
+ },
+ {
+ "key": "arrow_point_rgt_then_curving_down",
+ "value": "⤵"
+ },
+ {
+ "key": "keycap_number_sign",
+ "value": "#⃣"
+ },
+ {
+ "key": "keycap_asterisk",
+ "value": "*⃣"
+ },
+ {
+ "key": "information_source",
+ "value": "ℹ"
+ },
+ {
+ "key": "input_symbol_for_latin_letters",
+ "value": "🔤"
+ },
+ {
+ "key": "input_symbol_latin_small_letters",
+ "value": "🔡"
+ },
+ {
+ "key": "input_symbol_latin_capital_letters",
+ "value": "🔠"
+ },
+ {
+ "key": "input_symbol_symbols",
+ "value": "🔣"
+ },
+ {
+ "key": "musical_note",
+ "value": "🎵"
+ },
+ {
+ "key": "multiple_musical_notes",
+ "value": "🎶"
+ },
+ {
+ "key": "wavy_dash",
+ "value": "〰"
+ },
+ {
+ "key": "curly_loop",
+ "value": "➰"
+ },
+ {
+ "key": "heavy_check_mark",
+ "value": "✔"
+ },
+ {
+ "key": "cwise_down_up_open_circle_arrows",
+ "value": "🔃"
+ },
+ {
+ "key": "heavy_plus_sign",
+ "value": "➕"
+ },
+ {
+ "key": "heavy_minus_sign",
+ "value": "➖"
+ },
+ {
+ "key": "heavy_division_sign",
+ "value": "➗"
+ },
+ {
+ "key": "heavy_multiplication_x",
+ "value": "✖"
+ },
+ {
+ "key": "heavy_dollar_sign",
+ "value": "💲"
+ },
+ {
+ "key": "currency_exchange",
+ "value": "💱"
+ },
+ {
+ "key": "copyright_sign",
+ "value": "©"
+ },
+ {
+ "key": "registered_sign",
+ "value": "®"
+ },
+ {
+ "key": "trademark_sign",
+ "value": "™"
+ },
+ {
+ "key": "end_with_lft_arrow_above",
+ "value": "🔚"
+ },
+ {
+ "key": "back_with_lft_arrow_above",
+ "value": "🔙"
+ },
+ {
+ "key": "on_exclamation_lft_rgt_arrow",
+ "value": "🔛"
+ },
+ {
+ "key": "top_with_up_arrow_above",
+ "value": "🔝"
+ },
+ {
+ "key": "soon_right_arrow_above",
+ "value": "🔜"
+ },
+ {
+ "key": "ballot_box_with_check",
+ "value": "☑"
+ },
+ {
+ "key": "radio_button",
+ "value": "🔘"
+ },
+ {
+ "key": "medium_white_circle",
+ "value": "⚪"
+ },
+ {
+ "key": "medium_black_circle",
+ "value": "⚫"
+ },
+ {
+ "key": "large_red_circle",
+ "value": "🔴"
+ },
+ {
+ "key": "large_blue_circle",
+ "value": "🔵"
+ },
+ {
+ "key": "small_orange_diamond",
+ "value": "🔸"
+ },
+ {
+ "key": "small_blue_diamond",
+ "value": "🔹"
+ },
+ {
+ "key": "large_orange_diamond",
+ "value": "🔶"
+ },
+ {
+ "key": "large_blue_diamond",
+ "value": "🔷"
+ },
+ {
+ "key": "up_point_red_triangle",
+ "value": "🔺"
+ },
+ {
+ "key": "black_small_square",
+ "value": "▪"
+ },
+ {
+ "key": "white_small_square",
+ "value": "▫"
+ },
+ {
+ "key": "black_large_square",
+ "value": "⬛"
+ },
+ {
+ "key": "white_large_square",
+ "value": "⬜"
+ },
+ {
+ "key": "down_point_red_triangle",
+ "value": "🔻"
+ },
+ {
+ "key": "black_medium_square",
+ "value": "◼"
+ },
+ {
+ "key": "white_medium_square",
+ "value": "◻"
+ },
+ {
+ "key": "black_medium_small_square",
+ "value": "◾"
+ },
+ {
+ "key": "white_medium_small_square",
+ "value": "◽"
+ },
+ {
+ "key": "black_square_button",
+ "value": "🔲"
+ },
+ {
+ "key": "white_square_button",
+ "value": "🔳"
+ },
+ {
+ "key": "speaker",
+ "value": "🔈"
+ },
+ {
+ "key": "speaker_one_sound_wave",
+ "value": "🔉"
+ },
+ {
+ "key": "speaker_three_sound_waves",
+ "value": "🔊"
+ },
+ {
+ "key": "speaker_cancellation_stroke",
+ "value": "🔇"
+ },
+ {
+ "key": "cheering_megaphone",
+ "value": "📣"
+ },
+ {
+ "key": "public_address_loudspeaker",
+ "value": "📢"
+ },
+ {
+ "key": "bell",
+ "value": "🔔"
+ },
+ {
+ "key": "bell_with_cancellation_stroke",
+ "value": "🔕"
+ },
+ {
+ "key": "playing_card_black_joker",
+ "value": "🃏"
+ },
+ {
+ "key": "mahjong_tile_red_dragon",
+ "value": "🀄"
+ },
+ {
+ "key": "black_spade_suit",
+ "value": "♠"
+ },
+ {
+ "key": "black_club_suit",
+ "value": "♣"
+ },
+ {
+ "key": "black_heart_suit",
+ "value": "♥"
+ },
+ {
+ "key": "black_diamond_suit",
+ "value": "♦"
+ },
+ {
+ "key": "flower_playing_cards",
+ "value": "🎴"
+ },
+ {
+ "key": "eye_in_speech_bubble",
+ "value": "👁‍🗨"
+ },
+ {
+ "key": "thought_balloon",
+ "value": "💭"
+ },
+ {
+ "key": "right_anger_bubble",
+ "value": "🗯"
+ },
+ {
+ "key": "speech_balloon",
+ "value": "💬"
+ },
+ {
+ "key": "clock_face_one_o_clock",
+ "value": "🕐"
+ },
+ {
+ "key": "clock_face_two_o_clock",
+ "value": "🕑"
+ },
+ {
+ "key": "clock_face_three_o_clock",
+ "value": "🕒"
+ },
+ {
+ "key": "clock_face_four_o_clock",
+ "value": "🕓"
+ },
+ {
+ "key": "clock_face_five_o_clock",
+ "value": "🕔"
+ },
+ {
+ "key": "clock_face_six_o_clock",
+ "value": "🕕"
+ },
+ {
+ "key": "clock_face_seven_o_clock",
+ "value": "🕖"
+ },
+ {
+ "key": "clock_face_eight_o_clock",
+ "value": "🕗"
+ },
+ {
+ "key": "clock_face_nine_o_clock",
+ "value": "🕘"
+ },
+ {
+ "key": "clock_face_ten_o_clock",
+ "value": "🕙"
+ },
+ {
+ "key": "clock_face_eleven_o_clock",
+ "value": "🕚"
+ },
+ {
+ "key": "clock_face_twelve_o_clock",
+ "value": "🕛"
+ },
+ {
+ "key": "clock_face_one_thirty",
+ "value": "🕜"
+ },
+ {
+ "key": "clock_face_two_thirty",
+ "value": "🕝"
+ },
+ {
+ "key": "clock_face_three_thirty",
+ "value": "🕞"
+ },
+ {
+ "key": "clock_face_four_thirty",
+ "value": "🕟"
+ },
+ {
+ "key": "clock_face_five_thirty",
+ "value": "🕠"
+ },
+ {
+ "key": "clock_face_six_thirty",
+ "value": "🕡"
+ },
+ {
+ "key": "clock_face_seven_thirty",
+ "value": "🕢"
+ },
+ {
+ "key": "clock_face_eight_thirty",
+ "value": "🕣"
+ },
+ {
+ "key": "clock_face_nine_thirty",
+ "value": "🕤"
+ },
+ {
+ "key": "clock_face_ten_thirty",
+ "value": "🕥"
+ },
+ {
+ "key": "clock_face_eleven_thirty",
+ "value": "🕦"
+ },
+ {
+ "key": "clock_face_twelve_thirty",
+ "value": "🕧"
+ }
+ ]
+ }
+} \ No newline at end of file
diff --git a/nikola/plugins/shortcode/emoji/data/Travel.json b/nikola/plugins/shortcode/emoji/data/Travel.json
new file mode 100644
index 0000000..e38b84f
--- /dev/null
+++ b/nikola/plugins/shortcode/emoji/data/Travel.json
@@ -0,0 +1,466 @@
+{
+ "travels": {
+ "travel": [
+ {
+ "key": "automobile",
+ "value": "🚗"
+ },
+ {
+ "key": "taxi",
+ "value": "🚕"
+ },
+ {
+ "key": "recreational_vehicle",
+ "value": "🚙"
+ },
+ {
+ "key": "bus",
+ "value": "🚌"
+ },
+ {
+ "key": "trolley_bus",
+ "value": "🚎"
+ },
+ {
+ "key": "racing_car",
+ "value": "🏎"
+ },
+ {
+ "key": "police_car",
+ "value": "🚓"
+ },
+ {
+ "key": "ambulance",
+ "value": "🚑"
+ },
+ {
+ "key": "fire_engine",
+ "value": "🚒"
+ },
+ {
+ "key": "minibus",
+ "value": "🚐"
+ },
+ {
+ "key": "delivery_truck",
+ "value": "🚚"
+ },
+ {
+ "key": "articulated_lorry",
+ "value": "🚛"
+ },
+ {
+ "key": "tractor",
+ "value": "🚜"
+ },
+ {
+ "key": "racing_motorcycle",
+ "value": "🏍"
+ },
+ {
+ "key": "bicycle",
+ "value": "🚲"
+ },
+ {
+ "key": "police_light",
+ "value": "🚨"
+ },
+ {
+ "key": "on_coming_police_car",
+ "value": "🚔"
+ },
+ {
+ "key": "on_coming_bus",
+ "value": "🚍"
+ },
+ {
+ "key": "on_coming_automobile",
+ "value": "🚘"
+ },
+ {
+ "key": "on_coming_taxi",
+ "value": "🚖"
+ },
+ {
+ "key": "aerial_tramway",
+ "value": "🚡"
+ },
+ {
+ "key": "mountain_cableway",
+ "value": "🚠"
+ },
+ {
+ "key": "suspension_railway",
+ "value": "🚟"
+ },
+ {
+ "key": "railway_car",
+ "value": "🚃"
+ },
+ {
+ "key": "tramcar",
+ "value": "🚋"
+ },
+ {
+ "key": "monorail",
+ "value": "🚝"
+ },
+ {
+ "key": "high_speed_train",
+ "value": "🚄"
+ },
+ {
+ "key": "high_speed_train_bullet_nose",
+ "value": "🚅"
+ },
+ {
+ "key": "light_rail",
+ "value": "🚈"
+ },
+ {
+ "key": "mountain_railway",
+ "value": "🚞"
+ },
+ {
+ "key": "steam_locomotive",
+ "value": "🚂"
+ },
+ {
+ "key": "train",
+ "value": "🚆"
+ },
+ {
+ "key": "metro",
+ "value": "🚇"
+ },
+ {
+ "key": "tram",
+ "value": "🚊"
+ },
+ {
+ "key": "station",
+ "value": "🚉"
+ },
+ {
+ "key": "helicopter",
+ "value": "🚁"
+ },
+ {
+ "key": "small_airplane",
+ "value": "🛩"
+ },
+ {
+ "key": "airplane",
+ "value": "✈"
+ },
+ {
+ "key": "airplane_departure",
+ "value": "🛫"
+ },
+ {
+ "key": "airplane_arriving",
+ "value": "🛬"
+ },
+ {
+ "key": "sailboat",
+ "value": "⛵"
+ },
+ {
+ "key": "motorboat",
+ "value": "🛥"
+ },
+ {
+ "key": "speedboat",
+ "value": "🚤"
+ },
+ {
+ "key": "ferry",
+ "value": "⛴"
+ },
+ {
+ "key": "passenger_ship",
+ "value": "🛳"
+ },
+ {
+ "key": "rocket",
+ "value": "🚀"
+ },
+ {
+ "key": "satellite",
+ "value": "🛰"
+ },
+ {
+ "key": "seat",
+ "value": "💺"
+ },
+ {
+ "key": "anchor",
+ "value": "⚓"
+ },
+ {
+ "key": "construction_sign",
+ "value": "🚧"
+ },
+ {
+ "key": "fuel_pump",
+ "value": "⛽"
+ },
+ {
+ "key": "bus_stop",
+ "value": "🚏"
+ },
+ {
+ "key": "vertical_traffic_light",
+ "value": "🚦"
+ },
+ {
+ "key": "horizontal_traffic_light",
+ "value": "🚥"
+ },
+ {
+ "key": "chequered_flag",
+ "value": "🏁"
+ },
+ {
+ "key": "ship",
+ "value": "🚢"
+ },
+ {
+ "key": "ferris_wheel",
+ "value": "🎡"
+ },
+ {
+ "key": "roller_coaster",
+ "value": "🎢"
+ },
+ {
+ "key": "carousel_horse",
+ "value": "🎠"
+ },
+ {
+ "key": "building_construction",
+ "value": "🏗"
+ },
+ {
+ "key": "foggy",
+ "value": "🌁"
+ },
+ {
+ "key": "tokyo_tower",
+ "value": "🗼"
+ },
+ {
+ "key": "factory",
+ "value": "🏭"
+ },
+ {
+ "key": "fountain",
+ "value": "⛲"
+ },
+ {
+ "key": "moon_viewing_ceremony",
+ "value": "🎑"
+ },
+ {
+ "key": "mountain",
+ "value": "⛰"
+ },
+ {
+ "key": "snow_capped_mountain",
+ "value": "🏔"
+ },
+ {
+ "key": "mount_fuji",
+ "value": "🗻"
+ },
+ {
+ "key": "volcano",
+ "value": "🌋"
+ },
+ {
+ "key": "silhouette_of_japan",
+ "value": "🗾"
+ },
+ {
+ "key": "camping",
+ "value": "🏕"
+ },
+ {
+ "key": "tent",
+ "value": "⛺"
+ },
+ {
+ "key": "national_park",
+ "value": "🏞"
+ },
+ {
+ "key": "motorway",
+ "value": "🛣"
+ },
+ {
+ "key": "railway_track",
+ "value": "🛤"
+ },
+ {
+ "key": "sunrise",
+ "value": "🌅"
+ },
+ {
+ "key": "sunrise_over_mountain",
+ "value": "🌄"
+ },
+ {
+ "key": "desert",
+ "value": "🏜"
+ },
+ {
+ "key": "beach_with_umbrella",
+ "value": "🏖"
+ },
+ {
+ "key": "desert_island",
+ "value": "🏝"
+ },
+ {
+ "key": "sunset_over_buildings",
+ "value": "🌇"
+ },
+ {
+ "key": "city_scape_at_dusk",
+ "value": "🌆"
+ },
+ {
+ "key": "city_scape",
+ "value": "🏙"
+ },
+ {
+ "key": "night_with_stars",
+ "value": "🌃"
+ },
+ {
+ "key": "bridge_at_night",
+ "value": "🌉"
+ },
+ {
+ "key": "milky_way",
+ "value": "🌌"
+ },
+ {
+ "key": "shooting_star",
+ "value": "🌠"
+ },
+ {
+ "key": "fire_work_sparkler",
+ "value": "🎇"
+ },
+ {
+ "key": "fireworks",
+ "value": "🎆"
+ },
+ {
+ "key": "rainbow",
+ "value": "🌈"
+ },
+ {
+ "key": "house_buildings",
+ "value": "🏘"
+ },
+ {
+ "key": "european_castle",
+ "value": "🏰"
+ },
+ {
+ "key": "japanese_castle",
+ "value": "🏯"
+ },
+ {
+ "key": "stadium",
+ "value": "🏟"
+ },
+ {
+ "key": "statue_of_liberty",
+ "value": "🗽"
+ },
+ {
+ "key": "house_building",
+ "value": "🏠"
+ },
+ {
+ "key": "house_with_garden",
+ "value": "🏡"
+ },
+ {
+ "key": "derelict_house_building",
+ "value": "🏚"
+ },
+ {
+ "key": "office_building",
+ "value": "🏢"
+ },
+ {
+ "key": "department_store",
+ "value": "🏬"
+ },
+ {
+ "key": "japanese_post_office",
+ "value": "🏣"
+ },
+ {
+ "key": "european_post_office",
+ "value": "🏤"
+ },
+ {
+ "key": "hospital",
+ "value": "🏥"
+ },
+ {
+ "key": "bank",
+ "value": "🏦"
+ },
+ {
+ "key": "hotel",
+ "value": "🏨"
+ },
+ {
+ "key": "convenience_store",
+ "value": "🏪"
+ },
+ {
+ "key": "school",
+ "value": "🏫"
+ },
+ {
+ "key": "love_hotel",
+ "value": "🏩"
+ },
+ {
+ "key": "wedding",
+ "value": "💒"
+ },
+ {
+ "key": "classical_building",
+ "value": "🏛"
+ },
+ {
+ "key": "church",
+ "value": "⛪"
+ },
+ {
+ "key": "mosque",
+ "value": "🕌"
+ },
+ {
+ "key": "synagogue",
+ "value": "🕍"
+ },
+ {
+ "key": "kaaba",
+ "value": "🕋"
+ },
+ {
+ "key": "shinto_shrine",
+ "value": "⛩"
+ }
+ ]
+ }
+}
diff --git a/nikola/plugins/shortcode/gist.plugin b/nikola/plugins/shortcode/gist.plugin
index cd19a72..b610763 100644
--- a/nikola/plugins/shortcode/gist.plugin
+++ b/nikola/plugins/shortcode/gist.plugin
@@ -3,7 +3,7 @@ name = gist
module = gist
[Nikola]
-plugincategory = Shortcode
+PluginCategory = Shortcode
[Documentation]
author = Roberto Alsina
diff --git a/nikola/plugins/shortcode/gist.py b/nikola/plugins/shortcode/gist.py
index 64fd0d9..eb9e976 100644
--- a/nikola/plugins/shortcode/gist.py
+++ b/nikola/plugins/shortcode/gist.py
@@ -13,12 +13,6 @@ class Plugin(ShortcodePlugin):
name = "gist"
- def set_site(self, site):
- """Set Nikola site."""
- self.site = site
- site.register_shortcode('gist', self.handler)
- return super(Plugin, self).set_site(site)
-
def get_raw_gist_with_filename(self, gistID, filename):
"""Get raw gist text for a filename."""
url = '/'.join(("https://gist.github.com/raw", gistID, filename))
diff --git a/nikola/plugins/shortcode/listing.plugin b/nikola/plugins/shortcode/listing.plugin
new file mode 100644
index 0000000..90fb6eb
--- /dev/null
+++ b/nikola/plugins/shortcode/listing.plugin
@@ -0,0 +1,13 @@
+[Core]
+name = listing_shortcode
+module = listing
+
+[Nikola]
+PluginCategory = Shortcode
+
+[Documentation]
+author = Roberto Alsina
+version = 0.1
+website = https://getnikola.com/
+description = Listing shortcode
+
diff --git a/nikola/plugins/shortcode/listing.py b/nikola/plugins/shortcode/listing.py
new file mode 100644
index 0000000..b51365a
--- /dev/null
+++ b/nikola/plugins/shortcode/listing.py
@@ -0,0 +1,77 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2017-2020 Roberto Alsina and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+"""Listing shortcode (equivalent to reST’s listing directive)."""
+
+import os
+from urllib.parse import urlunsplit
+
+import pygments
+
+from nikola.plugin_categories import ShortcodePlugin
+
+
+class Plugin(ShortcodePlugin):
+ """Plugin for listing shortcode."""
+
+ name = "listing"
+
+ def set_site(self, site):
+ """Set Nikola site."""
+ self.site = site
+ Plugin.folders = site.config['LISTINGS_FOLDERS']
+ return super().set_site(site)
+
+ def handler(self, fname, language='text', linenumbers=False, filename=None, site=None, data=None, lang=None, post=None):
+ """Create HTML for a listing."""
+ fname = fname.replace('/', os.sep)
+ if len(self.folders) == 1:
+ listings_folder = next(iter(self.folders.keys()))
+ if fname.startswith(listings_folder):
+ fpath = os.path.join(fname) # new syntax: specify folder name
+ else:
+ # old syntax: don't specify folder name
+ fpath = os.path.join(listings_folder, fname)
+ else:
+ # must be new syntax: specify folder name
+ fpath = os.path.join(fname)
+ linenumbers = 'table' if linenumbers else False
+ deps = [fpath]
+ with open(fpath, 'r') as inf:
+ target = urlunsplit(
+ ("link", 'listing', fpath.replace('\\', '/'), '', ''))
+ src_target = urlunsplit(
+ ("link", 'listing_source', fpath.replace('\\', '/'), '', ''))
+ src_label = self.site.MESSAGES('Source')
+
+ data = inf.read()
+ lexer = pygments.lexers.get_lexer_by_name(language)
+ formatter = pygments.formatters.get_formatter_by_name(
+ 'html', linenos=linenumbers)
+ output = '<a href="{1}">{0}</a> <a href="{3}">({2})</a>' .format(
+ fname, target, src_label, src_target) + pygments.highlight(data, lexer, formatter)
+
+ return output, deps
diff --git a/nikola/plugins/shortcode/post_list.plugin b/nikola/plugins/shortcode/post_list.plugin
new file mode 100644
index 0000000..494a1d8
--- /dev/null
+++ b/nikola/plugins/shortcode/post_list.plugin
@@ -0,0 +1,13 @@
+[Core]
+name = post_list
+module = post_list
+
+[Nikola]
+PluginCategory = Shortcode
+
+[Documentation]
+author = Udo Spallek
+version = 0.2
+website = https://getnikola.com/
+description = Includes a list of posts with tag and slice based filters.
+
diff --git a/nikola/plugins/shortcode/post_list.py b/nikola/plugins/shortcode/post_list.py
new file mode 100644
index 0000000..462984a
--- /dev/null
+++ b/nikola/plugins/shortcode/post_list.py
@@ -0,0 +1,245 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2013-2020 Udo Spallek, Roberto Alsina and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+"""Post list shortcode."""
+
+
+import operator
+import os
+import uuid
+
+import natsort
+
+from nikola import utils
+from nikola.packages.datecond import date_in_range
+from nikola.plugin_categories import ShortcodePlugin
+
+
+class PostListShortcode(ShortcodePlugin):
+ """Provide a shortcode to create a list of posts.
+
+ Post List
+ =========
+ :Directive Arguments: None.
+ :Directive Options: lang, start, stop, reverse, sort, date, tags, categories, sections, slugs, post_type, template, id
+ :Directive Content: None.
+
+ The posts appearing in the list can be filtered by options.
+ *List slicing* is provided with the *start*, *stop* and *reverse* options.
+
+ The following not required options are recognized:
+
+ ``start`` : integer
+ The index of the first post to show.
+ A negative value like ``-3`` will show the *last* three posts in the
+ post-list.
+ Defaults to None.
+
+ ``stop`` : integer
+ The index of the last post to show.
+ A value negative value like ``-1`` will show every post, but not the
+ *last* in the post-list.
+ Defaults to None.
+
+ ``reverse`` : flag
+ Reverse the order of the post-list.
+ Defaults is to not reverse the order of posts.
+
+ ``sort`` : string
+ Sort post list by one of each post's attributes, usually ``title`` or a
+ custom ``priority``. Defaults to None (chronological sorting).
+
+ ``date`` : string
+ Show posts that match date range specified by this option. Format:
+
+ * comma-separated clauses (AND)
+ * clause: attribute comparison_operator value (spaces optional)
+ * attribute: year, month, day, hour, month, second, weekday, isoweekday; or empty for full datetime
+ * comparison_operator: == != <= >= < >
+ * value: integer, 'now', 'today', or dateutil-compatible date input
+
+ ``tags`` : string [, string...]
+ Filter posts to show only posts having at least one of the ``tags``.
+ Defaults to None.
+
+ ``require_all_tags`` : flag
+ Change tag filter behaviour to show only posts that have all specified ``tags``.
+ Defaults to False.
+
+ ``categories`` : string [, string...]
+ Filter posts to show only posts having one of the ``categories``.
+ Defaults to None.
+
+ ``sections`` : string [, string...]
+ Filter posts to show only posts having one of the ``sections``.
+ Defaults to None.
+
+ ``slugs`` : string [, string...]
+ Filter posts to show only posts having at least one of the ``slugs``.
+ Defaults to None.
+
+ ``post_type`` (or ``type``) : string
+ Show only ``posts``, ``pages`` or ``all``.
+ Replaces ``all``. Defaults to ``posts``.
+
+ ``lang`` : string
+ The language of post *titles* and *links*.
+ Defaults to default language.
+
+ ``template`` : string
+ The name of an alternative template to render the post-list.
+ Defaults to ``post_list_directive.tmpl``
+
+ ``id`` : string
+ A manual id for the post list.
+ Defaults to a random name composed by 'post_list_' + uuid.uuid4().hex.
+ """
+
+ name = "post_list"
+
+ def set_site(self, site):
+ """Set the site."""
+ super().set_site(site)
+ site.register_shortcode('post-list', self.handler)
+
+ def handler(self, start=None, stop=None, reverse=False, tags=None, require_all_tags=False, categories=None,
+ sections=None, slugs=None, post_type='post', type=False,
+ lang=None, template='post_list_directive.tmpl', sort=None,
+ id=None, data=None, state=None, site=None, date=None, filename=None, post=None):
+ """Generate HTML for post-list."""
+ if lang is None:
+ lang = utils.LocaleBorg().current_lang
+ if site.invariant: # for testing purposes
+ post_list_id = id or 'post_list_' + 'fixedvaluethatisnotauuid'
+ else:
+ post_list_id = id or 'post_list_' + uuid.uuid4().hex
+
+ # Get post from filename if available
+ if filename:
+ self_post = site.post_per_input_file.get(filename)
+ else:
+ self_post = None
+
+ if self_post:
+ self_post.register_depfile("####MAGIC####TIMELINE", lang=lang)
+
+ # If we get strings for start/stop, make them integers
+ if start is not None:
+ start = int(start)
+ if stop is not None:
+ stop = int(stop)
+
+ # Parse tags/categories/sections/slugs (input is strings)
+ categories = [c.strip().lower() for c in categories.split(',')] if categories else []
+ sections = [s.strip().lower() for s in sections.split(',')] if sections else []
+ slugs = [s.strip() for s in slugs.split(',')] if slugs else []
+
+ filtered_timeline = []
+ posts = []
+ step = None if reverse is False else -1
+
+ if type is not False:
+ post_type = type
+
+ if post_type == 'page' or post_type == 'pages':
+ timeline = [p for p in site.timeline if not p.use_in_feeds]
+ elif post_type == 'all':
+ timeline = [p for p in site.timeline]
+ else: # post
+ timeline = [p for p in site.timeline if p.use_in_feeds]
+
+ # self_post should be removed from timeline because this is redundant
+ timeline = [p for p in timeline if p.source_path != filename]
+
+ if categories:
+ timeline = [p for p in timeline if p.meta('category', lang=lang).lower() in categories]
+
+ if sections:
+ timeline = [p for p in timeline if p.section_name(lang).lower() in sections]
+
+ if tags:
+ tags = {t.strip().lower() for t in tags.split(',')}
+ if require_all_tags:
+ compare = set.issubset
+ else:
+ compare = operator.and_
+ for post in timeline:
+ post_tags = {t.lower() for t in post.tags}
+ if compare(tags, post_tags):
+ filtered_timeline.append(post)
+ else:
+ filtered_timeline = timeline
+
+ if sort:
+ filtered_timeline = natsort.natsorted(filtered_timeline, key=lambda post: post.meta[lang][sort], alg=natsort.ns.F | natsort.ns.IC)
+
+ if date:
+ _now = utils.current_time()
+ filtered_timeline = [p for p in filtered_timeline if date_in_range(utils.html_unescape(date), p.date, now=_now)]
+
+ for post in filtered_timeline[start:stop:step]:
+ if slugs:
+ cont = True
+ for slug in slugs:
+ if slug == post.meta('slug'):
+ cont = False
+
+ if cont:
+ continue
+
+ bp = post.translated_base_path(lang)
+ if os.path.exists(bp) and state:
+ state.document.settings.record_dependencies.add(bp)
+ elif os.path.exists(bp) and self_post:
+ self_post.register_depfile(bp, lang=lang)
+
+ posts += [post]
+
+ template_deps = site.template_system.template_deps(template)
+ if state:
+ # Register template as a dependency (Issue #2391)
+ for d in template_deps:
+ state.document.settings.record_dependencies.add(d)
+ elif self_post:
+ for d in template_deps:
+ self_post.register_depfile(d, lang=lang)
+
+ template_data = {
+ 'lang': lang,
+ 'posts': posts,
+ # Need to provide str, not TranslatableSetting (Issue #2104)
+ 'date_format': site.GLOBAL_CONTEXT.get('date_format')[lang],
+ 'post_list_id': post_list_id,
+ 'messages': site.MESSAGES,
+ '_link': site.link,
+ }
+ output = site.template_system.render_template(
+ template, None, template_data)
+ return output, template_deps
+
+
+# Request file name from shortcode (Issue #2412)
+PostListShortcode.handler.nikola_shortcode_pass_filename = True
diff --git a/nikola/plugins/shortcode/thumbnail.plugin b/nikola/plugins/shortcode/thumbnail.plugin
new file mode 100644
index 0000000..e55d34f
--- /dev/null
+++ b/nikola/plugins/shortcode/thumbnail.plugin
@@ -0,0 +1,12 @@
+[Core]
+name = thumbnail
+module = thumbnail
+
+[Nikola]
+PluginCategory = Shortcode
+
+[Documentation]
+author = Chris Warrick
+version = 0.1
+website = https://getnikola.com/
+description = Thumbnail shortcode
diff --git a/nikola/plugins/shortcode/thumbnail.py b/nikola/plugins/shortcode/thumbnail.py
new file mode 100644
index 0000000..feb731b
--- /dev/null
+++ b/nikola/plugins/shortcode/thumbnail.py
@@ -0,0 +1,69 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2017-2020 Roberto Alsina, Chris Warrick and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+"""Thumbnail shortcode (equivalent to reST’s thumbnail directive)."""
+
+import os.path
+
+from nikola.plugin_categories import ShortcodePlugin
+
+
+class ThumbnailShortcode(ShortcodePlugin):
+ """Plugin for thumbnail directive."""
+
+ name = "thumbnail"
+
+ def handler(self, uri, alt=None, align=None, linktitle=None, title=None, imgclass=None, figclass=None, site=None, data=None, lang=None, post=None):
+ """Create HTML for thumbnail."""
+ if uri.endswith('.svg'):
+ # the ? at the end makes docutil output an <img> instead of an object for the svg, which lightboxes may require
+ src = '.thumbnail'.join(os.path.splitext(uri)) + '?'
+ else:
+ src = '.thumbnail'.join(os.path.splitext(uri))
+
+ if imgclass is None:
+ imgclass = ''
+ if figclass is None:
+ figclass = ''
+
+ if align and data:
+ figclass += ' align-{0}'.format(align)
+ elif align:
+ imgclass += ' align-{0}'.format(align)
+
+ output = '<a href="{0}" class="image-reference"'.format(uri)
+ if linktitle:
+ output += ' title="{0}"'.format(linktitle)
+ output += '><img src="{0}"'.format(src)
+ for item, name in ((alt, 'alt'), (title, 'title'), (imgclass, 'class')):
+ if item:
+ output += ' {0}="{1}"'.format(name, item)
+ output += '></a>'
+
+ if data:
+ output = '<div class="figure {0}">{1}{2}</div>'.format(figclass, output, data)
+
+ return output, []
diff --git a/nikola/plugins/task/__init__.py b/nikola/plugins/task/__init__.py
index 4eeae62..3e18cd5 100644
--- a/nikola/plugins/task/__init__.py
+++ b/nikola/plugins/task/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/task/archive.plugin b/nikola/plugins/task/archive.plugin
index eb079da..62e5fd9 100644
--- a/nikola/plugins/task/archive.plugin
+++ b/nikola/plugins/task/archive.plugin
@@ -1,5 +1,5 @@
[Core]
-name = render_archive
+name = classify_archive
module = archive
[Documentation]
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Generates the blog's archive pages.
[Nikola]
-plugincategory = Task
+PluginCategory = Taxonomy
diff --git a/nikola/plugins/task/archive.py b/nikola/plugins/task/archive.py
index 303d349..4cbf215 100644
--- a/nikola/plugins/task/archive.py
+++ b/nikola/plugins/task/archive.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -24,231 +24,216 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""Render the post archives."""
+"""Classify the posts in archives."""
-import copy
-import os
-
-# for tearDown with _reload we cannot use 'import from' to access LocaleBorg
-import nikola.utils
import datetime
-from nikola.plugin_categories import Task
-from nikola.utils import config_changed, adjust_name_for_index_path, adjust_name_for_index_link
+from collections import defaultdict
+
+import natsort
+import nikola.utils
+from nikola.plugin_categories import Taxonomy
+
+
+class Archive(Taxonomy):
+ """Classify the post archives."""
+
+ name = "classify_archive"
+
+ classification_name = "archive"
+ overview_page_variable_name = "archive"
+ more_than_one_classifications_per_post = False
+ has_hierarchy = True
+ include_posts_from_subhierarchies = True
+ include_posts_into_hierarchy_root = True
+ subcategories_list_template = "list.tmpl"
+ template_for_classification_overview = None
+ always_disable_rss = True
+ always_disable_atom = True
+ apply_to_posts = True
+ apply_to_pages = False
+ minimum_post_count_per_classification_in_overview = 1
+ omit_empty_classifications = False
+ add_other_languages_variable = True
+ path_handler_docstrings = {
+ 'archive_index': False,
+ 'archive': """Link to archive path, name is the year.
-class Archive(Task):
- """Render the post archives."""
+ Example:
- name = "render_archive"
+ link://archive/2013 => /archives/2013/index.html""",
+ 'archive_atom': False,
+ 'archive_rss': False,
+ }
def set_site(self, site):
"""Set Nikola site."""
- site.register_path_handler('archive', self.archive_path)
- site.register_path_handler('archive_atom', self.archive_atom_path)
- return super(Archive, self).set_site(site)
-
- def _prepare_task(self, kw, name, lang, posts, items, template_name,
- title, deps_translatable=None):
- """Prepare an archive task."""
- # name: used to build permalink and destination
- # posts, items: posts or items; only one of them should be used,
- # the other should be None
- # template_name: name of the template to use
- # title: the (translated) title for the generated page
- # deps_translatable: dependencies (None if not added)
- assert posts is not None or items is not None
- task_cfg = [copy.copy(kw)]
- context = {}
- context["lang"] = lang
- context["title"] = title
- context["permalink"] = self.site.link("archive", name, lang)
- context["pagekind"] = ["list", "archive_page"]
- if posts is not None:
- context["posts"] = posts
- # Depend on all post metadata because it can be used in templates (Issue #1931)
- task_cfg.append([repr(p) for p in posts])
+ # Sanity checks
+ if (site.config['CREATE_MONTHLY_ARCHIVE'] and site.config['CREATE_SINGLE_ARCHIVE']) and not site.config['CREATE_FULL_ARCHIVES']:
+ raise Exception('Cannot create monthly and single archives at the same time.')
+ # Finish setup
+ self.show_list_as_subcategories_list = not site.config['CREATE_FULL_ARCHIVES']
+ self.show_list_as_index = site.config['ARCHIVES_ARE_INDEXES']
+ self.template_for_single_list = "archiveindex.tmpl" if site.config['ARCHIVES_ARE_INDEXES'] else "archive.tmpl"
+ # Determine maximum hierarchy height
+ if site.config['CREATE_DAILY_ARCHIVE'] or site.config['CREATE_FULL_ARCHIVES']:
+ self.max_levels = 3
+ elif site.config['CREATE_MONTHLY_ARCHIVE']:
+ self.max_levels = 2
+ elif site.config['CREATE_SINGLE_ARCHIVE']:
+ self.max_levels = 0
else:
- # Depend on the content of items, to rebuild if links change (Issue #1931)
- context["items"] = items
- task_cfg.append(items)
- task = self.site.generic_post_list_renderer(
- lang,
- [],
- os.path.join(kw['output_folder'], self.site.path("archive", name, lang)),
- template_name,
- kw['filters'],
- context,
- )
-
- task_cfg = {i: x for i, x in enumerate(task_cfg)}
- if deps_translatable is not None:
- task_cfg[3] = deps_translatable
- task['uptodate'] = task['uptodate'] + [config_changed(task_cfg, 'nikola.plugins.task.archive')]
- task['basename'] = self.name
- return task
-
- def _generate_posts_task(self, kw, name, lang, posts, title, deps_translatable=None):
- """Genereate a task for an archive with posts."""
- posts = sorted(posts, key=lambda a: a.date)
- posts.reverse()
- if kw['archives_are_indexes']:
- def page_link(i, displayed_i, num_pages, force_addition, extension=None):
- feed = "_atom" if extension == ".atom" else ""
- return adjust_name_for_index_link(self.site.link("archive" + feed, name, lang), i, displayed_i,
- lang, self.site, force_addition, extension)
-
- def page_path(i, displayed_i, num_pages, force_addition, extension=None):
- feed = "_atom" if extension == ".atom" else ""
- return adjust_name_for_index_path(self.site.path("archive" + feed, name, lang), i, displayed_i,
- lang, self.site, force_addition, extension)
-
- uptodate = []
- if deps_translatable is not None:
- uptodate += [config_changed(deps_translatable, 'nikola.plugins.task.archive')]
- context = {"archive_name": name,
- "is_feed_stale": kw["is_feed_stale"],
- "pagekind": ["index", "archive_page"]}
- yield self.site.generic_index_renderer(
- lang,
- posts,
- title,
- "archiveindex.tmpl",
- context,
- kw,
- str(self.name),
- page_link,
- page_path,
- uptodate)
+ self.max_levels = 1
+ return super().set_site(site)
+
+ def get_implicit_classifications(self, lang):
+ """Return a list of classification strings which should always appear in posts_per_classification."""
+ return ['']
+
+ def classify(self, post, lang):
+ """Classify the given post for the given language."""
+ levels = [str(post.date.year).zfill(4), str(post.date.month).zfill(2), str(post.date.day).zfill(2)]
+ return ['/'.join(levels[:self.max_levels])]
+
+ def sort_classifications(self, classifications, lang, level=None):
+ """Sort the given list of classification strings."""
+ if level in (0, 1):
+ # Years or months: sort descending
+ classifications.sort()
+ classifications.reverse()
+
+ def get_classification_friendly_name(self, classification, lang, only_last_component=False):
+ """Extract a friendly name from the classification."""
+ classification = self.extract_hierarchy(classification)
+ if len(classification) == 0:
+ return self.site.MESSAGES[lang]['Archive']
+ elif len(classification) == 1:
+ return classification[0]
+ elif len(classification) == 2:
+ if only_last_component:
+ date_str = "{month}"
+ else:
+ date_str = "{month_year}"
+ return nikola.utils.LocaleBorg().format_date_in_string(
+ date_str,
+ datetime.date(int(classification[0]), int(classification[1]), 1),
+ lang)
+ else:
+ if only_last_component:
+ return str(classification[2])
+ return nikola.utils.LocaleBorg().format_date_in_string(
+ "{month_day_year}",
+ datetime.date(int(classification[0]), int(classification[1]), int(classification[2])),
+ lang)
+
+ def get_path(self, classification, lang, dest_type='page'):
+ """Return a path for the given classification."""
+ components = [self.site.config['ARCHIVE_PATH'](lang)]
+ if classification:
+ components.extend(classification)
+ add_index = 'always'
else:
- yield self._prepare_task(kw, name, lang, posts, None, "list_post.tmpl", title, deps_translatable)
+ components.append(self.site.config['ARCHIVE_FILENAME'](lang))
+ add_index = 'never'
+ return [_f for _f in components if _f], add_index
+
+ def extract_hierarchy(self, classification):
+ """Given a classification, return a list of parts in the hierarchy."""
+ return classification.split('/') if classification else []
- def gen_tasks(self):
- """Generate archive tasks."""
+ def recombine_classification_from_hierarchy(self, hierarchy):
+ """Given a list of parts in the hierarchy, return the classification string."""
+ return '/'.join(hierarchy)
+
+ def provide_context_and_uptodate(self, classification, lang, node=None):
+ """Provide data for the context and the uptodate list for the list of the given classifiation."""
+ hierarchy = self.extract_hierarchy(classification)
kw = {
"messages": self.site.MESSAGES,
- "translations": self.site.config['TRANSLATIONS'],
- "output_folder": self.site.config['OUTPUT_FOLDER'],
- "filters": self.site.config['FILTERS'],
- "archives_are_indexes": self.site.config['ARCHIVES_ARE_INDEXES'],
- "create_monthly_archive": self.site.config['CREATE_MONTHLY_ARCHIVE'],
- "create_single_archive": self.site.config['CREATE_SINGLE_ARCHIVE'],
- "show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
- "create_full_archives": self.site.config['CREATE_FULL_ARCHIVES'],
- "create_daily_archive": self.site.config['CREATE_DAILY_ARCHIVE'],
- "pretty_urls": self.site.config['PRETTY_URLS'],
- "strip_indexes": self.site.config['STRIP_INDEXES'],
- "index_file": self.site.config['INDEX_FILE'],
- "generate_atom": self.site.config["GENERATE_ATOM"],
}
- self.site.scan_posts()
- yield self.group_task()
- # TODO add next/prev links for years
- if (kw['create_monthly_archive'] and kw['create_single_archive']) and not kw['create_full_archives']:
- raise Exception('Cannot create monthly and single archives at the same time.')
- for lang in kw["translations"]:
- if kw['create_single_archive'] and not kw['create_full_archives']:
- # if we are creating one single archive
- archdata = {}
- else:
- # if we are not creating one single archive, start with all years
- archdata = self.site.posts_per_year.copy()
- if kw['create_single_archive'] or kw['create_full_archives']:
- # if we are creating one single archive, or full archives
- archdata[None] = self.site.posts # for create_single_archive
-
- for year, posts in archdata.items():
- # Filter untranslated posts (Issue #1360)
- if not kw["show_untranslated_posts"]:
- posts = [p for p in posts if lang in p.translated_to]
-
- # Add archive per year or total archive
- if year:
- title = kw["messages"][lang]["Posts for year %s"] % year
- kw["is_feed_stale"] = (datetime.datetime.utcnow().strftime("%Y") != year)
- else:
- title = kw["messages"][lang]["Archive"]
- kw["is_feed_stale"] = False
- deps_translatable = {}
- for k in self.site._GLOBAL_CONTEXT_TRANSLATABLE:
- deps_translatable[k] = self.site.GLOBAL_CONTEXT[k](lang)
- if not kw["create_monthly_archive"] or kw["create_full_archives"]:
- yield self._generate_posts_task(kw, year, lang, posts, title, deps_translatable)
- else:
- months = set([(m.split('/')[1], self.site.link("archive", m, lang), len(self.site.posts_per_month[m])) for m in self.site.posts_per_month.keys() if m.startswith(str(year))])
- months = sorted(list(months))
- months.reverse()
- items = [[nikola.utils.LocaleBorg().get_month_name(int(month), lang), link, count] for month, link, count in months]
- yield self._prepare_task(kw, year, lang, None, items, "list.tmpl", title, deps_translatable)
-
- if not kw["create_monthly_archive"] and not kw["create_full_archives"] and not kw["create_daily_archive"]:
- continue # Just to avoid nesting the other loop in this if
- for yearmonth, posts in self.site.posts_per_month.items():
- # Add archive per month
- year, month = yearmonth.split('/')
-
- kw["is_feed_stale"] = (datetime.datetime.utcnow().strftime("%Y/%m") != yearmonth)
-
- # Filter untranslated posts (via Issue #1360)
- if not kw["show_untranslated_posts"]:
- posts = [p for p in posts if lang in p.translated_to]
-
- if kw["create_monthly_archive"] or kw["create_full_archives"]:
- title = kw["messages"][lang]["Posts for {month} {year}"].format(
- year=year, month=nikola.utils.LocaleBorg().get_month_name(int(month), lang))
- yield self._generate_posts_task(kw, yearmonth, lang, posts, title)
-
- if not kw["create_full_archives"] and not kw["create_daily_archive"]:
- continue # Just to avoid nesting the other loop in this if
- # Add archive per day
- days = dict()
- for p in posts:
- if p.date.day not in days:
- days[p.date.day] = list()
- days[p.date.day].append(p)
- for day, posts in days.items():
- title = kw["messages"][lang]["Posts for {month} {day}, {year}"].format(
- year=year, month=nikola.utils.LocaleBorg().get_month_name(int(month), lang), day=day)
- yield self._generate_posts_task(kw, yearmonth + '/{0:02d}'.format(day), lang, posts, title)
-
- if not kw['create_single_archive'] and not kw['create_full_archives']:
- # And an "all your years" page for yearly and monthly archives
- if "is_feed_stale" in kw:
- del kw["is_feed_stale"]
- years = list(self.site.posts_per_year.keys())
- years.sort(reverse=True)
- kw['years'] = years
- for lang in kw["translations"]:
- items = [(y, self.site.link("archive", y, lang), len(self.site.posts_per_year[y])) for y in years]
- yield self._prepare_task(kw, None, lang, None, items, "list.tmpl", kw["messages"][lang]["Archive"])
-
- def archive_path(self, name, lang, is_feed=False):
- """Link to archive path, name is the year.
-
- Example:
-
- link://archive/2013 => /archives/2013/index.html
- """
- if is_feed:
- extension = ".atom"
- archive_file = os.path.splitext(self.site.config['ARCHIVE_FILENAME'])[0] + extension
- index_file = os.path.splitext(self.site.config['INDEX_FILE'])[0] + extension
- else:
- archive_file = self.site.config['ARCHIVE_FILENAME']
- index_file = self.site.config['INDEX_FILE']
- if name:
- return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['ARCHIVE_PATH'], name,
- index_file] if _f]
+ page_kind = "list"
+ if self.show_list_as_index:
+ if not self.show_list_as_subcategories_list or len(hierarchy) == self.max_levels:
+ page_kind = "index"
+ if len(hierarchy) == 0:
+ title = kw["messages"][lang]["Archive"]
+ elif len(hierarchy) == 1:
+ title = kw["messages"][lang]["Posts for year %s"] % hierarchy[0]
+ elif len(hierarchy) == 2:
+ title = nikola.utils.LocaleBorg().format_date_in_string(
+ kw["messages"][lang]["Posts for {month_year}"],
+ datetime.date(int(hierarchy[0]), int(hierarchy[1]), 1),
+ lang)
+ elif len(hierarchy) == 3:
+ title = nikola.utils.LocaleBorg().format_date_in_string(
+ kw["messages"][lang]["Posts for {month_day_year}"],
+ datetime.date(int(hierarchy[0]), int(hierarchy[1]), int(hierarchy[2])),
+ lang)
else:
- return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['ARCHIVE_PATH'],
- archive_file] if _f]
+ raise Exception("Cannot interpret classification {}!".format(repr(classification)))
- def archive_atom_path(self, name, lang):
- """Link to atom archive path, name is the year.
-
- Example:
+ context = {
+ "title": title,
+ "pagekind": [page_kind, "archive_page"],
+ "create_archive_navigation": self.site.config["CREATE_ARCHIVE_NAVIGATION"],
+ "archive_name": classification
+ }
- link://archive_atom/2013 => /archives/2013/index.atom
- """
- return self.archive_path(name, lang, is_feed=True)
+ # Generate links for hierarchies
+ if context["create_archive_navigation"]:
+ if hierarchy:
+ # Up level link makes sense only if this is not the top-level
+ # page (hierarchy is empty)
+ parent = '/'.join(hierarchy[:-1])
+ context["up_archive"] = self.site.link('archive', parent, lang)
+ context["up_archive_name"] = self.get_classification_friendly_name(parent, lang)
+ else:
+ context["up_archive"] = None
+ context["up_archive_name"] = None
+
+ nodelevel = len(hierarchy)
+ flat_samelevel = self.archive_navigation[lang][nodelevel]
+ idx = flat_samelevel.index(classification)
+ if idx == -1:
+ raise Exception("Cannot find classification {0} in flat hierarchy!".format(classification))
+ previdx, nextidx = idx - 1, idx + 1
+ # If the previous index is -1, or the next index is 1, the previous/next archive does not exist.
+ context["previous_archive"] = self.site.link('archive', flat_samelevel[previdx], lang) if previdx != -1 else None
+ context["previous_archive_name"] = self.get_classification_friendly_name(flat_samelevel[previdx], lang) if previdx != -1 else None
+ context["next_archive"] = self.site.link('archive', flat_samelevel[nextidx], lang) if nextidx != len(flat_samelevel) else None
+ context["next_archive_name"] = self.get_classification_friendly_name(flat_samelevel[nextidx], lang) if nextidx != len(flat_samelevel) else None
+ context["archive_nodelevel"] = nodelevel
+ context["has_archive_navigation"] = bool(context["previous_archive"] or context["up_archive"] or context["next_archive"])
+ else:
+ context["has_archive_navigation"] = False
+ kw.update(context)
+ return context, kw
+
+ def postprocess_posts_per_classification(self, posts_per_classification_per_language, flat_hierarchy_per_lang=None, hierarchy_lookup_per_lang=None):
+ """Rearrange, modify or otherwise use the list of posts per classification and per language."""
+ # Build a lookup table for archive navigation, if we’ll need one.
+ if self.site.config['CREATE_ARCHIVE_NAVIGATION']:
+ if flat_hierarchy_per_lang is None:
+ raise ValueError('Archives need flat_hierarchy_per_lang')
+ self.archive_navigation = {}
+ for lang, flat_hierarchy in flat_hierarchy_per_lang.items():
+ self.archive_navigation[lang] = defaultdict(list)
+ for node in flat_hierarchy:
+ if not self.site.config["SHOW_UNTRANSLATED_POSTS"]:
+ if not [x for x in posts_per_classification_per_language[lang][node.classification_name] if x.is_translation_available(lang)]:
+ continue
+ self.archive_navigation[lang][len(node.classification_path)].append(node.classification_name)
+
+ # We need to sort it. Natsort means it’s year 10000 compatible!
+ for k, v in self.archive_navigation[lang].items():
+ self.archive_navigation[lang][k] = natsort.natsorted(v, alg=natsort.ns.F | natsort.ns.IC)
+
+ return super().postprocess_posts_per_classification(posts_per_classification_per_language, flat_hierarchy_per_lang, hierarchy_lookup_per_lang)
+
+ def should_generate_classification_page(self, classification, post_list, lang):
+ """Only generates list of posts for classification if this function returns True."""
+ return classification == '' or len(post_list) > 0
+
+ def get_other_language_variants(self, classification, lang, classifications_per_language):
+ """Return a list of variants of the same classification in other languages."""
+ return [(other_lang, classification) for other_lang, lookup in classifications_per_language.items() if classification in lookup and other_lang != lang]
diff --git a/nikola/plugins/task/authors.plugin b/nikola/plugins/task/authors.plugin
index 3fc4ef2..19e687c 100644
--- a/nikola/plugins/task/authors.plugin
+++ b/nikola/plugins/task/authors.plugin
@@ -1,5 +1,5 @@
[Core]
-Name = render_authors
+Name = classify_authors
Module = authors
[Documentation]
@@ -8,3 +8,5 @@ Version = 0.1
Website = http://getnikola.com
Description = Render the author pages and feeds.
+[Nikola]
+PluginCategory = Taxonomy
diff --git a/nikola/plugins/task/authors.py b/nikola/plugins/task/authors.py
index ec61800..24fe650 100644
--- a/nikola/plugins/task/authors.py
+++ b/nikola/plugins/task/authors.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2015-2016 Juanjo Conti and others.
+# Copyright © 2015-2020 Juanjo Conti and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,301 +26,134 @@
"""Render the author pages and feeds."""
-from __future__ import unicode_literals
-import os
-import natsort
-try:
- from urlparse import urljoin
-except ImportError:
- from urllib.parse import urljoin # NOQA
-from collections import defaultdict
-from blinker import signal
-
-from nikola.plugin_categories import Task
+from nikola.plugin_categories import Taxonomy
from nikola import utils
-class RenderAuthors(Task):
- """Render the author pages and feeds."""
-
- name = "render_authors"
- posts_per_author = None
-
- def set_site(self, site):
- """Set Nikola site."""
- self.generate_author_pages = False
- if site.config["ENABLE_AUTHOR_PAGES"]:
- site.register_path_handler('author_index', self.author_index_path)
- site.register_path_handler('author', self.author_path)
- site.register_path_handler('author_atom', self.author_atom_path)
- site.register_path_handler('author_rss', self.author_rss_path)
- signal('scanned').connect(self.posts_scanned)
- return super(RenderAuthors, self).set_site(site)
-
- def posts_scanned(self, event):
- """Called after posts are scanned via signal."""
- self.generate_author_pages = self.site.config["ENABLE_AUTHOR_PAGES"] and len(self._posts_per_author()) > 1
- self.site.GLOBAL_CONTEXT["author_pages_generated"] = self.generate_author_pages
-
- def gen_tasks(self):
- """Render the author pages and feeds."""
- kw = {
- "translations": self.site.config["TRANSLATIONS"],
- "blog_title": self.site.config["BLOG_TITLE"],
- "site_url": self.site.config["SITE_URL"],
- "base_url": self.site.config["BASE_URL"],
- "messages": self.site.MESSAGES,
- "output_folder": self.site.config['OUTPUT_FOLDER'],
- "filters": self.site.config['FILTERS'],
- 'author_path': self.site.config['AUTHOR_PATH'],
- "author_pages_are_indexes": self.site.config['AUTHOR_PAGES_ARE_INDEXES'],
- "generate_rss": self.site.config['GENERATE_RSS'],
- "feed_teasers": self.site.config["FEED_TEASERS"],
- "feed_plain": self.site.config["FEED_PLAIN"],
- "feed_link_append_query": self.site.config["FEED_LINKS_APPEND_QUERY"],
- "show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
- "feed_length": self.site.config['FEED_LENGTH'],
- "tzinfo": self.site.tzinfo,
- "pretty_urls": self.site.config['PRETTY_URLS'],
- "strip_indexes": self.site.config['STRIP_INDEXES'],
- "index_file": self.site.config['INDEX_FILE'],
- }
-
- self.site.scan_posts()
- yield self.group_task()
-
- if self.generate_author_pages:
- yield self.list_authors_page(kw)
-
- if not self._posts_per_author(): # this may be self.site.posts_per_author
- return
-
- author_list = list(self._posts_per_author().items())
+class ClassifyAuthors(Taxonomy):
+ """Classify the posts by authors."""
- def render_lists(author, posts):
- """Render author pages as RSS files and lists/indexes."""
- post_list = sorted(posts, key=lambda a: a.date)
- post_list.reverse()
- for lang in kw["translations"]:
- if kw["show_untranslated_posts"]:
- filtered_posts = post_list
- else:
- filtered_posts = [x for x in post_list if x.is_translation_available(lang)]
- if kw["generate_rss"]:
- yield self.author_rss(author, lang, filtered_posts, kw)
- # Render HTML
- if kw['author_pages_are_indexes']:
- yield self.author_page_as_index(author, lang, filtered_posts, kw)
- else:
- yield self.author_page_as_list(author, lang, filtered_posts, kw)
+ name = "classify_authors"
- for author, posts in author_list:
- for task in render_lists(author, posts):
- yield task
+ classification_name = "author"
+ overview_page_variable_name = "authors"
+ more_than_one_classifications_per_post = False
+ has_hierarchy = False
+ template_for_classification_overview = "authors.tmpl"
+ apply_to_posts = True
+ apply_to_pages = False
+ minimum_post_count_per_classification_in_overview = 1
+ omit_empty_classifications = False
+ add_other_languages_variable = True
+ path_handler_docstrings = {
+ 'author_index': """ Link to the authors index.
- def _create_authors_page(self, kw):
- """Create a global "all authors" page for each language."""
- template_name = "authors.tmpl"
- kw = kw.copy()
- for lang in kw["translations"]:
- authors = natsort.natsorted([author for author in self._posts_per_author().keys()],
- alg=natsort.ns.F | natsort.ns.IC)
- has_authors = (authors != [])
- kw['authors'] = authors
- output_name = os.path.join(
- kw['output_folder'], self.site.path('author_index', None, lang))
- context = {}
- if has_authors:
- context["title"] = kw["messages"][lang]["Authors"]
- context["items"] = [(author, self.site.link("author", author, lang)) for author
- in authors]
- context["description"] = context["title"]
- else:
- context["items"] = None
- context["permalink"] = self.site.link("author_index", None, lang)
- context["pagekind"] = ["list", "authors_page"]
- task = self.site.generic_post_list_renderer(
- lang,
- [],
- output_name,
- template_name,
- kw['filters'],
- context,
- )
- task['uptodate'] = task['uptodate'] + [utils.config_changed(kw, 'nikola.plugins.task.authors:page')]
- task['basename'] = str(self.name)
- yield task
+ Example:
- def list_authors_page(self, kw):
- """Create a global "all authors" page for each language."""
- yield self._create_authors_page(kw)
+ link://authors/ => /authors/index.html""",
+ 'author': """Link to an author's page.
- def _get_title(self, author):
- return author
+ Example:
- def _get_description(self, author, lang):
- descriptions = self.site.config['AUTHOR_PAGES_DESCRIPTIONS']
- return descriptions[lang][author] if lang in descriptions and author in descriptions[lang] else None
+ link://author/joe => /authors/joe.html""",
+ 'author_atom': """Link to an author's Atom feed.
- def author_page_as_index(self, author, lang, post_list, kw):
- """Render a sort of index page collection using only this author's posts."""
- kind = "author"
+Example:
- def page_link(i, displayed_i, num_pages, force_addition, extension=None):
- feed = "_atom" if extension == ".atom" else ""
- return utils.adjust_name_for_index_link(self.site.link(kind + feed, author, lang), i, displayed_i, lang, self.site, force_addition, extension)
+link://author_atom/joe => /authors/joe.atom""",
+ 'author_rss': """Link to an author's RSS feed.
- def page_path(i, displayed_i, num_pages, force_addition, extension=None):
- feed = "_atom" if extension == ".atom" else ""
- return utils.adjust_name_for_index_path(self.site.path(kind + feed, author, lang), i, displayed_i, lang, self.site, force_addition, extension)
+Example:
- context_source = {}
- title = self._get_title(author)
- if kw["generate_rss"]:
- # On a author page, the feeds include the author's feeds
- rss_link = ("""<link rel="alternate" type="application/rss+xml" """
- """title="RSS for author """
- """{0} ({1})" href="{2}">""".format(
- title, lang, self.site.link(kind + "_rss", author, lang)))
- context_source['rss_link'] = rss_link
- context_source["author"] = title
- indexes_title = kw["messages"][lang]["Posts by %s"] % title
- context_source["description"] = self._get_description(author, lang)
- context_source["pagekind"] = ["index", "author_page"]
- template_name = "authorindex.tmpl"
+link://author_rss/joe => /authors/joe.xml""",
+ }
- yield self.site.generic_index_renderer(lang, post_list, indexes_title, template_name, context_source, kw, str(self.name), page_link, page_path)
+ def set_site(self, site):
+ """Set Nikola site."""
+ super().set_site(site)
+ self.show_list_as_index = site.config['AUTHOR_PAGES_ARE_INDEXES']
+ self.more_than_one_classifications_per_post = site.config.get('MULTIPLE_AUTHORS_PER_POST', False)
+ self.template_for_single_list = "authorindex.tmpl" if self.show_list_as_index else "author.tmpl"
+ self.translation_manager = utils.ClassificationTranslationManager()
+
+ def is_enabled(self, lang=None):
+ """Return True if this taxonomy is enabled, or False otherwise."""
+ if not self.site.config["ENABLE_AUTHOR_PAGES"]:
+ return False
+ if lang is not None:
+ return self.generate_author_pages
+ return True
+
+ def classify(self, post, lang):
+ """Classify the given post for the given language."""
+ if self.more_than_one_classifications_per_post:
+ return post.authors(lang=lang)
+ else:
+ return [post.author(lang=lang)]
- def author_page_as_list(self, author, lang, post_list, kw):
- """Render a single flat link list with this author's posts."""
- kind = "author"
- template_name = "author.tmpl"
- output_name = os.path.join(kw['output_folder'], self.site.path(
- kind, author, lang))
- context = {}
- context["lang"] = lang
- title = self._get_title(author)
- context["author"] = title
- context["title"] = kw["messages"][lang]["Posts by %s"] % title
- context["posts"] = post_list
- context["permalink"] = self.site.link(kind, author, lang)
- context["kind"] = kind
- context["description"] = self._get_description(author, lang)
- context["pagekind"] = ["list", "author_page"]
- task = self.site.generic_post_list_renderer(
- lang,
- post_list,
- output_name,
- template_name,
- kw['filters'],
- context,
- )
- task['uptodate'] = task['uptodate'] + [utils.config_changed(kw, 'nikola.plugins.task.authors:list')]
- task['basename'] = str(self.name)
- yield task
+ def get_classification_friendly_name(self, classification, lang, only_last_component=False):
+ """Extract a friendly name from the classification."""
+ return classification
- def author_rss(self, author, lang, posts, kw):
- """Create a RSS feed for a single author in a given language."""
- kind = "author"
- # Render RSS
- output_name = os.path.normpath(
- os.path.join(kw['output_folder'],
- self.site.path(kind + "_rss", author, lang)))
- feed_url = urljoin(self.site.config['BASE_URL'], self.site.link(kind + "_rss", author, lang).lstrip('/'))
- deps = []
- deps_uptodate = []
- post_list = sorted(posts, key=lambda a: a.date)
- post_list.reverse()
- for post in post_list:
- deps += post.deps(lang)
- deps_uptodate += post.deps_uptodate(lang)
- task = {
- 'basename': str(self.name),
- 'name': output_name,
- 'file_dep': deps,
- 'targets': [output_name],
- 'actions': [(utils.generic_rss_renderer,
- (lang, "{0} ({1})".format(kw["blog_title"](lang), self._get_title(author)),
- kw["site_url"], None, post_list,
- output_name, kw["feed_teasers"], kw["feed_plain"], kw['feed_length'],
- feed_url, None, kw["feed_link_append_query"]))],
- 'clean': True,
- 'uptodate': [utils.config_changed(kw, 'nikola.plugins.task.authors:rss')] + deps_uptodate,
- 'task_dep': ['render_posts'],
- }
- return utils.apply_filters(task, kw['filters'])
+ def get_overview_path(self, lang, dest_type='page'):
+ """Return a path for the list of all classifications."""
+ path = self.site.config['AUTHOR_PATH'](lang)
+ return [component for component in path.split('/') if component], 'always'
- def slugify_author_name(self, name, lang=None):
- """Slugify an author name."""
- if lang is None: # TODO: remove in v8
- utils.LOGGER.warn("RenderAuthors.slugify_author_name() called without language!")
- lang = ''
+ def get_path(self, classification, lang, dest_type='page'):
+ """Return a path for the given classification."""
if self.site.config['SLUG_AUTHOR_PATH']:
- name = utils.slugify(name, lang)
- return name
-
- def author_index_path(self, name, lang):
- """Link to the author's index.
-
- Example:
-
- link://authors/ => /authors/index.html
- """
- return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['AUTHOR_PATH'],
- self.site.config['INDEX_FILE']] if _f]
-
- def author_path(self, name, lang):
- """Link to an author's page.
-
- Example:
-
- link://author/joe => /authors/joe.html
- """
- if self.site.config['PRETTY_URLS']:
- return [_f for _f in [
- self.site.config['TRANSLATIONS'][lang],
- self.site.config['AUTHOR_PATH'],
- self.slugify_author_name(name, lang),
- self.site.config['INDEX_FILE']] if _f]
+ slug = utils.slugify(classification, lang)
else:
- return [_f for _f in [
- self.site.config['TRANSLATIONS'][lang],
- self.site.config['AUTHOR_PATH'],
- self.slugify_author_name(name, lang) + ".html"] if _f]
-
- def author_atom_path(self, name, lang):
- """Link to an author's Atom feed.
-
- Example:
-
- link://author_atom/joe => /authors/joe.atom
- """
- return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['AUTHOR_PATH'], self.slugify_author_name(name, lang) + ".atom"] if
- _f]
-
- def author_rss_path(self, name, lang):
- """Link to an author's RSS feed.
-
- Example:
-
- link://author_rss/joe => /authors/joe.rss
- """
- return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['AUTHOR_PATH'], self.slugify_author_name(name, lang) + ".xml"] if
- _f]
+ slug = classification
+ return [self.site.config['AUTHOR_PATH'](lang), slug], 'auto'
- def _add_extension(self, path, extension):
- path[-1] += extension
- return path
+ def provide_overview_context_and_uptodate(self, lang):
+ """Provide data for the context and the uptodate list for the list of all classifiations."""
+ kw = {
+ "messages": self.site.MESSAGES,
+ }
+ context = {
+ "title": kw["messages"][lang]["Authors"],
+ "description": kw["messages"][lang]["Authors"],
+ "permalink": self.site.link("author_index", None, lang),
+ "pagekind": ["list", "authors_page"],
+ }
+ kw.update(context)
+ return context, kw
- def _posts_per_author(self):
- """Return a dict of posts per author."""
- if self.posts_per_author is None:
- self.posts_per_author = defaultdict(list)
- for post in self.site.timeline:
- if post.is_post:
- self.posts_per_author[post.author()].append(post)
- return self.posts_per_author
+ def provide_context_and_uptodate(self, classification, lang, node=None):
+ """Provide data for the context and the uptodate list for the list of the given classifiation."""
+ descriptions = self.site.config['AUTHOR_PAGES_DESCRIPTIONS']
+ kw = {
+ "messages": self.site.MESSAGES,
+ }
+ context = {
+ "author": classification,
+ "title": kw["messages"][lang]["Posts by %s"] % classification,
+ "description": descriptions[lang][classification] if lang in descriptions and classification in descriptions[lang] else None,
+ "pagekind": ["index" if self.show_list_as_index else "list", "author_page"],
+ }
+ kw.update(context)
+ return context, kw
+
+ def get_other_language_variants(self, classification, lang, classifications_per_language):
+ """Return a list of variants of the same author in other languages."""
+ return self.translation_manager.get_translations_as_list(classification, lang, classifications_per_language)
+
+ def postprocess_posts_per_classification(self, posts_per_classification_per_language, flat_hierarchy_per_lang=None, hierarchy_lookup_per_lang=None):
+ """Rearrange, modify or otherwise use the list of posts per classification and per language."""
+ more_than_one = False
+ for lang, posts_per_author in posts_per_classification_per_language.items():
+ authors = set()
+ for author, posts in posts_per_author.items():
+ for post in posts:
+ if not self.site.config["SHOW_UNTRANSLATED_POSTS"] and not post.is_translation_available(lang):
+ continue
+ authors.add(author)
+ if len(authors) > 1:
+ more_than_one = True
+ self.generate_author_pages = self.site.config["ENABLE_AUTHOR_PAGES"] and more_than_one
+ self.site.GLOBAL_CONTEXT["author_pages_generated"] = self.generate_author_pages
+ self.translation_manager.add_defaults(posts_per_classification_per_language)
diff --git a/nikola/plugins/task/bundles.plugin b/nikola/plugins/task/bundles.plugin
index b5bf6e4..939065b 100644
--- a/nikola/plugins/task/bundles.plugin
+++ b/nikola/plugins/task/bundles.plugin
@@ -6,8 +6,8 @@ module = bundles
author = Roberto Alsina
version = 1.0
website = https://getnikola.com/
-description = Theme bundles using WebAssets
+description = Bundle assets
[Nikola]
-plugincategory = Task
+PluginCategory = Task
diff --git a/nikola/plugins/task/bundles.py b/nikola/plugins/task/bundles.py
index b33d8e0..aa4ce78 100644
--- a/nikola/plugins/task/bundles.py
+++ b/nikola/plugins/task/bundles.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -24,38 +24,26 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""Bundle assets using WebAssets."""
+"""Bundle assets."""
-from __future__ import unicode_literals
+import configparser
+import io
+import itertools
import os
-
-try:
- import webassets
-except ImportError:
- webassets = None # NOQA
+import shutil
from nikola.plugin_categories import LateTask
from nikola import utils
class BuildBundles(LateTask):
- """Bundle assets using WebAssets."""
+ """Bundle assets."""
name = "create_bundles"
- def set_site(self, site):
- """Set Nikola site."""
- self.logger = utils.get_logger('bundles', utils.STDERR_HANDLER)
- if webassets is None and site.config['USE_BUNDLES']:
- utils.req_missing(['webassets'], 'USE_BUNDLES', optional=True)
- self.logger.warn('Setting USE_BUNDLES to False.')
- site.config['USE_BUNDLES'] = False
- site._GLOBAL_CONTEXT['use_bundles'] = False
- super(BuildBundles, self).set_site(site)
-
def gen_tasks(self):
- """Bundle assets using WebAssets."""
+ """Bundle assets."""
kw = {
'filters': self.site.config['FILTERS'],
'output_folder': self.site.config['OUTPUT_FOLDER'],
@@ -69,28 +57,21 @@ class BuildBundles(LateTask):
def build_bundle(output, inputs):
out_dir = os.path.join(kw['output_folder'],
os.path.dirname(output))
- inputs = [os.path.relpath(i, out_dir) for i in inputs if os.path.isfile(i)]
- cache_dir = os.path.join(kw['cache_folder'], 'webassets')
- utils.makedirs(cache_dir)
- env = webassets.Environment(out_dir, os.path.dirname(output),
- cache=cache_dir)
- if inputs:
- bundle = webassets.Bundle(*inputs, output=os.path.basename(output))
- env.register(output, bundle)
- # This generates the file
- try:
- env[output].urls()
- except Exception as e:
- self.logger.error("Failed to build bundles.")
- self.logger.exception(e)
- self.logger.notice("Try running ``nikola clean`` and building again.")
- else:
- with open(os.path.join(out_dir, os.path.basename(output)), 'wb+'):
- pass # Create empty file
+ inputs = [
+ os.path.join(
+ out_dir,
+ os.path.relpath(i, out_dir))
+ for i in inputs if os.path.isfile(i)
+ ]
+ with open(os.path.join(out_dir, os.path.basename(output)), 'wb+') as out_fh:
+ for i in inputs:
+ with open(i, 'rb') as in_fh:
+ shutil.copyfileobj(in_fh, out_fh)
+ out_fh.write(b'\n')
yield self.group_task()
- if (webassets is not None and self.site.config['USE_BUNDLES'] is not
- False):
+
+ if self.site.config['USE_BUNDLES']:
for name, _files in kw['theme_bundles'].items():
output_path = os.path.join(kw['output_folder'], name)
dname = os.path.dirname(name)
@@ -127,19 +108,17 @@ class BuildBundles(LateTask):
def get_theme_bundles(themes):
"""Given a theme chain, return the bundle definitions."""
- bundles = {}
for theme_name in themes:
bundles_path = os.path.join(
utils.get_theme_path(theme_name), 'bundles')
if os.path.isfile(bundles_path):
- with open(bundles_path) as fd:
- for line in fd:
- try:
- name, files = line.split('=')
- files = [f.strip() for f in files.split(',')]
- bundles[name.strip().replace('/', os.sep)] = files
- except ValueError:
- # for empty lines
- pass
- break
- return bundles
+ config = configparser.ConfigParser()
+ header = io.StringIO('[bundles]\n')
+ with open(bundles_path, 'rt') as fd:
+ config.read_file(itertools.chain(header, fd))
+ bundles = {}
+ for name, files in config['bundles'].items():
+ name = name.strip().replace('/', os.sep)
+ files = [f.strip() for f in files.split(',') if f.strip()]
+ bundles[name] = files
+ return bundles
diff --git a/nikola/plugins/task/categories.plugin b/nikola/plugins/task/categories.plugin
new file mode 100644
index 0000000..be2bb79
--- /dev/null
+++ b/nikola/plugins/task/categories.plugin
@@ -0,0 +1,12 @@
+[Core]
+name = classify_categories
+module = categories
+
+[Documentation]
+author = Roberto Alsina
+version = 1.0
+website = https://getnikola.com/
+description = Render the category pages and feeds.
+
+[Nikola]
+PluginCategory = Taxonomy
diff --git a/nikola/plugins/task/categories.py b/nikola/plugins/task/categories.py
new file mode 100644
index 0000000..68f9caa
--- /dev/null
+++ b/nikola/plugins/task/categories.py
@@ -0,0 +1,248 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2012-2020 Roberto Alsina and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+"""Render the category pages and feeds."""
+
+import os
+
+from nikola.plugin_categories import Taxonomy
+from nikola import utils, hierarchy_utils
+
+
+class ClassifyCategories(Taxonomy):
+ """Classify the posts by categories."""
+
+ name = "classify_categories"
+
+ classification_name = "category"
+ overview_page_variable_name = "categories"
+ overview_page_items_variable_name = "cat_items"
+ overview_page_hierarchy_variable_name = "cat_hierarchy"
+ more_than_one_classifications_per_post = False
+ has_hierarchy = True
+ include_posts_from_subhierarchies = True
+ include_posts_into_hierarchy_root = False
+ show_list_as_subcategories_list = False
+ template_for_classification_overview = "tags.tmpl"
+ always_disable_rss = False
+ always_disable_atom = False
+ apply_to_posts = True
+ apply_to_pages = False
+ minimum_post_count_per_classification_in_overview = 1
+ omit_empty_classifications = True
+ add_other_languages_variable = True
+ path_handler_docstrings = {
+ 'category_index': """A link to the category index.
+
+Example:
+
+link://category_index => /categories/index.html""",
+ 'category': """A link to a category. Takes page number as optional keyword argument.
+
+Example:
+
+link://category/dogs => /categories/dogs.html""",
+ 'category_atom': """A link to a category's Atom feed.
+
+Example:
+
+link://category_atom/dogs => /categories/dogs.atom""",
+ 'category_rss': """A link to a category's RSS feed.
+
+Example:
+
+link://category_rss/dogs => /categories/dogs.xml""",
+ }
+
+ def set_site(self, site):
+ """Set site, which is a Nikola instance."""
+ super().set_site(site)
+ self.show_list_as_index = self.site.config['CATEGORY_PAGES_ARE_INDEXES']
+ self.template_for_single_list = "tagindex.tmpl" if self.show_list_as_index else "tag.tmpl"
+ self.translation_manager = utils.ClassificationTranslationManager()
+
+ # Needed to undo names for CATEGORY_PAGES_FOLLOW_DESTPATH
+ self.destpath_names_reverse = {}
+ for lang in self.site.config['TRANSLATIONS']:
+ self.destpath_names_reverse[lang] = {}
+ for k, v in self.site.config['CATEGORY_DESTPATH_NAMES'](lang).items():
+ self.destpath_names_reverse[lang][v] = k
+ self.destpath_names_reverse = utils.TranslatableSetting(
+ '_CATEGORY_DESTPATH_NAMES_REVERSE', self.destpath_names_reverse,
+ self.site.config['TRANSLATIONS'])
+
+ def is_enabled(self, lang=None):
+ """Return True if this taxonomy is enabled, or False otherwise."""
+ return True
+
+ def classify(self, post, lang):
+ """Classify the given post for the given language."""
+ cat = post.meta('category', lang=lang).strip()
+ return [cat] if cat else []
+
+ def get_classification_friendly_name(self, classification, lang, only_last_component=False):
+ """Extract a friendly name from the classification."""
+ classification = self.extract_hierarchy(classification)
+ return classification[-1] if classification else ''
+
+ def get_overview_path(self, lang, dest_type='page'):
+ """Return a path for the list of all classifications."""
+ if self.site.config['CATEGORIES_INDEX_PATH'](lang):
+ path = self.site.config['CATEGORIES_INDEX_PATH'](lang)
+ append_index = 'never'
+ else:
+ path = self.site.config['CATEGORY_PATH'](lang)
+ append_index = 'always'
+ return [component for component in path.split('/') if component], append_index
+
+ def slugify_tag_name(self, name, lang):
+ """Slugify a tag name."""
+ if self.site.config['SLUG_TAG_PATH']:
+ name = utils.slugify(name, lang)
+ return name
+
+ def slugify_category_name(self, path, lang):
+ """Slugify a category name."""
+ if self.site.config['CATEGORY_OUTPUT_FLAT_HIERARCHY']:
+ path = path[-1:] # only the leaf
+ result = [self.slugify_tag_name(part, lang) for part in path]
+ result[0] = self.site.config['CATEGORY_PREFIX'] + result[0]
+ if not self.site.config['PRETTY_URLS']:
+ result = ['-'.join(result)]
+ return result
+
+ def get_path(self, classification, lang, dest_type='page'):
+ """Return a path for the given classification."""
+ cat_string = '/'.join(classification)
+ classification_raw = classification # needed to undo CATEGORY_DESTPATH_NAMES
+ destpath_names_reverse = self.destpath_names_reverse(lang)
+ if self.site.config['CATEGORY_PAGES_FOLLOW_DESTPATH']:
+ base_dir = None
+ for post in self.site.posts_per_category[cat_string]:
+ if post.category_from_destpath:
+ base_dir = post.folder_base(lang)
+ # Handle CATEGORY_DESTPATH_NAMES
+ if cat_string in destpath_names_reverse:
+ cat_string = destpath_names_reverse[cat_string]
+ classification_raw = cat_string.split('/')
+ break
+
+ if not self.site.config['CATEGORY_DESTPATH_TRIM_PREFIX']:
+ # If prefixes are not trimmed, we'll already have the base_dir in classification_raw
+ base_dir = ''
+
+ if base_dir is None:
+ # fallback: first POSTS entry + classification
+ base_dir = self.site.config['POSTS'][0][1]
+ base_dir_list = base_dir.split(os.sep)
+ sub_dir = [self.slugify_tag_name(part, lang) for part in classification_raw]
+ return [_f for _f in (base_dir_list + sub_dir) if _f], 'auto'
+ else:
+ return [_f for _f in [self.site.config['CATEGORY_PATH'](lang)] if _f] + self.slugify_category_name(
+ classification, lang), 'auto'
+
+ def extract_hierarchy(self, classification):
+ """Given a classification, return a list of parts in the hierarchy."""
+ return hierarchy_utils.parse_escaped_hierarchical_category_name(classification)
+
+ def recombine_classification_from_hierarchy(self, hierarchy):
+ """Given a list of parts in the hierarchy, return the classification string."""
+ return hierarchy_utils.join_hierarchical_category_path(hierarchy)
+
+ def provide_overview_context_and_uptodate(self, lang):
+ """Provide data for the context and the uptodate list for the list of all classifiations."""
+ kw = {
+ 'category_path': self.site.config['CATEGORY_PATH'],
+ 'category_prefix': self.site.config['CATEGORY_PREFIX'],
+ "category_pages_are_indexes": self.site.config['CATEGORY_PAGES_ARE_INDEXES'],
+ "tzinfo": self.site.tzinfo,
+ "category_descriptions": self.site.config['CATEGORY_DESCRIPTIONS'],
+ "category_titles": self.site.config['CATEGORY_TITLES'],
+ }
+ context = {
+ "title": self.site.MESSAGES[lang]["Categories"],
+ "description": self.site.MESSAGES[lang]["Categories"],
+ "pagekind": ["list", "tags_page"],
+ }
+ kw.update(context)
+ return context, kw
+
+ def provide_context_and_uptodate(self, classification, lang, node=None):
+ """Provide data for the context and the uptodate list for the list of the given classifiation."""
+ cat_path = self.extract_hierarchy(classification)
+ kw = {
+ 'category_path': self.site.config['CATEGORY_PATH'],
+ 'category_prefix': self.site.config['CATEGORY_PREFIX'],
+ "category_pages_are_indexes": self.site.config['CATEGORY_PAGES_ARE_INDEXES'],
+ "tzinfo": self.site.tzinfo,
+ "category_descriptions": self.site.config['CATEGORY_DESCRIPTIONS'],
+ "category_titles": self.site.config['CATEGORY_TITLES'],
+ }
+ posts = self.site.posts_per_classification[self.classification_name][lang]
+ if node is None:
+ children = []
+ else:
+ children = [child for child in node.children if len([post for post in posts.get(child.classification_name, []) if self.site.config['SHOW_UNTRANSLATED_POSTS'] or post.is_translation_available(lang)]) > 0]
+ subcats = [(child.name, self.site.link(self.classification_name, child.classification_name, lang)) for child in children]
+ friendly_name = self.get_classification_friendly_name(classification, lang)
+ context = {
+ "title": self.site.config['CATEGORY_TITLES'].get(lang, {}).get(classification, self.site.MESSAGES[lang]["Posts about %s"] % friendly_name),
+ "description": self.site.config['CATEGORY_DESCRIPTIONS'].get(lang, {}).get(classification),
+ "pagekind": ["tag_page", "index" if self.show_list_as_index else "list"],
+ "tag": friendly_name,
+ "category": classification,
+ "category_path": cat_path,
+ "subcategories": subcats,
+ }
+ kw.update(context)
+ return context, kw
+
+ def get_other_language_variants(self, classification, lang, classifications_per_language):
+ """Return a list of variants of the same category in other languages."""
+ return self.translation_manager.get_translations_as_list(classification, lang, classifications_per_language)
+
+ def postprocess_posts_per_classification(self, posts_per_classification_per_language, flat_hierarchy_per_lang=None, hierarchy_lookup_per_lang=None):
+ """Rearrange, modify or otherwise use the list of posts per classification and per language."""
+ self.translation_manager.read_from_config(self.site, 'CATEGORY', posts_per_classification_per_language, False)
+
+ def should_generate_classification_page(self, classification, post_list, lang):
+ """Only generates list of posts for classification if this function returns True."""
+ if self.site.config["CATEGORY_PAGES_FOLLOW_DESTPATH"]:
+ # In destpath mode, allow users to replace the default category index with a custom page.
+ classification_hierarchy = self.extract_hierarchy(classification)
+ dest_list, _ = self.get_path(classification_hierarchy, lang)
+ short_destination = os.sep.join(dest_list + [self.site.config["INDEX_FILE"]])
+ if short_destination in self.site.post_per_file:
+ return False
+ return True
+
+ def should_generate_atom_for_classification_page(self, classification, post_list, lang):
+ """Only generates Atom feed for list of posts for classification if this function returns True."""
+ return True
+
+ def should_generate_rss_for_classification_page(self, classification, post_list, lang):
+ """Only generates RSS feed for list of posts for classification if this function returns True."""
+ return True
diff --git a/nikola/plugins/task/copy_assets.plugin b/nikola/plugins/task/copy_assets.plugin
index ddd38df..b63581d 100644
--- a/nikola/plugins/task/copy_assets.plugin
+++ b/nikola/plugins/task/copy_assets.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Copy theme assets into output.
[Nikola]
-plugincategory = Task
+PluginCategory = Task
diff --git a/nikola/plugins/task/copy_assets.py b/nikola/plugins/task/copy_assets.py
index 4ed7414..c6d32c7 100644
--- a/nikola/plugins/task/copy_assets.py
+++ b/nikola/plugins/task/copy_assets.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,11 +26,11 @@
"""Copy theme assets into output."""
-from __future__ import unicode_literals
import io
import os
+from nikola.packages.pygments_better_html import BetterHtmlFormatter
from nikola.plugin_categories import Task
from nikola import utils
@@ -48,13 +48,19 @@ class CopyAssets(Task):
"""
kw = {
"themes": self.site.THEMES,
+ "translations": self.site.translations,
"files_folders": self.site.config['FILES_FOLDERS'],
"output_folder": self.site.config['OUTPUT_FOLDER'],
"filters": self.site.config['FILTERS'],
"code_color_scheme": self.site.config['CODE_COLOR_SCHEME'],
- "code.css_selectors": 'pre.code',
+ "code.css_selectors": ['pre.code', '.code .codetable', '.highlight pre'],
+ "code.css_wrappers": ['.highlight', '.code'],
"code.css_head": '/* code.css file generated by Nikola */\n',
- "code.css_close": "\ntable.codetable { width: 100%;} td.linenos {text-align: right; width: 4em;}\n",
+ "code.css_close": (
+ "\ntable.codetable, table.highlighttable { width: 100%;}\n"
+ ".codetable td.linenos, td.linenos { text-align: right; width: 3.5em; "
+ "padding-right: 0.5em; background: rgba(127, 127, 127, 0.2) }\n"
+ ".codetable td.code, td.code { padding-left: 0.5em; }\n"),
}
tasks = {}
code_css_path = os.path.join(kw['output_folder'], 'assets', 'css', 'code.css')
@@ -63,11 +69,20 @@ class CopyAssets(Task):
files_folders=kw['files_folders'], output_dir=None)
yield self.group_task()
+ main_theme = utils.get_theme_path(kw['themes'][0])
+ theme_ini = utils.parse_theme_meta(main_theme)
+ if theme_ini:
+ ignored_assets = theme_ini.get("Nikola", "ignored_assets", fallback='').split(',')
+ ignored_assets = [os.path.normpath(asset_name.strip()) for asset_name in ignored_assets]
+ else:
+ ignored_assets = []
+
for theme_name in kw['themes']:
src = os.path.join(utils.get_theme_path(theme_name), 'assets')
dst = os.path.join(kw['output_folder'], 'assets')
for task in utils.copy_tree(src, dst):
- if task['name'] in tasks:
+ asset_name = os.path.relpath(task['name'], dst)
+ if task['name'] in tasks or asset_name in ignored_assets:
continue
tasks[task['name']] = task
task['uptodate'] = [utils.config_changed(kw, 'nikola.plugins.task.copy_assets')]
@@ -79,18 +94,18 @@ class CopyAssets(Task):
yield utils.apply_filters(task, kw['filters'])
# Check whether or not there is a code.css file around.
- if not code_css_input:
+ if not code_css_input and kw['code_color_scheme']:
def create_code_css():
- from pygments.formatters import get_formatter_by_name
- formatter = get_formatter_by_name('html', style=kw["code_color_scheme"])
+ formatter = BetterHtmlFormatter(style=kw["code_color_scheme"])
utils.makedirs(os.path.dirname(code_css_path))
- with io.open(code_css_path, 'w+', encoding='utf8') as outf:
+ with io.open(code_css_path, 'w+', encoding='utf-8') as outf:
outf.write(kw["code.css_head"])
- outf.write(formatter.get_style_defs(kw["code.css_selectors"]))
+ outf.write(formatter.get_style_defs(
+ kw["code.css_selectors"], kw["code.css_wrappers"]))
outf.write(kw["code.css_close"])
if os.path.exists(code_css_path):
- with io.open(code_css_path, 'r', encoding='utf-8') as fh:
+ with io.open(code_css_path, 'r', encoding='utf-8-sig') as fh:
testcontents = fh.read(len(kw["code.css_head"])) == kw["code.css_head"]
else:
testcontents = False
diff --git a/nikola/plugins/task/copy_files.plugin b/nikola/plugins/task/copy_files.plugin
index e4bb1cf..45c2253 100644
--- a/nikola/plugins/task/copy_files.plugin
+++ b/nikola/plugins/task/copy_files.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Copy static files into the output.
[Nikola]
-plugincategory = Task
+PluginCategory = Task
diff --git a/nikola/plugins/task/copy_files.py b/nikola/plugins/task/copy_files.py
index 6f6cfb8..26364d4 100644
--- a/nikola/plugins/task/copy_files.py
+++ b/nikola/plugins/task/copy_files.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/task/galleries.plugin b/nikola/plugins/task/galleries.plugin
index 2064e68..d06e117 100644
--- a/nikola/plugins/task/galleries.plugin
+++ b/nikola/plugins/task/galleries.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Create image galleries automatically.
[Nikola]
-plugincategory = Task
+PluginCategory = Task
diff --git a/nikola/plugins/task/galleries.py b/nikola/plugins/task/galleries.py
index edfd33d..b8ac9ee 100644
--- a/nikola/plugins/task/galleries.py
+++ b/nikola/plugins/task/galleries.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,32 +26,29 @@
"""Render image galleries."""
-from __future__ import unicode_literals
import datetime
import glob
import io
import json
import mimetypes
import os
-try:
- from urlparse import urljoin
-except ImportError:
- from urllib.parse import urljoin # NOQA
+from collections import OrderedDict
+from urllib.parse import urljoin
import natsort
-try:
- from PIL import Image # NOQA
-except ImportError:
- import Image as _Image
- Image = _Image
-
import PyRSS2Gen as rss
+from PIL import Image
from nikola.plugin_categories import Task
from nikola import utils
from nikola.image_processing import ImageProcessor
from nikola.post import Post
+try:
+ from ruamel.yaml import YAML
+except ImportError:
+ YAML = None
+
_image_size_cache = {}
@@ -63,12 +60,11 @@ class Galleries(Task, ImageProcessor):
def set_site(self, site):
"""Set Nikola site."""
+ super().set_site(site)
site.register_path_handler('gallery', self.gallery_path)
site.register_path_handler('gallery_global', self.gallery_global_path)
site.register_path_handler('gallery_rss', self.gallery_rss_path)
- self.logger = utils.get_logger('render_galleries', utils.STDERR_HANDLER)
-
self.kw = {
'thumbnail_size': site.config['THUMBNAIL_SIZE'],
'max_image_size': site.config['MAX_IMAGE_SIZE'],
@@ -87,6 +83,11 @@ class Galleries(Task, ImageProcessor):
'generate_rss': site.config['GENERATE_RSS'],
'preserve_exif_data': site.config['PRESERVE_EXIF_DATA'],
'exif_whitelist': site.config['EXIF_WHITELIST'],
+ 'preserve_icc_profiles': site.config['PRESERVE_ICC_PROFILES'],
+ 'index_path': site.config['INDEX_PATH'],
+ 'disable_indexes': site.config['DISABLE_INDEXES'],
+ 'galleries_use_thumbnail': site.config['GALLERIES_USE_THUMBNAIL'],
+ 'galleries_default_thumbnail': site.config['GALLERIES_DEFAULT_THUMBNAIL'],
}
# Verify that no folder in GALLERY_FOLDERS appears twice
@@ -104,8 +105,6 @@ class Galleries(Task, ImageProcessor):
# Create self.gallery_links
self.create_galleries_paths()
- return super(Galleries, self).set_site(site)
-
def _find_gallery_path(self, name):
# The system using self.proper_gallery_links and self.improper_gallery_links
# is similar as in listings.py.
@@ -165,7 +164,7 @@ class Galleries(Task, ImageProcessor):
gallery_path = self._find_gallery_path(name)
return [_f for _f in [self.site.config['TRANSLATIONS'][lang]] +
gallery_path.split(os.sep) +
- ['rss.xml'] if _f]
+ [self.site.config['RSS_FILENAME_BASE'](lang) + self.site.config['RSS_EXTENSION']] if _f]
def gen_tasks(self):
"""Render image galleries."""
@@ -173,7 +172,7 @@ class Galleries(Task, ImageProcessor):
self.image_ext_list.extend(self.site.config.get('EXTRA_IMAGE_EXTENSIONS', []))
for k, v in self.site.GLOBAL_CONTEXT['template_hooks'].items():
- self.kw['||template_hooks|{0}||'.format(k)] = v._items
+ self.kw['||template_hooks|{0}||'.format(k)] = v.calculate_deps()
self.site.scan_posts()
yield self.group_task()
@@ -223,6 +222,12 @@ class Galleries(Task, ImageProcessor):
self.kw[k] = self.site.GLOBAL_CONTEXT[k](lang)
context = {}
+
+ # Do we have a metadata file?
+ meta_path, order, captions, img_metadata = self.find_metadata(gallery, lang)
+ context['meta_path'] = meta_path
+ context['order'] = order
+ context['captions'] = captions
context["lang"] = lang
if post:
context["title"] = post.title(lang)
@@ -232,7 +237,20 @@ class Galleries(Task, ImageProcessor):
image_name_list = [os.path.basename(p) for p in image_list]
- if self.kw['use_filename_as_title']:
+ if captions:
+ img_titles = []
+ for fn in image_name_list:
+ if fn in captions:
+ img_titles.append(captions[fn])
+ else:
+ if self.kw['use_filename_as_title']:
+ img_titles.append(fn)
+ else:
+ img_titles.append('')
+ self.logger.debug(
+ "Image {0} found in gallery but not listed in {1}".
+ format(fn, context['meta_path']))
+ elif self.kw['use_filename_as_title']:
img_titles = []
for fn in image_name_list:
name_without_ext = os.path.splitext(os.path.basename(fn))[0]
@@ -248,6 +266,7 @@ class Galleries(Task, ImageProcessor):
folders = []
# Generate friendly gallery names
+ fpost_list = []
for path, folder in folder_list:
fpost = self.parse_index(path, input_folder, output_folder)
if fpost:
@@ -256,8 +275,17 @@ class Galleries(Task, ImageProcessor):
ft = folder
if not folder.endswith('/'):
folder += '/'
- folders.append((folder, ft))
+ # TODO: This is to keep compatibility with user's custom gallery.tmpl
+ # To be removed in v9 someday
+ if self.kw['galleries_use_thumbnail']:
+ folders.append((folder, ft, fpost))
+ if fpost:
+ fpost_list.append(fpost.source_path)
+ else:
+ folders.append((folder, ft))
+
+ context["gallery_path"] = gallery
context["folders"] = natsort.natsorted(
folders, alg=natsort.ns.F | natsort.ns.IC)
context["crumbs"] = utils.get_crumbs(gallery, index_folder=self, lang=lang)
@@ -265,6 +293,7 @@ class Galleries(Task, ImageProcessor):
context["enable_comments"] = self.kw['comments_in_galleries']
context["thumbnail_size"] = self.kw["thumbnail_size"]
context["pagekind"] = ["gallery_front"]
+ context["galleries_use_thumbnail"] = self.kw['galleries_use_thumbnail']
if post:
yield {
@@ -291,7 +320,7 @@ class Galleries(Task, ImageProcessor):
yield utils.apply_filters({
'basename': self.name,
'name': dst,
- 'file_dep': file_dep,
+ 'file_dep': file_dep + dest_img_list + fpost_list,
'targets': [dst],
'actions': [
(self.render_gallery_index, (
@@ -301,7 +330,7 @@ class Galleries(Task, ImageProcessor):
dest_img_list,
img_titles,
thumbs,
- file_dep))],
+ img_metadata))],
'clean': True,
'uptodate': [utils.config_changed({
1: self.kw.copy(),
@@ -343,7 +372,14 @@ class Galleries(Task, ImageProcessor):
self.gallery_list = []
for input_folder, output_folder in self.kw['gallery_folders'].items():
for root, dirs, files in os.walk(input_folder, followlinks=True):
- self.gallery_list.append((root, input_folder, output_folder))
+ # If output folder is empty, the top-level gallery
+ # index will collide with the main page for the site.
+ # Don't generate the top-level gallery index in that
+ # case.
+ # FIXME: also ignore pages named index
+ if (output_folder or root != input_folder and
+ (not self.kw['disable_indexes'] and self.kw['index_path'] == '')):
+ self.gallery_list.append((root, input_folder, output_folder))
def create_galleries_paths(self):
"""Given a list of galleries, put their paths into self.gallery_links."""
@@ -395,12 +431,73 @@ class Galleries(Task, ImageProcessor):
'uptodate': [utils.config_changed(self.kw.copy(), 'nikola.plugins.task.galleries:mkdir')],
}
+ def find_metadata(self, gallery, lang):
+ """Search for a gallery metadata file.
+
+ If there is an metadata file for the gallery, use that to determine
+ captions and the order in which images shall be displayed in the
+ gallery. You only need to list the images if a specific ordering or
+ caption is required. The metadata file is YAML-formatted, with field
+ names of
+ #
+ name:
+ caption:
+ order:
+ #
+ If a numeric order value is specified, we use that directly, otherwise
+ we depend on how the library returns the information - which may or may not
+ be in the same order as in the file itself. Non-numeric ordering is not
+ supported. If no caption is specified, then we return an empty string.
+ Returns a string (l18n'd filename), list (ordering), dict (captions),
+ dict (image metadata).
+ """
+ base_meta_path = os.path.join(gallery, "metadata.yml")
+ localized_meta_path = utils.get_translation_candidate(self.site.config,
+ base_meta_path, lang)
+ order = []
+ captions = {}
+ custom_metadata = {}
+ used_path = ""
+
+ if os.path.isfile(localized_meta_path):
+ used_path = localized_meta_path
+ elif os.path.isfile(base_meta_path):
+ used_path = base_meta_path
+ else:
+ return "", [], {}, {}
+
+ self.logger.debug("Using {0} for gallery {1}".format(
+ used_path, gallery))
+ with open(used_path, "r", encoding='utf-8-sig') as meta_file:
+ if YAML is None:
+ utils.req_missing(['ruamel.yaml'], 'use metadata.yml files for galleries')
+ yaml = YAML(typ='safe')
+ meta = yaml.load_all(meta_file)
+ for img in meta:
+ # load_all and safe_load_all both return None as their
+ # final element, so skip it
+ if not img:
+ continue
+ if 'name' in img:
+ img_name = img.pop('name')
+ if 'caption' in img and img['caption']:
+ captions[img_name] = img.pop('caption')
+
+ if 'order' in img and img['order'] is not None:
+ order.insert(img.pop('order'), img_name)
+ else:
+ order.append(img_name)
+ custom_metadata[img_name] = img
+ else:
+ self.logger.error("no 'name:' for ({0}) in {1}".format(
+ img, used_path))
+ return used_path, order, captions, custom_metadata
+
def parse_index(self, gallery, input_folder, output_folder):
"""Return a Post object if there is an index.txt."""
index_path = os.path.join(gallery, "index.txt")
- destination = os.path.join(
- self.kw["output_folder"], output_folder,
- os.path.relpath(gallery, input_folder))
+ destination = os.path.join(output_folder,
+ os.path.relpath(gallery, input_folder))
if os.path.isfile(index_path):
post = Post(
index_path,
@@ -408,15 +505,18 @@ class Galleries(Task, ImageProcessor):
destination,
False,
self.site.MESSAGES,
- 'story.tmpl',
- self.site.get_compiler(index_path)
+ 'page.tmpl',
+ self.site.get_compiler(index_path),
+ None,
+ self.site.metadata_extractors_by
)
# If this did not exist, galleries without a title in the
# index.txt file would be errorneously named `index`
# (warning: galleries titled index and filenamed differently
# may break)
- if post.title == 'index':
- post.title = os.path.split(gallery)[1]
+ if post.title() == 'index':
+ for lang in post.meta.keys():
+ post.meta[lang]['title'] = os.path.split(gallery)[1]
# Register the post (via #2417)
self.site.post_per_input_file[index_path] = post
else:
@@ -428,8 +528,8 @@ class Galleries(Task, ImageProcessor):
exclude_path = os.path.join(gallery_path, "exclude.meta")
try:
- f = open(exclude_path, 'r')
- excluded_image_name_list = f.read().split()
+ with open(exclude_path, 'r') as f:
+ excluded_image_name_list = f.read().split()
except IOError:
excluded_image_name_list = []
@@ -473,34 +573,26 @@ class Galleries(Task, ImageProcessor):
orig_dest_path = os.path.join(output_gallery, img_name)
yield utils.apply_filters({
'basename': self.name,
- 'name': thumb_path,
- 'file_dep': [img],
- 'targets': [thumb_path],
- 'actions': [
- (self.resize_image,
- (img, thumb_path, self.kw['thumbnail_size'], False, self.kw['preserve_exif_data'],
- self.kw['exif_whitelist']))
- ],
- 'clean': True,
- 'uptodate': [utils.config_changed({
- 1: self.kw['thumbnail_size']
- }, 'nikola.plugins.task.galleries:resize_thumb')],
- }, self.kw['filters'])
-
- yield utils.apply_filters({
- 'basename': self.name,
'name': orig_dest_path,
'file_dep': [img],
- 'targets': [orig_dest_path],
+ 'targets': [thumb_path, orig_dest_path],
'actions': [
(self.resize_image,
- (img, orig_dest_path, self.kw['max_image_size'], False, self.kw['preserve_exif_data'],
- self.kw['exif_whitelist']))
- ],
+ [img], {
+ 'dst_paths': [thumb_path, orig_dest_path],
+ 'max_sizes': [self.kw['thumbnail_size'], self.kw['max_image_size']],
+ 'bigger_panoramas': True,
+ 'preserve_exif_data': self.kw['preserve_exif_data'],
+ 'exif_whitelist': self.kw['exif_whitelist'],
+ 'preserve_icc_profiles': self.kw['preserve_icc_profiles']})],
'clean': True,
'uptodate': [utils.config_changed({
- 1: self.kw['max_image_size']
- }, 'nikola.plugins.task.galleries:resize_max')],
+ 1: self.kw['thumbnail_size'],
+ 2: self.kw['max_image_size'],
+ 3: self.kw['preserve_exif_data'],
+ 4: self.kw['exif_whitelist'],
+ 5: self.kw['preserve_icc_profiles'],
+ }, 'nikola.plugins.task.galleries:resize_thumb')],
}, self.kw['filters'])
def remove_excluded_image(self, img, input_folder):
@@ -546,7 +638,7 @@ class Galleries(Task, ImageProcessor):
img_list,
img_titles,
thumbs,
- file_dep):
+ img_metadata):
"""Build the gallery index."""
# The photo array needs to be created here, because
# it relies on thumbnails already being created on
@@ -568,7 +660,7 @@ class Galleries(Task, ImageProcessor):
else:
img_list, thumbs, img_titles = [], [], []
- photo_array = []
+ photo_info = OrderedDict()
for img, thumb, title in zip(img_list, thumbs, img_titles):
w, h = _image_size_cache.get(thumb, (None, None))
if w is None:
@@ -578,8 +670,11 @@ class Galleries(Task, ImageProcessor):
im = Image.open(thumb)
w, h = im.size
_image_size_cache[thumb] = w, h
- # Thumbs are files in output, we need URLs
- photo_array.append({
+ im.close()
+ # Use basename to avoid issues with multilingual sites (Issue #3078)
+ img_basename = os.path.basename(img)
+ photo_info[img_basename] = {
+ # Thumbs are files in output, we need URLs
'url': url_from_path(img),
'url_thumb': url_from_path(thumb),
'title': title,
@@ -587,9 +682,27 @@ class Galleries(Task, ImageProcessor):
'w': w,
'h': h
},
- })
+ 'width': w,
+ 'height': h
+ }
+ if img_basename in img_metadata:
+ photo_info[img_basename].update(img_metadata[img_basename])
+ photo_array = []
+ if context['order']:
+ for entry in context['order']:
+ photo_array.append(photo_info.pop(entry))
+ # Do we have any orphan entries from metadata.yml, or
+ # are the files from the gallery not listed in metadata.yml?
+ if photo_info:
+ for entry in photo_info:
+ photo_array.append(photo_info[entry])
+ else:
+ for entry in photo_info:
+ photo_array.append(photo_info[entry])
+
context['photo_array'] = photo_array
context['photo_array_json'] = json.dumps(photo_array, sort_keys=True)
+
self.site.render_template(template_name, output_name, context)
def gallery_rss(self, img_list, dest_img_list, img_titles, lang, permalink, output_path, title):
@@ -647,6 +760,6 @@ class Galleries(Task, ImageProcessor):
utils.makedirs(dst_dir)
with io.open(output_path, "w+", encoding="utf-8") as rss_file:
data = rss_obj.to_xml(encoding='utf-8')
- if isinstance(data, utils.bytes_str):
+ if isinstance(data, bytes):
data = data.decode('utf-8')
rss_file.write(data)
diff --git a/nikola/plugins/task/gzip.plugin b/nikola/plugins/task/gzip.plugin
index d3a34ee..cc078b7 100644
--- a/nikola/plugins/task/gzip.plugin
+++ b/nikola/plugins/task/gzip.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Create gzipped copies of files
[Nikola]
-plugincategory = Task
+PluginCategory = Task
diff --git a/nikola/plugins/task/gzip.py b/nikola/plugins/task/gzip.py
index 79a11dc..ebd427f 100644
--- a/nikola/plugins/task/gzip.py
+++ b/nikola/plugins/task/gzip.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/task/indexes.plugin b/nikola/plugins/task/indexes.plugin
index 553b5ad..f4a8f05 100644
--- a/nikola/plugins/task/indexes.plugin
+++ b/nikola/plugins/task/indexes.plugin
@@ -1,5 +1,5 @@
[Core]
-name = render_indexes
+name = classify_indexes
module = indexes
[Documentation]
@@ -9,5 +9,4 @@ website = https://getnikola.com/
description = Generates the blog's index pages.
[Nikola]
-plugincategory = Task
-
+PluginCategory = Taxonomy
diff --git a/nikola/plugins/task/indexes.py b/nikola/plugins/task/indexes.py
index 8ecd1de..20491fb 100644
--- a/nikola/plugins/task/indexes.py
+++ b/nikola/plugins/task/indexes.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -24,323 +24,114 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""Render the blog indexes."""
+"""Render the blog's main index."""
-from __future__ import unicode_literals
-from collections import defaultdict
-import os
-try:
- from urlparse import urljoin
-except ImportError:
- from urllib.parse import urljoin # NOQA
-from nikola.plugin_categories import Task
-from nikola import utils
-from nikola.nikola import _enclosure
+from nikola.plugin_categories import Taxonomy
-class Indexes(Task):
- """Render the blog indexes."""
+class Indexes(Taxonomy):
+ """Classify for the blog's main index."""
- name = "render_indexes"
+ name = "classify_indexes"
- def set_site(self, site):
- """Set Nikola site."""
- self.number_of_pages = dict()
- self.number_of_pages_section = {lang: dict() for lang in site.config['TRANSLATIONS']}
- site.register_path_handler('index', self.index_path)
- site.register_path_handler('index_atom', self.index_atom_path)
- site.register_path_handler('section_index', self.index_section_path)
- site.register_path_handler('section_index_atom', self.index_section_atom_path)
- site.register_path_handler('section_index_rss', self.index_section_rss_path)
- return super(Indexes, self).set_site(site)
-
- def _get_filtered_posts(self, lang, show_untranslated_posts):
- """Return a filtered list of all posts for the given language.
-
- If show_untranslated_posts is True, will only include posts which
- are translated to the given language. Otherwise, returns all posts.
- """
- if show_untranslated_posts:
- return self.site.posts
- else:
- return [x for x in self.site.posts if x.is_translation_available(lang)]
-
- def _compute_number_of_pages(self, filtered_posts, posts_count):
- """Given a list of posts and the maximal number of posts per page, computes the number of pages needed."""
- return min(1, (len(filtered_posts) + posts_count - 1) // posts_count)
-
- def gen_tasks(self):
- """Render the blog indexes."""
- self.site.scan_posts()
- yield self.group_task()
-
- kw = {
- "translations": self.site.config['TRANSLATIONS'],
- "messages": self.site.MESSAGES,
- "output_folder": self.site.config['OUTPUT_FOLDER'],
- "feed_length": self.site.config['FEED_LENGTH'],
- "feed_links_append_query": self.site.config["FEED_LINKS_APPEND_QUERY"],
- "feed_teasers": self.site.config["FEED_TEASERS"],
- "feed_plain": self.site.config["FEED_PLAIN"],
- "filters": self.site.config['FILTERS'],
- "index_file": self.site.config['INDEX_FILE'],
- "show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
- "index_display_post_count": self.site.config['INDEX_DISPLAY_POST_COUNT'],
- "indexes_title": self.site.config['INDEXES_TITLE'],
- "strip_indexes": self.site.config['STRIP_INDEXES'],
- "blog_title": self.site.config["BLOG_TITLE"],
- "generate_atom": self.site.config["GENERATE_ATOM"],
- "site_url": self.site.config["SITE_URL"],
- }
-
- template_name = "index.tmpl"
- for lang in kw["translations"]:
- def page_link(i, displayed_i, num_pages, force_addition, extension=None):
- feed = "_atom" if extension == ".atom" else ""
- return utils.adjust_name_for_index_link(self.site.link("index" + feed, None, lang), i, displayed_i,
- lang, self.site, force_addition, extension)
-
- def page_path(i, displayed_i, num_pages, force_addition, extension=None):
- feed = "_atom" if extension == ".atom" else ""
- return utils.adjust_name_for_index_path(self.site.path("index" + feed, None, lang), i, displayed_i,
- lang, self.site, force_addition, extension)
-
- filtered_posts = self._get_filtered_posts(lang, kw["show_untranslated_posts"])
-
- indexes_title = kw['indexes_title'](lang) or kw['blog_title'](lang)
- self.number_of_pages[lang] = self._compute_number_of_pages(filtered_posts, kw['index_display_post_count'])
-
- context = {}
- context["pagekind"] = ["main_index", "index"]
-
- yield self.site.generic_index_renderer(lang, filtered_posts, indexes_title, template_name, context, kw, 'render_indexes', page_link, page_path)
-
- if self.site.config['POSTS_SECTIONS']:
- index_len = len(kw['index_file'])
-
- groups = defaultdict(list)
- for p in filtered_posts:
- groups[p.section_slug(lang)].append(p)
-
- # don't build sections when there is only one, aka. default setups
- if not len(groups.items()) > 1:
- continue
-
- for section_slug, post_list in groups.items():
- self.number_of_pages_section[lang][section_slug] = self._compute_number_of_pages(post_list, kw['index_display_post_count'])
-
- def cat_link(i, displayed_i, num_pages, force_addition, extension=None):
- feed = "_atom" if extension == ".atom" else ""
- return utils.adjust_name_for_index_link(self.site.link("section_index" + feed, section_slug, lang), i, displayed_i,
- lang, self.site, force_addition, extension)
-
- def cat_path(i, displayed_i, num_pages, force_addition, extension=None):
- feed = "_atom" if extension == ".atom" else ""
- return utils.adjust_name_for_index_path(self.site.path("section_index" + feed, section_slug, lang), i, displayed_i,
- lang, self.site, force_addition, extension)
+ classification_name = "index"
+ overview_page_variable_name = None
+ more_than_one_classifications_per_post = False
+ has_hierarchy = False
+ show_list_as_index = True
+ template_for_single_list = "index.tmpl"
+ template_for_classification_overview = None
+ apply_to_posts = True
+ apply_to_pages = False
+ omit_empty_classifications = False
+ path_handler_docstrings = {
+ 'index_index': False,
+ 'index': """Link to a numbered index.
- context = {}
+Example:
- short_destination = os.path.join(section_slug, kw['index_file'])
- link = short_destination.replace('\\', '/')
- if kw['strip_indexes'] and link[-(1 + index_len):] == '/' + kw['index_file']:
- link = link[:-index_len]
- context["permalink"] = link
- context["pagekind"] = ["section_page"]
- context["description"] = self.site.config['POSTS_SECTION_DESCRIPTIONS'](lang)[section_slug] if section_slug in self.site.config['POSTS_SECTION_DESCRIPTIONS'](lang) else ""
+link://index/3 => /index-3.html""",
+ 'index_atom': """Link to a numbered Atom index.
- if self.site.config["POSTS_SECTION_ARE_INDEXES"]:
- context["pagekind"].append("index")
- posts_section_title = self.site.config['POSTS_SECTION_TITLE'](lang)
+Example:
- section_title = None
- if type(posts_section_title) is dict:
- if section_slug in posts_section_title:
- section_title = posts_section_title[section_slug]
- elif type(posts_section_title) is str:
- section_title = posts_section_title
- if not section_title:
- section_title = post_list[0].section_name(lang)
- section_title = section_title.format(name=post_list[0].section_name(lang))
+link://index_atom/3 => /index-3.atom""",
+ 'index_rss': """A link to the RSS feed path.
- task = self.site.generic_index_renderer(lang, post_list, section_title, "sectionindex.tmpl", context, kw, self.name, cat_link, cat_path)
- else:
- context["pagekind"].append("list")
- output_name = os.path.join(kw['output_folder'], section_slug, kw['index_file'])
- task = self.site.generic_post_list_renderer(lang, post_list, output_name, "list.tmpl", kw['filters'], context)
- task['uptodate'] = [utils.config_changed(kw, 'nikola.plugins.task.indexes')]
- task['basename'] = self.name
- yield task
+Example:
- # RSS feed for section
- deps = []
- deps_uptodate = []
- if kw["show_untranslated_posts"]:
- posts = post_list[:kw['feed_length']]
- else:
- posts = [x for x in post_list if x.is_translation_available(lang)][:kw['feed_length']]
- for post in posts:
- deps += post.deps(lang)
- deps_uptodate += post.deps_uptodate(lang)
+link://rss => /blog/rss.xml""",
+ }
- feed_url = urljoin(self.site.config['BASE_URL'], self.site.link('section_index_rss', section_slug, lang).lstrip('/'))
- output_name = os.path.join(kw['output_folder'], self.site.path('section_index_rss', section_slug, lang).lstrip(os.sep))
- task = {
- 'basename': self.name,
- 'name': os.path.normpath(output_name),
- 'file_dep': deps,
- 'targets': [output_name],
- 'actions': [(utils.generic_rss_renderer,
- (lang, kw["blog_title"](lang), kw["site_url"],
- context["description"], posts, output_name,
- kw["feed_teasers"], kw["feed_plain"], kw['feed_length'], feed_url,
- _enclosure, kw["feed_links_append_query"]))],
-
- 'task_dep': ['render_posts'],
- 'clean': True,
- 'uptodate': [utils.config_changed(kw, 'nikola.plugins.indexes')] + deps_uptodate,
- }
- yield task
-
- if not self.site.config["PAGE_INDEX"]:
- return
+ def set_site(self, site):
+ """Set Nikola site."""
+ # Redirect automatically generated 'index_rss' path handler to 'rss' for compatibility with old rss plugin
+ site.register_path_handler('rss', lambda name, lang: site.path_handlers['index_rss'](name, lang))
+ site.path_handlers['rss'].__doc__ = """A link to the RSS feed path.
+
+Example:
+
+ link://rss => /blog/rss.xml
+ """.strip()
+ return super().set_site(site)
+
+ def get_implicit_classifications(self, lang):
+ """Return a list of classification strings which should always appear in posts_per_classification."""
+ return [""]
+
+ def classify(self, post, lang):
+ """Classify the given post for the given language."""
+ return [""]
+
+ def get_classification_friendly_name(self, classification, lang, only_last_component=False):
+ """Extract a friendly name from the classification."""
+ return self.site.config["BLOG_TITLE"](lang)
+
+ def get_path(self, classification, lang, dest_type='page'):
+ """Return a path for the given classification."""
+ if dest_type == 'rss':
+ return [
+ self.site.config['RSS_PATH'](lang),
+ self.site.config['RSS_FILENAME_BASE'](lang)
+ ], 'auto'
+ if dest_type == 'feed':
+ return [
+ self.site.config['ATOM_PATH'](lang),
+ self.site.config['ATOM_FILENAME_BASE'](lang)
+ ], 'auto'
+ page_number = None
+ if dest_type == 'page':
+ # Interpret argument as page number
+ try:
+ page_number = int(classification)
+ except (ValueError, TypeError):
+ pass
+ return [self.site.config['INDEX_PATH'](lang)], 'always', page_number
+
+ def provide_context_and_uptodate(self, classification, lang, node=None):
+ """Provide data for the context and the uptodate list for the list of the given classifiation."""
kw = {
- "translations": self.site.config['TRANSLATIONS'],
- "post_pages": self.site.config["post_pages"],
- "output_folder": self.site.config['OUTPUT_FOLDER'],
- "filters": self.site.config['FILTERS'],
- "index_file": self.site.config['INDEX_FILE'],
- "strip_indexes": self.site.config['STRIP_INDEXES'],
+ "show_untranslated_posts": self.site.config["SHOW_UNTRANSLATED_POSTS"],
}
- template_name = "list.tmpl"
- index_len = len(kw['index_file'])
- for lang in kw["translations"]:
- # Need to group by folder to avoid duplicated tasks (Issue #758)
- # Group all pages by path prefix
- groups = defaultdict(list)
- for p in self.site.timeline:
- if not p.is_post:
- destpath = p.destination_path(lang)
- if destpath[-(1 + index_len):] == '/' + kw['index_file']:
- destpath = destpath[:-(1 + index_len)]
- dirname = os.path.dirname(destpath)
- groups[dirname].append(p)
- for dirname, post_list in groups.items():
- context = {}
- context["items"] = []
- should_render = True
- output_name = os.path.join(kw['output_folder'], dirname, kw['index_file'])
- short_destination = os.path.join(dirname, kw['index_file'])
- link = short_destination.replace('\\', '/')
- if kw['strip_indexes'] and link[-(1 + index_len):] == '/' + kw['index_file']:
- link = link[:-index_len]
- context["permalink"] = link
- context["pagekind"] = ["list"]
- if dirname == "/":
- context["pagekind"].append("front_page")
-
- for post in post_list:
- # If there is an index.html pending to be created from
- # a page, do not generate the PAGE_INDEX
- if post.destination_path(lang) == short_destination:
- should_render = False
- else:
- context["items"].append((post.title(lang),
- post.permalink(lang),
- None))
-
- if should_render:
- task = self.site.generic_post_list_renderer(lang, post_list,
- output_name,
- template_name,
- kw['filters'],
- context)
- task['uptodate'] = task['uptodate'] + [utils.config_changed(kw, 'nikola.plugins.task.indexes')]
- task['basename'] = self.name
- yield task
-
- def index_path(self, name, lang, is_feed=False):
- """Link to a numbered index.
-
- Example:
-
- link://index/3 => /index-3.html
- """
- extension = None
- if is_feed:
- extension = ".atom"
- index_file = os.path.splitext(self.site.config['INDEX_FILE'])[0] + extension
- else:
- index_file = self.site.config['INDEX_FILE']
- if lang in self.number_of_pages:
- number_of_pages = self.number_of_pages[lang]
- else:
- number_of_pages = self._compute_number_of_pages(self._get_filtered_posts(lang, self.site.config['SHOW_UNTRANSLATED_POSTS']), self.site.config['INDEX_DISPLAY_POST_COUNT'])
- self.number_of_pages[lang] = number_of_pages
- return utils.adjust_name_for_index_path_list([_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['INDEX_PATH'],
- index_file] if _f],
- name,
- utils.get_displayed_page_number(name, number_of_pages, self.site),
- lang,
- self.site,
- extension=extension)
-
- def index_section_path(self, name, lang, is_feed=False, is_rss=False):
- """Link to the index for a section.
-
- Example:
-
- link://section_index/cars => /cars/index.html
- """
- extension = None
-
- if is_feed:
- extension = ".atom"
- index_file = os.path.splitext(self.site.config['INDEX_FILE'])[0] + extension
- elif is_rss:
- index_file = 'rss.xml'
- else:
- index_file = self.site.config['INDEX_FILE']
- if name in self.number_of_pages_section[lang]:
- number_of_pages = self.number_of_pages_section[lang][name]
- else:
- posts = [post for post in self._get_filtered_posts(lang, self.site.config['SHOW_UNTRANSLATED_POSTS']) if post.section_slug(lang) == name]
- number_of_pages = self._compute_number_of_pages(posts, self.site.config['INDEX_DISPLAY_POST_COUNT'])
- self.number_of_pages_section[lang][name] = number_of_pages
- return utils.adjust_name_for_index_path_list([_f for _f in [self.site.config['TRANSLATIONS'][lang],
- name,
- index_file] if _f],
- None,
- utils.get_displayed_page_number(None, number_of_pages, self.site),
- lang,
- self.site,
- extension=extension)
-
- def index_atom_path(self, name, lang):
- """Link to a numbered Atom index.
-
- Example:
-
- link://index_atom/3 => /index-3.atom
- """
- return self.index_path(name, lang, is_feed=True)
-
- def index_section_atom_path(self, name, lang):
- """Link to the Atom index for a section.
-
- Example:
-
- link://section_index_atom/cars => /cars/index.atom
- """
- return self.index_section_path(name, lang, is_feed=True)
+ context = {
+ "title": self.site.config["INDEXES_TITLE"](lang) or self.site.config["BLOG_TITLE"](lang),
+ "description": self.site.config["BLOG_DESCRIPTION"](lang),
+ "pagekind": ["main_index", "index"],
+ "featured": [p for p in self.site.posts if p.post_status == 'featured' and
+ (lang in p.translated_to or kw["show_untranslated_posts"])],
+ }
+ kw.update(context)
+ return context, kw
- def index_section_rss_path(self, name, lang):
- """Link to the RSS feed for a section.
+ def should_generate_classification_page(self, classification, post_list, lang):
+ """Only generates list of posts for classification if this function returns True."""
+ return not self.site.config["DISABLE_INDEXES"]
- Example:
+ def should_generate_atom_for_classification_page(self, classification, post_list, lang):
+ """Only generates Atom feed for list of posts for classification if this function returns True."""
+ return not self.site.config["DISABLE_MAIN_ATOM_FEED"]
- link://section_index_rss/cars => /cars/rss.xml
- """
- return self.index_section_path(name, lang, is_rss=True)
+ def should_generate_rss_for_classification_page(self, classification, post_list, lang):
+ """Only generates RSS feed for list of posts for classification if this function returns True."""
+ return not self.site.config["DISABLE_MAIN_RSS_FEED"]
diff --git a/nikola/plugins/task/listings.plugin b/nikola/plugins/task/listings.plugin
index 8fc2e2d..03b67d2 100644
--- a/nikola/plugins/task/listings.plugin
+++ b/nikola/plugins/task/listings.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Render code listings into output
[Nikola]
-plugincategory = Task
+PluginCategory = Task
diff --git a/nikola/plugins/task/listings.py b/nikola/plugins/task/listings.py
index e694aa5..c946313 100644
--- a/nikola/plugins/task/listings.py
+++ b/nikola/plugins/task/listings.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,15 +26,12 @@
"""Render code listings."""
-from __future__ import unicode_literals, print_function
-
-from collections import defaultdict
import os
-import lxml.html
+from collections import defaultdict
+import natsort
from pygments import highlight
from pygments.lexers import get_lexer_for_filename, guess_lexer, TextLexer
-import natsort
from nikola.plugin_categories import Task
from nikola import utils
@@ -92,7 +89,7 @@ class Listings(Task):
self.proper_input_file_mapping = {}
for input_folder, output_folder in self.kw['listings_folders'].items():
- for root, dirs, files in os.walk(input_folder, followlinks=True):
+ for root, _, files in os.walk(input_folder, followlinks=True):
# Compute relative path; can't use os.path.relpath() here as it returns "." instead of ""
rel_path = root[len(input_folder):]
if rel_path[:1] == os.sep:
@@ -104,7 +101,7 @@ class Listings(Task):
# Register file names in the mapping.
self.register_output_name(input_folder, rel_name, rel_output_name)
- return super(Listings, self).set_site(site)
+ return super().set_site(site)
def gen_tasks(self):
"""Render pretty code listings."""
@@ -115,24 +112,31 @@ class Listings(Task):
needs_ipython_css = False
if in_name and in_name.endswith('.ipynb'):
# Special handling: render ipynbs in listings (Issue #1900)
- ipynb_compiler = self.site.plugin_manager.getPluginByName("ipynb", "PageCompiler").plugin_object
- ipynb_raw = ipynb_compiler.compile_html_string(in_name, True)
- ipynb_html = lxml.html.fromstring(ipynb_raw)
- # The raw HTML contains garbage (scripts and styles), we can’t leave it in
- code = lxml.html.tostring(ipynb_html.xpath('//*[@id="notebook"]')[0], encoding='unicode')
+ ipynb_plugin = self.site.plugin_manager.getPluginByName("ipynb", "PageCompiler")
+ if ipynb_plugin is None:
+ msg = "To use .ipynb files as listings, you must set up the Jupyter compiler in COMPILERS and POSTS/PAGES."
+ utils.LOGGER.error(msg)
+ raise ValueError(msg)
+
+ ipynb_compiler = ipynb_plugin.plugin_object
+ with open(in_name, "r", encoding="utf-8-sig") as in_file:
+ nb_json = ipynb_compiler._nbformat_read(in_file)
+ code = ipynb_compiler._compile_string(nb_json)
title = os.path.basename(in_name)
needs_ipython_css = True
elif in_name:
- with open(in_name, 'r') as fd:
+ with open(in_name, 'r', encoding='utf-8-sig') as fd:
try:
lexer = get_lexer_for_filename(in_name)
- except:
+ except Exception:
try:
lexer = guess_lexer(fd.read())
- except:
+ except Exception:
lexer = TextLexer()
fd.seek(0)
- code = highlight(fd.read(), lexer, utils.NikolaPygmentsHTML(in_name))
+ code = highlight(
+ fd.read(), lexer,
+ utils.NikolaPygmentsHTML(in_name, linenos='table'))
title = os.path.basename(in_name)
else:
code = ''
@@ -184,7 +188,7 @@ class Listings(Task):
uptodate = {'c': self.site.GLOBAL_CONTEXT}
for k, v in self.site.GLOBAL_CONTEXT['template_hooks'].items():
- uptodate['||template_hooks|{0}||'.format(k)] = v._items
+ uptodate['||template_hooks|{0}||'.format(k)] = v.calculate_deps()
for k in self.site._GLOBAL_CONTEXT_TRANSLATABLE:
uptodate[k] = self.site.GLOBAL_CONTEXT[k](self.kw['default_lang'])
@@ -220,6 +224,8 @@ class Listings(Task):
'clean': True,
}, self.kw["filters"])
for f in files:
+ if f == '.DS_Store':
+ continue
ext = os.path.splitext(f)[-1]
if ext in ignored_extensions:
continue
@@ -257,7 +263,7 @@ class Listings(Task):
}, self.kw["filters"])
def listing_source_path(self, name, lang):
- """A link to the source code for a listing.
+ """Return a link to the source code for a listing.
It will try to use the file name if it's not ambiguous, or the file path.
@@ -273,7 +279,7 @@ class Listings(Task):
return result
def listing_path(self, namep, lang):
- """A link to a listing.
+ """Return a link to a listing.
It will try to use the file name if it's not ambiguous, or the file path.
@@ -297,7 +303,7 @@ class Listings(Task):
utils.LOGGER.error("Using non-unique listing name '{0}', which maps to more than one listing name ({1})!".format(name, str(self.improper_input_file_mapping[name])))
return ["ERROR"]
if len(self.site.config['LISTINGS_FOLDERS']) > 1:
- utils.LOGGER.notice("Using listings names in site.link() without input directory prefix while configuration's LISTINGS_FOLDERS has more than one entry.")
+ utils.LOGGER.warning("Using listings names in site.link() without input directory prefix while configuration's LISTINGS_FOLDERS has more than one entry.")
name = list(self.improper_input_file_mapping[name])[0]
break
else:
diff --git a/nikola/plugins/task/page_index.plugin b/nikola/plugins/task/page_index.plugin
new file mode 100644
index 0000000..42c9288
--- /dev/null
+++ b/nikola/plugins/task/page_index.plugin
@@ -0,0 +1,12 @@
+[Core]
+name = classify_page_index
+module = page_index
+
+[Documentation]
+author = Roberto Alsina
+version = 1.0
+website = https://getnikola.com/
+description = Generates the blog's index pages.
+
+[Nikola]
+PluginCategory = Taxonomy
diff --git a/nikola/plugins/task/page_index.py b/nikola/plugins/task/page_index.py
new file mode 100644
index 0000000..e7b33cf
--- /dev/null
+++ b/nikola/plugins/task/page_index.py
@@ -0,0 +1,111 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2012-2020 Roberto Alsina and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+"""Render the page index."""
+
+
+from nikola.plugin_categories import Taxonomy
+
+
+class PageIndex(Taxonomy):
+ """Classify for the page index."""
+
+ name = "classify_page_index"
+
+ classification_name = "page_index_folder"
+ overview_page_variable_name = "page_folder"
+ more_than_one_classifications_per_post = False
+ has_hierarchy = True
+ include_posts_from_subhierarchies = False
+ show_list_as_index = False
+ template_for_single_list = "list.tmpl"
+ template_for_classification_overview = None
+ always_disable_rss = True
+ always_disable_atom = True
+ apply_to_posts = False
+ apply_to_pages = True
+ omit_empty_classifications = True
+ path_handler_docstrings = {
+ 'page_index_folder_index': None,
+ 'page_index_folder': None,
+ 'page_index_folder_atom': None,
+ 'page_index_folder_rss': None,
+ }
+
+ def is_enabled(self, lang=None):
+ """Return True if this taxonomy is enabled, or False otherwise."""
+ return self.site.config["PAGE_INDEX"]
+
+ def classify(self, post, lang):
+ """Classify the given post for the given language."""
+ destpath = post.destination_path(lang, sep='/')
+ if post.has_pretty_url(lang):
+ idx = '/index.html'
+ if destpath.endswith(idx):
+ destpath = destpath[:-len(idx)]
+ i = destpath.rfind('/')
+ return [destpath[:i] if i >= 0 else '']
+
+ def get_classification_friendly_name(self, dirname, lang, only_last_component=False):
+ """Extract a friendly name from the classification."""
+ return dirname
+
+ def get_path(self, hierarchy, lang, dest_type='page'):
+ """Return a path for the given classification."""
+ return hierarchy, 'always'
+
+ def extract_hierarchy(self, dirname):
+ """Given a classification, return a list of parts in the hierarchy."""
+ return dirname.split('/') if dirname else []
+
+ def recombine_classification_from_hierarchy(self, hierarchy):
+ """Given a list of parts in the hierarchy, return the classification string."""
+ return '/'.join(hierarchy)
+
+ def provide_context_and_uptodate(self, dirname, lang, node=None):
+ """Provide data for the context and the uptodate list for the list of the given classifiation."""
+ kw = {
+ "translations": self.site.config['TRANSLATIONS'],
+ "filters": self.site.config['FILTERS'],
+ }
+ context = {
+ "title": self.site.config['BLOG_TITLE'](lang),
+ "pagekind": ["list", "front_page", "page_index"] if dirname == '' else ["list", "page_index"],
+ "kind": "page_index_folder",
+ "classification": dirname,
+ "has_no_feeds": True,
+ }
+ kw.update(context)
+ return context, kw
+
+ def should_generate_classification_page(self, dirname, post_list, lang):
+ """Only generates list of posts for classification if this function returns True."""
+ short_destination = dirname + '/' + self.site.config['INDEX_FILE']
+ for post in post_list:
+ # If there is an index.html pending to be created from a page, do not generate the page index.
+ if post.destination_path(lang, sep='/') == short_destination:
+ return False
+ return True
diff --git a/nikola/plugins/task/pages.plugin b/nikola/plugins/task/pages.plugin
index 1bdc7f4..a04cd05 100644
--- a/nikola/plugins/task/pages.plugin
+++ b/nikola/plugins/task/pages.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Create pages in the output.
[Nikola]
-plugincategory = Task
+PluginCategory = Task
diff --git a/nikola/plugins/task/pages.py b/nikola/plugins/task/pages.py
index 7d8287b..0c0bdd2 100644
--- a/nikola/plugins/task/pages.py
+++ b/nikola/plugins/task/pages.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,9 +26,10 @@
"""Render pages into output."""
-from __future__ import unicode_literals
+import os
+
from nikola.plugin_categories import Task
-from nikola.utils import config_changed
+from nikola.utils import config_changed, LOGGER
class RenderPages(Task):
@@ -47,6 +48,13 @@ class RenderPages(Task):
}
self.site.scan_posts()
yield self.group_task()
+ index_paths = {}
+ for lang in kw["translations"]:
+ index_paths[lang] = False
+ if not self.site.config["DISABLE_INDEXES"]:
+ index_paths[lang] = os.path.normpath(os.path.join(self.site.config['OUTPUT_FOLDER'],
+ self.site.path('index', '', lang=lang)))
+
for lang in kw["translations"]:
for post in self.site.timeline:
if not kw["show_untranslated_posts"] and not post.is_translation_available(lang):
@@ -56,6 +64,12 @@ class RenderPages(Task):
else:
context = {'pagekind': ['story_page', 'page_page']}
for task in self.site.generic_page_renderer(lang, post, kw["filters"], context):
+ if task['name'] == index_paths[lang]:
+ # Issue 3022
+ LOGGER.error(
+ "Post {0!r}: output path ({1}) conflicts with the blog index ({2}). "
+ "Please change INDEX_PATH or disable index generation.".format(
+ post.source_path, task['name'], index_paths[lang]))
task['uptodate'] = task['uptodate'] + [config_changed(kw, 'nikola.plugins.task.pages')]
task['basename'] = self.name
task['task_dep'] = ['render_posts']
diff --git a/nikola/plugins/task/posts.plugin b/nikola/plugins/task/posts.plugin
index c9578bc..6893472 100644
--- a/nikola/plugins/task/posts.plugin
+++ b/nikola/plugins/task/posts.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Create HTML fragments out of posts.
[Nikola]
-plugincategory = Task
+PluginCategory = Task
diff --git a/nikola/plugins/task/posts.py b/nikola/plugins/task/posts.py
index fe10c5f..5f48165 100644
--- a/nikola/plugins/task/posts.py
+++ b/nikola/plugins/task/posts.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,11 +26,11 @@
"""Build HTML fragments from metadata and text."""
-from copy import copy
import os
+from copy import copy
from nikola.plugin_categories import Task
-from nikola import filters, utils
+from nikola import utils
def update_deps(post, lang, task):
@@ -85,11 +85,12 @@ class RenderPosts(Task):
deps_dict[k] = self.site.config.get(k)
dest = post.translated_base_path(lang)
file_dep = [p for p in post.fragment_deps(lang) if not p.startswith("####MAGIC####")]
+ extra_targets = post.compiler.get_extra_targets(post, lang, dest)
task = {
'basename': self.name,
'name': dest,
'file_dep': file_dep,
- 'targets': [dest],
+ 'targets': [dest] + extra_targets,
'actions': [(post.compile, (lang, )),
(update_deps, (post, lang, )),
],
@@ -107,12 +108,9 @@ class RenderPosts(Task):
for i, f in enumerate(ff):
if not f:
continue
- if f.startswith('filters.'): # A function from the filters module
- f = f[8:]
- try:
- flist.append(getattr(filters, f))
- except AttributeError:
- pass
+ _f = self.site.filters.get(f)
+ if _f is not None: # A registered filter
+ flist.append(_f)
else:
flist.append(f)
yield utils.apply_filters(task, {os.path.splitext(dest)[-1]: flist})
diff --git a/nikola/plugins/task/py3_switch.plugin b/nikola/plugins/task/py3_switch.plugin
deleted file mode 100644
index b0014e1..0000000
--- a/nikola/plugins/task/py3_switch.plugin
+++ /dev/null
@@ -1,13 +0,0 @@
-[Core]
-name = py3_switch
-module = py3_switch
-
-[Documentation]
-author = Roberto Alsina
-version = 1.0
-website = https://getnikola.com/
-description = Beg the user to switch to Python 3
-
-[Nikola]
-plugincategory = Task
-
diff --git a/nikola/plugins/task/py3_switch.py b/nikola/plugins/task/py3_switch.py
deleted file mode 100644
index 2ff4e2d..0000000
--- a/nikola/plugins/task/py3_switch.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2016 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-"""Beg the user to switch to python 3."""
-
-import datetime
-import os
-import random
-import sys
-
-import doit.tools
-
-from nikola.utils import get_logger, STDERR_HANDLER
-from nikola.plugin_categories import LateTask
-
-PY2_AND_NO_PY3_WARNING = """Nikola is going to deprecate Python 2 support in 2016. Your current
-version will continue to work, but please consider upgrading to Python 3.
-
-Please check http://bit.ly/1FKEsiX for details.
-"""
-PY2_WARNING = """Nikola is going to deprecate Python 2 support in 2016. You already have Python 3
-available in your system. Why not switch?
-
-Please check http://bit.ly/1FKEsiX for details.
-"""
-PY2_BARBS = [
- "Python 2 has been deprecated for years. Stop clinging to your long gone youth and switch to Python3.",
- "Python 2 is the safety blanket of languages. Be a big kid and switch to Python 3",
- "Python 2 is old and busted. Python 3 is the new hotness.",
- "Nice unicode you have there, would be a shame something happened to it.. switch to python 3!.",
- "Don't get in the way of progress! Upgrade to Python 3 and save a developer's mind today!",
- "Winners don't use Python 2 -- Signed: The FBI",
- "Python 2? What year is it?",
- "I just wanna tell you how I'm feeling\n"
- "Gotta make you understand\n"
- "Never gonna give you up [But Python 2 has to go]",
- "The year 2009 called, and they want their Python 2.7 back.",
-]
-
-
-LOGGER = get_logger('Nikola', STDERR_HANDLER)
-
-
-def has_python_3():
- """Check if python 3 is available."""
- if 'win' in sys.platform:
- py_bin = 'py.exe'
- else:
- py_bin = 'python3'
- for path in os.environ["PATH"].split(os.pathsep):
- if os.access(os.path.join(path, py_bin), os.X_OK):
- return True
- return False
-
-
-class Py3Switch(LateTask):
- """Beg the user to switch to python 3."""
-
- name = "_switch to py3"
-
- def gen_tasks(self):
- """Beg the user to switch to python 3."""
- def give_warning():
- if sys.version_info[0] == 3:
- return
- if has_python_3():
- LOGGER.warn(random.choice(PY2_BARBS))
- LOGGER.warn(PY2_WARNING)
- else:
- LOGGER.warn(PY2_AND_NO_PY3_WARNING)
-
- task = {
- 'basename': self.name,
- 'name': 'please!',
- 'actions': [give_warning],
- 'clean': True,
- 'uptodate': [doit.tools.timeout(datetime.timedelta(days=3))]
- }
-
- return task
diff --git a/nikola/plugins/task/redirect.plugin b/nikola/plugins/task/redirect.plugin
index c5a3042..57bd0c0 100644
--- a/nikola/plugins/task/redirect.plugin
+++ b/nikola/plugins/task/redirect.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Create redirect pages.
[Nikola]
-plugincategory = Task
+PluginCategory = Task
diff --git a/nikola/plugins/task/redirect.py b/nikola/plugins/task/redirect.py
index b170b81..a89fbd0 100644
--- a/nikola/plugins/task/redirect.py
+++ b/nikola/plugins/task/redirect.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,7 +26,6 @@
"""Generate redirections."""
-from __future__ import unicode_literals
import os
@@ -45,12 +44,15 @@ class Redirect(Task):
'redirections': self.site.config['REDIRECTIONS'],
'output_folder': self.site.config['OUTPUT_FOLDER'],
'filters': self.site.config['FILTERS'],
+ 'index_file': self.site.config['INDEX_FILE'],
}
yield self.group_task()
if kw['redirections']:
for src, dst in kw["redirections"]:
src_path = os.path.join(kw["output_folder"], src.lstrip('/'))
+ if src_path.endswith("/"):
+ src_path += kw['index_file']
yield utils.apply_filters({
'basename': self.name,
'name': src_path,
diff --git a/nikola/plugins/task/robots.plugin b/nikola/plugins/task/robots.plugin
index 7ae56c6..51f7781 100644
--- a/nikola/plugins/task/robots.plugin
+++ b/nikola/plugins/task/robots.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Generate /robots.txt exclusion file and promote sitemap.
[Nikola]
-plugincategory = Task
+PluginCategory = Task
diff --git a/nikola/plugins/task/robots.py b/nikola/plugins/task/robots.py
index 8537fc8..627d436 100644
--- a/nikola/plugins/task/robots.py
+++ b/nikola/plugins/task/robots.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,13 +26,9 @@
"""Generate a robots.txt file."""
-from __future__ import print_function, absolute_import, unicode_literals
import io
import os
-try:
- from urlparse import urljoin, urlparse
-except ImportError:
- from urllib.parse import urljoin, urlparse # NOQA
+from urllib.parse import urljoin, urlparse
from nikola.plugin_categories import LateTask
from nikola import utils
@@ -59,7 +55,8 @@ class RobotsFile(LateTask):
def write_robots():
if kw["site_url"] != urljoin(kw["site_url"], "/"):
- utils.LOGGER.warn('robots.txt not ending up in server root, will be useless')
+ utils.LOGGER.warning('robots.txt not ending up in server root, will be useless')
+ utils.LOGGER.info('Add "robots" to DISABLED_PLUGINS to disable this warning and robots.txt generation.')
with io.open(robots_path, 'w+', encoding='utf8') as outf:
outf.write("Sitemap: {0}\n\n".format(sitemapindex_url))
@@ -82,6 +79,6 @@ class RobotsFile(LateTask):
"task_dep": ["sitemap"]
}, kw["filters"])
elif kw["robots_exclusions"]:
- utils.LOGGER.warn('Did not generate robots.txt as one already exists in FILES_FOLDERS. ROBOTS_EXCLUSIONS will not have any affect on the copied file.')
+ utils.LOGGER.warning('Did not generate robots.txt as one already exists in FILES_FOLDERS. ROBOTS_EXCLUSIONS will not have any affect on the copied file.')
else:
utils.LOGGER.debug('Did not generate robots.txt as one already exists in FILES_FOLDERS.')
diff --git a/nikola/plugins/task/rss.plugin b/nikola/plugins/task/rss.plugin
deleted file mode 100644
index 4dd8aba..0000000
--- a/nikola/plugins/task/rss.plugin
+++ /dev/null
@@ -1,13 +0,0 @@
-[Core]
-name = generate_rss
-module = rss
-
-[Documentation]
-author = Roberto Alsina
-version = 1.0
-website = https://getnikola.com/
-description = Generate RSS feeds.
-
-[Nikola]
-plugincategory = Task
-
diff --git a/nikola/plugins/task/rss.py b/nikola/plugins/task/rss.py
deleted file mode 100644
index 780559b..0000000
--- a/nikola/plugins/task/rss.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2016 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-"""Generate RSS feeds."""
-
-from __future__ import unicode_literals, print_function
-import os
-try:
- from urlparse import urljoin
-except ImportError:
- from urllib.parse import urljoin # NOQA
-
-from nikola import utils
-from nikola.nikola import _enclosure
-from nikola.plugin_categories import Task
-
-
-class GenerateRSS(Task):
- """Generate RSS feeds."""
-
- name = "generate_rss"
-
- def set_site(self, site):
- """Set Nikola site."""
- site.register_path_handler('rss', self.rss_path)
- return super(GenerateRSS, self).set_site(site)
-
- def gen_tasks(self):
- """Generate RSS feeds."""
- kw = {
- "translations": self.site.config["TRANSLATIONS"],
- "filters": self.site.config["FILTERS"],
- "blog_title": self.site.config["BLOG_TITLE"],
- "site_url": self.site.config["SITE_URL"],
- "base_url": self.site.config["BASE_URL"],
- "blog_description": self.site.config["BLOG_DESCRIPTION"],
- "output_folder": self.site.config["OUTPUT_FOLDER"],
- "feed_teasers": self.site.config["FEED_TEASERS"],
- "feed_plain": self.site.config["FEED_PLAIN"],
- "show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
- "feed_length": self.site.config['FEED_LENGTH'],
- "feed_previewimage": self.site.config["FEED_PREVIEWIMAGE"],
- "tzinfo": self.site.tzinfo,
- "feed_read_more_link": self.site.config["FEED_READ_MORE_LINK"],
- "feed_links_append_query": self.site.config["FEED_LINKS_APPEND_QUERY"],
- }
- self.site.scan_posts()
- # Check for any changes in the state of use_in_feeds for any post.
- # Issue #934
- kw['use_in_feeds_status'] = ''.join(
- ['T' if x.use_in_feeds else 'F' for x in self.site.timeline]
- )
- yield self.group_task()
- for lang in kw["translations"]:
- output_name = os.path.join(kw['output_folder'],
- self.site.path("rss", None, lang))
- deps = []
- deps_uptodate = []
- if kw["show_untranslated_posts"]:
- posts = self.site.posts[:kw['feed_length']]
- else:
- posts = [x for x in self.site.posts if x.is_translation_available(lang)][:kw['feed_length']]
- for post in posts:
- deps += post.deps(lang)
- deps_uptodate += post.deps_uptodate(lang)
-
- feed_url = urljoin(self.site.config['BASE_URL'], self.site.link("rss", None, lang).lstrip('/'))
-
- task = {
- 'basename': 'generate_rss',
- 'name': os.path.normpath(output_name),
- 'file_dep': deps,
- 'targets': [output_name],
- 'actions': [(utils.generic_rss_renderer,
- (lang, kw["blog_title"](lang), kw["site_url"],
- kw["blog_description"](lang), posts, output_name,
- kw["feed_teasers"], kw["feed_plain"], kw['feed_length'], feed_url,
- _enclosure, kw["feed_links_append_query"]))],
-
- 'task_dep': ['render_posts'],
- 'clean': True,
- 'uptodate': [utils.config_changed(kw, 'nikola.plugins.task.rss')] + deps_uptodate,
- }
- yield utils.apply_filters(task, kw['filters'])
-
- def rss_path(self, name, lang):
- """A link to the RSS feed path.
-
- Example:
-
- link://rss => /blog/rss.xml
- """
- return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['RSS_PATH'], 'rss.xml'] if _f]
diff --git a/nikola/plugins/task/scale_images.plugin b/nikola/plugins/task/scale_images.plugin
index 3edd0c6..332f583 100644
--- a/nikola/plugins/task/scale_images.plugin
+++ b/nikola/plugins/task/scale_images.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Create down-scaled images and thumbnails.
[Nikola]
-plugincategory = Task
+PluginCategory = Task
diff --git a/nikola/plugins/task/scale_images.py b/nikola/plugins/task/scale_images.py
index 2b483ae..fa3a67b 100644
--- a/nikola/plugins/task/scale_images.py
+++ b/nikola/plugins/task/scale_images.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2014-2016 Pelle Nilsson and others.
+# Copyright © 2014-2020 Pelle Nilsson and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -38,29 +38,24 @@ class ScaleImage(Task, ImageProcessor):
name = "scale_images"
- def set_site(self, site):
- """Set Nikola site."""
- self.logger = utils.get_logger('scale_images', utils.STDERR_HANDLER)
- return super(ScaleImage, self).set_site(site)
-
def process_tree(self, src, dst):
"""Process all images in a src tree and put the (possibly) rescaled images in the dst folder."""
- ignore = set(['.svn'])
+ thumb_fmt = self.kw['image_thumbnail_format']
base_len = len(src.split(os.sep))
for root, dirs, files in os.walk(src, followlinks=True):
root_parts = root.split(os.sep)
- if set(root_parts) & ignore:
- continue
dst_dir = os.path.join(dst, *root_parts[base_len:])
utils.makedirs(dst_dir)
for src_name in files:
- if src_name in ('.DS_Store', 'Thumbs.db'):
- continue
if (not src_name.lower().endswith(tuple(self.image_ext_list)) and not src_name.upper().endswith(tuple(self.image_ext_list))):
continue
dst_file = os.path.join(dst_dir, src_name)
src_file = os.path.join(root, src_name)
- thumb_file = '.thumbnail'.join(os.path.splitext(dst_file))
+ thumb_name, thumb_ext = os.path.splitext(src_name)
+ thumb_file = os.path.join(dst_dir, thumb_fmt.format(
+ name=thumb_name,
+ ext=thumb_ext,
+ ))
yield {
'name': dst_file,
'file_dep': [src_file],
@@ -71,19 +66,28 @@ class ScaleImage(Task, ImageProcessor):
def process_image(self, src, dst, thumb):
"""Resize an image."""
- self.resize_image(src, dst, self.kw['max_image_size'], False, preserve_exif_data=self.kw['preserve_exif_data'], exif_whitelist=self.kw['exif_whitelist'])
- self.resize_image(src, thumb, self.kw['image_thumbnail_size'], False, preserve_exif_data=self.kw['preserve_exif_data'], exif_whitelist=self.kw['exif_whitelist'])
+ self.resize_image(
+ src,
+ dst_paths=[dst, thumb],
+ max_sizes=[self.kw['max_image_size'], self.kw['image_thumbnail_size']],
+ bigger_panoramas=True,
+ preserve_exif_data=self.kw['preserve_exif_data'],
+ exif_whitelist=self.kw['exif_whitelist'],
+ preserve_icc_profiles=self.kw['preserve_icc_profiles']
+ )
def gen_tasks(self):
"""Copy static files into the output folder."""
self.kw = {
'image_thumbnail_size': self.site.config['IMAGE_THUMBNAIL_SIZE'],
+ 'image_thumbnail_format': self.site.config['IMAGE_THUMBNAIL_FORMAT'],
'max_image_size': self.site.config['MAX_IMAGE_SIZE'],
'image_folders': self.site.config['IMAGE_FOLDERS'],
'output_folder': self.site.config['OUTPUT_FOLDER'],
'filters': self.site.config['FILTERS'],
'preserve_exif_data': self.site.config['PRESERVE_EXIF_DATA'],
'exif_whitelist': self.site.config['EXIF_WHITELIST'],
+ 'preserve_icc_profiles': self.site.config['PRESERVE_ICC_PROFILES'],
}
self.image_ext_list = self.image_ext_list_builtin
diff --git a/nikola/plugins/task/sitemap.plugin b/nikola/plugins/task/sitemap.plugin
index 83e72c4..c8aa832 100644
--- a/nikola/plugins/task/sitemap.plugin
+++ b/nikola/plugins/task/sitemap.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Generate google sitemap.
[Nikola]
-plugincategory = Task
+PluginCategory = Task
diff --git a/nikola/plugins/task/sitemap/__init__.py b/nikola/plugins/task/sitemap.py
index 64fcb45..8bbaa63 100644
--- a/nikola/plugins/task/sitemap/__init__.py
+++ b/nikola/plugins/task/sitemap.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,18 +26,13 @@
"""Generate a sitemap."""
-from __future__ import print_function, absolute_import, unicode_literals
-import io
import datetime
-import dateutil.tz
+import io
import os
-import sys
-try:
- from urlparse import urljoin, urlparse
- import robotparser as robotparser
-except ImportError:
- from urllib.parse import urljoin, urlparse # NOQA
- import urllib.robotparser as robotparser # NOQA
+import urllib.robotparser as robotparser
+from urllib.parse import urljoin, urlparse
+
+import dateutil.tz
from nikola.plugin_categories import LateTask
from nikola.utils import apply_filters, config_changed, encodelink
@@ -119,7 +114,6 @@ class Sitemap(LateTask):
"output_folder": self.site.config["OUTPUT_FOLDER"],
"strip_indexes": self.site.config["STRIP_INDEXES"],
"index_file": self.site.config["INDEX_FILE"],
- "sitemap_include_fileless_dirs": self.site.config["SITEMAP_INCLUDE_FILELESS_DIRS"],
"mapped_extensions": self.site.config.get('MAPPED_EXTENSIONS', ['.atom', '.html', '.htm', '.php', '.xml', '.rss']),
"robots_exclusions": self.site.config["ROBOTS_EXCLUSIONS"],
"filters": self.site.config["FILTERS"],
@@ -142,18 +136,19 @@ class Sitemap(LateTask):
def scan_locs():
"""Scan site locations."""
for root, dirs, files in os.walk(output, followlinks=True):
- if not dirs and not files and not kw['sitemap_include_fileless_dirs']:
+ if not dirs and not files:
continue # Totally empty, not on sitemap
path = os.path.relpath(root, output)
# ignore the current directory.
if path == '.':
- path = ''
+ path = syspath = ''
else:
+ syspath = path + os.sep
path = path.replace(os.sep, '/') + '/'
lastmod = self.get_lastmod(root)
loc = urljoin(base_url, base_path + path)
if kw['index_file'] in files and kw['strip_indexes']: # ignore folders when not stripping urls
- post = self.site.post_per_file.get(path + kw['index_file'])
+ post = self.site.post_per_file.get(syspath + kw['index_file'])
if post and (post.is_draft or post.is_private or post.publish_later):
continue
alternates = []
@@ -169,7 +164,7 @@ class Sitemap(LateTask):
continue # We already mapped the folder
if os.path.splitext(fname)[-1] in mapped_exts:
real_path = os.path.join(root, fname)
- path = os.path.relpath(real_path, output)
+ path = syspath = os.path.relpath(real_path, output)
if path.endswith(kw['index_file']) and kw['strip_indexes']:
# ignore index files when stripping urls
continue
@@ -177,16 +172,15 @@ class Sitemap(LateTask):
continue
# read in binary mode to make ancient files work
- fh = open(real_path, 'rb')
- filehead = fh.read(1024)
- fh.close()
+ with open(real_path, 'rb') as fh:
+ filehead = fh.read(1024)
if path.endswith('.html') or path.endswith('.htm') or path.endswith('.php'):
- """ ignores "html" files without doctype """
+ # Ignores "html" files without doctype
if b'<!doctype html' not in filehead.lower():
continue
- """ ignores "html" files with noindex robot directives """
+ # Ignores "html" files with noindex robot directives
robots_directives = [b'<meta content=noindex name=robots',
b'<meta content=none name=robots',
b'<meta name=robots content=noindex',
@@ -207,7 +201,7 @@ class Sitemap(LateTask):
continue
else:
continue # ignores all XML files except those presumed to be RSS
- post = self.site.post_per_file.get(path)
+ post = self.site.post_per_file.get(syspath)
if post and (post.is_draft or post.is_private or post.publish_later):
continue
path = path.replace(os.sep, '/')
@@ -227,12 +221,8 @@ class Sitemap(LateTask):
for rule in kw["robots_exclusions"]:
robot = robotparser.RobotFileParser()
robot.parse(["User-Agent: *", "Disallow: {0}".format(rule)])
- if sys.version_info[0] == 3:
- if not robot.can_fetch("*", '/' + path):
- return False # not robot food
- else:
- if not robot.can_fetch("*", ('/' + path).encode('utf-8')):
- return False # not robot food
+ if not robot.can_fetch("*", '/' + path):
+ return False # not robot food
return True
def write_sitemap():
@@ -322,6 +312,7 @@ class Sitemap(LateTask):
lastmod = datetime.datetime.utcfromtimestamp(os.stat(p).st_mtime).replace(tzinfo=dateutil.tz.gettz('UTC'), second=0, microsecond=0).isoformat().replace('+00:00', 'Z')
return lastmod
+
if __name__ == '__main__':
import doctest
doctest.testmod()
diff --git a/nikola/plugins/task/sources.plugin b/nikola/plugins/task/sources.plugin
index 66856f1..1ab1a3c 100644
--- a/nikola/plugins/task/sources.plugin
+++ b/nikola/plugins/task/sources.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Copy page sources into the output.
[Nikola]
-plugincategory = Task
+PluginCategory = Task
diff --git a/nikola/plugins/task/sources.py b/nikola/plugins/task/sources.py
index 0d77aba..1d36429 100644
--- a/nikola/plugins/task/sources.py
+++ b/nikola/plugins/task/sources.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -61,12 +61,8 @@ class Sources(Task):
# do not publish PHP sources
if post.source_ext(True) == post.compiler.extension():
continue
- source = post.source_path
- if lang != kw["default_lang"]:
- source_lang = utils.get_translation_candidate(self.site.config, source, lang)
- if os.path.exists(source_lang):
- source = source_lang
- if os.path.isfile(source):
+ source = post.translated_source_path(lang)
+ if source is not None and os.path.isfile(source):
yield {
'basename': 'render_sources',
'name': os.path.normpath(output_name),
diff --git a/nikola/plugins/task/tags.plugin b/nikola/plugins/task/tags.plugin
index c3a5be3..c17b7b3 100644
--- a/nikola/plugins/task/tags.plugin
+++ b/nikola/plugins/task/tags.plugin
@@ -1,5 +1,5 @@
[Core]
-name = render_tags
+name = classify_tags
module = tags
[Documentation]
@@ -9,5 +9,4 @@ website = https://getnikola.com/
description = Render the tag pages and feeds.
[Nikola]
-plugincategory = Task
-
+PluginCategory = Taxonomy
diff --git a/nikola/plugins/task/tags.py b/nikola/plugins/task/tags.py
index 8b4683e..aecf8f5 100644
--- a/nikola/plugins/task/tags.py
+++ b/nikola/plugins/task/tags.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -24,497 +24,137 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""Render the tag/category pages and feeds."""
+"""Render the tag pages and feeds."""
-from __future__ import unicode_literals
-import json
-import os
-import natsort
-try:
- from urlparse import urljoin
-except ImportError:
- from urllib.parse import urljoin # NOQA
-from nikola.plugin_categories import Task
+from nikola.plugin_categories import Taxonomy
from nikola import utils
-from nikola.nikola import _enclosure
-class RenderTags(Task):
- """Render the tag/category pages and feeds."""
+class ClassifyTags(Taxonomy):
+ """Classify the posts by tags."""
- name = "render_tags"
+ name = "classify_tags"
- def set_site(self, site):
- """Set Nikola site."""
- site.register_path_handler('tag_index', self.tag_index_path)
- site.register_path_handler('category_index', self.category_index_path)
- site.register_path_handler('tag', self.tag_path)
- site.register_path_handler('tag_atom', self.tag_atom_path)
- site.register_path_handler('tag_rss', self.tag_rss_path)
- site.register_path_handler('category', self.category_path)
- site.register_path_handler('category_atom', self.category_atom_path)
- site.register_path_handler('category_rss', self.category_rss_path)
- return super(RenderTags, self).set_site(site)
-
- def gen_tasks(self):
- """Render the tag pages and feeds."""
- kw = {
- "translations": self.site.config["TRANSLATIONS"],
- "blog_title": self.site.config["BLOG_TITLE"],
- "site_url": self.site.config["SITE_URL"],
- "base_url": self.site.config["BASE_URL"],
- "messages": self.site.MESSAGES,
- "output_folder": self.site.config['OUTPUT_FOLDER'],
- "filters": self.site.config['FILTERS'],
- 'tag_path': self.site.config['TAG_PATH'],
- "tag_pages_are_indexes": self.site.config['TAG_PAGES_ARE_INDEXES'],
- 'category_path': self.site.config['CATEGORY_PATH'],
- 'category_prefix': self.site.config['CATEGORY_PREFIX'],
- "category_pages_are_indexes": self.site.config['CATEGORY_PAGES_ARE_INDEXES'],
- "generate_rss": self.site.config['GENERATE_RSS'],
- "feed_teasers": self.site.config["FEED_TEASERS"],
- "feed_plain": self.site.config["FEED_PLAIN"],
- "feed_link_append_query": self.site.config["FEED_LINKS_APPEND_QUERY"],
- "show_untranslated_posts": self.site.config['SHOW_UNTRANSLATED_POSTS'],
- "feed_length": self.site.config['FEED_LENGTH'],
- "taglist_minimum_post_count": self.site.config['TAGLIST_MINIMUM_POSTS'],
- "tzinfo": self.site.tzinfo,
- "pretty_urls": self.site.config['PRETTY_URLS'],
- "strip_indexes": self.site.config['STRIP_INDEXES'],
- "index_file": self.site.config['INDEX_FILE'],
- "category_pages_descriptions": self.site.config['CATEGORY_PAGES_DESCRIPTIONS'],
- "category_pages_titles": self.site.config['CATEGORY_PAGES_TITLES'],
- "tag_pages_descriptions": self.site.config['TAG_PAGES_DESCRIPTIONS'],
- "tag_pages_titles": self.site.config['TAG_PAGES_TITLES'],
- }
-
- self.site.scan_posts()
- yield self.group_task()
-
- yield self.list_tags_page(kw)
-
- if not self.site.posts_per_tag and not self.site.posts_per_category:
- return
-
- for lang in kw["translations"]:
- if kw['category_path'][lang] == kw['tag_path'][lang]:
- tags = {self.slugify_tag_name(tag, lang): tag for tag in self.site.tags_per_language[lang]}
- cats = {tuple(self.slugify_category_name(category, lang)): category for category in self.site.posts_per_category.keys()}
- categories = {k[0]: v for k, v in cats.items() if len(k) == 1}
- intersect = set(tags.keys()) & set(categories.keys())
- if len(intersect) > 0:
- for slug in intersect:
- utils.LOGGER.error("Category '{0}' and tag '{1}' both have the same slug '{2}' for language {3}!".format('/'.join(categories[slug]), tags[slug], slug, lang))
-
- # Test for category slug clashes
- categories = {}
- for category in self.site.posts_per_category.keys():
- slug = tuple(self.slugify_category_name(category, lang))
- for part in slug:
- if len(part) == 0:
- utils.LOGGER.error("Category '{0}' yields invalid slug '{1}'!".format(category, '/'.join(slug)))
- raise RuntimeError("Category '{0}' yields invalid slug '{1}'!".format(category, '/'.join(slug)))
- if slug in categories:
- other_category = categories[slug]
- utils.LOGGER.error('You have categories that are too similar: {0} and {1} (language {2})'.format(category, other_category, lang))
- utils.LOGGER.error('Category {0} is used in: {1}'.format(category, ', '.join([p.source_path for p in self.site.posts_per_category[category]])))
- utils.LOGGER.error('Category {0} is used in: {1}'.format(other_category, ', '.join([p.source_path for p in self.site.posts_per_category[other_category]])))
- raise RuntimeError("Category '{0}' yields invalid slug '{1}'!".format(category, '/'.join(slug)))
- categories[slug] = category
-
- tag_list = list(self.site.posts_per_tag.items())
- cat_list = list(self.site.posts_per_category.items())
-
- def render_lists(tag, posts, is_category=True):
- """Render tag pages as RSS files and lists/indexes."""
- post_list = sorted(posts, key=lambda a: a.date)
- post_list.reverse()
- for lang in kw["translations"]:
- if kw["show_untranslated_posts"]:
- filtered_posts = post_list
- else:
- filtered_posts = [x for x in post_list if x.is_translation_available(lang)]
- if kw["generate_rss"]:
- yield self.tag_rss(tag, lang, filtered_posts, kw, is_category)
- # Render HTML
- if kw['category_pages_are_indexes'] if is_category else kw['tag_pages_are_indexes']:
- yield self.tag_page_as_index(tag, lang, filtered_posts, kw, is_category)
- else:
- yield self.tag_page_as_list(tag, lang, filtered_posts, kw, is_category)
-
- for tag, posts in tag_list:
- for task in render_lists(tag, posts, False):
- yield task
-
- for path, posts in cat_list:
- for task in render_lists(path, posts, True):
- yield task
-
- # Tag cloud json file
- tag_cloud_data = {}
- for tag, posts in self.site.posts_per_tag.items():
- if tag in self.site.config['HIDDEN_TAGS']:
- continue
- tag_posts = dict(posts=[{'title': post.meta[post.default_lang]['title'],
- 'date': post.date.strftime('%m/%d/%Y'),
- 'isodate': post.date.isoformat(),
- 'url': post.permalink(post.default_lang)}
- for post in reversed(sorted(self.site.timeline, key=lambda post: post.date))
- if tag in post.alltags])
- tag_cloud_data[tag] = [len(posts), self.site.link(
- 'tag', tag, self.site.config['DEFAULT_LANG']), tag_posts]
- output_name = os.path.join(kw['output_folder'],
- 'assets', 'js', 'tag_cloud_data.json')
+ classification_name = "tag"
+ overview_page_variable_name = "tags"
+ overview_page_items_variable_name = "items"
+ more_than_one_classifications_per_post = True
+ has_hierarchy = False
+ show_list_as_subcategories_list = False
+ template_for_classification_overview = "tags.tmpl"
+ always_disable_rss = False
+ always_disable_atom = False
+ apply_to_posts = True
+ apply_to_pages = False
+ omit_empty_classifications = True
+ add_other_languages_variable = True
+ path_handler_docstrings = {
+ 'tag_index': """A link to the tag index.
- def write_tag_data(data):
- """Write tag data into JSON file, for use in tag clouds."""
- utils.makedirs(os.path.dirname(output_name))
- with open(output_name, 'w+') as fd:
- json.dump(data, fd, sort_keys=True)
+Example:
- if self.site.config['WRITE_TAG_CLOUD']:
- task = {
- 'basename': str(self.name),
- 'name': str(output_name)
- }
+link://tag_index => /tags/index.html""",
+ 'tag': """A link to a tag's page. Takes page number as optional keyword argument.
- task['uptodate'] = [utils.config_changed(tag_cloud_data, 'nikola.plugins.task.tags:tagdata')]
- task['targets'] = [output_name]
- task['actions'] = [(write_tag_data, [tag_cloud_data])]
- task['clean'] = True
- yield utils.apply_filters(task, kw['filters'])
+Example:
- def _create_tags_page(self, kw, lang, include_tags=True, include_categories=True):
- """Create a global "all your tags/categories" page for each language."""
- categories = [cat.category_name for cat in self.site.category_hierarchy]
- has_categories = (categories != []) and include_categories
- template_name = "tags.tmpl"
- kw = kw.copy()
- if include_categories:
- kw['categories'] = categories
- tags = natsort.natsorted([tag for tag in self.site.tags_per_language[lang]
- if len(self.site.posts_per_tag[tag]) >= kw["taglist_minimum_post_count"]],
- alg=natsort.ns.F | natsort.ns.IC)
- has_tags = (tags != []) and include_tags
- if include_tags:
- kw['tags'] = tags
- output_name = os.path.join(
- kw['output_folder'], self.site.path('tag_index' if has_tags else 'category_index', None, lang))
- context = {}
- if has_categories and has_tags:
- context["title"] = kw["messages"][lang]["Tags and Categories"]
- elif has_categories:
- context["title"] = kw["messages"][lang]["Categories"]
- else:
- context["title"] = kw["messages"][lang]["Tags"]
- if has_tags:
- context["items"] = [(tag, self.site.link("tag", tag, lang)) for tag
- in tags]
- else:
- context["items"] = None
- if has_categories:
- context["cat_items"] = [(tag, self.site.link("category", tag, lang)) for tag
- in categories]
- context['cat_hierarchy'] = [(node.name, node.category_name, node.category_path,
- self.site.link("category", node.category_name),
- node.indent_levels, node.indent_change_before,
- node.indent_change_after)
- for node in self.site.category_hierarchy]
- else:
- context["cat_items"] = None
- context["permalink"] = self.site.link("tag_index" if has_tags else "category_index", None, lang)
- context["description"] = context["title"]
- context["pagekind"] = ["list", "tags_page"]
- task = self.site.generic_post_list_renderer(
- lang,
- [],
- output_name,
- template_name,
- kw['filters'],
- context,
- )
- task['uptodate'] = task['uptodate'] + [utils.config_changed(kw, 'nikola.plugins.task.tags:page')]
- task['basename'] = str(self.name)
- yield task
-
- def list_tags_page(self, kw):
- """Create a global "all your tags/categories" page for each language."""
- for lang in kw["translations"]:
- if self.site.config['TAG_PATH'][lang] == self.site.config['CATEGORY_PATH'][lang]:
- yield self._create_tags_page(kw, lang, True, True)
- else:
- yield self._create_tags_page(kw, lang, False, True)
- yield self._create_tags_page(kw, lang, True, False)
-
- def _get_title(self, tag, is_category):
- if is_category:
- return self.site.parse_category_name(tag)[-1]
- else:
- return tag
-
- def _get_indexes_title(self, tag, nice_tag, is_category, lang, messages):
- titles = self.site.config['CATEGORY_PAGES_TITLES'] if is_category else self.site.config['TAG_PAGES_TITLES']
- return titles[lang][tag] if lang in titles and tag in titles[lang] else messages[lang]["Posts about %s"] % nice_tag
-
- def _get_description(self, tag, is_category, lang):
- descriptions = self.site.config['CATEGORY_PAGES_DESCRIPTIONS'] if is_category else self.site.config['TAG_PAGES_DESCRIPTIONS']
- return descriptions[lang][tag] if lang in descriptions and tag in descriptions[lang] else None
-
- def _get_subcategories(self, category):
- node = self.site.category_hierarchy_lookup[category]
- return [(child.name, self.site.link("category", child.category_name)) for child in node.children]
+link://tag/cats => /tags/cats.html""",
+ 'tag_atom': """A link to a tag's Atom feed.
- def tag_page_as_index(self, tag, lang, post_list, kw, is_category):
- """Render a sort of index page collection using only this tag's posts."""
- kind = "category" if is_category else "tag"
+Example:
- def page_link(i, displayed_i, num_pages, force_addition, extension=None):
- feed = "_atom" if extension == ".atom" else ""
- return utils.adjust_name_for_index_link(self.site.link(kind + feed, tag, lang), i, displayed_i, lang, self.site, force_addition, extension)
+link://tag_atom/cats => /tags/cats.atom""",
+ 'tag_rss': """A link to a tag's RSS feed.
- def page_path(i, displayed_i, num_pages, force_addition, extension=None):
- feed = "_atom" if extension == ".atom" else ""
- return utils.adjust_name_for_index_path(self.site.path(kind + feed, tag, lang), i, displayed_i, lang, self.site, force_addition, extension)
+Example:
- context_source = {}
- title = self._get_title(tag, is_category)
- if kw["generate_rss"]:
- # On a tag page, the feeds include the tag's feeds
- rss_link = ("""<link rel="alternate" type="application/rss+xml" """
- """title="RSS for tag """
- """{0} ({1})" href="{2}">""".format(
- title, lang, self.site.link(kind + "_rss", tag, lang)))
- context_source['rss_link'] = rss_link
- if is_category:
- context_source["category"] = tag
- context_source["category_path"] = self.site.parse_category_name(tag)
- context_source["tag"] = title
- indexes_title = self._get_indexes_title(tag, title, is_category, lang, kw["messages"])
- context_source["description"] = self._get_description(tag, is_category, lang)
- if is_category:
- context_source["subcategories"] = self._get_subcategories(tag)
- context_source["pagekind"] = ["index", "tag_page"]
- template_name = "tagindex.tmpl"
+link://tag_rss/cats => /tags/cats.xml""",
+ }
- yield self.site.generic_index_renderer(lang, post_list, indexes_title, template_name, context_source, kw, str(self.name), page_link, page_path)
-
- def tag_page_as_list(self, tag, lang, post_list, kw, is_category):
- """Render a single flat link list with this tag's posts."""
- kind = "category" if is_category else "tag"
- template_name = "tag.tmpl"
- output_name = os.path.join(kw['output_folder'], self.site.path(
- kind, tag, lang))
- context = {}
- context["lang"] = lang
- title = self._get_title(tag, is_category)
- if is_category:
- context["category"] = tag
- context["category_path"] = self.site.parse_category_name(tag)
- context["tag"] = title
- context["title"] = self._get_indexes_title(tag, title, is_category, lang, kw["messages"])
- context["posts"] = post_list
- context["permalink"] = self.site.link(kind, tag, lang)
- context["kind"] = kind
- context["description"] = self._get_description(tag, is_category, lang)
- if is_category:
- context["subcategories"] = self._get_subcategories(tag)
- context["pagekind"] = ["list", "tag_page"]
- task = self.site.generic_post_list_renderer(
- lang,
- post_list,
- output_name,
- template_name,
- kw['filters'],
- context,
- )
- task['uptodate'] = task['uptodate'] + [utils.config_changed(kw, 'nikola.plugins.task.tags:list')]
- task['basename'] = str(self.name)
- yield task
-
- if self.site.config['GENERATE_ATOM']:
- yield self.atom_feed_list(kind, tag, lang, post_list, context, kw)
+ def set_site(self, site):
+ """Set site, which is a Nikola instance."""
+ super().set_site(site)
+ self.show_list_as_index = self.site.config['TAG_PAGES_ARE_INDEXES']
+ self.template_for_single_list = "tagindex.tmpl" if self.show_list_as_index else "tag.tmpl"
+ self.minimum_post_count_per_classification_in_overview = self.site.config['TAGLIST_MINIMUM_POSTS']
+ self.translation_manager = utils.ClassificationTranslationManager()
- def atom_feed_list(self, kind, tag, lang, post_list, context, kw):
- """Generate atom feeds for tag lists."""
- if kind == 'tag':
- context['feedlink'] = self.site.abs_link(self.site.path('tag_atom', tag, lang))
- feed_path = os.path.join(kw['output_folder'], self.site.path('tag_atom', tag, lang))
- elif kind == 'category':
- context['feedlink'] = self.site.abs_link(self.site.path('category_atom', tag, lang))
- feed_path = os.path.join(kw['output_folder'], self.site.path('category_atom', tag, lang))
+ def is_enabled(self, lang=None):
+ """Return True if this taxonomy is enabled, or False otherwise."""
+ return True
- task = {
- 'basename': str(self.name),
- 'name': feed_path,
- 'targets': [feed_path],
- 'actions': [(self.site.atom_feed_renderer, (lang, post_list, feed_path, kw['filters'], context))],
- 'clean': True,
- 'uptodate': [utils.config_changed(kw, 'nikola.plugins.task.tags:atom')],
- 'task_dep': ['render_posts'],
- }
- return task
+ def classify(self, post, lang):
+ """Classify the given post for the given language."""
+ return post.tags_for_language(lang)
- def tag_rss(self, tag, lang, posts, kw, is_category):
- """Create a RSS feed for a single tag in a given language."""
- kind = "category" if is_category else "tag"
- # Render RSS
- output_name = os.path.normpath(
- os.path.join(kw['output_folder'],
- self.site.path(kind + "_rss", tag, lang)))
- feed_url = urljoin(self.site.config['BASE_URL'], self.site.link(kind + "_rss", tag, lang).lstrip('/'))
- deps = []
- deps_uptodate = []
- post_list = sorted(posts, key=lambda a: a.date)
- post_list.reverse()
- for post in post_list:
- deps += post.deps(lang)
- deps_uptodate += post.deps_uptodate(lang)
- task = {
- 'basename': str(self.name),
- 'name': output_name,
- 'file_dep': deps,
- 'targets': [output_name],
- 'actions': [(utils.generic_rss_renderer,
- (lang, "{0} ({1})".format(kw["blog_title"](lang), self._get_title(tag, is_category)),
- kw["site_url"], None, post_list,
- output_name, kw["feed_teasers"], kw["feed_plain"], kw['feed_length'],
- feed_url, _enclosure, kw["feed_link_append_query"]))],
- 'clean': True,
- 'uptodate': [utils.config_changed(kw, 'nikola.plugins.task.tags:rss')] + deps_uptodate,
- 'task_dep': ['render_posts'],
- }
- return utils.apply_filters(task, kw['filters'])
+ def get_classification_friendly_name(self, classification, lang, only_last_component=False):
+ """Extract a friendly name from the classification."""
+ return classification
def slugify_tag_name(self, name, lang):
"""Slugify a tag name."""
- if lang is None: # TODO: remove in v8
- utils.LOGGER.warn("RenderTags.slugify_tag_name() called without language!")
- lang = ''
if self.site.config['SLUG_TAG_PATH']:
name = utils.slugify(name, lang)
return name
- def tag_index_path(self, name, lang):
- """A link to the tag index.
-
- Example:
-
- link://tag_index => /tags/index.html
- """
- if self.site.config['TAGS_INDEX_PATH'][lang]:
- paths = [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['TAGS_INDEX_PATH'][lang]] if _f]
+ def get_overview_path(self, lang, dest_type='page'):
+ """Return a path for the list of all classifications."""
+ if self.site.config['TAGS_INDEX_PATH'](lang):
+ path = self.site.config['TAGS_INDEX_PATH'](lang)
+ append_index = 'never'
else:
- paths = [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['TAG_PATH'][lang],
- self.site.config['INDEX_FILE']] if _f]
- return paths
-
- def category_index_path(self, name, lang):
- """A link to the category index.
-
- Example:
-
- link://category_index => /categories/index.html
- """
- return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['CATEGORY_PATH'][lang],
- self.site.config['INDEX_FILE']] if _f]
-
- def tag_path(self, name, lang):
- """A link to a tag's page.
-
- Example:
-
- link://tag/cats => /tags/cats.html
- """
- if self.site.config['PRETTY_URLS']:
- return [_f for _f in [
- self.site.config['TRANSLATIONS'][lang],
- self.site.config['TAG_PATH'][lang],
- self.slugify_tag_name(name, lang),
- self.site.config['INDEX_FILE']] if _f]
- else:
- return [_f for _f in [
- self.site.config['TRANSLATIONS'][lang],
- self.site.config['TAG_PATH'][lang],
- self.slugify_tag_name(name, lang) + ".html"] if _f]
-
- def tag_atom_path(self, name, lang):
- """A link to a tag's Atom feed.
-
- Example:
-
- link://tag_atom/cats => /tags/cats.atom
- """
- return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['TAG_PATH'][lang], self.slugify_tag_name(name, lang) + ".atom"] if
- _f]
-
- def tag_rss_path(self, name, lang):
- """A link to a tag's RSS feed.
-
- Example:
-
- link://tag_rss/cats => /tags/cats.xml
- """
- return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['TAG_PATH'][lang], self.slugify_tag_name(name, lang) + ".xml"] if
- _f]
-
- def slugify_category_name(self, name, lang):
- """Slugify a category name."""
- if lang is None: # TODO: remove in v8
- utils.LOGGER.warn("RenderTags.slugify_category_name() called without language!")
- lang = ''
- path = self.site.parse_category_name(name)
- if self.site.config['CATEGORY_OUTPUT_FLAT_HIERARCHY']:
- path = path[-1:] # only the leaf
- result = [self.slugify_tag_name(part, lang) for part in path]
- result[0] = self.site.config['CATEGORY_PREFIX'] + result[0]
- if not self.site.config['PRETTY_URLS']:
- result = ['-'.join(result)]
- return result
-
- def _add_extension(self, path, extension):
- path[-1] += extension
- return path
-
- def category_path(self, name, lang):
- """A link to a category.
-
- Example:
-
- link://category/dogs => /categories/dogs.html
- """
- if self.site.config['PRETTY_URLS']:
- return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['CATEGORY_PATH'][lang]] if
- _f] + self.slugify_category_name(name, lang) + [self.site.config['INDEX_FILE']]
- else:
- return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['CATEGORY_PATH'][lang]] if
- _f] + self._add_extension(self.slugify_category_name(name, lang), ".html")
-
- def category_atom_path(self, name, lang):
- """A link to a category's Atom feed.
-
- Example:
-
- link://category_atom/dogs => /categories/dogs.atom
- """
- return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['CATEGORY_PATH'][lang]] if
- _f] + self._add_extension(self.slugify_category_name(name, lang), ".atom")
+ path = self.site.config['TAG_PATH'](lang)
+ append_index = 'always'
+ return [component for component in path.split('/') if component], append_index
+
+ def get_path(self, classification, lang, dest_type='page'):
+ """Return a path for the given classification."""
+ return [_f for _f in [
+ self.site.config['TAG_PATH'](lang),
+ self.slugify_tag_name(classification, lang)] if _f], 'auto'
+
+ def provide_overview_context_and_uptodate(self, lang):
+ """Provide data for the context and the uptodate list for the list of all classifiations."""
+ kw = {
+ "tag_path": self.site.config['TAG_PATH'],
+ "tag_pages_are_indexes": self.site.config['TAG_PAGES_ARE_INDEXES'],
+ "taglist_minimum_post_count": self.site.config['TAGLIST_MINIMUM_POSTS'],
+ "tzinfo": self.site.tzinfo,
+ "tag_descriptions": self.site.config['TAG_DESCRIPTIONS'],
+ "tag_titles": self.site.config['TAG_TITLES'],
+ }
+ context = {
+ "title": self.site.MESSAGES[lang]["Tags"],
+ "description": self.site.MESSAGES[lang]["Tags"],
+ "pagekind": ["list", "tags_page"],
+ }
+ kw.update(context)
+ return context, kw
- def category_rss_path(self, name, lang):
- """A link to a category's RSS feed.
+ def provide_context_and_uptodate(self, classification, lang, node=None):
+ """Provide data for the context and the uptodate list for the list of the given classifiation."""
+ kw = {
+ "tag_path": self.site.config['TAG_PATH'],
+ "tag_pages_are_indexes": self.site.config['TAG_PAGES_ARE_INDEXES'],
+ "taglist_minimum_post_count": self.site.config['TAGLIST_MINIMUM_POSTS'],
+ "tzinfo": self.site.tzinfo,
+ "tag_descriptions": self.site.config['TAG_DESCRIPTIONS'],
+ "tag_titles": self.site.config['TAG_TITLES'],
+ }
+ context = {
+ "title": self.site.config['TAG_TITLES'].get(lang, {}).get(classification, self.site.MESSAGES[lang]["Posts about %s"] % classification),
+ "description": self.site.config['TAG_DESCRIPTIONS'].get(lang, {}).get(classification),
+ "pagekind": ["tag_page", "index" if self.show_list_as_index else "list"],
+ "tag": classification,
+ }
+ kw.update(context)
+ return context, kw
- Example:
+ def get_other_language_variants(self, classification, lang, classifications_per_language):
+ """Return a list of variants of the same tag in other languages."""
+ return self.translation_manager.get_translations_as_list(classification, lang, classifications_per_language)
- link://category_rss/dogs => /categories/dogs.xml
- """
- return [_f for _f in [self.site.config['TRANSLATIONS'][lang],
- self.site.config['CATEGORY_PATH'][lang]] if
- _f] + self._add_extension(self.slugify_category_name(name, lang), ".xml")
+ def postprocess_posts_per_classification(self, posts_per_classification_per_language, flat_hierarchy_per_lang=None, hierarchy_lookup_per_lang=None):
+ """Rearrange, modify or otherwise use the list of posts per classification and per language."""
+ self.translation_manager.read_from_config(self.site, 'TAG', posts_per_classification_per_language, False)
diff --git a/nikola/plugins/task/taxonomies.plugin b/nikola/plugins/task/taxonomies.plugin
new file mode 100644
index 0000000..5bda812
--- /dev/null
+++ b/nikola/plugins/task/taxonomies.plugin
@@ -0,0 +1,12 @@
+[Core]
+name = render_taxonomies
+module = taxonomies
+
+[Documentation]
+author = Roberto Alsina
+version = 1.0
+website = https://getnikola.com/
+description = Render the taxonomy overviews, classification pages and feeds.
+
+[Nikola]
+PluginCategory = Task
diff --git a/nikola/plugins/task/taxonomies.py b/nikola/plugins/task/taxonomies.py
new file mode 100644
index 0000000..7dcf6ed
--- /dev/null
+++ b/nikola/plugins/task/taxonomies.py
@@ -0,0 +1,459 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2012-2020 Roberto Alsina and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+"""Render the taxonomy overviews, classification pages and feeds."""
+
+import os
+from collections import defaultdict
+from copy import copy
+from urllib.parse import urljoin
+
+import blinker
+import natsort
+
+from nikola import utils, hierarchy_utils
+from nikola.nikola import _enclosure
+from nikola.plugin_categories import Task
+
+
+class RenderTaxonomies(Task):
+ """Render taxonomy pages and feeds."""
+
+ name = "render_taxonomies"
+
+ def _generate_classification_overview_kw_context(self, taxonomy, lang):
+ """Create context and kw for a classification overview page."""
+ context, kw = taxonomy.provide_overview_context_and_uptodate(lang)
+
+ context = copy(context)
+ context["kind"] = "{}_index".format(taxonomy.classification_name)
+ sorted_links = []
+ for other_lang in sorted(self.site.config['TRANSLATIONS'].keys()):
+ if other_lang != lang:
+ sorted_links.append((other_lang, None, None))
+ # Put the current language in front, so that it appears first in links
+ # (Issue #3248)
+ sorted_links_all = [(lang, None, None)] + sorted_links
+ context['has_other_languages'] = True
+ context['other_languages'] = sorted_links
+ context['all_languages'] = sorted_links_all
+
+ kw = copy(kw)
+ kw["messages"] = self.site.MESSAGES
+ kw["translations"] = self.site.config['TRANSLATIONS']
+ kw["filters"] = self.site.config['FILTERS']
+ kw["minimum_post_count"] = taxonomy.minimum_post_count_per_classification_in_overview
+ kw["output_folder"] = self.site.config['OUTPUT_FOLDER']
+ kw["pretty_urls"] = self.site.config['PRETTY_URLS']
+ kw["strip_indexes"] = self.site.config['STRIP_INDEXES']
+ kw["index_file"] = self.site.config['INDEX_FILE']
+
+ # Collect all relevant classifications
+ if taxonomy.has_hierarchy:
+ def acceptor(node):
+ return len(self._filter_list(self.site.posts_per_classification[taxonomy.classification_name][lang][node.classification_name], lang)) >= kw["minimum_post_count"]
+
+ clipped_root_list = [hierarchy_utils.clone_treenode(node, parent=None, acceptor=acceptor) for node in self.site.hierarchy_per_classification[taxonomy.classification_name][lang]]
+ clipped_root_list = [node for node in clipped_root_list if node]
+ clipped_flat_hierarchy = hierarchy_utils.flatten_tree_structure(clipped_root_list)
+
+ classifications = [cat.classification_name for cat in clipped_flat_hierarchy]
+ else:
+ classifications = natsort.natsorted([tag for tag, posts in self.site.posts_per_classification[taxonomy.classification_name][lang].items()
+ if len(self._filter_list(posts, lang)) >= kw["minimum_post_count"]],
+ alg=natsort.ns.F | natsort.ns.IC)
+ taxonomy.sort_classifications(classifications, lang)
+
+ # Set up classifications in context
+ context[taxonomy.overview_page_variable_name] = classifications
+ context["has_hierarchy"] = taxonomy.has_hierarchy
+ if taxonomy.overview_page_items_variable_name:
+ items = [(classification,
+ self.site.link(taxonomy.classification_name, classification, lang))
+ for classification in classifications]
+ items_with_postcount = [
+ (classification,
+ self.site.link(taxonomy.classification_name, classification, lang),
+ len(self._filter_list(self.site.posts_per_classification[taxonomy.classification_name][lang][classification], lang)))
+ for classification in classifications
+ ]
+ context[taxonomy.overview_page_items_variable_name] = items
+ context[taxonomy.overview_page_items_variable_name + "_with_postcount"] = items_with_postcount
+ if taxonomy.has_hierarchy and taxonomy.overview_page_hierarchy_variable_name:
+ hier_items = [
+ (node.name, node.classification_name, node.classification_path,
+ self.site.link(taxonomy.classification_name, node.classification_name, lang),
+ node.indent_levels, node.indent_change_before,
+ node.indent_change_after)
+ for node in clipped_flat_hierarchy
+ ]
+ hier_items_with_postcount = [
+ (node.name, node.classification_name, node.classification_path,
+ self.site.link(taxonomy.classification_name, node.classification_name, lang),
+ node.indent_levels, node.indent_change_before,
+ node.indent_change_after,
+ len(node.children),
+ len(self._filter_list(self.site.posts_per_classification[taxonomy.classification_name][lang][node.classification_name], lang)))
+ for node in clipped_flat_hierarchy
+ ]
+ context[taxonomy.overview_page_hierarchy_variable_name] = hier_items
+ context[taxonomy.overview_page_hierarchy_variable_name + '_with_postcount'] = hier_items_with_postcount
+ return context, kw
+
+ def _render_classification_overview(self, classification_name, template, lang, context, kw):
+ # Prepare rendering
+ context["permalink"] = self.site.link("{}_index".format(classification_name), None, lang)
+ if "pagekind" not in context:
+ context["pagekind"] = ["list", "tags_page"]
+ output_name = os.path.join(self.site.config['OUTPUT_FOLDER'], self.site.path('{}_index'.format(classification_name), None, lang))
+ blinker.signal('generate_classification_overview').send({
+ 'site': self.site,
+ 'classification_name': classification_name,
+ 'lang': lang,
+ 'context': context,
+ 'kw': kw,
+ 'output_name': output_name,
+ })
+ task = self.site.generic_post_list_renderer(
+ lang,
+ [],
+ output_name,
+ template,
+ kw['filters'],
+ context,
+ )
+ task['uptodate'] = task['uptodate'] + [utils.config_changed(kw, 'nikola.plugins.task.taxonomies:page')]
+ task['basename'] = str(self.name)
+ yield task
+
+ def _generate_classification_overview(self, taxonomy, lang):
+ """Create a global "all your tags/categories" page for a given language."""
+ context, kw = self._generate_classification_overview_kw_context(taxonomy, lang)
+ for task in self._render_classification_overview(taxonomy.classification_name, taxonomy.template_for_classification_overview, lang, context, kw):
+ yield task
+
+ def _generate_tag_and_category_overview(self, tag_taxonomy, category_taxonomy, lang):
+ """Create a global "all your tags/categories" page for a given language."""
+ # Create individual contexts and kw dicts
+ tag_context, tag_kw = self._generate_classification_overview_kw_context(tag_taxonomy, lang)
+ cat_context, cat_kw = self._generate_classification_overview_kw_context(category_taxonomy, lang)
+
+ # Combine resp. select dicts
+ if tag_context['items'] and cat_context['cat_items']:
+ # Combine contexts. We must merge the tag context into the category context
+ # so that tag_context['items'] makes it into the result.
+ context = cat_context
+ context.update(tag_context)
+ kw = cat_kw
+ kw.update(tag_kw)
+
+ # Update title
+ title = self.site.MESSAGES[lang]["Tags and Categories"]
+ context['title'] = title
+ context['description'] = title
+ kw['title'] = title
+ kw['description'] = title
+ elif cat_context['cat_items']:
+ # Use category overview page
+ context = cat_context
+ kw = cat_kw
+ else:
+ # Use tag overview page
+ context = tag_context
+ kw = tag_kw
+
+ # Render result
+ for task in self._render_classification_overview('tag', tag_taxonomy.template_for_classification_overview, lang, context, kw):
+ yield task
+
+ def _generate_classification_page_as_rss(self, taxonomy, classification, filtered_posts, title, description, kw, lang):
+ """Create a RSS feed for a single classification in a given language."""
+ kind = taxonomy.classification_name
+ # Render RSS
+ output_name = os.path.normpath(os.path.join(self.site.config['OUTPUT_FOLDER'], self.site.path(kind + "_rss", classification, lang)))
+ feed_url = urljoin(self.site.config['BASE_URL'], self.site.link(kind + "_rss", classification, lang).lstrip('/'))
+ deps = []
+ deps_uptodate = []
+ for post in filtered_posts:
+ deps += post.deps(lang)
+ deps_uptodate += post.deps_uptodate(lang)
+ blog_title = kw["blog_title"](lang)
+ task = {
+ 'basename': str(self.name),
+ 'name': output_name,
+ 'file_dep': deps,
+ 'targets': [output_name],
+ 'actions': [(utils.generic_rss_renderer,
+ (lang, "{0} ({1})".format(blog_title, title) if blog_title != title else blog_title,
+ kw["site_url"], description, filtered_posts,
+ output_name, kw["feed_teasers"], kw["feed_plain"], kw['feed_length'],
+ feed_url, _enclosure, kw["feed_links_append_query"]))],
+ 'clean': True,
+ 'uptodate': [utils.config_changed(kw, 'nikola.plugins.task.taxonomies:rss')] + deps_uptodate,
+ 'task_dep': ['render_posts'],
+ }
+ return utils.apply_filters(task, kw['filters'])
+
+ def _generate_classification_page_as_index(self, taxonomy, classification, filtered_posts, context, kw, lang):
+ """Render an index page collection using only this classification's posts."""
+ kind = taxonomy.classification_name
+
+ def page_link(i, displayed_i, num_pages, force_addition, extension=None):
+ return self.site.link(kind, classification, lang, alternative_path=force_addition, page=i)
+
+ def page_path(i, displayed_i, num_pages, force_addition, extension=None):
+ return self.site.path(kind, classification, lang, alternative_path=force_addition, page=i)
+
+ context = copy(context)
+ context["kind"] = kind
+ if "pagekind" not in context:
+ context["pagekind"] = ["index", "tag_page"]
+ template_name = taxonomy.template_for_single_list
+
+ yield self.site.generic_index_renderer(lang, filtered_posts, context['title'], template_name, context, kw, str(self.name), page_link, page_path)
+
+ def _generate_classification_page_as_atom(self, taxonomy, classification, filtered_posts, context, kw, lang):
+ """Generate atom feeds for classification lists."""
+ kind = taxonomy.classification_name
+
+ context = copy(context)
+ context["kind"] = kind
+
+ yield self.site.generic_atom_renderer(lang, filtered_posts, context, kw, str(self.name), classification, kind)
+
+ def _generate_classification_page_as_list(self, taxonomy, classification, filtered_posts, context, kw, lang):
+ """Render a single flat link list with this classification's posts."""
+ kind = taxonomy.classification_name
+ template_name = taxonomy.template_for_single_list
+ output_name = os.path.join(self.site.config['OUTPUT_FOLDER'], self.site.path(kind, classification, lang))
+ context["lang"] = lang
+ # list.tmpl expects a different format than list_post.tmpl (Issue #2701)
+ if template_name == 'list.tmpl':
+ context["items"] = [(post.title(lang), post.permalink(lang), None) for post in filtered_posts]
+ else:
+ context["posts"] = filtered_posts
+ if "pagekind" not in context:
+ context["pagekind"] = ["list", "tag_page"]
+ task = self.site.generic_post_list_renderer(lang, filtered_posts, output_name, template_name, kw['filters'], context)
+ task['uptodate'] = task['uptodate'] + [utils.config_changed(kw, 'nikola.plugins.task.taxonomies:list')]
+ task['basename'] = str(self.name)
+ yield task
+
+ def _filter_list(self, post_list, lang):
+ """Return only the posts which should be shown for this language."""
+ if self.site.config["SHOW_UNTRANSLATED_POSTS"]:
+ return post_list
+ else:
+ return [x for x in post_list if x.is_translation_available(lang)]
+
+ def _generate_subclassification_page(self, taxonomy, node, context, kw, lang):
+ """Render a list of subclassifications."""
+ def get_subnode_data(subnode):
+ return [
+ taxonomy.get_classification_friendly_name(subnode.classification_name, lang, only_last_component=True),
+ self.site.link(taxonomy.classification_name, subnode.classification_name, lang),
+ len(self._filter_list(self.site.posts_per_classification[taxonomy.classification_name][lang][subnode.classification_name], lang))
+ ]
+
+ items = [get_subnode_data(subnode) for subnode in node.children]
+ context = copy(context)
+ context["lang"] = lang
+ context["permalink"] = self.site.link(taxonomy.classification_name, node.classification_name, lang)
+ if "pagekind" not in context:
+ context["pagekind"] = ["list", "archive_page"]
+ context["items"] = items
+ task = self.site.generic_post_list_renderer(
+ lang,
+ [],
+ os.path.join(kw['output_folder'], self.site.path(taxonomy.classification_name, node.classification_name, lang)),
+ taxonomy.subcategories_list_template,
+ kw['filters'],
+ context,
+ )
+ task_cfg = {1: kw, 2: items}
+ task['uptodate'] = task['uptodate'] + [utils.config_changed(task_cfg, 'nikola.plugins.task.taxonomy')]
+ task['basename'] = self.name
+ return task
+
+ def _generate_classification_page(self, taxonomy, classification, filtered_posts, generate_list, generate_rss, generate_atom, lang, post_lists_per_lang, classification_set_per_lang=None):
+ """Render index or post list and associated feeds per classification."""
+ # Should we create this list?
+ if not any((generate_list, generate_rss, generate_atom)):
+ return
+ # Get data
+ node = None
+ if taxonomy.has_hierarchy:
+ node = self.site.hierarchy_lookup_per_classification[taxonomy.classification_name][lang].get(classification)
+ context, kw = taxonomy.provide_context_and_uptodate(classification, lang, node)
+ kw = copy(kw)
+ kw["messages"] = self.site.MESSAGES
+ kw["translations"] = self.site.config['TRANSLATIONS']
+ kw["filters"] = self.site.config['FILTERS']
+ kw["site_url"] = self.site.config['SITE_URL']
+ kw["blog_title"] = self.site.config['BLOG_TITLE']
+ kw["generate_rss"] = self.site.config['GENERATE_RSS']
+ kw["generate_atom"] = self.site.config['GENERATE_ATOM']
+ kw["feed_teasers"] = self.site.config["FEED_TEASERS"]
+ kw["feed_plain"] = self.site.config["FEED_PLAIN"]
+ kw["feed_links_append_query"] = self.site.config["FEED_LINKS_APPEND_QUERY"]
+ kw["feed_length"] = self.site.config['FEED_LENGTH']
+ kw["output_folder"] = self.site.config['OUTPUT_FOLDER']
+ kw["pretty_urls"] = self.site.config['PRETTY_URLS']
+ kw["strip_indexes"] = self.site.config['STRIP_INDEXES']
+ kw["index_file"] = self.site.config['INDEX_FILE']
+ context = copy(context)
+ context["permalink"] = self.site.link(taxonomy.classification_name, classification, lang)
+ context["kind"] = taxonomy.classification_name
+ # Get links to other language versions of this classification
+ if classification_set_per_lang is not None:
+ other_lang_links = taxonomy.get_other_language_variants(classification, lang, classification_set_per_lang)
+ # Collect by language
+ links_per_lang = defaultdict(list)
+ for other_lang, link in other_lang_links:
+ # Make sure we ignore the current language (in case the
+ # plugin accidentally returns links for it as well)
+ if other_lang != lang:
+ links_per_lang[other_lang].append(link)
+ # Sort first by language, then by classification
+ sorted_links = []
+ sorted_links_all = []
+ for other_lang in sorted(list(links_per_lang.keys()) + [lang]):
+ if other_lang == lang:
+ sorted_links_all.append((lang, classification, taxonomy.get_classification_friendly_name(classification, lang)))
+ else:
+ links = hierarchy_utils.sort_classifications(taxonomy, links_per_lang[other_lang], other_lang)
+ links = [(other_lang, other_classification,
+ taxonomy.get_classification_friendly_name(other_classification, other_lang))
+ for other_classification in links if post_lists_per_lang[other_lang].get(other_classification, ('', False, False))[1]]
+ sorted_links.extend(links)
+ sorted_links_all.extend(links)
+ # Store result in context and kw
+ context['has_other_languages'] = True
+ context['other_languages'] = sorted_links
+ context['all_languages'] = sorted_links_all
+ kw['other_languages'] = sorted_links
+ kw['all_languages'] = sorted_links_all
+ else:
+ context['has_other_languages'] = False
+ # Allow other plugins to modify the result
+ blinker.signal('generate_classification_page').send({
+ 'site': self.site,
+ 'taxonomy': taxonomy,
+ 'classification': classification,
+ 'lang': lang,
+ 'posts': filtered_posts,
+ 'context': context,
+ 'kw': kw,
+ })
+ # Decide what to do
+ if taxonomy.has_hierarchy and taxonomy.show_list_as_subcategories_list:
+ # Determine whether there are subcategories
+ node = self.site.hierarchy_lookup_per_classification[taxonomy.classification_name][lang][classification]
+ # Are there subclassifications?
+ if len(node.children) > 0:
+ # Yes: create list with subclassifications instead of list of posts
+ if generate_list:
+ yield self._generate_subclassification_page(taxonomy, node, context, kw, lang)
+ return
+ # Generate RSS feed
+ if generate_rss and kw["generate_rss"] and not taxonomy.always_disable_rss:
+ yield self._generate_classification_page_as_rss(taxonomy, classification, filtered_posts, context['title'], context.get("description"), kw, lang)
+
+ # Generate Atom feed
+ if generate_atom and kw["generate_atom"] and not taxonomy.always_disable_atom:
+ yield self._generate_classification_page_as_atom(taxonomy, classification, filtered_posts, context, kw, lang)
+
+ # Render HTML
+ if generate_list and taxonomy.show_list_as_index:
+ yield self._generate_classification_page_as_index(taxonomy, classification, filtered_posts, context, kw, lang)
+ elif generate_list:
+ yield self._generate_classification_page_as_list(taxonomy, classification, filtered_posts, context, kw, lang)
+
+ def gen_tasks(self):
+ """Render the tag pages and feeds."""
+ self.site.scan_posts()
+ yield self.group_task()
+
+ # Cache classification sets per language for taxonomies where
+ # add_other_languages_variable is True.
+ classification_set_per_lang = {}
+ for taxonomy in self.site.taxonomy_plugins.values():
+ if taxonomy.add_other_languages_variable:
+ lookup = self.site.posts_per_classification[taxonomy.classification_name]
+ cspl = {lang: set(lookup[lang].keys()) for lang in lookup}
+ classification_set_per_lang[taxonomy.classification_name] = cspl
+
+ # Collect post lists for classification pages and determine whether
+ # they should be generated.
+ post_lists_per_lang = {}
+ for taxonomy in self.site.taxonomy_plugins.values():
+ plpl = {}
+ for lang in self.site.config["TRANSLATIONS"]:
+ result = {}
+ for classification, posts in self.site.posts_per_classification[taxonomy.classification_name][lang].items():
+ # Filter list
+ filtered_posts = self._filter_list(posts, lang)
+ if len(filtered_posts) == 0 and taxonomy.omit_empty_classifications:
+ generate_list = generate_rss = generate_atom = False
+ else:
+ # Should we create this list?
+ generate_list = taxonomy.should_generate_classification_page(classification, filtered_posts, lang)
+ generate_rss = taxonomy.should_generate_rss_for_classification_page(classification, filtered_posts, lang)
+ generate_atom = taxonomy.should_generate_atom_for_classification_page(classification, filtered_posts, lang)
+ result[classification] = (filtered_posts, generate_list, generate_rss, generate_atom)
+ plpl[lang] = result
+ post_lists_per_lang[taxonomy.classification_name] = plpl
+
+ # Now generate pages
+ for lang in self.site.config["TRANSLATIONS"]:
+ # To support that tag and category classifications share the same overview,
+ # we explicitly detect this case:
+ ignore_plugins_for_overview = set()
+ if 'tag' in self.site.taxonomy_plugins and 'category' in self.site.taxonomy_plugins and self.site.link("tag_index", None, lang) == self.site.link("category_index", None, lang):
+ # Block both plugins from creating overviews
+ ignore_plugins_for_overview.add(self.site.taxonomy_plugins['tag'])
+ ignore_plugins_for_overview.add(self.site.taxonomy_plugins['category'])
+ for taxonomy in self.site.taxonomy_plugins.values():
+ if not taxonomy.is_enabled(lang):
+ continue
+ # Generate list of classifications (i.e. classification overview)
+ if taxonomy not in ignore_plugins_for_overview:
+ if taxonomy.template_for_classification_overview is not None:
+ for task in self._generate_classification_overview(taxonomy, lang):
+ yield task
+
+ # Process classifications
+ for classification, (filtered_posts, generate_list, generate_rss, generate_atom) in post_lists_per_lang[taxonomy.classification_name][lang].items():
+ for task in self._generate_classification_page(taxonomy, classification, filtered_posts,
+ generate_list, generate_rss, generate_atom, lang,
+ post_lists_per_lang[taxonomy.classification_name],
+ classification_set_per_lang.get(taxonomy.classification_name)):
+ yield task
+ # In case we are ignoring plugins for overview, we must have a collision for
+ # tags and categories. Handle this special case with extra code.
+ if ignore_plugins_for_overview:
+ for task in self._generate_tag_and_category_overview(self.site.taxonomy_plugins['tag'], self.site.taxonomy_plugins['category'], lang):
+ yield task
diff --git a/nikola/plugins/template/__init__.py b/nikola/plugins/template/__init__.py
index d5efd61..a530db4 100644
--- a/nikola/plugins/template/__init__.py
+++ b/nikola/plugins/template/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/template/jinja.plugin b/nikola/plugins/template/jinja.plugin
index 78fd41b..629b20e 100644
--- a/nikola/plugins/template/jinja.plugin
+++ b/nikola/plugins/template/jinja.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Support for Jinja2 templates.
[Nikola]
-plugincategory = Template
+PluginCategory = Template
diff --git a/nikola/plugins/template/jinja.py b/nikola/plugins/template/jinja.py
index 5a2135f..7795739 100644
--- a/nikola/plugins/template/jinja.py
+++ b/nikola/plugins/template/jinja.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -24,21 +24,20 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
"""Jinja template handler."""
-from __future__ import unicode_literals
-import os
import io
import json
+import os
+
+from nikola.plugin_categories import TemplateSystem
+from nikola.utils import makedirs, req_missing, sort_posts, _smartjoin_filter
+
try:
import jinja2
from jinja2 import meta
except ImportError:
- jinja2 = None # NOQA
-
-from nikola.plugin_categories import TemplateSystem
-from nikola.utils import makedirs, req_missing
+ jinja2 = None
class JinjaTemplates(TemplateSystem):
@@ -65,6 +64,8 @@ class JinjaTemplates(TemplateSystem):
self.lookup.trim_blocks = True
self.lookup.lstrip_blocks = True
self.lookup.filters['tojson'] = json.dumps
+ self.lookup.filters['sort_posts'] = sort_posts
+ self.lookup.filters['smartjoin'] = _smartjoin_filter
self.lookup.globals['enumerate'] = enumerate
self.lookup.globals['isinstance'] = isinstance
self.lookup.globals['tuple'] = tuple
@@ -107,7 +108,7 @@ class JinjaTemplates(TemplateSystem):
"""Find dependencies for a template string."""
deps = set([])
ast = self.lookup.parse(text)
- dep_names = meta.find_referenced_templates(ast)
+ dep_names = [d for d in meta.find_referenced_templates(ast) if d]
for dep_name in dep_names:
filename = self.lookup.loader.get_source(self.lookup, dep_name)[1]
sub_deps = [filename] + self.get_deps(filename)
@@ -117,7 +118,7 @@ class JinjaTemplates(TemplateSystem):
def get_deps(self, filename):
"""Return paths to dependencies for the template loaded from filename."""
- with io.open(filename, 'r', encoding='utf-8') as fd:
+ with io.open(filename, 'r', encoding='utf-8-sig') as fd:
text = fd.read()
return self.get_string_deps(text)
diff --git a/nikola/plugins/template/mako.plugin b/nikola/plugins/template/mako.plugin
index 308d291..2d353bf 100644
--- a/nikola/plugins/template/mako.plugin
+++ b/nikola/plugins/template/mako.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com/
description = Support for Mako templates.
[Nikola]
-plugincategory = Template
+PluginCategory = Template
diff --git a/nikola/plugins/template/mako.py b/nikola/plugins/template/mako.py
index 0c9bb64..30e2041 100644
--- a/nikola/plugins/template/mako.py
+++ b/nikola/plugins/template/mako.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2016 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,12 +26,9 @@
"""Mako template handler."""
-from __future__ import unicode_literals, print_function, absolute_import
import io
import os
import shutil
-import sys
-import tempfile
from mako import exceptions, util, lexer, parsetree
from mako.lookup import TemplateLookup
@@ -39,9 +36,9 @@ from mako.template import Template
from markupsafe import Markup # It's ok, Mako requires it
from nikola.plugin_categories import TemplateSystem
-from nikola.utils import makedirs, get_logger, STDERR_HANDLER
+from nikola.utils import makedirs, get_logger
-LOGGER = get_logger('mako', STDERR_HANDLER)
+LOGGER = get_logger('mako')
class MakoTemplates(TemplateSystem):
@@ -57,7 +54,7 @@ class MakoTemplates(TemplateSystem):
def get_string_deps(self, text, filename=None):
"""Find dependencies for a template string."""
- lex = lexer.Lexer(text=text, filename=filename)
+ lex = lexer.Lexer(text=text, filename=filename, input_encoding='utf-8')
lex.parse()
deps = []
@@ -68,7 +65,12 @@ class MakoTemplates(TemplateSystem):
# Some templates will include "foo.tmpl" and we need paths, so normalize them
# using the template lookup
for i, d in enumerate(deps):
- deps[i] = self.get_template_path(d)
+ dep = self.get_template_path(d)
+ if dep:
+ deps[i] = dep
+ else:
+ LOGGER.error("Cannot find template {0} referenced in {1}",
+ d, filename)
return deps
def get_deps(self, filename):
@@ -79,13 +81,6 @@ class MakoTemplates(TemplateSystem):
def set_directories(self, directories, cache_folder):
"""Create a new template lookup with set directories."""
cache_dir = os.path.join(cache_folder, '.mako.tmp')
- # Workaround for a Mako bug, Issue #825
- if sys.version_info[0] == 2:
- try:
- os.path.abspath(cache_dir).decode('ascii')
- except UnicodeEncodeError:
- cache_dir = tempfile.mkdtemp()
- LOGGER.warning('Because of a Mako bug, setting cache_dir to {0}'.format(cache_dir))
if os.path.exists(cache_dir):
shutil.rmtree(cache_dir)
self.directories = directories
@@ -103,6 +98,7 @@ class MakoTemplates(TemplateSystem):
self.lookup = TemplateLookup(
directories=self.directories,
module_directory=self.cache_dir,
+ input_encoding='utf-8',
output_encoding='utf-8')
def set_site(self, site):
@@ -135,9 +131,10 @@ class MakoTemplates(TemplateSystem):
dep_filenames = self.get_deps(template.filename)
deps = [template.filename]
for fname in dep_filenames:
- deps += [fname] + self.get_deps(fname)
- self.cache[template_name] = deps
- return list(self.cache[template_name])
+ # yes, it uses forward slashes on Windows
+ deps += self.template_deps(fname.split('/')[-1])
+ self.cache[template_name] = list(set(deps))
+ return self.cache[template_name]
def get_template_path(self, template_name):
"""Get the path to a template or return None."""