summaryrefslogtreecommitdiffstats
path: root/nikola/plugins/command
diff options
context:
space:
mode:
authorLibravatarUnit 193 <unit193@unit193.net>2021-02-03 19:17:00 -0500
committerLibravatarUnit 193 <unit193@unit193.net>2021-02-03 19:17:00 -0500
commit3a0d66f07b112b6d2bdc2b57bbf717a89a351ce6 (patch)
treea7cf56282e54f05785243bc1e903d6594f2c06ba /nikola/plugins/command
parent787b97a4cb24330b36f11297c6d3a7a473a907d0 (diff)
New upstream version 8.1.2.upstream/8.1.2
Diffstat (limited to 'nikola/plugins/command')
-rw-r--r--nikola/plugins/command/__init__.py2
-rw-r--r--nikola/plugins/command/auto.plugin4
-rw-r--r--nikola/plugins/command/auto/__init__.py695
l---------nikola/plugins/command/auto/livereload.js2
-rw-r--r--nikola/plugins/command/bootswatch_theme.plugin13
-rw-r--r--nikola/plugins/command/bootswatch_theme.py106
-rw-r--r--nikola/plugins/command/check.plugin4
-rw-r--r--nikola/plugins/command/check.py228
-rw-r--r--nikola/plugins/command/console.plugin4
-rw-r--r--nikola/plugins/command/console.py52
-rw-r--r--nikola/plugins/command/default_config.plugin13
-rw-r--r--nikola/plugins/command/default_config.py54
-rw-r--r--nikola/plugins/command/deploy.plugin4
-rw-r--r--nikola/plugins/command/deploy.py76
-rw-r--r--nikola/plugins/command/github_deploy.plugin4
-rw-r--r--nikola/plugins/command/github_deploy.py129
-rw-r--r--nikola/plugins/command/import_wordpress.plugin4
-rw-r--r--nikola/plugins/command/import_wordpress.py480
-rw-r--r--nikola/plugins/command/init.plugin4
-rw-r--r--nikola/plugins/command/init.py124
-rw-r--r--nikola/plugins/command/install_theme.plugin13
-rw-r--r--nikola/plugins/command/install_theme.py172
-rw-r--r--nikola/plugins/command/new_page.plugin4
-rw-r--r--nikola/plugins/command/new_page.py5
-rw-r--r--nikola/plugins/command/new_post.plugin4
-rw-r--r--nikola/plugins/command/new_post.py140
-rw-r--r--nikola/plugins/command/orphans.plugin4
-rw-r--r--nikola/plugins/command/orphans.py4
-rw-r--r--nikola/plugins/command/plugin.plugin4
-rw-r--r--nikola/plugins/command/plugin.py129
-rw-r--r--nikola/plugins/command/rst2html.plugin4
-rw-r--r--nikola/plugins/command/rst2html/__init__.py14
-rw-r--r--nikola/plugins/command/serve.plugin4
-rw-r--r--nikola/plugins/command/serve.py95
-rw-r--r--nikola/plugins/command/status.plugin2
-rw-r--r--nikola/plugins/command/status.py60
-rw-r--r--nikola/plugins/command/subtheme.plugin13
-rw-r--r--nikola/plugins/command/subtheme.py150
-rw-r--r--nikola/plugins/command/theme.plugin13
-rw-r--r--nikola/plugins/command/theme.py393
-rw-r--r--nikola/plugins/command/version.plugin4
-rw-r--r--nikola/plugins/command/version.py18
42 files changed, 2089 insertions, 1162 deletions
diff --git a/nikola/plugins/command/__init__.py b/nikola/plugins/command/__init__.py
index 2aa5267..cdd1560 100644
--- a/nikola/plugins/command/__init__.py
+++ b/nikola/plugins/command/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2015 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
diff --git a/nikola/plugins/command/auto.plugin b/nikola/plugins/command/auto.plugin
index 3e2b17d..a847e14 100644
--- a/nikola/plugins/command/auto.plugin
+++ b/nikola/plugins/command/auto.plugin
@@ -5,9 +5,9 @@ module = auto
[Documentation]
author = Roberto Alsina
version = 2.1.0
-website = http://getnikola.com
+website = https://getnikola.com/
description = Automatically detect site changes, rebuild and optionally refresh a browser.
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/auto/__init__.py b/nikola/plugins/command/auto/__init__.py
index 71f9624..6bedcac 100644
--- a/nikola/plugins/command/auto/__init__.py
+++ b/nikola/plugins/command/auto/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2015 Roberto Alsina and others.
+# Copyright © 2012-2020 Chris Warrick, Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,66 +26,56 @@
"""Automatic rebuilds for Nikola."""
-from __future__ import print_function
-
-import json
+import asyncio
+import datetime
import mimetypes
import os
import re
+import stat
import subprocess
import sys
-import time
-try:
- from urlparse import urlparse
- from urllib2 import unquote
-except ImportError:
- from urllib.parse import urlparse, unquote # NOQA
+import typing
import webbrowser
-from wsgiref.simple_server import make_server
-import wsgiref.util
-from blinker import signal
+import pkg_resources
+
+from nikola.plugin_categories import Command
+from nikola.utils import dns_sd, req_missing, get_theme_path, makedirs
+
try:
- from ws4py.websocket import WebSocket
- from ws4py.server.wsgirefserver import WSGIServer, WebSocketWSGIRequestHandler, WebSocketWSGIHandler
- from ws4py.server.wsgiutils import WebSocketWSGIApplication
- from ws4py.messaging import TextMessage
+ import aiohttp
+ from aiohttp import web
+ from aiohttp.web_urldispatcher import StaticResource
+ from aiohttp.web_exceptions import HTTPNotFound, HTTPForbidden, HTTPMovedPermanently
+ from aiohttp.web_response import Response
+ from aiohttp.web_fileresponse import FileResponse
except ImportError:
- WebSocket = object
+ aiohttp = web = None
+ StaticResource = HTTPNotFound = HTTPForbidden = Response = FileResponse = object
+
try:
- import watchdog
from watchdog.observers import Observer
- from watchdog.events import FileSystemEventHandler, PatternMatchingEventHandler
except ImportError:
- watchdog = None
- FileSystemEventHandler = object
- PatternMatchingEventHandler = object
-
+ Observer = None
-from nikola.plugin_categories import Command
-from nikola.utils import req_missing, get_logger, get_theme_path, STDERR_HANDLER
LRJS_PATH = os.path.join(os.path.dirname(__file__), 'livereload.js')
-error_signal = signal('error')
-refresh_signal = signal('refresh')
+REBUILDING_REFRESH_DELAY = 0.35
+IDLE_REFRESH_DELAY = 0.05
-ERROR_N = '''<html>
-<head>
-</head>
-<boody>
-ERROR {}
-</body>
-</html>
-'''
+if sys.platform == 'win32':
+ asyncio.set_event_loop(asyncio.ProactorEventLoop())
class CommandAuto(Command):
-
"""Automatic rebuilds for Nikola."""
name = "auto"
- logger = None
has_server = True
doc_purpose = "builds and serves a site; automatically detects site changes, rebuilds, and optionally refreshes a browser"
+ dns_sd = None
+ delta_last_rebuild = datetime.timedelta(milliseconds=100)
+ web_runner = None # type: web.AppRunner
+
cmd_options = [
{
'name': 'port',
@@ -93,7 +83,7 @@ class CommandAuto(Command):
'long': 'port',
'default': 8000,
'type': int,
- 'help': 'Port nummber (default: 8000)',
+ 'help': 'Port number',
},
{
'name': 'address',
@@ -101,7 +91,7 @@ class CommandAuto(Command):
'long': 'address',
'type': str,
'default': '127.0.0.1',
- 'help': 'Address to bind (default: 127.0.0.1 – localhost)',
+ 'help': 'Address to bind',
},
{
'name': 'browser',
@@ -126,26 +116,50 @@ class CommandAuto(Command):
'type': bool,
'help': 'Disable the server, automate rebuilds only'
},
+ {
+ 'name': 'process',
+ 'short': 'n',
+ 'long': 'process',
+ 'default': 0,
+ 'type': int,
+ 'help': 'Number of subprocesses (nikola build argument)'
+ },
+ {
+ 'name': 'parallel-type',
+ 'short': 'P',
+ 'long': 'parallel-type',
+ 'default': 'process',
+ 'type': str,
+ 'help': "Parallelization mode ('process' or 'thread', nikola build argument)"
+ },
]
def _execute(self, options, args):
"""Start the watcher."""
- self.logger = get_logger('auto', STDERR_HANDLER)
- LRSocket.logger = self.logger
-
- if WebSocket is object and watchdog is None:
- req_missing(['ws4py', 'watchdog'], 'use the "auto" command')
- elif WebSocket is object:
- req_missing(['ws4py'], 'use the "auto" command')
- elif watchdog is None:
+ self.sockets = []
+ self.rebuild_queue = asyncio.Queue()
+ self.reload_queue = asyncio.Queue()
+ self.last_rebuild = datetime.datetime.now()
+ self.is_rebuilding = False
+
+ if aiohttp is None and Observer is None:
+ req_missing(['aiohttp', 'watchdog'], 'use the "auto" command')
+ elif aiohttp is None:
+ req_missing(['aiohttp'], 'use the "auto" command')
+ elif Observer is None:
req_missing(['watchdog'], 'use the "auto" command')
- self.cmd_arguments = ['nikola', 'build']
+ if sys.argv[0].endswith('__main__.py'):
+ self.nikola_cmd = [sys.executable, '-m', 'nikola', 'build']
+ else:
+ self.nikola_cmd = [sys.argv[0], 'build']
+
if self.site.configuration_filename != 'conf.py':
- self.cmd_arguments = ['--conf=' + self.site.configuration_filename] + self.cmd_arguments
+ self.nikola_cmd.append('--conf=' + self.site.configuration_filename)
- # Run an initial build so we are up-to-date
- subprocess.call(self.cmd_arguments)
+ if options and options.get('process'):
+ self.nikola_cmd += ['--process={}'.format(options['process']),
+ '--parallel-type={}'.format(options['parallel-type'])]
port = options and options.get('port')
self.snippet = '''<script>document.write('<script src="http://'
@@ -154,9 +168,9 @@ class CommandAuto(Command):
+ 'script>')</script>
</head>'''.format(port)
- # Do not duplicate entries -- otherwise, multiple rebuilds are triggered
+ # Deduplicate entries by using a set -- otherwise, multiple rebuilds are triggered
watched = set([
- 'templates/',
+ 'templates/'
] + [get_theme_path(name) for name in self.site.THEMES])
for item in self.site.config['post_pages']:
watched.add(os.path.dirname(item[0]))
@@ -166,8 +180,17 @@ class CommandAuto(Command):
watched.add(item)
for item in self.site.config['LISTINGS_FOLDERS']:
watched.add(item)
+ for item in self.site.config['IMAGE_FOLDERS']:
+ watched.add(item)
+ for item in self.site._plugin_places:
+ watched.add(item)
+ # Nikola itself (useful for developers)
+ watched.add(pkg_resources.resource_filename('nikola', ''))
out_folder = self.site.config['OUTPUT_FOLDER']
+ if not os.path.exists(out_folder):
+ makedirs(out_folder)
+
if options and options.get('browser'):
browser = True
else:
@@ -176,285 +199,387 @@ class CommandAuto(Command):
if options['ipv6']:
dhost = '::'
else:
- dhost = None
+ dhost = '0.0.0.0'
host = options['address'].strip('[').strip(']') or dhost
+ # Prepare asyncio event loop
+ # Required for subprocessing to work
+ loop = asyncio.get_event_loop()
+
+ # Set debug setting
+ loop.set_debug(self.site.debug)
+
# Server can be disabled (Issue #1883)
self.has_server = not options['no-server']
- # Instantiate global observer
- observer = Observer()
if self.has_server:
- # Watch output folders and trigger reloads
- observer.schedule(OurWatchHandler(self.do_refresh), out_folder, recursive=True)
+ loop.run_until_complete(self.set_up_server(host, port, out_folder))
+
+ # Run an initial build so we are up-to-date. The server is running, but we are not watching yet.
+ loop.run_until_complete(self.run_initial_rebuild())
+
+ self.wd_observer = Observer()
+ # Watch output folders and trigger reloads
+ if self.has_server:
+ self.wd_observer.schedule(NikolaEventHandler(self.reload_page, loop), out_folder, recursive=True)
# Watch input folders and trigger rebuilds
for p in watched:
if os.path.exists(p):
- observer.schedule(OurWatchHandler(self.do_rebuild), p, recursive=True)
+ self.wd_observer.schedule(NikolaEventHandler(self.queue_rebuild, loop), p, recursive=True)
# Watch config file (a bit of a hack, but we need a directory)
_conf_fn = os.path.abspath(self.site.configuration_filename or 'conf.py')
_conf_dn = os.path.dirname(_conf_fn)
- observer.schedule(ConfigWatchHandler(_conf_fn, self.do_rebuild), _conf_dn, recursive=False)
+ self.wd_observer.schedule(ConfigEventHandler(_conf_fn, self.queue_rebuild, loop), _conf_dn, recursive=False)
+ self.wd_observer.start()
- try:
- self.logger.info("Watching files for changes...")
- observer.start()
- except KeyboardInterrupt:
- pass
+ win_sleeper = None
+ # https://bugs.python.org/issue23057 (fixed in Python 3.8)
+ if sys.platform == 'win32' and sys.version_info < (3, 8):
+ win_sleeper = asyncio.ensure_future(windows_ctrlc_workaround())
- parent = self
-
- class Mixed(WebSocketWSGIApplication):
-
- """A class that supports WS and HTTP protocols on the same port."""
+ if not self.has_server:
+ self.logger.info("Watching for changes...")
+ # Run the event loop forever (no server mode).
+ try:
+ # Run rebuild queue
+ loop.run_until_complete(self.run_rebuild_queue())
- def __call__(self, environ, start_response):
- if environ.get('HTTP_UPGRADE') is None:
- return parent.serve_static(environ, start_response)
- return super(Mixed, self).__call__(environ, start_response)
+ loop.run_forever()
+ except KeyboardInterrupt:
+ pass
+ finally:
+ if win_sleeper:
+ win_sleeper.cancel()
+ self.wd_observer.stop()
+ self.wd_observer.join()
+ loop.close()
+ return
- if self.has_server:
- ws = make_server(
- host, port, server_class=WSGIServer,
- handler_class=WebSocketWSGIRequestHandler,
- app=Mixed(handler_cls=LRSocket)
- )
- ws.initialize_websockets_manager()
- self.logger.info("Serving HTTP on {0} port {1}...".format(host, port))
- if browser:
- if options['ipv6'] or '::' in host:
- server_url = "http://[{0}]:{1}/".format(host, port)
- else:
- server_url = "http://{0}:{1}/".format(host, port)
+ if options['ipv6'] or '::' in host:
+ server_url = "http://[{0}]:{1}/".format(host, port)
+ else:
+ server_url = "http://{0}:{1}/".format(host, port)
+ self.logger.info("Serving on {0} ...".format(server_url))
- self.logger.info("Opening {0} in the default web browser...".format(server_url))
- # Yes, this is racy
- webbrowser.open('http://{0}:{1}'.format(host, port))
+ if browser:
+ # Some browsers fail to load 0.0.0.0 (Issue #2755)
+ if host == '0.0.0.0':
+ server_url = "http://127.0.0.1:{0}/".format(port)
+ self.logger.info("Opening {0} in the default web browser...".format(server_url))
+ webbrowser.open(server_url)
- try:
- ws.serve_forever()
- except KeyboardInterrupt:
- self.logger.info("Server is shutting down.")
- # This is a hack, but something is locking up in a futex
- # and exit() doesn't work.
- os.kill(os.getpid(), 15)
- else:
- # Workaround: can’t have nothing running (instant exit)
- # but also can’t join threads (no way to exit)
- # The joys of threading.
- try:
- while True:
- time.sleep(1)
- except KeyboardInterrupt:
- self.logger.info("Shutting down.")
- # This is a hack, but something is locking up in a futex
- # and exit() doesn't work.
- os.kill(os.getpid(), 15)
+ # Run the event loop forever and handle shutdowns.
+ try:
+ # Run rebuild queue
+ rebuild_queue_fut = asyncio.ensure_future(self.run_rebuild_queue())
+ reload_queue_fut = asyncio.ensure_future(self.run_reload_queue())
- def do_rebuild(self, event):
+ self.dns_sd = dns_sd(port, (options['ipv6'] or '::' in host))
+ loop.run_forever()
+ except KeyboardInterrupt:
+ pass
+ finally:
+ self.logger.info("Server is shutting down.")
+ if win_sleeper:
+ win_sleeper.cancel()
+ if self.dns_sd:
+ self.dns_sd.Reset()
+ rebuild_queue_fut.cancel()
+ reload_queue_fut.cancel()
+ loop.run_until_complete(self.web_runner.cleanup())
+ self.wd_observer.stop()
+ self.wd_observer.join()
+ loop.close()
+
+ async def set_up_server(self, host: str, port: int, out_folder: str) -> None:
+ """Set up aiohttp server and start it."""
+ webapp = web.Application()
+ webapp.router.add_get('/livereload.js', self.serve_livereload_js)
+ webapp.router.add_get('/robots.txt', self.serve_robots_txt)
+ webapp.router.add_route('*', '/livereload', self.websocket_handler)
+ resource = IndexHtmlStaticResource(True, self.snippet, '', out_folder)
+ webapp.router.register_resource(resource)
+ webapp.on_shutdown.append(self.remove_websockets)
+
+ self.web_runner = web.AppRunner(webapp)
+ await self.web_runner.setup()
+ website = web.TCPSite(self.web_runner, host, port)
+ await website.start()
+
+ async def run_initial_rebuild(self) -> None:
+ """Run an initial rebuild."""
+ await self._rebuild_site()
+ # If there are any clients, have them reload the root.
+ await self._send_reload_command(self.site.config['INDEX_FILE'])
+
+ async def queue_rebuild(self, event) -> None:
"""Rebuild the site."""
# Move events have a dest_path, some editors like gedit use a
# move on larger save operations for write protection
event_path = event.dest_path if hasattr(event, 'dest_path') else event.src_path
- fname = os.path.basename(event_path)
- if (fname.endswith('~') or
- fname.startswith('.') or
- os.path.isdir(event_path)): # Skip on folders, these are usually duplicates
+ if sys.platform == 'win32':
+ # Windows hidden files support
+ is_hidden = os.stat(event_path).st_file_attributes & stat.FILE_ATTRIBUTE_HIDDEN
+ else:
+ is_hidden = False
+ has_hidden_component = any(p.startswith('.') for p in event_path.split(os.sep))
+ if (is_hidden or has_hidden_component or
+ '__pycache__' in event_path or
+ event_path.endswith(('.pyc', '.pyo', '.pyd', '_bak', '~')) or
+ event.is_directory): # Skip on folders, these are usually duplicates
return
- self.logger.info('REBUILDING SITE (from {0})'.format(event_path))
- p = subprocess.Popen(self.cmd_arguments, stderr=subprocess.PIPE)
- error = p.stderr.read()
- errord = error.decode('utf-8')
- if p.wait() != 0:
- self.logger.error(errord)
- error_signal.send(error=errord)
+
+ self.logger.debug('Queuing rebuild from {0}'.format(event_path))
+ await self.rebuild_queue.put((datetime.datetime.now(), event_path))
+
+ async def run_rebuild_queue(self) -> None:
+ """Run rebuilds from a queue (Nikola can only build in a single instance)."""
+ while True:
+ date, event_path = await self.rebuild_queue.get()
+ if date < (self.last_rebuild + self.delta_last_rebuild):
+ self.logger.debug("Skipping rebuild from {0} (within delta)".format(event_path))
+ continue
+ await self._rebuild_site(event_path)
+
+ async def _rebuild_site(self, event_path: typing.Optional[str] = None) -> None:
+ """Rebuild the site."""
+ self.is_rebuilding = True
+ self.last_rebuild = datetime.datetime.now()
+ if event_path:
+ self.logger.info('REBUILDING SITE (from {0})'.format(event_path))
else:
- print(errord)
+ self.logger.info('REBUILDING SITE')
+
+ p = await asyncio.create_subprocess_exec(*self.nikola_cmd, stderr=subprocess.PIPE)
+ exit_code = await p.wait()
+ out = (await p.stderr.read()).decode('utf-8')
- def do_refresh(self, event):
- """Refresh the page."""
+ if exit_code != 0:
+ self.logger.error("Rebuild failed\n" + out)
+ await self.send_to_websockets({'command': 'alert', 'message': out})
+ else:
+ self.logger.info("Rebuild successful\n" + out)
+
+ self.is_rebuilding = False
+
+ async def run_reload_queue(self) -> None:
+ """Send reloads from a queue to limit CPU usage."""
+ while True:
+ p = await self.reload_queue.get()
+ self.logger.info('REFRESHING: {0}'.format(p))
+ await self._send_reload_command(p)
+ if self.is_rebuilding:
+ await asyncio.sleep(REBUILDING_REFRESH_DELAY)
+ else:
+ await asyncio.sleep(IDLE_REFRESH_DELAY)
+
+ async def _send_reload_command(self, path: str) -> None:
+ """Send a reload command."""
+ await self.send_to_websockets({'command': 'reload', 'path': path, 'liveCSS': True})
+
+ async def reload_page(self, event) -> None:
+ """Reload the page."""
# Move events have a dest_path, some editors like gedit use a
# move on larger save operations for write protection
- event_path = event.dest_path if hasattr(event, 'dest_path') else event.src_path
- self.logger.info('REFRESHING: {0}'.format(event_path))
- p = os.path.relpath(event_path, os.path.abspath(self.site.config['OUTPUT_FOLDER']))
- refresh_signal.send(path=p)
-
- def serve_static(self, environ, start_response):
- """Trivial static file server."""
- uri = wsgiref.util.request_uri(environ)
- p_uri = urlparse(uri)
- f_path = os.path.join(self.site.config['OUTPUT_FOLDER'], *[unquote(x) for x in p_uri.path.split('/')])
-
- # ‘Pretty’ URIs and root are assumed to be HTML
- mimetype = 'text/html' if uri.endswith('/') else mimetypes.guess_type(uri)[0] or 'application/octet-stream'
-
- if os.path.isdir(f_path):
- if not f_path.endswith('/'): # Redirect to avoid breakage
- start_response('301 Redirect', [('Location', p_uri.path + '/')])
- return []
- f_path = os.path.join(f_path, self.site.config['INDEX_FILE'])
- mimetype = 'text/html'
-
- if p_uri.path == '/robots.txt':
- start_response('200 OK', [('Content-type', 'text/plain')])
- return ['User-Agent: *\nDisallow: /\n'.encode('utf-8')]
- elif os.path.isfile(f_path):
- with open(f_path, 'rb') as fd:
- start_response('200 OK', [('Content-type', mimetype)])
- return [self.file_filter(mimetype, fd.read())]
- elif p_uri.path == '/livereload.js':
- with open(LRJS_PATH, 'rb') as fd:
- start_response('200 OK', [('Content-type', mimetype)])
- return [self.file_filter(mimetype, fd.read())]
- start_response('404 ERR', [])
- return [self.file_filter('text/html', ERROR_N.format(404).format(uri).encode('utf-8'))]
-
- def file_filter(self, mimetype, data):
- """Apply necessary changes to document before serving."""
- if mimetype == 'text/html':
- data = data.decode('utf8')
- data = self.remove_base_tag(data)
- data = self.inject_js(data)
- data = data.encode('utf8')
- return data
-
- def inject_js(self, data):
- """Inject livereload.js."""
- data = re.sub('</head>', self.snippet, data, 1, re.IGNORECASE)
- return data
-
- def remove_base_tag(self, data):
- """Comment out any <base> to allow local resolution of relative URLs."""
- data = re.sub(r'<base\s([^>]*)>', '<!--base \g<1>-->', data, re.IGNORECASE)
- return data
-
-
-pending = []
-
-
-class LRSocket(WebSocket):
-
- """Speak Livereload protocol."""
-
- def __init__(self, *a, **kw):
- """Initialize protocol handler."""
- refresh_signal.connect(self.notify)
- error_signal.connect(self.send_error)
- super(LRSocket, self).__init__(*a, **kw)
-
- def received_message(self, message):
- """Handle received message."""
- message = json.loads(message.data.decode('utf8'))
- self.logger.info('<--- {0}'.format(message))
- response = None
- if message['command'] == 'hello': # Handshake
- response = {
- 'command': 'hello',
- 'protocols': [
- 'http://livereload.com/protocols/official-7',
- ],
- 'serverName': 'nikola-livereload',
- }
- elif message['command'] == 'info': # Someone connected
- self.logger.info('****** Browser connected: {0}'.format(message.get('url')))
- self.logger.info('****** sending {0} pending messages'.format(len(pending)))
- while pending:
- msg = pending.pop()
- self.logger.info('---> {0}'.format(msg.data))
- self.send(msg, msg.is_binary)
+ if event:
+ event_path = event.dest_path if hasattr(event, 'dest_path') else event.src_path
else:
- response = {
- 'command': 'alert',
- 'message': 'HEY',
- }
- if response is not None:
- response = json.dumps(response)
- self.logger.info('---> {0}'.format(response))
- response = TextMessage(response)
- self.send(response, response.is_binary)
-
- def notify(self, sender, path):
- """Send reload requests to the client."""
- p = os.path.join('/', path)
- message = {
- 'command': 'reload',
- 'liveCSS': True,
- 'path': p,
- }
- response = json.dumps(message)
- self.logger.info('---> {0}'.format(p))
- response = TextMessage(response)
- if self.stream is None: # No client connected or whatever
- pending.append(response)
- else:
- self.send(response, response.is_binary)
+ event_path = self.site.config['OUTPUT_FOLDER']
+ p = os.path.relpath(event_path, os.path.abspath(self.site.config['OUTPUT_FOLDER'])).replace(os.sep, '/')
+ await self.reload_queue.put(p)
+
+ async def serve_livereload_js(self, request):
+ """Handle requests to /livereload.js and serve the JS file."""
+ return FileResponse(LRJS_PATH)
+
+ async def serve_robots_txt(self, request):
+ """Handle requests to /robots.txt."""
+ return Response(body=b'User-Agent: *\nDisallow: /\n', content_type='text/plain', charset='utf-8')
+
+ async def websocket_handler(self, request):
+ """Handle requests to /livereload and initiate WebSocket communication."""
+ ws = web.WebSocketResponse()
+ await ws.prepare(request)
+ self.sockets.append(ws)
+
+ while True:
+ msg = await ws.receive()
+
+ self.logger.debug("Received message: {0}".format(msg))
+ if msg.type == aiohttp.WSMsgType.TEXT:
+ message = msg.json()
+ if message['command'] == 'hello':
+ response = {
+ 'command': 'hello',
+ 'protocols': [
+ 'http://livereload.com/protocols/official-7',
+ ],
+ 'serverName': 'Nikola Auto (livereload)',
+ }
+ await ws.send_json(response)
+ elif message['command'] != 'info':
+ self.logger.warning("Unknown command in message: {0}".format(message))
+ elif msg.type in (aiohttp.WSMsgType.CLOSED, aiohttp.WSMsgType.CLOSING):
+ break
+ elif msg.type == aiohttp.WSMsgType.CLOSE:
+ self.logger.debug("Closing WebSocket")
+ await ws.close()
+ break
+ elif msg.type == aiohttp.WSMsgType.ERROR:
+ self.logger.error('WebSocket connection closed with exception {0}'.format(ws.exception()))
+ break
+ else:
+ self.logger.warning("Received unknown message: {0}".format(msg))
+
+ self.sockets.remove(ws)
+ self.logger.debug("WebSocket connection closed: {0}".format(ws))
+
+ return ws
+
+ async def remove_websockets(self, app) -> None:
+ """Remove all websockets."""
+ for ws in self.sockets:
+ await ws.close()
+ self.sockets.clear()
+
+ async def send_to_websockets(self, message: dict) -> None:
+ """Send a message to all open WebSockets."""
+ to_delete = []
+ for ws in self.sockets:
+ if ws.closed:
+ to_delete.append(ws)
+ continue
- def send_error(self, sender, error=None):
- """Send reload requests to the client."""
- if self.stream is None: # No client connected or whatever
- return
- message = {
- 'command': 'alert',
- 'message': error,
- }
- response = json.dumps(message)
- response = TextMessage(response)
- if self.stream is None: # No client connected or whatever
- pending.append(response)
+ try:
+ await ws.send_json(message)
+ if ws._close_code:
+ await ws.close()
+ to_delete.append(ws)
+ except RuntimeError as e:
+ if 'closed' in e.args[0]:
+ self.logger.warning("WebSocket {0} closed uncleanly".format(ws))
+ to_delete.append(ws)
+ else:
+ raise
+
+ for ws in to_delete:
+ self.sockets.remove(ws)
+
+
+async def windows_ctrlc_workaround() -> None:
+ """Work around bpo-23057."""
+ # https://bugs.python.org/issue23057
+ while True:
+ await asyncio.sleep(1)
+
+
+class IndexHtmlStaticResource(StaticResource):
+ """A StaticResource implementation that serves /index.html in directory roots."""
+
+ modify_html = True
+ snippet = "</head>"
+
+ def __init__(self, modify_html=True, snippet="</head>", *args, **kwargs):
+ """Initialize a resource."""
+ self.modify_html = modify_html
+ self.snippet = snippet
+ super().__init__(*args, **kwargs)
+
+ async def _handle(self, request: 'web.Request') -> 'web.Response':
+ """Handle incoming requests (pass to handle_file)."""
+ filename = request.match_info['filename']
+ return await self.handle_file(request, filename)
+
+ async def handle_file(self, request: 'web.Request', filename: str, from_index=None) -> 'web.Response':
+ """Handle file requests."""
+ try:
+ filepath = self._directory.joinpath(filename).resolve()
+ if not self._follow_symlinks:
+ filepath.relative_to(self._directory)
+ except (ValueError, FileNotFoundError) as error:
+ # relatively safe
+ raise HTTPNotFound() from error
+ except Exception as error:
+ # perm error or other kind!
+ request.app.logger.exception(error)
+ raise HTTPNotFound() from error
+
+ # on opening a dir, load it's contents if allowed
+ if filepath.is_dir():
+ if filename.endswith('/') or not filename:
+ ret = await self.handle_file(request, filename + 'index.html', from_index=filename)
+ else:
+ # Redirect and add trailing slash so relative links work (Issue #3140)
+ new_url = request.rel_url.path + '/'
+ if request.rel_url.query_string:
+ new_url += '?' + request.rel_url.query_string
+ raise HTTPMovedPermanently(new_url)
+ elif filepath.is_file():
+ ct, encoding = mimetypes.guess_type(str(filepath))
+ encoding = encoding or 'utf-8'
+ if ct == 'text/html' and self.modify_html:
+ if sys.version_info[0] == 3 and sys.version_info[1] <= 5:
+ # Python 3.4 and 3.5 do not accept pathlib.Path objects in calls to open()
+ filepath = str(filepath)
+ with open(filepath, 'r', encoding=encoding) as fh:
+ text = fh.read()
+ text = self.transform_html(text)
+ ret = Response(text=text, content_type=ct, charset=encoding)
+ else:
+ ret = FileResponse(filepath, chunk_size=self._chunk_size)
+ elif from_index:
+ filepath = self._directory.joinpath(from_index).resolve()
+ try:
+ return Response(text=self._directory_as_html(filepath),
+ content_type="text/html")
+ except PermissionError:
+ raise HTTPForbidden
else:
- self.send(response, response.is_binary)
+ raise HTTPNotFound
+ return ret
-class OurWatchHandler(FileSystemEventHandler):
+ def transform_html(self, text: str) -> str:
+ """Apply some transforms to HTML content."""
+ # Inject livereload.js
+ text = text.replace('</head>', self.snippet, 1)
+ # Disable <base> tag
+ text = re.sub(r'<base\s([^>]*)>', r'<!--base \g<1>-->', text, flags=re.IGNORECASE)
+ return text
- """A Nikola-specific handler for Watchdog."""
- def __init__(self, function):
+# Based on code from the 'hachiko' library by John Biesnecker — thanks!
+# https://github.com/biesnecker/hachiko
+class NikolaEventHandler:
+ """A Nikola-specific event handler for Watchdog. Based on code from hachiko."""
+
+ def __init__(self, function, loop):
"""Initialize the handler."""
self.function = function
- super(OurWatchHandler, self).__init__()
+ self.loop = loop
- def on_any_event(self, event):
- """Call the provided function on any event."""
- self.function(event)
+ async def on_any_event(self, event):
+ """Handle all file events."""
+ await self.function(event)
+ def dispatch(self, event):
+ """Dispatch events to handler."""
+ self.loop.call_soon_threadsafe(asyncio.ensure_future, self.on_any_event(event))
-class ConfigWatchHandler(FileSystemEventHandler):
+class ConfigEventHandler(NikolaEventHandler):
"""A Nikola-specific handler for Watchdog that handles the config file (as a workaround)."""
- def __init__(self, configuration_filename, function):
+ def __init__(self, configuration_filename, function, loop):
"""Initialize the handler."""
self.configuration_filename = configuration_filename
self.function = function
+ self.loop = loop
- def on_any_event(self, event):
- """Call the provided function on any event."""
+ async def on_any_event(self, event):
+ """Handle file events if they concern the configuration file."""
if event._src_path == self.configuration_filename:
- self.function(event)
-
-
-try:
- # Monkeypatch to hide Broken Pipe Errors
- f = WebSocketWSGIHandler.finish_response
-
- if sys.version_info[0] == 3:
- EX = BrokenPipeError # NOQA
- else:
- EX = IOError
-
- def finish_response(self):
- """Monkeypatched finish_response that ignores broken pipes."""
- try:
- f(self)
- except EX: # Client closed the connection, not a real error
- pass
-
- WebSocketWSGIHandler.finish_response = finish_response
-except NameError:
- # In case there is no WebSocketWSGIHandler because of a failed import.
- pass
+ await self.function(event)
diff --git a/nikola/plugins/command/auto/livereload.js b/nikola/plugins/command/auto/livereload.js
index b4cafb3..282dce5 120000
--- a/nikola/plugins/command/auto/livereload.js
+++ b/nikola/plugins/command/auto/livereload.js
@@ -1 +1 @@
-../../../../bower_components/livereload-js/dist/livereload.js \ No newline at end of file
+../../../../npm_assets/node_modules/livereload-js/dist/livereload.js \ No newline at end of file
diff --git a/nikola/plugins/command/bootswatch_theme.plugin b/nikola/plugins/command/bootswatch_theme.plugin
deleted file mode 100644
index fc25045..0000000
--- a/nikola/plugins/command/bootswatch_theme.plugin
+++ /dev/null
@@ -1,13 +0,0 @@
-[Core]
-name = bootswatch_theme
-module = bootswatch_theme
-
-[Documentation]
-author = Roberto Alsina
-version = 1.0
-website = http://getnikola.com
-description = Given a swatch name and a parent theme, creates a custom theme.
-
-[Nikola]
-plugincategory = Command
-
diff --git a/nikola/plugins/command/bootswatch_theme.py b/nikola/plugins/command/bootswatch_theme.py
deleted file mode 100644
index b5644a1..0000000
--- a/nikola/plugins/command/bootswatch_theme.py
+++ /dev/null
@@ -1,106 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2015 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-"""Given a swatch name from bootswatch.com and a parent theme, creates a custom theme."""
-
-from __future__ import print_function
-import os
-import requests
-
-from nikola.plugin_categories import Command
-from nikola import utils
-
-LOGGER = utils.get_logger('bootswatch_theme', utils.STDERR_HANDLER)
-
-
-class CommandBootswatchTheme(Command):
-
- """Given a swatch name from bootswatch.com and a parent theme, creates a custom theme."""
-
- name = "bootswatch_theme"
- doc_usage = "[options]"
- doc_purpose = "given a swatch name from bootswatch.com and a parent theme, creates a custom"\
- " theme"
- cmd_options = [
- {
- 'name': 'name',
- 'short': 'n',
- 'long': 'name',
- 'default': 'custom',
- 'type': str,
- 'help': 'New theme name (default: custom)',
- },
- {
- 'name': 'swatch',
- 'short': 's',
- 'default': '',
- 'type': str,
- 'help': 'Name of the swatch from bootswatch.com.'
- },
- {
- 'name': 'parent',
- 'short': 'p',
- 'long': 'parent',
- 'default': 'bootstrap3',
- 'help': 'Parent theme name (default: bootstrap3)',
- },
- ]
-
- def _execute(self, options, args):
- """Given a swatch name and a parent theme, creates a custom theme."""
- name = options['name']
- swatch = options['swatch']
- if not swatch:
- LOGGER.error('The -s option is mandatory')
- return 1
- parent = options['parent']
- version = ''
-
- # See if we need bootswatch for bootstrap v2 or v3
- themes = utils.get_theme_chain(parent)
- if 'bootstrap3' not in themes and 'bootstrap3-jinja' not in themes:
- version = '2'
- elif 'bootstrap' not in themes and 'bootstrap-jinja' not in themes:
- LOGGER.warn('"bootswatch_theme" only makes sense for themes that use bootstrap')
- elif 'bootstrap3-gradients' in themes or 'bootstrap3-gradients-jinja' in themes:
- LOGGER.warn('"bootswatch_theme" doesn\'t work well with the bootstrap3-gradients family')
-
- LOGGER.info("Creating '{0}' theme from '{1}' and '{2}'".format(name, swatch, parent))
- utils.makedirs(os.path.join('themes', name, 'assets', 'css'))
- for fname in ('bootstrap.min.css', 'bootstrap.css'):
- url = 'http://bootswatch.com'
- if version:
- url += '/' + version
- url = '/'.join((url, swatch, fname))
- LOGGER.info("Downloading: " + url)
- data = requests.get(url).text
- with open(os.path.join('themes', name, 'assets', 'css', fname),
- 'wb+') as output:
- output.write(data.encode('utf-8'))
-
- with open(os.path.join('themes', name, 'parent'), 'wb+') as output:
- output.write(parent.encode('utf-8'))
- LOGGER.notice('Theme created. Change the THEME setting to "{0}" to use it.'.format(name))
diff --git a/nikola/plugins/command/check.plugin b/nikola/plugins/command/check.plugin
index e380e64..bc6ede3 100644
--- a/nikola/plugins/command/check.plugin
+++ b/nikola/plugins/command/check.plugin
@@ -5,9 +5,9 @@ module = check
[Documentation]
author = Roberto Alsina
version = 1.0
-website = http://getnikola.com
+website = https://getnikola.com/
description = Check the generated site
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/check.py b/nikola/plugins/command/check.py
index abf183e..cac6000 100644
--- a/nikola/plugins/command/check.py
+++ b/nikola/plugins/command/check.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2015 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,27 +26,25 @@
"""Check the generated site."""
-from __future__ import print_function
-from collections import defaultdict
+import logging
import os
import re
import sys
import time
-try:
- from urllib import unquote
- from urlparse import urlparse, urljoin, urldefrag
-except ImportError:
- from urllib.parse import unquote, urlparse, urljoin, urldefrag # NOQA
+from collections import defaultdict
+from urllib.parse import unquote, urlparse, urljoin, urldefrag
-from doit.loader import generate_tasks
import lxml.html
import requests
+from doit.loader import generate_tasks
from nikola.plugin_categories import Command
-from nikola.utils import get_logger, STDERR_HANDLER
-def _call_nikola_list(site):
+def _call_nikola_list(site, cache=None):
+ if cache is not None:
+ if 'files' in cache and 'deps' in cache:
+ return cache['files'], cache['deps']
files = []
deps = defaultdict(list)
for task in generate_tasks('render_site', site.gen_tasks('render_site', "Task", '')):
@@ -57,16 +55,19 @@ def _call_nikola_list(site):
files.extend(task.targets)
for target in task.targets:
deps[target].extend(task.file_dep)
+ if cache is not None:
+ cache['files'] = files
+ cache['deps'] = deps
return files, deps
-def real_scan_files(site):
+def real_scan_files(site, cache=None):
"""Scan for files."""
task_fnames = set([])
real_fnames = set([])
output_folder = site.config['OUTPUT_FOLDER']
# First check that all targets are generated in the right places
- for fname in _call_nikola_list(site)[0]:
+ for fname in _call_nikola_list(site, cache)[0]:
fname = fname.strip()
if fname.startswith(output_folder):
task_fnames.add(fname)
@@ -94,11 +95,9 @@ def fs_relpath_from_url_path(url_path):
class CommandCheck(Command):
-
"""Check the generated site."""
name = "check"
- logger = None
doc_usage = "[-v] (-l [--find-sources] [-r] | -f [--clean-files])"
doc_purpose = "check links and files in the generated site"
@@ -153,39 +152,41 @@ class CommandCheck(Command):
def _execute(self, options, args):
"""Check the generated site."""
- self.logger = get_logger('check', STDERR_HANDLER)
-
if not options['links'] and not options['files'] and not options['clean']:
print(self.help())
- return False
+ return 1
if options['verbose']:
- self.logger.level = 1
+ self.logger.level = logging.DEBUG
else:
- self.logger.level = 4
+ self.logger.level = logging.WARNING
+ failure = False
if options['links']:
- failure = self.scan_links(options['find_sources'], options['remote'])
+ failure |= self.scan_links(options['find_sources'], options['remote'])
if options['files']:
- failure = self.scan_files()
+ failure |= self.scan_files()
if options['clean']:
- failure = self.clean_files()
+ failure |= self.clean_files()
if failure:
return 1
existing_targets = set([])
checked_remote_targets = {}
+ cache = {}
def analyze(self, fname, find_sources=False, check_remote=False):
"""Analyze links on a page."""
rv = False
self.whitelist = [re.compile(x) for x in self.site.config['LINK_CHECK_WHITELIST']]
+ self.internal_redirects = [urljoin('/', _[0]) for _ in self.site.config['REDIRECTIONS']]
base_url = urlparse(self.site.config['BASE_URL'])
self.existing_targets.add(self.site.config['SITE_URL'])
self.existing_targets.add(self.site.config['BASE_URL'])
url_type = self.site.config['URL_TYPE']
+ atom_extension = self.site.config['ATOM_EXTENSION']
deps = {}
if find_sources:
- deps = _call_nikola_list(self.site)[1]
+ deps = _call_nikola_list(self.site, self.cache)[1]
if url_type in ('absolute', 'full_path'):
url_netloc_to_root = urlparse(self.site.config['BASE_URL']).path
@@ -196,24 +197,66 @@ class CommandCheck(Command):
# Do not look at links in the cache, which are not parsed by
# anyone and may result in false positives. Problems arise
# with galleries, for example. Full rationale: (Issue #1447)
- self.logger.notice("Ignoring {0} (in cache, links may be incorrect)".format(filename))
+ self.logger.warning("Ignoring {0} (in cache, links may be incorrect)".format(filename))
return False
if not os.path.exists(fname):
# Quietly ignore files that don’t exist; use `nikola check -f` instead (Issue #1831)
return False
- d = lxml.html.fromstring(open(filename, 'rb').read())
- for l in d.iterlinks():
+ if '.html' == fname[-5:]:
+ with open(filename, 'rb') as inf:
+ d = lxml.html.fromstring(inf.read())
+ extra_objs = lxml.html.fromstring('<html/>')
+
+ # Turn elements with a srcset attribute into individual img elements with src attributes
+ for obj in list(d.xpath('(*//img|*//source)')):
+ if 'srcset' in obj.attrib:
+ for srcset_item in obj.attrib['srcset'].split(','):
+ extra_objs.append(lxml.etree.Element('img', src=srcset_item.strip().split(' ')[0]))
+ link_elements = list(d.iterlinks()) + list(extra_objs.iterlinks())
+ # Extract links from XML formats to minimal HTML, allowing those to go through the link checks
+ elif atom_extension == filename[-len(atom_extension):]:
+ d = lxml.etree.parse(filename)
+ link_elements = lxml.html.fromstring('<html/>')
+ for elm in d.findall('*//{http://www.w3.org/2005/Atom}link'):
+ feed_link = elm.attrib['href'].split('?')[0].strip() # strip FEED_LINKS_APPEND_QUERY
+ link_elements.append(lxml.etree.Element('a', href=feed_link))
+ link_elements = list(link_elements.iterlinks())
+ elif filename.endswith('sitemap.xml') or filename.endswith('sitemapindex.xml'):
+ d = lxml.etree.parse(filename)
+ link_elements = lxml.html.fromstring('<html/>')
+ for elm in d.getroot().findall("*//{http://www.sitemaps.org/schemas/sitemap/0.9}loc"):
+ link_elements.append(lxml.etree.Element('a', href=elm.text.strip()))
+ link_elements = list(link_elements.iterlinks())
+ else: # unsupported file type
+ return False
+
+ for l in link_elements:
target = l[2]
if target == "#":
continue
- target, _ = urldefrag(target)
+ target = urldefrag(target)[0]
+
+ if any([urlparse(target).netloc.endswith(_) for _ in ['example.com', 'example.net', 'example.org']]):
+ self.logger.debug("Not testing example address \"{0}\".".format(target))
+ continue
+
+ # absolute URL to root-relative
+ if target.startswith(base_url.geturl()):
+ target = target.replace(base_url.geturl(), '/')
+
parsed = urlparse(target)
# Warn about links from https to http (mixed-security)
if base_url.netloc == parsed.netloc and base_url.scheme == "https" and parsed.scheme == "http":
- self.logger.warn("Mixed-content security for link in {0}: {1}".format(filename, target))
+ self.logger.warning("Mixed-content security for link in {0}: {1}".format(filename, target))
+
+ # Link to an internal REDIRECTIONS page
+ if target in self.internal_redirects:
+ redir_status_code = 301
+ redir_target = [_dest for _target, _dest in self.site.config['REDIRECTIONS'] if urljoin('/', _target) == target][0]
+ self.logger.warning("Remote link moved PERMANENTLY to \"{0}\" and should be updated in {1}: {2} [HTTP: 301]".format(redir_target, filename, target))
# Absolute links to other domains, skip
# Absolute links when using only paths, skip.
@@ -221,19 +264,17 @@ class CommandCheck(Command):
((parsed.scheme or target.startswith('//')) and url_type in ('rel_path', 'full_path')):
if not check_remote or parsed.scheme not in ["http", "https"]:
continue
- if parsed.netloc == base_url.netloc: # absolute URL to self.site
- continue
if target in self.checked_remote_targets: # already checked this exact target
- if self.checked_remote_targets[target] in [301, 307]:
- self.logger.warn("Remote link PERMANENTLY redirected in {0}: {1} [Error {2}]".format(filename, target, self.checked_remote_targets[target]))
- elif self.checked_remote_targets[target] in [302, 308]:
- self.logger.info("Remote link temporarily redirected in {1}: {2} [HTTP: {3}]".format(filename, target, self.checked_remote_targets[target]))
+ if self.checked_remote_targets[target] in [301, 308]:
+ self.logger.warning("Remote link PERMANENTLY redirected in {0}: {1} [Error {2}]".format(filename, target, self.checked_remote_targets[target]))
+ elif self.checked_remote_targets[target] in [302, 307]:
+ self.logger.debug("Remote link temporarily redirected in {0}: {1} [HTTP: {2}]".format(filename, target, self.checked_remote_targets[target]))
elif self.checked_remote_targets[target] > 399:
self.logger.error("Broken link in {0}: {1} [Error {2}]".format(filename, target, self.checked_remote_targets[target]))
continue
# Skip whitelisted targets
- if any(re.search(_, target) for _ in self.whitelist):
+ if any(pattern.search(target) for pattern in self.whitelist):
continue
# Check the remote link works
@@ -253,9 +294,9 @@ class CommandCheck(Command):
resp = requests.get(target, headers=req_headers, allow_redirects=True)
# Permanent redirects should be updated
if redir_status_code in [301, 308]:
- self.logger.warn("Remote link moved PERMANENTLY to \"{0}\" and should be updated in {1}: {2} [HTTP: {3}]".format(resp.url, filename, target, redir_status_code))
+ self.logger.warning("Remote link moved PERMANENTLY to \"{0}\" and should be updated in {1}: {2} [HTTP: {3}]".format(resp.url, filename, target, redir_status_code))
if redir_status_code in [302, 307]:
- self.logger.info("Remote link temporarily redirected to \"{0}\" in {1}: {2} [HTTP: {3}]".format(resp.url, filename, target, redir_status_code))
+ self.logger.debug("Remote link temporarily redirected to \"{0}\" in {1}: {2} [HTTP: {3}]".format(resp.url, filename, target, redir_status_code))
self.checked_remote_targets[resp.url] = resp.status_code
self.checked_remote_targets[target] = redir_status_code
else:
@@ -267,7 +308,7 @@ class CommandCheck(Command):
elif resp.status_code <= 399: # The address leads *somewhere* that is not an error
self.logger.debug("Successfully checked remote link in {0}: {1} [HTTP: {2}]".format(filename, target, resp.status_code))
continue
- self.logger.warn("Could not check remote link in {0}: {1} [Unknown problem]".format(filename, target))
+ self.logger.warning("Could not check remote link in {0}: {1} [Unknown problem]".format(filename, target))
continue
if url_type == 'rel_path':
@@ -275,60 +316,95 @@ class CommandCheck(Command):
target_filename = os.path.abspath(
os.path.join(self.site.config['OUTPUT_FOLDER'], unquote(target.lstrip('/'))))
else: # Relative path
+ unquoted_target = unquote(target).encode('utf-8')
target_filename = os.path.abspath(
- os.path.join(os.path.dirname(filename), unquote(target)))
+ os.path.join(os.path.dirname(filename).encode('utf-8'), unquoted_target))
- elif url_type in ('full_path', 'absolute'):
+ else:
+ relative = False
if url_type == 'absolute':
# convert to 'full_path' case, ie url relative to root
- url_rel_path = parsed.path[len(url_netloc_to_root):]
+ if parsed.path.startswith(url_netloc_to_root):
+ url_rel_path = parsed.path[len(url_netloc_to_root):]
+ else:
+ url_rel_path = parsed.path
+ if not url_rel_path.startswith('/'):
+ relative = True
else:
# convert to relative to base path
- url_rel_path = target[len(url_netloc_to_root):]
+ if target.startswith(url_netloc_to_root):
+ url_rel_path = target[len(url_netloc_to_root):]
+ else:
+ url_rel_path = target
+ if not url_rel_path.startswith('/'):
+ relative = True
if url_rel_path == '' or url_rel_path.endswith('/'):
url_rel_path = urljoin(url_rel_path, self.site.config['INDEX_FILE'])
- fs_rel_path = fs_relpath_from_url_path(url_rel_path)
- target_filename = os.path.join(self.site.config['OUTPUT_FOLDER'], fs_rel_path)
+ if relative:
+ unquoted_target = unquote(target).encode('utf-8')
+ target_filename = os.path.abspath(
+ os.path.join(os.path.dirname(filename).encode('utf-8'), unquoted_target))
+ else:
+ fs_rel_path = fs_relpath_from_url_path(url_rel_path)
+ target_filename = os.path.join(self.site.config['OUTPUT_FOLDER'], fs_rel_path)
- if any(re.search(x, target_filename) for x in self.whitelist):
+ if isinstance(target_filename, str):
+ target_filename_str = target_filename
+ else:
+ target_filename_str = target_filename.decode("utf-8", errors="surrogateescape")
+
+ if any(pattern.search(target_filename_str) for pattern in self.whitelist):
continue
+
elif target_filename not in self.existing_targets:
if os.path.exists(target_filename):
- self.logger.notice("Good link {0} => {1}".format(target, target_filename))
+ self.logger.info("Good link {0} => {1}".format(target, target_filename))
self.existing_targets.add(target_filename)
else:
rv = True
- self.logger.warn("Broken link in {0}: {1}".format(filename, target))
+ self.logger.warning("Broken link in {0}: {1}".format(filename, target))
if find_sources:
- self.logger.warn("Possible sources:")
- self.logger.warn("\n".join(deps[filename]))
- self.logger.warn("===============================\n")
+ self.logger.warning("Possible sources:")
+ self.logger.warning("\n".join(deps[filename]))
+ self.logger.warning("===============================\n")
except Exception as exc:
- self.logger.error("Error with: {0} {1}".format(filename, exc))
+ self.logger.error(u"Error with: {0} {1}".format(filename, exc))
return rv
def scan_links(self, find_sources=False, check_remote=False):
"""Check links on the site."""
- self.logger.info("Checking Links:")
- self.logger.info("===============\n")
- self.logger.notice("{0} mode".format(self.site.config['URL_TYPE']))
+ self.logger.debug("Checking Links:")
+ self.logger.debug("===============\n")
+ self.logger.debug("{0} mode".format(self.site.config['URL_TYPE']))
failure = False
+ atom_extension = self.site.config['ATOM_EXTENSION']
# Maybe we should just examine all HTML files
output_folder = self.site.config['OUTPUT_FOLDER']
- for fname in _call_nikola_list(self.site)[0]:
- if fname.startswith(output_folder) and '.html' == fname[-5:]:
- if self.analyze(fname, find_sources, check_remote):
- failure = True
+
+ if urlparse(self.site.config['BASE_URL']).netloc == 'example.com':
+ self.logger.error("You've not changed the SITE_URL (or BASE_URL) setting from \"example.com\"!")
+
+ for fname in _call_nikola_list(self.site, self.cache)[0]:
+ if fname.startswith(output_folder):
+ if '.html' == fname[-5:]:
+ if self.analyze(fname, find_sources, check_remote):
+ failure = True
+ if atom_extension == fname[-len(atom_extension):]:
+ if self.analyze(fname, find_sources, False):
+ failure = True
+ if fname.endswith('sitemap.xml') or fname.endswith('sitemapindex.xml'):
+ if self.analyze(fname, find_sources, False):
+ failure = True
if not failure:
- self.logger.info("All links checked.")
+ self.logger.debug("All links checked.")
return failure
def scan_files(self):
"""Check files in the site, find missing and orphaned files."""
failure = False
- self.logger.info("Checking Files:")
- self.logger.info("===============\n")
- only_on_output, only_on_input = real_scan_files(self.site)
+ self.logger.debug("Checking Files:")
+ self.logger.debug("===============\n")
+ only_on_output, only_on_input = real_scan_files(self.site, self.cache)
# Ignore folders
only_on_output = [p for p in only_on_output if not os.path.isdir(p)]
@@ -336,26 +412,28 @@ class CommandCheck(Command):
if only_on_output:
only_on_output.sort()
- self.logger.warn("Files from unknown origins (orphans):")
+ self.logger.warning("Files from unknown origins (orphans):")
for f in only_on_output:
- self.logger.warn(f)
+ self.logger.warning(f)
failure = True
if only_on_input:
only_on_input.sort()
- self.logger.warn("Files not generated:")
+ self.logger.warning("Files not generated:")
for f in only_on_input:
- self.logger.warn(f)
+ self.logger.warning(f)
if not failure:
- self.logger.info("All files checked.")
+ self.logger.debug("All files checked.")
return failure
def clean_files(self):
"""Remove orphaned files."""
- only_on_output, _ = real_scan_files(self.site)
+ only_on_output, _ = real_scan_files(self.site, self.cache)
for f in only_on_output:
- self.logger.info('removed: {0}'.format(f))
+ self.logger.debug('removed: {0}'.format(f))
os.unlink(f)
+ warn_flag = bool(only_on_output)
+
# Find empty directories and remove them
output_folder = self.site.config['OUTPUT_FOLDER']
all_dirs = []
@@ -365,7 +443,13 @@ class CommandCheck(Command):
for d in all_dirs:
try:
os.rmdir(d)
- self.logger.info('removed: {0}/'.format(d))
+ self.logger.debug('removed: {0}/'.format(d))
+ warn_flag = True
except OSError:
pass
- return True
+
+ if warn_flag:
+ self.logger.warning('Some files or directories have been removed, your site may need rebuilding')
+ return True
+
+ return False
diff --git a/nikola/plugins/command/console.plugin b/nikola/plugins/command/console.plugin
index 333762c..35e3585 100644
--- a/nikola/plugins/command/console.plugin
+++ b/nikola/plugins/command/console.plugin
@@ -5,9 +5,9 @@ module = console
[Documentation]
author = Chris Warrick, Roberto Alsina
version = 1.0
-website = http://getnikola.com
+website = https://getnikola.com/
description = Start a debugging python console
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/console.py b/nikola/plugins/command/console.py
index 539fa08..b4342b4 100644
--- a/nikola/plugins/command/console.py
+++ b/nikola/plugins/command/console.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2015 Chris Warrick, Roberto Alsina and others.
+# Copyright © 2012-2020 Chris Warrick, Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,28 +26,26 @@
"""Start debugging console."""
-from __future__ import print_function, unicode_literals
import os
from nikola import __version__
from nikola.plugin_categories import Command
-from nikola.utils import get_logger, STDERR_HANDLER, req_missing, Commands
+from nikola.utils import get_logger, req_missing, Commands
-LOGGER = get_logger('console', STDERR_HANDLER)
+LOGGER = get_logger('console')
class CommandConsole(Command):
-
"""Start debugging console."""
name = "console"
shells = ['ipython', 'bpython', 'plain']
doc_purpose = "start an interactive Python console with access to your site"
doc_description = """\
-The site engine is accessible as `site`, the config file as `conf`, and commands are available as `commands`.
+The site engine is accessible as `site` and `nikola_site`, the config file as `conf`, and commands are available as `commands`.
If there is no console to use specified (as -b, -i, -p) it tries IPython, then falls back to bpython, and finally falls back to the plain Python console."""
- header = "Nikola v" + __version__ + " -- {0} Console (conf = configuration file, site = site engine, commands = nikola commands)"
+ header = "Nikola v" + __version__ + " -- {0} Console (conf = configuration file, site, nikola_site = site engine, commands = nikola commands)"
cmd_options = [
{
'name': 'bpython',
@@ -73,35 +71,52 @@ If there is no console to use specified (as -b, -i, -p) it tries IPython, then f
'default': False,
'help': 'Use the plain Python interpreter',
},
+ {
+ 'name': 'command',
+ 'short': 'c',
+ 'long': 'command',
+ 'type': str,
+ 'default': None,
+ 'help': 'Run a single command',
+ },
+ {
+ 'name': 'script',
+ 'short': 's',
+ 'long': 'script',
+ 'type': str,
+ 'default': None,
+ 'help': 'Execute a python script in the console context',
+ },
]
def ipython(self, willful=True):
- """IPython shell."""
+ """Run an IPython shell."""
try:
import IPython
- except ImportError as e:
+ except ImportError:
if willful:
req_missing(['IPython'], 'use the IPython console')
- raise e # That’s how _execute knows whether to try something else.
+ raise # That’s how _execute knows whether to try something else.
else:
site = self.context['site'] # NOQA
+ nikola_site = self.context['nikola_site'] # NOQA
conf = self.context['conf'] # NOQA
commands = self.context['commands'] # NOQA
IPython.embed(header=self.header.format('IPython'))
def bpython(self, willful=True):
- """bpython shell."""
+ """Run a bpython shell."""
try:
import bpython
- except ImportError as e:
+ except ImportError:
if willful:
req_missing(['bpython'], 'use the bpython console')
- raise e # That’s how _execute knows whether to try something else.
+ raise # That’s how _execute knows whether to try something else.
else:
bpython.embed(banner=self.header.format('bpython'), locals_=self.context)
def plain(self, willful=True):
- """Plain Python shell."""
+ """Run a plain Python shell."""
import code
try:
import readline
@@ -131,9 +146,16 @@ If there is no console to use specified (as -b, -i, -p) it tries IPython, then f
self.context = {
'conf': self.site.config,
'site': self.site,
+ 'nikola_site': self.site,
'commands': self.site.commands,
}
- if options['bpython']:
+ if options['command']:
+ exec(options['command'], None, self.context)
+ elif options['script']:
+ with open(options['script']) as inf:
+ code = compile(inf.read(), options['script'], 'exec')
+ exec(code, None, self.context)
+ elif options['bpython']:
self.bpython(True)
elif options['ipython']:
self.ipython(True)
diff --git a/nikola/plugins/command/default_config.plugin b/nikola/plugins/command/default_config.plugin
new file mode 100644
index 0000000..af279f6
--- /dev/null
+++ b/nikola/plugins/command/default_config.plugin
@@ -0,0 +1,13 @@
+[Core]
+name = default_config
+module = default_config
+
+[Documentation]
+author = Roberto Alsina
+version = 1.0
+website = https://getnikola.com/
+description = Show the default configuration.
+
+[Nikola]
+PluginCategory = Command
+
diff --git a/nikola/plugins/command/default_config.py b/nikola/plugins/command/default_config.py
new file mode 100644
index 0000000..036f4d1
--- /dev/null
+++ b/nikola/plugins/command/default_config.py
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2012-2020 Roberto Alsina and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+"""Show the default configuration."""
+
+import sys
+
+import nikola.plugins.command.init
+from nikola.plugin_categories import Command
+from nikola.utils import get_logger
+
+
+LOGGER = get_logger('default_config')
+
+
+class CommandShowConfig(Command):
+ """Show the default configuration."""
+
+ name = "default_config"
+
+ doc_usage = ""
+ needs_config = False
+ doc_purpose = "Print the default Nikola configuration."
+ cmd_options = []
+
+ def _execute(self, options=None, args=None):
+ """Show the default configuration."""
+ try:
+ print(nikola.plugins.command.init.CommandInit.create_configuration_to_string())
+ except Exception:
+ sys.stdout.buffer.write(nikola.plugins.command.init.CommandInit.create_configuration_to_string().encode('utf-8'))
diff --git a/nikola/plugins/command/deploy.plugin b/nikola/plugins/command/deploy.plugin
index 4743ca2..7cff28d 100644
--- a/nikola/plugins/command/deploy.plugin
+++ b/nikola/plugins/command/deploy.plugin
@@ -5,9 +5,9 @@ module = deploy
[Documentation]
author = Roberto Alsina
version = 1.0
-website = http://getnikola.com
+website = https://getnikola.com/
description = Deploy the site
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/deploy.py b/nikola/plugins/command/deploy.py
index 821ea11..5273b58 100644
--- a/nikola/plugins/command/deploy.py
+++ b/nikola/plugins/command/deploy.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2015 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,57 +26,48 @@
"""Deploy site."""
-from __future__ import print_function
-import io
-from datetime import datetime
-from dateutil.tz import gettz
-import os
import subprocess
import time
+from datetime import datetime
+import dateutil
from blinker import signal
+from dateutil.tz import gettz
from nikola.plugin_categories import Command
-from nikola.utils import get_logger, remove_file, unicode_str, makedirs, STDERR_HANDLER
+from nikola.utils import clean_before_deployment
class CommandDeploy(Command):
-
"""Deploy site."""
name = "deploy"
- doc_usage = "[[preset [preset...]]"
+ doc_usage = "[preset [preset...]]"
doc_purpose = "deploy the site"
doc_description = "Deploy the site by executing deploy commands from the presets listed on the command line. If no presets are specified, `default` is executed."
- logger = None
def _execute(self, command, args):
"""Execute the deploy command."""
- self.logger = get_logger('deploy', STDERR_HANDLER)
- # Get last successful deploy date
- timestamp_path = os.path.join(self.site.config['CACHE_FOLDER'], 'lastdeploy')
- if self.site.config['COMMENT_SYSTEM_ID'] == 'nikolademo':
- self.logger.warn("\nWARNING WARNING WARNING WARNING\n"
- "You are deploying using the nikolademo Disqus account.\n"
- "That means you will not be able to moderate the comments in your own site.\n"
- "And is probably not what you want to do.\n"
- "Think about it for 5 seconds, I'll wait :-)\n\n")
+ # Get last-deploy from persistent state
+ last_deploy = self.site.state.get('last_deploy')
+ if last_deploy is not None:
+ last_deploy = dateutil.parser.parse(last_deploy)
+ clean = False
+
+ if self.site.config['COMMENT_SYSTEM'] and self.site.config['COMMENT_SYSTEM_ID'] == 'nikolademo':
+ self.logger.warning("\nWARNING WARNING WARNING WARNING\n"
+ "You are deploying using the nikolademo Disqus account.\n"
+ "That means you will not be able to moderate the comments in your own site.\n"
+ "And is probably not what you want to do.\n"
+ "Think about it for 5 seconds, I'll wait :-)\n"
+ "(press Ctrl+C to abort)\n")
time.sleep(5)
- deploy_drafts = self.site.config.get('DEPLOY_DRAFTS', True)
- deploy_future = self.site.config.get('DEPLOY_FUTURE', False)
- undeployed_posts = []
- if not (deploy_drafts and deploy_future):
- # Remove drafts and future posts
- out_dir = self.site.config['OUTPUT_FOLDER']
- self.site.scan_posts()
- for post in self.site.timeline:
- if (not deploy_drafts and post.is_draft) or \
- (not deploy_future and post.publish_later):
- remove_file(os.path.join(out_dir, post.destination_path()))
- remove_file(os.path.join(out_dir, post.source_path))
- undeployed_posts.append(post)
+ # Remove drafts and future posts if requested
+ undeployed_posts = clean_before_deployment(self.site)
+ if undeployed_posts:
+ self.logger.warning("Deleted {0} posts due to DEPLOY_* settings".format(len(undeployed_posts)))
if args:
presets = args
@@ -87,7 +78,7 @@ class CommandDeploy(Command):
for preset in presets:
try:
self.site.config['DEPLOY_COMMANDS'][preset]
- except:
+ except KeyError:
self.logger.error('No such preset: {0}'.format(preset))
return 255
@@ -98,27 +89,22 @@ class CommandDeploy(Command):
try:
subprocess.check_call(command, shell=True)
except subprocess.CalledProcessError as e:
- self.logger.error('Failed deployment — command {0} '
+ self.logger.error('Failed deployment -- command {0} '
'returned {1}'.format(e.cmd, e.returncode))
return e.returncode
self.logger.info("Successful deployment")
- try:
- with io.open(timestamp_path, 'r', encoding='utf8') as inf:
- last_deploy = datetime.strptime(inf.read().strip(), "%Y-%m-%dT%H:%M:%S.%f")
- clean = False
- except (IOError, Exception) as e:
- self.logger.debug("Problem when reading `{0}`: {1}".format(timestamp_path, e))
- last_deploy = datetime(1970, 1, 1)
- clean = True
new_deploy = datetime.utcnow()
self._emit_deploy_event(last_deploy, new_deploy, clean, undeployed_posts)
- makedirs(self.site.config['CACHE_FOLDER'])
# Store timestamp of successful deployment
- with io.open(timestamp_path, 'w+', encoding='utf8') as outf:
- outf.write(unicode_str(new_deploy.isoformat()))
+ self.site.state.set('last_deploy', new_deploy.isoformat())
+ if clean:
+ self.logger.info(
+ 'Looks like this is the first time you deployed this site. '
+ 'Let us know you are using Nikola '
+ 'at <https://users.getnikola.com/add/> if you want!')
def _emit_deploy_event(self, last_deploy, new_deploy, clean=False, undeployed=None):
"""Emit events for all timeline entries newer than last deploy.
diff --git a/nikola/plugins/command/github_deploy.plugin b/nikola/plugins/command/github_deploy.plugin
index e793548..fbdd3bf 100644
--- a/nikola/plugins/command/github_deploy.plugin
+++ b/nikola/plugins/command/github_deploy.plugin
@@ -5,9 +5,9 @@ module = github_deploy
[Documentation]
author = Puneeth Chaganti
version = 1,0
-website = http://getnikola.com
+website = https://getnikola.com/
description = Deploy the site to GitHub pages.
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/github_deploy.py b/nikola/plugins/command/github_deploy.py
index 0ab9332..d2c1f3f 100644
--- a/nikola/plugins/command/github_deploy.py
+++ b/nikola/plugins/command/github_deploy.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2014-2015 Puneeth Chaganti and others.
+# Copyright © 2014-2020 Puneeth Chaganti and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,16 +26,13 @@
"""Deploy site to GitHub Pages."""
-from __future__ import print_function
-from datetime import datetime
-import io
import os
import subprocess
from textwrap import dedent
from nikola.plugin_categories import Command
from nikola.plugins.command.check import real_scan_files
-from nikola.utils import get_logger, req_missing, makedirs, unicode_str, STDERR_HANDLER
+from nikola.utils import req_missing, clean_before_deployment
from nikola.__main__ import main
from nikola import __version__
@@ -53,32 +50,41 @@ def check_ghp_import_installed():
except OSError:
# req_missing defaults to `python=True` — and it’s meant to be like this.
# `ghp-import` is installed via pip, but the only way to use it is by executing the script it installs.
- req_missing(['ghp-import'], 'deploy the site to GitHub Pages')
+ req_missing(['ghp-import2'], 'deploy the site to GitHub Pages')
-class CommandGitHubDeploy(Command):
+class DeployFailedException(Exception):
+ """An internal exception for deployment errors."""
+
+ pass
+
+class CommandGitHubDeploy(Command):
"""Deploy site to GitHub Pages."""
name = 'github_deploy'
- doc_usage = ''
+ doc_usage = '[-m COMMIT_MESSAGE]'
doc_purpose = 'deploy the site to GitHub Pages'
doc_description = dedent(
"""\
- This command can be used to deploy your site to GitHub Pages.
+ This command can be used to deploy your site to GitHub Pages. It uses ghp-import to do this task. It also optionally commits to the source branch.
- It uses ghp-import to do this task.
-
- """
+ Configuration help: https://getnikola.com/handbook.html#deploying-to-github"""
)
-
- logger = None
-
- def _execute(self, command, args):
+ cmd_options = [
+ {
+ 'name': 'commit_message',
+ 'short': 'm',
+ 'long': 'message',
+ 'default': 'Nikola auto commit.',
+ 'type': str,
+ 'help': 'Commit message',
+ },
+ ]
+
+ def _execute(self, options, args):
"""Run the deployment."""
- self.logger = get_logger(CommandGitHubDeploy.name, STDERR_HANDLER)
-
# Check if ghp-import is installed
check_ghp_import_installed()
@@ -93,41 +99,74 @@ class CommandGitHubDeploy(Command):
for f in only_on_output:
os.unlink(f)
- # Commit and push
- self._commit_and_push()
-
- return
+ # Remove drafts and future posts if requested (Issue #2406)
+ undeployed_posts = clean_before_deployment(self.site)
+ if undeployed_posts:
+ self.logger.warning("Deleted {0} posts due to DEPLOY_* settings".format(len(undeployed_posts)))
- def _commit_and_push(self):
- """Commit all the files and push."""
- source = self.site.config['GITHUB_SOURCE_BRANCH']
- deploy = self.site.config['GITHUB_DEPLOY_BRANCH']
- remote = self.site.config['GITHUB_REMOTE_NAME']
- source_commit = uni_check_output(['git', 'rev-parse', source])
- commit_message = (
- 'Nikola auto commit.\n\n'
- 'Source commit: %s'
- 'Nikola version: %s' % (source_commit, __version__)
- )
- output_folder = self.site.config['OUTPUT_FOLDER']
-
- command = ['ghp-import', '-n', '-m', commit_message, '-p', '-r', remote, '-b', deploy, output_folder]
+ # Commit and push
+ return self._commit_and_push(options['commit_message'])
+ def _run_command(self, command, xfail=False):
+ """Run a command that may or may not fail."""
self.logger.info("==> {0}".format(command))
try:
subprocess.check_call(command)
+ return 0
except subprocess.CalledProcessError as e:
+ if xfail:
+ return e.returncode
self.logger.error(
- 'Failed GitHub deployment — command {0} '
+ 'Failed GitHub deployment -- command {0} '
'returned {1}'.format(e.cmd, e.returncode)
)
- return e.returncode
+ raise DeployFailedException(e.returncode)
- self.logger.info("Successful deployment")
+ def _commit_and_push(self, commit_first_line):
+ """Commit all the files and push."""
+ source = self.site.config['GITHUB_SOURCE_BRANCH']
+ deploy = self.site.config['GITHUB_DEPLOY_BRANCH']
+ remote = self.site.config['GITHUB_REMOTE_NAME']
+ autocommit = self.site.config['GITHUB_COMMIT_SOURCE']
+ try:
+ if autocommit:
+ commit_message = (
+ '{0}\n\n'
+ 'Nikola version: {1}'.format(commit_first_line, __version__)
+ )
+ e = self._run_command(['git', 'checkout', source], True)
+ if e != 0:
+ self._run_command(['git', 'checkout', '-b', source])
+ self._run_command(['git', 'add', '.'])
+ # Figure out if there is anything to commit
+ e = self._run_command(['git', 'diff-index', '--quiet', 'HEAD'], True)
+ if e != 0:
+ self._run_command(['git', 'commit', '-am', commit_message])
+ else:
+ self.logger.info('Nothing to commit to source branch.')
+
+ try:
+ source_commit = uni_check_output(['git', 'rev-parse', source])
+ except subprocess.CalledProcessError:
+ try:
+ source_commit = uni_check_output(['git', 'rev-parse', 'HEAD'])
+ except subprocess.CalledProcessError:
+ source_commit = '?'
+
+ commit_message = (
+ '{0}\n\n'
+ 'Source commit: {1}'
+ 'Nikola version: {2}'.format(commit_first_line, source_commit, __version__)
+ )
+ output_folder = self.site.config['OUTPUT_FOLDER']
+
+ command = ['ghp-import', '-n', '-m', commit_message, '-p', '-r', remote, '-b', deploy, output_folder]
- # Store timestamp of successful deployment
- timestamp_path = os.path.join(self.site.config["CACHE_FOLDER"], "lastdeploy")
- new_deploy = datetime.utcnow()
- makedirs(self.site.config["CACHE_FOLDER"])
- with io.open(timestamp_path, "w+", encoding="utf8") as outf:
- outf.write(unicode_str(new_deploy.isoformat()))
+ self._run_command(command)
+
+ if autocommit:
+ self._run_command(['git', 'push', '-u', remote, source])
+ except DeployFailedException as e:
+ return e.args[0]
+
+ self.logger.info("Successful deployment")
diff --git a/nikola/plugins/command/import_wordpress.plugin b/nikola/plugins/command/import_wordpress.plugin
index 6c4384e..46df1ef 100644
--- a/nikola/plugins/command/import_wordpress.plugin
+++ b/nikola/plugins/command/import_wordpress.plugin
@@ -5,9 +5,9 @@ module = import_wordpress
[Documentation]
author = Roberto Alsina
version = 1.0
-website = http://getnikola.com
+website = https://getnikola.com/
description = Import a wordpress site from a XML dump (requires markdown).
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/import_wordpress.py b/nikola/plugins/command/import_wordpress.py
index a652ec8..5e2aee6 100644
--- a/nikola/plugins/command/import_wordpress.py
+++ b/nikola/plugins/command/import_wordpress.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2015 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,41 +26,45 @@
"""Import a WordPress dump."""
-from __future__ import unicode_literals, print_function
-import os
-import re
-import sys
import datetime
import io
import json
+import os
+import re
+import sys
+from collections import defaultdict
+from urllib.parse import urlparse, unquote
+
import requests
from lxml import etree
-from collections import defaultdict
+
+from nikola.plugin_categories import Command
+from nikola import utils, hierarchy_utils
+from nikola.nikola import DEFAULT_TRANSLATIONS_PATTERN
+from nikola.utils import req_missing
+from nikola.plugins.basic_import import ImportMixin, links
+from nikola.plugins.command.init import (
+ SAMPLE_CONF, prepare_config,
+ format_default_translations_config,
+ get_default_translations_dict
+)
try:
- from urlparse import urlparse
- from urllib import unquote
+ import html2text
except ImportError:
- from urllib.parse import urlparse, unquote # NOQA
+ html2text = None
try:
import phpserialize
except ImportError:
- phpserialize = None # NOQA
+ phpserialize = None
-from nikola.plugin_categories import Command
-from nikola import utils
-from nikola.utils import req_missing
-from nikola.plugins.basic_import import ImportMixin, links
-from nikola.nikola import DEFAULT_TRANSLATIONS_PATTERN
-from nikola.plugins.command.init import SAMPLE_CONF, prepare_config, format_default_translations_config
-
-LOGGER = utils.get_logger('import_wordpress', utils.STDERR_HANDLER)
+LOGGER = utils.get_logger('import_wordpress')
def install_plugin(site, plugin_name, output_dir=None, show_install_notes=False):
"""Install a Nikola plugin."""
- LOGGER.notice("Installing plugin '{0}'".format(plugin_name))
+ LOGGER.info("Installing plugin '{0}'".format(plugin_name))
# Get hold of the 'plugin' plugin
plugin_installer_info = site.plugin_manager.getPluginByName('plugin', 'Command')
if plugin_installer_info is None:
@@ -88,7 +92,6 @@ def install_plugin(site, plugin_name, output_dir=None, show_install_notes=False)
class CommandImportWordpress(Command, ImportMixin):
-
"""Import a WordPress dump."""
name = "import_wordpress"
@@ -144,15 +147,22 @@ class CommandImportWordpress(Command, ImportMixin):
'long': 'qtranslate',
'default': False,
'type': bool,
- 'help': "Look for translations generated by qtranslate plugin",
- # WARNING: won't recover translated titles that actually
- # don't seem to be part of the wordpress XML export at the
- # time of writing :(
+ 'help': """Look for translations generated by qtranslate plugin.
+WARNING: a default wordpress export won't allow to recover title translations.
+For this to be possible consider applying the hack suggested at
+https://github.com/qtranslate/qtranslate-xt/issues/199 :
+
+In wp-admin/includes/export.php change
+`echo apply_filters( 'the_title_rss', $post->post_title );
+
+to
+`echo apply_filters( 'the_title_export', $post->post_title );
+"""
},
{
'name': 'translations_pattern',
'long': 'translations_pattern',
- 'default': None,
+ 'default': DEFAULT_TRANSLATIONS_PATTERN,
'type': str,
'help': "The pattern for translation files names",
},
@@ -171,6 +181,20 @@ class CommandImportWordpress(Command, ImportMixin):
'help': "Export comments as .wpcomment files",
},
{
+ 'name': 'html2text',
+ 'long': 'html2text',
+ 'default': False,
+ 'type': bool,
+ 'help': "Uses html2text (needs to be installed with pip) to transform WordPress posts to MarkDown during import",
+ },
+ {
+ 'name': 'transform_to_markdown',
+ 'long': 'transform-to-markdown',
+ 'default': False,
+ 'type': bool,
+ 'help': "Uses WordPress page compiler to transform WordPress posts to HTML and then use html2text to transform them to MarkDown during import",
+ },
+ {
'name': 'transform_to_html',
'long': 'transform-to-html',
'default': False,
@@ -191,9 +215,36 @@ class CommandImportWordpress(Command, ImportMixin):
'type': bool,
'help': "Automatically installs the WordPress page compiler (either locally or in the new site) if required by other options.\nWarning: the compiler is GPL software!",
},
+ {
+ 'name': 'tag_sanitizing_strategy',
+ 'long': 'tag-sanitizing-strategy',
+ 'default': 'first',
+ 'help': 'lower: Convert all tag and category names to lower case\nfirst: Keep first spelling of tag or category name',
+ },
+ {
+ 'name': 'one_file',
+ 'long': 'one-file',
+ 'default': False,
+ 'type': bool,
+ 'help': "Save imported posts in the more modern one-file format.",
+ },
]
all_tags = set([])
+ def _get_compiler(self):
+ """Return whatever compiler we will use."""
+ self._find_wordpress_compiler()
+ if self.wordpress_page_compiler is not None:
+ return self.wordpress_page_compiler
+ plugin_info = self.site.plugin_manager.getPluginByName('markdown', 'PageCompiler')
+ if plugin_info is not None:
+ if not plugin_info.is_activated:
+ self.site.plugin_manager.activatePluginByName(plugin_info.name)
+ plugin_info.plugin_object.set_site(self.site)
+ return plugin_info.plugin_object
+ else:
+ LOGGER.error("Can't find markdown post compiler.")
+
def _find_wordpress_compiler(self):
"""Find WordPress compiler plugin."""
if self.wordpress_page_compiler is not None:
@@ -214,9 +265,11 @@ class CommandImportWordpress(Command, ImportMixin):
options['output_folder'] = args.pop(0)
if args:
- LOGGER.warn('You specified additional arguments ({0}). Please consider '
- 'putting these arguments before the filename if you '
- 'are running into problems.'.format(args))
+ LOGGER.warning('You specified additional arguments ({0}). Please consider '
+ 'putting these arguments before the filename if you '
+ 'are running into problems.'.format(args))
+
+ self.onefile = options.get('one_file', False)
self.import_into_existing_site = False
self.url_map = {}
@@ -234,11 +287,16 @@ class CommandImportWordpress(Command, ImportMixin):
self.export_categories_as_categories = options.get('export_categories_as_categories', False)
self.export_comments = options.get('export_comments', False)
+ self.html2text = options.get('html2text', False)
+ self.transform_to_markdown = options.get('transform_to_markdown', False)
+
self.transform_to_html = options.get('transform_to_html', False)
self.use_wordpress_compiler = options.get('use_wordpress_compiler', False)
self.install_wordpress_compiler = options.get('install_wordpress_compiler', False)
self.wordpress_page_compiler = None
+ self.tag_saniziting_strategy = options.get('tag_saniziting_strategy', 'first')
+
self.auth = None
if options.get('download_auth') is not None:
username_password = options.get('download_auth')
@@ -250,10 +308,18 @@ class CommandImportWordpress(Command, ImportMixin):
self.separate_qtranslate_content = options.get('separate_qtranslate_content')
self.translations_pattern = options.get('translations_pattern')
- if self.transform_to_html and self.use_wordpress_compiler:
- LOGGER.warn("It does not make sense to combine --transform-to-html with --use-wordpress-compiler, as the first converts all posts to HTML and the latter option affects zero posts.")
+ count = (1 if self.html2text else 0) + (1 if self.transform_to_html else 0) + (1 if self.transform_to_markdown else 0)
+ if count > 1:
+ LOGGER.error("You can use at most one of the options --html2text, --transform-to-html and --transform-to-markdown.")
+ return False
+ if (self.html2text or self.transform_to_html or self.transform_to_markdown) and self.use_wordpress_compiler:
+ LOGGER.warning("It does not make sense to combine --use-wordpress-compiler with any of --html2text, --transform-to-html and --transform-to-markdown, as the latter convert all posts to HTML and the first option then affects zero posts.")
+
+ if (self.html2text or self.transform_to_markdown) and not html2text:
+ LOGGER.error("You need to install html2text via 'pip install html2text' before you can use the --html2text and --transform-to-markdown options.")
+ return False
- if self.transform_to_html:
+ if self.transform_to_html or self.transform_to_markdown:
self._find_wordpress_compiler()
if not self.wordpress_page_compiler and self.install_wordpress_compiler:
if not install_plugin(self.site, 'wordpress_compiler', output_dir='plugins'): # local install
@@ -279,14 +345,14 @@ class CommandImportWordpress(Command, ImportMixin):
# cat_id = get_text_tag(cat, '{{{0}}}term_id'.format(wordpress_namespace), None)
cat_slug = get_text_tag(cat, '{{{0}}}category_nicename'.format(wordpress_namespace), None)
cat_parent_slug = get_text_tag(cat, '{{{0}}}category_parent'.format(wordpress_namespace), None)
- cat_name = get_text_tag(cat, '{{{0}}}cat_name'.format(wordpress_namespace), None)
+ cat_name = utils.html_unescape(get_text_tag(cat, '{{{0}}}cat_name'.format(wordpress_namespace), None))
cat_path = [cat_name]
if cat_parent_slug in cat_map:
cat_path = cat_map[cat_parent_slug] + cat_path
cat_map[cat_slug] = cat_path
self._category_paths = dict()
for cat, path in cat_map.items():
- self._category_paths[cat] = utils.join_hierarchical_category_path(path)
+ self._category_paths[cat] = hierarchy_utils.join_hierarchical_category_path(path)
def _execute(self, options={}, args=[]):
"""Import a WordPress blog from an export file into a Nikola site."""
@@ -313,21 +379,16 @@ class CommandImportWordpress(Command, ImportMixin):
if phpserialize is None:
req_missing(['phpserialize'], 'import WordPress dumps without --no-downloads')
- channel = self.get_channel_from_file(self.wordpress_export_file)
+ export_file_preprocessor = modernize_qtranslate_tags if self.separate_qtranslate_content else None
+ channel = self.get_channel_from_file(self.wordpress_export_file, export_file_preprocessor)
self._prepare(channel)
conf_template = self.generate_base_site()
- # If user has specified a custom pattern for translation files we
- # need to fix the config
- if self.translations_pattern:
- self.context['TRANSLATIONS_PATTERN'] = self.translations_pattern
-
self.import_posts(channel)
-
self.context['TRANSLATIONS'] = format_default_translations_config(
self.extra_languages)
self.context['REDIRECTIONS'] = self.configure_redirections(
- self.url_map)
+ self.url_map, self.base_dir)
if self.timezone:
self.context['TIMEZONE'] = self.timezone
if self.export_categories_as_categories:
@@ -337,10 +398,13 @@ class CommandImportWordpress(Command, ImportMixin):
# Add tag redirects
for tag in self.all_tags:
try:
- tag_str = tag.decode('utf8')
+ if isinstance(tag, bytes):
+ tag_str = tag.decode('utf8', 'replace')
+ else:
+ tag_str = tag
except AttributeError:
tag_str = tag
- tag = utils.slugify(tag_str)
+ tag = utils.slugify(tag_str, self.lang)
src_url = '{}tag/{}'.format(self.context['SITE_URL'], tag)
dst_url = self.site.link('tag', tag)
if src_url != dst_url:
@@ -357,9 +421,9 @@ class CommandImportWordpress(Command, ImportMixin):
if not install_plugin(self.site, 'wordpress_compiler', output_dir=os.path.join(self.output_folder, 'plugins')):
return False
else:
- LOGGER.warn("Make sure to install the WordPress page compiler via")
- LOGGER.warn(" nikola plugin -i wordpress_compiler")
- LOGGER.warn("in your imported blog's folder ({0}), if you haven't installed it system-wide or user-wide. Otherwise, your newly imported blog won't compile.".format(self.output_folder))
+ LOGGER.warning("Make sure to install the WordPress page compiler via")
+ LOGGER.warning(" nikola plugin -i wordpress_compiler")
+ LOGGER.warning("in your imported blog's folder ({0}), if you haven't installed it system-wide or user-wide. Otherwise, your newly imported blog won't compile.".format(self.output_folder))
@classmethod
def read_xml_file(cls, filename):
@@ -372,12 +436,19 @@ class CommandImportWordpress(Command, ImportMixin):
if b'<atom:link rel=' in line:
continue
xml.append(line)
- return b'\n'.join(xml)
+ return b''.join(xml)
@classmethod
- def get_channel_from_file(cls, filename):
- """Get channel from XML file."""
- tree = etree.fromstring(cls.read_xml_file(filename))
+ def get_channel_from_file(cls, filename, xml_preprocessor=None):
+ """Get channel from XML file.
+
+ An optional 'xml_preprocessor' allows to modify the xml
+ (typically to deal with variations in tags injected by some WP plugin)
+ """
+ xml_string = cls.read_xml_file(filename)
+ if xml_preprocessor:
+ xml_string = xml_preprocessor(xml_string)
+ tree = etree.fromstring(xml_string)
channel = tree.find('channel')
return channel
@@ -386,8 +457,12 @@ class CommandImportWordpress(Command, ImportMixin):
wordpress_namespace = channel.nsmap['wp']
context = SAMPLE_CONF.copy()
- context['DEFAULT_LANG'] = get_text_tag(channel, 'language', 'en')[:2]
- context['TRANSLATIONS_PATTERN'] = DEFAULT_TRANSLATIONS_PATTERN
+ self.lang = get_text_tag(channel, 'language', 'en')[:2]
+ context['DEFAULT_LANG'] = self.lang
+ # If user has specified a custom pattern for translation files we
+ # need to fix the config
+ context['TRANSLATIONS_PATTERN'] = self.translations_pattern
+
context['BLOG_TITLE'] = get_text_tag(channel, 'title',
'PUT TITLE HERE')
context['BLOG_DESCRIPTION'] = get_text_tag(
@@ -418,17 +493,17 @@ class CommandImportWordpress(Command, ImportMixin):
PAGES = '(\n'
for extension in extensions:
POSTS += ' ("posts/*.{0}", "posts", "post.tmpl"),\n'.format(extension)
- PAGES += ' ("stories/*.{0}", "stories", "story.tmpl"),\n'.format(extension)
+ PAGES += ' ("pages/*.{0}", "pages", "page.tmpl"),\n'.format(extension)
POSTS += ')\n'
PAGES += ')\n'
context['POSTS'] = POSTS
context['PAGES'] = PAGES
COMPILERS = '{\n'
- COMPILERS += ''' "rest": ('.txt', '.rst'),''' + '\n'
- COMPILERS += ''' "markdown": ('.md', '.mdown', '.markdown'),''' + '\n'
- COMPILERS += ''' "html": ('.html', '.htm'),''' + '\n'
+ COMPILERS += ''' "rest": ['.txt', '.rst'],''' + '\n'
+ COMPILERS += ''' "markdown": ['.md', '.mdown', '.markdown'],''' + '\n'
+ COMPILERS += ''' "html": ['.html', '.htm'],''' + '\n'
if self.use_wordpress_compiler:
- COMPILERS += ''' "wordpress": ('.wp'),''' + '\n'
+ COMPILERS += ''' "wordpress": ['.wp'],''' + '\n'
COMPILERS += '}'
context['COMPILERS'] = COMPILERS
@@ -436,18 +511,15 @@ class CommandImportWordpress(Command, ImportMixin):
def download_url_content_to_file(self, url, dst_path):
"""Download some content (attachments) to a file."""
- if self.no_downloads:
- return
-
try:
request = requests.get(url, auth=self.auth)
if request.status_code >= 400:
- LOGGER.warn("Downloading {0} to {1} failed with HTTP status code {2}".format(url, dst_path, request.status_code))
+ LOGGER.warning("Downloading {0} to {1} failed with HTTP status code {2}".format(url, dst_path, request.status_code))
return
with open(dst_path, 'wb+') as fd:
fd.write(request.content)
except requests.exceptions.ConnectionError as err:
- LOGGER.warn("Downloading {0} to {1} failed: {2}".format(url, dst_path, err))
+ LOGGER.warning("Downloading {0} to {1} failed: {2}".format(url, dst_path, err))
def import_attachment(self, item, wordpress_namespace):
"""Import an attachment to the site."""
@@ -458,10 +530,13 @@ class CommandImportWordpress(Command, ImportMixin):
'foo')
path = urlparse(url).path
dst_path = os.path.join(*([self.output_folder, 'files'] + list(path.split('/'))))
- dst_dir = os.path.dirname(dst_path)
- utils.makedirs(dst_dir)
- LOGGER.info("Downloading {0} => {1}".format(url, dst_path))
- self.download_url_content_to_file(url, dst_path)
+ if self.no_downloads:
+ LOGGER.info("Skipping downloading {0} => {1}".format(url, dst_path))
+ else:
+ dst_dir = os.path.dirname(dst_path)
+ utils.makedirs(dst_dir)
+ LOGGER.info("Downloading {0} => {1}".format(url, dst_path))
+ self.download_url_content_to_file(url, dst_path)
dst_url = '/'.join(dst_path.split(os.sep)[2:])
links[link] = '/' + dst_url
links[url] = '/' + dst_url
@@ -485,14 +560,7 @@ class CommandImportWordpress(Command, ImportMixin):
# that the export should give you the power to insert
# your blogging into another site or system its not.
# Why don't they just use JSON?
- if sys.version_info[0] == 2:
- try:
- metadata = phpserialize.loads(utils.sys_encode(meta_value.text))
- except ValueError:
- # local encoding might be wrong sometimes
- metadata = phpserialize.loads(meta_value.text.encode('utf-8'))
- else:
- metadata = phpserialize.loads(meta_value.text.encode('utf-8'))
+ metadata = phpserialize.loads(meta_value.text.encode('utf-8'))
meta_key = b'image_meta'
size_key = b'sizes'
@@ -507,6 +575,8 @@ class CommandImportWordpress(Command, ImportMixin):
if meta_key in metadata:
image_meta = metadata[meta_key]
+ if not image_meta:
+ continue
dst_meta = {}
def add(our_key, wp_key, is_int=False, ignore_zero=False, is_float=False):
@@ -517,6 +587,9 @@ class CommandImportWordpress(Command, ImportMixin):
if ignore_zero and value == 0:
return
elif is_float:
+ # in some locales (like fr) and for old posts there may be a comma here.
+ if isinstance(value, bytes):
+ value = value.replace(b",", b".")
value = float(value)
if ignore_zero and value == 0:
return
@@ -552,15 +625,18 @@ class CommandImportWordpress(Command, ImportMixin):
meta = {}
meta['size'] = size.decode('utf-8')
if width_key in metadata[size_key][size] and height_key in metadata[size_key][size]:
- meta['width'] = metadata[size_key][size][width_key]
- meta['height'] = metadata[size_key][size][height_key]
+ meta['width'] = int(metadata[size_key][size][width_key])
+ meta['height'] = int(metadata[size_key][size][height_key])
path = urlparse(url).path
dst_path = os.path.join(*([self.output_folder, 'files'] + list(path.split('/'))))
- dst_dir = os.path.dirname(dst_path)
- utils.makedirs(dst_dir)
- LOGGER.info("Downloading {0} => {1}".format(url, dst_path))
- self.download_url_content_to_file(url, dst_path)
+ if self.no_downloads:
+ LOGGER.info("Skipping downloading {0} => {1}".format(url, dst_path))
+ else:
+ dst_dir = os.path.dirname(dst_path)
+ utils.makedirs(dst_dir)
+ LOGGER.info("Downloading {0} => {1}".format(url, dst_path))
+ self.download_url_content_to_file(url, dst_path)
dst_url = '/'.join(dst_path.split(os.sep)[2:])
links[url] = '/' + dst_url
@@ -604,7 +680,7 @@ class CommandImportWordpress(Command, ImportMixin):
def transform_code(self, content):
"""Transform code blocks."""
- # http://en.support.wordpress.com/code/posting-source-code/. There are
+ # https://en.support.wordpress.com/code/posting-source-code/. There are
# a ton of things not supported here. We only do a basic [code
# lang="x"] -> ```x translation, and remove quoted html entities (<,
# >, &, and ").
@@ -628,10 +704,10 @@ class CommandImportWordpress(Command, ImportMixin):
return content
@staticmethod
- def transform_caption(content):
+ def transform_caption(content, use_html=False):
"""Transform captions."""
- new_caption = re.sub(r'\[/caption\]', '', content)
- new_caption = re.sub(r'\[caption.*\]', '', new_caption)
+ new_caption = re.sub(r'\[/caption\]', '</h1>' if use_html else '', content)
+ new_caption = re.sub(r'\[caption.*\]', '<h1>' if use_html else '', new_caption)
return new_caption
@@ -654,6 +730,26 @@ class CommandImportWordpress(Command, ImportMixin):
except TypeError: # old versions of the plugin don't support the additional argument
content = self.wordpress_page_compiler.compile_to_string(content)
return content, 'html', True
+ elif self.transform_to_markdown:
+ # First convert to HTML with WordPress plugin
+ additional_data = {}
+ if attachments is not None:
+ additional_data['attachments'] = attachments
+ try:
+ content = self.wordpress_page_compiler.compile_to_string(content, additional_data=additional_data)
+ except TypeError: # old versions of the plugin don't support the additional argument
+ content = self.wordpress_page_compiler.compile_to_string(content)
+ # Now convert to MarkDown with html2text
+ h = html2text.HTML2Text()
+ content = h.handle(content)
+ return content, 'md', False
+ elif self.html2text:
+ # TODO: what to do with [code] blocks?
+ # content = self.transform_code(content)
+ content = self.transform_caption(content, use_html=True)
+ h = html2text.HTML2Text()
+ content = h.handle(content)
+ return content, 'md', False
elif self.use_wordpress_compiler:
return content, 'wp', False
else:
@@ -686,7 +782,7 @@ class CommandImportWordpress(Command, ImportMixin):
elif approved == 'spam' or approved == 'trash':
pass
else:
- LOGGER.warn("Unknown comment approved status: " + str(approved))
+ LOGGER.warning("Unknown comment approved status: {0}".format(approved))
parent = int(get_text_tag(comment, "{{{0}}}comment_parent".format(wordpress_namespace), 0))
if parent == 0:
parent = None
@@ -724,6 +820,16 @@ class CommandImportWordpress(Command, ImportMixin):
write_header_line(fd, "wordpress_user_id", comment["user_id"])
fd.write(('\n' + comment['content']).encode('utf8'))
+ def _create_meta_and_content_filenames(self, slug, extension, lang, default_language, translations_config):
+ out_meta_filename = slug + '.meta'
+ out_content_filename = slug + '.' + extension
+ if lang and lang != default_language:
+ out_meta_filename = utils.get_translation_candidate(translations_config,
+ out_meta_filename, lang)
+ out_content_filename = utils.get_translation_candidate(translations_config,
+ out_content_filename, lang)
+ return out_meta_filename, out_content_filename
+
def _create_metadata(self, status, excerpt, tags, categories, post_name=None):
"""Create post metadata."""
other_meta = {'wp-status': status}
@@ -735,24 +841,48 @@ class CommandImportWordpress(Command, ImportMixin):
if text in self._category_paths:
cats.append(self._category_paths[text])
else:
- cats.append(utils.join_hierarchical_category_path([text]))
+ cats.append(hierarchy_utils.join_hierarchical_category_path([utils.html_unescape(text)]))
other_meta['categories'] = ','.join(cats)
if len(cats) > 0:
other_meta['category'] = cats[0]
if len(cats) > 1:
- LOGGER.warn(('Post "{0}" has more than one category! ' +
- 'Will only use the first one.').format(post_name))
- tags_cats = tags
+ LOGGER.warning(('Post "{0}" has more than one category! ' +
+ 'Will only use the first one.').format(post_name))
+ tags_cats = [utils.html_unescape(tag) for tag in tags]
else:
- tags_cats = tags + categories
+ tags_cats = [utils.html_unescape(tag) for tag in tags + categories]
return tags_cats, other_meta
+ _tag_sanitize_map = {True: {}, False: {}}
+
+ def _sanitize(self, tag, is_category):
+ if self.tag_saniziting_strategy == 'lower':
+ return tag.lower()
+ if tag.lower() not in self._tag_sanitize_map[is_category]:
+ self._tag_sanitize_map[is_category][tag.lower()] = [tag]
+ return tag
+ previous = self._tag_sanitize_map[is_category][tag.lower()]
+ if self.tag_saniziting_strategy == 'first':
+ if tag != previous[0]:
+ LOGGER.warning("Changing spelling of {0} name '{1}' to {2}.".format('category' if is_category else 'tag', tag, previous[0]))
+ return previous[0]
+ else:
+ LOGGER.error("Unknown tag sanitizing strategy '{0}'!".format(self.tag_saniziting_strategy))
+ sys.exit(1)
+ return tag
+
def import_postpage_item(self, item, wordpress_namespace, out_folder=None, attachments=None):
"""Take an item from the feed and creates a post file."""
if out_folder is None:
out_folder = 'posts'
title = get_text_tag(item, 'title', 'NO TITLE')
+
+ # titles can have line breaks in them, particularly when they are
+ # created by third-party tools that post to Wordpress.
+ # Handle windows-style and unix-style line endings.
+ title = title.replace('\r\n', ' ').replace('\n', ' ')
+
# link is something like http://foo.com/2012/09/01/hello-world/
# So, take the path, utils.slugify it, and that's our slug
link = get_text_tag(item, 'link', None)
@@ -760,7 +890,10 @@ class CommandImportWordpress(Command, ImportMixin):
path = unquote(parsed.path.strip('/'))
try:
- path = path.decode('utf8')
+ if isinstance(path, bytes):
+ path = path.decode('utf8', 'replace')
+ else:
+ path = path
except AttributeError:
pass
@@ -782,7 +915,7 @@ class CommandImportWordpress(Command, ImportMixin):
else:
if len(pathlist) > 1:
out_folder = os.path.join(*([out_folder] + pathlist[:-1]))
- slug = utils.slugify(pathlist[-1])
+ slug = utils.slugify(pathlist[-1], self.lang)
description = get_text_tag(item, 'description', '')
post_date = get_text_tag(
@@ -809,17 +942,19 @@ class CommandImportWordpress(Command, ImportMixin):
tags = []
categories = []
+ post_status = 'published'
+ has_math = "no"
if status == 'trash':
- LOGGER.warn('Trashed post "{0}" will not be imported.'.format(title))
+ LOGGER.warning('Trashed post "{0}" will not be imported.'.format(title))
return False
elif status == 'private':
- tags.append('private')
is_draft = False
is_private = True
+ post_status = 'private'
elif status != 'publish':
- tags.append('draft')
is_draft = True
is_private = False
+ post_status = 'draft'
else:
is_draft = False
is_private = False
@@ -831,14 +966,23 @@ class CommandImportWordpress(Command, ImportMixin):
type = tag.attrib['domain']
if text == 'Uncategorized' and type == 'category':
continue
- self.all_tags.add(text)
if type == 'category':
- categories.append(type)
+ categories.append(text)
else:
tags.append(text)
if '$latex' in content:
- tags.append('mathjax')
+ has_math = "yes"
+
+ for i, cat in enumerate(categories[:]):
+ cat = self._sanitize(cat, True)
+ categories[i] = cat
+ self.all_tags.add(cat)
+
+ for i, tag in enumerate(tags[:]):
+ tag = self._sanitize(tag, False)
+ tags[i] = tag
+ self.all_tags.add(tag)
# Find post format if it's there
post_format = 'wp'
@@ -849,53 +993,75 @@ class CommandImportWordpress(Command, ImportMixin):
post_format = 'wp'
if is_draft and self.exclude_drafts:
- LOGGER.notice('Draft "{0}" will not be imported.'.format(title))
+ LOGGER.warning('Draft "{0}" will not be imported.'.format(title))
return False
elif is_private and self.exclude_privates:
- LOGGER.notice('Private post "{0}" will not be imported.'.format(title))
+ LOGGER.warning('Private post "{0}" will not be imported.'.format(title))
return False
elif content.strip() or self.import_empty_items:
# If no content is found, no files are written.
self.url_map[link] = (self.context['SITE_URL'] +
out_folder.rstrip('/') + '/' + slug +
'.html').replace(os.sep, '/')
- if hasattr(self, "separate_qtranslate_content") \
- and self.separate_qtranslate_content:
- content_translations = separate_qtranslate_content(content)
+ default_language = self.context["DEFAULT_LANG"]
+ if self.separate_qtranslate_content:
+ content_translations = separate_qtranslate_tagged_langs(content)
+ title_translations = separate_qtranslate_tagged_langs(title)
else:
content_translations = {"": content}
- default_language = self.context["DEFAULT_LANG"]
+ title_translations = {"": title}
+ # in case of mistmatch between the languages found in the title and in the content
+ default_title = title_translations.get(default_language, title)
+ extra_languages = [lang for lang in content_translations.keys() if lang not in ("", default_language)]
+ for extra_lang in extra_languages:
+ self.extra_languages.add(extra_lang)
+ translations_dict = get_default_translations_dict(default_language, extra_languages)
+ current_translations_config = {
+ "DEFAULT_LANG": default_language,
+ "TRANSLATIONS": translations_dict,
+ "TRANSLATIONS_PATTERN": self.context["TRANSLATIONS_PATTERN"]
+ }
for lang, content in content_translations.items():
try:
content, extension, rewrite_html = self.transform_content(content, post_format, attachments)
- except:
+ except Exception:
LOGGER.error(('Cannot interpret post "{0}" (language {1}) with post ' +
'format {2}!').format(os.path.join(out_folder, slug), lang, post_format))
return False
- if lang:
- out_meta_filename = slug + '.meta'
- if lang == default_language:
- out_content_filename = slug + '.' + extension
- else:
- out_content_filename \
- = utils.get_translation_candidate(self.context,
- slug + "." + extension, lang)
- self.extra_languages.add(lang)
- meta_slug = slug
- else:
- out_meta_filename = slug + '.meta'
- out_content_filename = slug + '.' + extension
- meta_slug = slug
+
+ out_meta_filename, out_content_filename = self._create_meta_and_content_filenames(
+ slug, extension, lang, default_language, current_translations_config)
+
tags, other_meta = self._create_metadata(status, excerpt, tags, categories,
post_name=os.path.join(out_folder, slug))
- self.write_metadata(os.path.join(self.output_folder, out_folder,
- out_meta_filename),
- title, meta_slug, post_date, description, tags, **other_meta)
- self.write_content(
- os.path.join(self.output_folder,
- out_folder, out_content_filename),
- content,
- rewrite_html)
+ current_title = title_translations.get(lang, default_title)
+ meta = {
+ "title": current_title,
+ "slug": slug,
+ "date": post_date,
+ "description": description,
+ "tags": ','.join(tags),
+ "status": post_status,
+ "has_math": has_math,
+ }
+ meta.update(other_meta)
+ if self.onefile:
+ self.write_post(
+ os.path.join(self.output_folder,
+ out_folder, out_content_filename),
+ content,
+ meta,
+ self._get_compiler(),
+ rewrite_html)
+ else:
+ self.write_metadata(os.path.join(self.output_folder, out_folder,
+ out_meta_filename),
+ current_title, slug, post_date, description, tags, **other_meta)
+ self.write_content(
+ os.path.join(self.output_folder,
+ out_folder, out_content_filename),
+ content,
+ rewrite_html)
if self.export_comments:
comments = []
@@ -905,13 +1071,13 @@ class CommandImportWordpress(Command, ImportMixin):
comments.append(comment)
for comment in comments:
- comment_filename = slug + "." + str(comment['id']) + ".wpcomment"
+ comment_filename = "{0}.{1}.wpcomment".format(slug, comment['id'])
self._write_comment(os.path.join(self.output_folder, out_folder, comment_filename), comment)
return (out_folder, slug)
else:
- LOGGER.warn(('Not going to import "{0}" because it seems to contain'
- ' no content.').format(title))
+ LOGGER.warning(('Not going to import "{0}" because it seems to contain'
+ ' no content.').format(title))
return False
def _extract_item_info(self, item):
@@ -937,7 +1103,7 @@ class CommandImportWordpress(Command, ImportMixin):
if parent_id is not None and int(parent_id) != 0:
self.attachments[int(parent_id)][post_id] = data
else:
- LOGGER.warn("Attachment #{0} ({1}) has no parent!".format(post_id, data['files']))
+ LOGGER.warning("Attachment #{0} ({1}) has no parent!".format(post_id, data['files']))
def write_attachments_info(self, path, attachments):
"""Write attachments info file."""
@@ -955,7 +1121,7 @@ class CommandImportWordpress(Command, ImportMixin):
if post_type == 'post':
out_folder_slug = self.import_postpage_item(item, wordpress_namespace, 'posts', attachments)
else:
- out_folder_slug = self.import_postpage_item(item, wordpress_namespace, 'stories', attachments)
+ out_folder_slug = self.import_postpage_item(item, wordpress_namespace, 'pages', attachments)
# Process attachment data
if attachments is not None:
# If post was exported, store data
@@ -975,8 +1141,8 @@ class CommandImportWordpress(Command, ImportMixin):
self.process_item_if_post_or_page(item)
# Assign attachments to posts
for post_id in self.attachments:
- LOGGER.warn(("Found attachments for post or page #{0}, but didn't find post or page. " +
- "(Attachments: {1})").format(post_id, [e['files'][0] for e in self.attachments[post_id].values()]))
+ LOGGER.warning(("Found attachments for post or page #{0}, but didn't find post or page. " +
+ "(Attachments: {1})").format(post_id, [e['files'][0] for e in self.attachments[post_id].values()]))
def get_text_tag(tag, name, default):
@@ -990,15 +1156,20 @@ def get_text_tag(tag, name, default):
return default
-def separate_qtranslate_content(text):
- """Parse the content of a wordpress post or page and separate qtranslate languages.
+def separate_qtranslate_tagged_langs(text):
+ """Parse the content of a wordpress post or page and separate languages.
+
+ For qtranslateX tags: [:LL]blabla[:]
- qtranslate tags: <!--:LL-->blabla<!--:-->
+ Note: qtranslate* plugins had a troubled history and used various
+ tags over time, application of the 'modernize_qtranslate_tags'
+ function is required for this function to handle most of the legacy
+ cases.
"""
- # TODO: uniformize qtranslate tags <!--/en--> => <!--:-->
- qt_start = "<!--:"
- qt_end = "-->"
- qt_end_with_lang_len = 5
+ qt_start = "[:"
+ qt_end = "]"
+ qt_end_len = len(qt_end)
+ qt_end_with_lang_len = qt_end_len + 2
qt_chunks = text.split(qt_start)
content_by_lang = {}
common_txt_list = []
@@ -1010,9 +1181,9 @@ def separate_qtranslate_content(text):
# be some piece of common text or tags, or just nothing
lang = "" # default language
c = c.lstrip(qt_end)
- if not c:
+ if not c.strip():
continue
- elif c[2:].startswith(qt_end):
+ elif c[2:qt_end_with_lang_len].startswith(qt_end):
# a language specific section (with language code at the begining)
lang = c[:2]
c = c[qt_end_with_lang_len:]
@@ -1033,3 +1204,26 @@ def separate_qtranslate_content(text):
for l in content_by_lang.keys():
content_by_lang[l] = " ".join(content_by_lang[l])
return content_by_lang
+
+
+def modernize_qtranslate_tags(xml_bytes):
+ """
+ Uniformize the "tag" used by various version of qtranslate.
+
+ The resulting byte string will only contain one set of qtranslate tags
+ (namely [:LG] and [:]), older ones being converted to new ones.
+ """
+ old_start_lang = re.compile(b"<!--:?(\\w{2})-->")
+ new_start_lang = b"[:\\1]"
+ old_end_lang = re.compile(b"<!--(/\\w{2}|:)-->")
+ new_end_lang = b"[:]"
+ title_match = re.compile(b"<title>(.*?)</title>")
+ modern_starts = old_start_lang.sub(new_start_lang, xml_bytes)
+ modernized_bytes = old_end_lang.sub(new_end_lang, modern_starts)
+
+ def title_escape(match):
+ title = match.group(1)
+ title = title.replace(b"&", b"&amp;").replace(b"<", b"&lt;").replace(b">", b"&gt;")
+ return b"<title>" + title + b"</title>"
+ fixed_bytes = title_match.sub(title_escape, modernized_bytes)
+ return fixed_bytes
diff --git a/nikola/plugins/command/init.plugin b/nikola/plugins/command/init.plugin
index a5404c4..6ee27d3 100644
--- a/nikola/plugins/command/init.plugin
+++ b/nikola/plugins/command/init.plugin
@@ -5,9 +5,9 @@ module = init
[Documentation]
author = Roberto Alsina
version = 1.0
-website = http://getnikola.com
+website = https://getnikola.com/
description = Create a new site.
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/init.py b/nikola/plugins/command/init.py
index 91ccdb4..0026edc 100644
--- a/nikola/plugins/command/init.py
+++ b/nikola/plugins/command/init.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2015 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,28 +26,28 @@
"""Create a new site."""
-from __future__ import print_function, unicode_literals
-import os
-import shutil
+import datetime
import io
import json
+import os
+import shutil
import textwrap
-import datetime
import unidecode
+from urllib.parse import urlsplit, urlunsplit
+
import dateutil.tz
import dateutil.zoneinfo
from mako.template import Template
from pkg_resources import resource_filename
-import tarfile
import nikola
-from nikola.nikola import DEFAULT_TRANSLATIONS_PATTERN, DEFAULT_INDEX_READ_MORE_LINK, DEFAULT_RSS_READ_MORE_LINK, LEGAL_VALUES, urlsplit, urlunsplit
+from nikola.nikola import DEFAULT_INDEX_READ_MORE_LINK, DEFAULT_FEED_READ_MORE_LINK, LEGAL_VALUES
from nikola.plugin_categories import Command
-from nikola.utils import ask, ask_yesno, get_logger, makedirs, STDERR_HANDLER, load_messages
+from nikola.utils import ask, ask_yesno, get_logger, makedirs, load_messages
from nikola.packages.tzlocal import get_localzone
-LOGGER = get_logger('init', STDERR_HANDLER)
+LOGGER = get_logger('init')
SAMPLE_CONF = {
'BLOG_AUTHOR': "Your Name",
@@ -55,48 +55,51 @@ SAMPLE_CONF = {
'SITE_URL': "https://example.com/",
'BLOG_EMAIL': "joe@demo.site",
'BLOG_DESCRIPTION': "This is a demo site for Nikola.",
- 'PRETTY_URLS': False,
- 'STRIP_INDEXES': False,
+ 'PRETTY_URLS': True,
+ 'STRIP_INDEXES': True,
'DEFAULT_LANG': "en",
'TRANSLATIONS': """{
DEFAULT_LANG: "",
# Example for another language:
# "es": "./es",
}""",
- 'THEME': 'bootstrap3',
+ 'THEME': LEGAL_VALUES['DEFAULT_THEME'],
'TIMEZONE': 'UTC',
'COMMENT_SYSTEM': 'disqus',
'COMMENT_SYSTEM_ID': 'nikolademo',
'CATEGORY_ALLOW_HIERARCHIES': False,
'CATEGORY_OUTPUT_FLAT_HIERARCHY': False,
- 'TRANSLATIONS_PATTERN': DEFAULT_TRANSLATIONS_PATTERN,
'INDEX_READ_MORE_LINK': DEFAULT_INDEX_READ_MORE_LINK,
- 'RSS_READ_MORE_LINK': DEFAULT_RSS_READ_MORE_LINK,
+ 'FEED_READ_MORE_LINK': DEFAULT_FEED_READ_MORE_LINK,
'POSTS': """(
("posts/*.rst", "posts", "post.tmpl"),
+ ("posts/*.md", "posts", "post.tmpl"),
("posts/*.txt", "posts", "post.tmpl"),
+ ("posts/*.html", "posts", "post.tmpl"),
)""",
'PAGES': """(
- ("stories/*.rst", "stories", "story.tmpl"),
- ("stories/*.txt", "stories", "story.tmpl"),
+ ("pages/*.rst", "pages", "page.tmpl"),
+ ("pages/*.md", "pages", "page.tmpl"),
+ ("pages/*.txt", "pages", "page.tmpl"),
+ ("pages/*.html", "pages", "page.tmpl"),
)""",
'COMPILERS': """{
- "rest": ('.rst', '.txt'),
- "markdown": ('.md', '.mdown', '.markdown'),
- "textile": ('.textile',),
- "txt2tags": ('.t2t',),
- "bbcode": ('.bb',),
- "wiki": ('.wiki',),
- "ipynb": ('.ipynb',),
- "html": ('.html', '.htm'),
+ "rest": ['.rst', '.txt'],
+ "markdown": ['.md', '.mdown', '.markdown'],
+ "textile": ['.textile'],
+ "txt2tags": ['.t2t'],
+ "bbcode": ['.bb'],
+ "wiki": ['.wiki'],
+ "ipynb": ['.ipynb'],
+ "html": ['.html', '.htm'],
# PHP files are rendered the usual way (i.e. with the full templates).
# The resulting files have .php extensions, making it possible to run
# them without reconfiguring your server to recognize them.
- "php": ('.php',),
+ "php": ['.php'],
# Pandoc detects the input from the source filename
# but is disabled by default as it would conflict
# with many of the others.
- # "pandoc": ('.rst', '.md', '.txt'),
+ # "pandoc": ['.rst', '.md', '.txt'],
}""",
'NAVIGATION_LINKS': """{
DEFAULT_LANG: (
@@ -106,6 +109,7 @@ SAMPLE_CONF = {
),
}""",
'REDIRECTIONS': [],
+ '_METADATA_MAPPING_FORMATS': ', '.join(LEGAL_VALUES['METADATA_MAPPING'])
}
@@ -169,6 +173,14 @@ def format_default_translations_config(additional_languages):
return "{{\n{0}\n}}".format("\n".join(lang_paths))
+def get_default_translations_dict(default_lang, additional_languages):
+ """Generate a TRANSLATIONS dict matching the config from 'format_default_translations_config'."""
+ tr = {default_lang: ''}
+ for l in additional_languages:
+ tr[l] = './' + l
+ return tr
+
+
def format_navigation_links(additional_languages, default_lang, messages, strip_indexes=False):
"""Return the string to configure NAVIGATION_LINKS."""
f = u"""\
@@ -210,17 +222,28 @@ def prepare_config(config):
"""Parse sample config with JSON."""
p = config.copy()
p.update({k: json.dumps(v, ensure_ascii=False) for k, v in p.items()
- if k not in ('POSTS', 'PAGES', 'COMPILERS', 'TRANSLATIONS', 'NAVIGATION_LINKS', '_SUPPORTED_LANGUAGES', '_SUPPORTED_COMMENT_SYSTEMS', 'INDEX_READ_MORE_LINK', 'RSS_READ_MORE_LINK')})
+ if k not in ('POSTS', 'PAGES', 'COMPILERS', 'TRANSLATIONS', 'NAVIGATION_LINKS', '_SUPPORTED_LANGUAGES', '_SUPPORTED_COMMENT_SYSTEMS', 'INDEX_READ_MORE_LINK', 'FEED_READ_MORE_LINK', '_METADATA_MAPPING_FORMATS')})
# READ_MORE_LINKs require some special treatment.
p['INDEX_READ_MORE_LINK'] = "'" + p['INDEX_READ_MORE_LINK'].replace("'", "\\'") + "'"
- p['RSS_READ_MORE_LINK'] = "'" + p['RSS_READ_MORE_LINK'].replace("'", "\\'") + "'"
+ p['FEED_READ_MORE_LINK'] = "'" + p['FEED_READ_MORE_LINK'].replace("'", "\\'") + "'"
# fix booleans and None
p.update({k: str(v) for k, v in config.items() if isinstance(v, bool) or v is None})
return p
-class CommandInit(Command):
+def test_destination(destination, demo=False):
+ """Check if the destination already exists, which can break demo site creation."""
+ # Issue #2214
+ if demo and os.path.exists(destination):
+ LOGGER.warning("The directory {0} already exists, and a new demo site cannot be initialized in an existing directory.".format(destination))
+ LOGGER.warning("Please remove the directory and try again, or use another directory.")
+ LOGGER.info("Hint: If you want to initialize a git repository in this directory, run `git init` in the directory after creating a Nikola site.")
+ return False
+ else:
+ return True
+
+class CommandInit(Command):
"""Create a new site."""
name = "init"
@@ -272,11 +295,11 @@ class CommandInit(Command):
@classmethod
def create_empty_site(cls, target):
"""Create an empty site with directories only."""
- for folder in ('files', 'galleries', 'listings', 'posts', 'stories'):
+ for folder in ('files', 'galleries', 'images', 'listings', 'posts', 'pages'):
makedirs(os.path.join(target, folder))
@staticmethod
- def ask_questions(target):
+ def ask_questions(target, demo=False):
"""Ask some questions about Nikola."""
def urlhandler(default, toconf):
answer = ask('Site URL', 'https://example.com/')
@@ -310,7 +333,6 @@ class CommandInit(Command):
def prettyhandler(default, toconf):
SAMPLE_CONF['PRETTY_URLS'] = ask_yesno('Enable pretty URLs (/page/ instead of /page.html) that don\'t need web server configuration?', default=True)
- SAMPLE_CONF['STRIP_INDEXES'] = SAMPLE_CONF['PRETTY_URLS']
def lhandler(default, toconf, show_header=True):
if show_header:
@@ -341,13 +363,12 @@ class CommandInit(Command):
# Get messages for navigation_links. In order to do this, we need
# to generate a throwaway TRANSLATIONS dict.
- tr = {default: ''}
- for l in langs:
- tr[l] = './' + l
+ tr = get_default_translations_dict(default, langs)
+
# Assuming that base contains all the locales, and that base does
# not inherit from anywhere.
try:
- messages = load_messages(['base'], tr, default)
+ messages = load_messages(['base'], tr, default, themes_dirs=['themes'])
SAMPLE_CONF['NAVIGATION_LINKS'] = format_navigation_links(langs, default, messages, SAMPLE_CONF['STRIP_INDEXES'])
except nikola.utils.LanguageNotFoundError as e:
print(" ERROR: the language '{0}' is not supported.".format(e.lang))
@@ -358,28 +379,28 @@ class CommandInit(Command):
def tzhandler(default, toconf):
print("\nPlease choose the correct time zone for your blog. Nikola uses the tz database.")
print("You can find your time zone here:")
- print("http://en.wikipedia.org/wiki/List_of_tz_database_time_zones")
+ print("https://en.wikipedia.org/wiki/List_of_tz_database_time_zones")
print("")
answered = False
while not answered:
try:
lz = get_localzone()
- except:
+ except Exception:
lz = None
answer = ask('Time zone', lz if lz else "UTC")
tz = dateutil.tz.gettz(answer)
if tz is None:
print(" WARNING: Time zone not found. Searching list of time zones for a match.")
- zonesfile = tarfile.open(fileobj=dateutil.zoneinfo.getzoneinfofile_stream())
- zonenames = [zone for zone in zonesfile.getnames() if answer.lower() in zone.lower()]
- if len(zonenames) == 1:
- tz = dateutil.tz.gettz(zonenames[0])
- answer = zonenames[0]
+ all_zones = dateutil.zoneinfo.get_zonefile_instance().zones
+ matching_zones = [zone for zone in all_zones if answer.lower() in zone.lower()]
+ if len(matching_zones) == 1:
+ tz = dateutil.tz.gettz(matching_zones[0])
+ answer = matching_zones[0]
print(" Picking '{0}'.".format(answer))
- elif len(zonenames) > 1:
+ elif len(matching_zones) > 1:
print(" The following time zones match your query:")
- print(' ' + '\n '.join(zonenames))
+ print(' ' + '\n '.join(matching_zones))
continue
if tz is not None:
@@ -441,7 +462,7 @@ class CommandInit(Command):
print("If you do not want to answer and want to go with the defaults instead, simply restart with the `-q` parameter.")
for query, default, toconf, destination in questions:
- if target and destination == '!target':
+ if target and destination == '!target' and test_destination(target, demo):
# Skip the destination question if we know it already
pass
else:
@@ -458,8 +479,9 @@ class CommandInit(Command):
if toconf:
SAMPLE_CONF[destination] = answer
if destination == '!target':
- while not answer:
- print(' ERROR: you need to specify a target directory.\n')
+ while not answer or not test_destination(answer, demo):
+ if not answer:
+ print(' ERROR: you need to specify a target directory.\n')
answer = ask(query, default)
STORAGE['target'] = answer
@@ -475,7 +497,7 @@ class CommandInit(Command):
except IndexError:
target = None
if not options.get('quiet'):
- st = self.ask_questions(target=target)
+ st = self.ask_questions(target=target, demo=options.get('demo'))
try:
if not target:
target = st['target']
@@ -488,11 +510,13 @@ class CommandInit(Command):
Options:
-q, --quiet Do not ask questions about config.
-d, --demo Create a site filled with example data.""")
- return False
+ return 1
if not options.get('demo'):
self.create_empty_site(target)
LOGGER.info('Created empty site at {0}.'.format(target))
else:
+ if not test_destination(target, True):
+ return 2
self.copy_sample_site(target)
LOGGER.info("A new site with example data has been created at "
"{0}.".format(target))
diff --git a/nikola/plugins/command/install_theme.plugin b/nikola/plugins/command/install_theme.plugin
deleted file mode 100644
index 8434f2e..0000000
--- a/nikola/plugins/command/install_theme.plugin
+++ /dev/null
@@ -1,13 +0,0 @@
-[Core]
-name = install_theme
-module = install_theme
-
-[Documentation]
-author = Roberto Alsina
-version = 1.0
-website = http://getnikola.com
-description = Install a theme into the current site.
-
-[Nikola]
-plugincategory = Command
-
diff --git a/nikola/plugins/command/install_theme.py b/nikola/plugins/command/install_theme.py
deleted file mode 100644
index f02252e..0000000
--- a/nikola/plugins/command/install_theme.py
+++ /dev/null
@@ -1,172 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright © 2012-2015 Roberto Alsina and others.
-
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish,
-# distribute, sublicense, and/or sell copies of the
-# Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice
-# shall be included in all copies or substantial portions of
-# the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
-# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
-# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
-# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
-# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
-# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-"""Install a theme."""
-
-from __future__ import print_function
-import os
-import io
-import time
-import requests
-
-import pygments
-from pygments.lexers import PythonLexer
-from pygments.formatters import TerminalFormatter
-
-from nikola.plugin_categories import Command
-from nikola import utils
-
-LOGGER = utils.get_logger('install_theme', utils.STDERR_HANDLER)
-
-
-class CommandInstallTheme(Command):
-
- """Install a theme."""
-
- name = "install_theme"
- doc_usage = "[[-u] theme_name] | [[-u] -l]"
- doc_purpose = "install theme into current site"
- output_dir = 'themes'
- cmd_options = [
- {
- 'name': 'list',
- 'short': 'l',
- 'long': 'list',
- 'type': bool,
- 'default': False,
- 'help': 'Show list of available themes.'
- },
- {
- 'name': 'url',
- 'short': 'u',
- 'long': 'url',
- 'type': str,
- 'help': "URL for the theme repository (default: "
- "https://themes.getnikola.com/v7/themes.json)",
- 'default': 'https://themes.getnikola.com/v7/themes.json'
- },
- {
- 'name': 'getpath',
- 'short': 'g',
- 'long': 'get-path',
- 'type': bool,
- 'default': False,
- 'help': "Print the path for installed theme",
- },
- ]
-
- def _execute(self, options, args):
- """Install theme into current site."""
- listing = options['list']
- url = options['url']
- if args:
- name = args[0]
- else:
- name = None
-
- if options['getpath'] and name:
- path = utils.get_theme_path(name)
- if path:
- print(path)
- else:
- print('not installed')
- return 0
-
- if name is None and not listing:
- LOGGER.error("This command needs either a theme name or the -l option.")
- return False
- try:
- data = requests.get(url).json()
- except requests.exceptions.SSLError:
- LOGGER.warning("SSL error, using http instead of https (press ^C to abort)")
- time.sleep(1)
- url = url.replace('https', 'http', 1)
- data = requests.get(url).json()
- if listing:
- print("Themes:")
- print("-------")
- for theme in sorted(data.keys()):
- print(theme)
- return True
- else:
- # `name` may be modified by the while loop.
- origname = name
- installstatus = self.do_install(name, data)
- # See if the theme's parent is available. If not, install it
- while True:
- parent_name = utils.get_parent_theme_name(name)
- if parent_name is None:
- break
- try:
- utils.get_theme_path(parent_name)
- break
- except: # Not available
- self.do_install(parent_name, data)
- name = parent_name
- if installstatus:
- LOGGER.notice('Remember to set THEME="{0}" in conf.py to use this theme.'.format(origname))
-
- def do_install(self, name, data):
- """Download and install a theme."""
- if name in data:
- utils.makedirs(self.output_dir)
- url = data[name]
- LOGGER.info("Downloading '{0}'".format(url))
- try:
- zip_data = requests.get(url).content
- except requests.exceptions.SSLError:
- LOGGER.warning("SSL error, using http instead of https (press ^C to abort)")
- time.sleep(1)
- url = url.replace('https', 'http', 1)
- zip_data = requests.get(url).content
-
- zip_file = io.BytesIO()
- zip_file.write(zip_data)
- LOGGER.info("Extracting '{0}' into themes/".format(name))
- utils.extract_all(zip_file)
- dest_path = os.path.join(self.output_dir, name)
- else:
- dest_path = os.path.join(self.output_dir, name)
- try:
- theme_path = utils.get_theme_path(name)
- LOGGER.error("Theme '{0}' is already installed in {1}".format(name, theme_path))
- except Exception:
- LOGGER.error("Can't find theme {0}".format(name))
-
- return False
-
- confpypath = os.path.join(dest_path, 'conf.py.sample')
- if os.path.exists(confpypath):
- LOGGER.notice('This theme has a sample config file. Integrate it with yours in order to make this theme work!')
- print('Contents of the conf.py.sample file:\n')
- with io.open(confpypath, 'r', encoding='utf-8') as fh:
- if self.site.colorful:
- print(utils.indent(pygments.highlight(
- fh.read(), PythonLexer(), TerminalFormatter()),
- 4 * ' '))
- else:
- print(utils.indent(fh.read(), 4 * ' '))
- return True
diff --git a/nikola/plugins/command/new_page.plugin b/nikola/plugins/command/new_page.plugin
index 145a419..8734805 100644
--- a/nikola/plugins/command/new_page.plugin
+++ b/nikola/plugins/command/new_page.plugin
@@ -5,9 +5,9 @@ module = new_page
[Documentation]
author = Roberto Alsina, Chris Warrick
version = 1.0
-website = http://getnikola.com
+website = https://getnikola.com/
description = Create a new page.
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/new_page.py b/nikola/plugins/command/new_page.py
index 811e28b..0f7996a 100644
--- a/nikola/plugins/command/new_page.py
+++ b/nikola/plugins/command/new_page.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2015 Roberto Alsina, Chris Warrick and others.
+# Copyright © 2012-2020 Roberto Alsina, Chris Warrick and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,13 +26,11 @@
"""Create a new page."""
-from __future__ import unicode_literals, print_function
from nikola.plugin_categories import Command
class CommandNewPage(Command):
-
"""Create a new page."""
name = "new_page"
@@ -108,6 +106,7 @@ class CommandNewPage(Command):
options['tags'] = ''
options['schedule'] = False
options['is_page'] = True
+ options['date-path'] = False
# Even though stuff was split into `new_page`, it’s easier to do it
# there not to duplicate the code.
p = self.site.plugin_manager.getPluginByName('new_post', 'Command').plugin_object
diff --git a/nikola/plugins/command/new_post.plugin b/nikola/plugins/command/new_post.plugin
index d88469f..efdeb58 100644
--- a/nikola/plugins/command/new_post.plugin
+++ b/nikola/plugins/command/new_post.plugin
@@ -5,9 +5,9 @@ module = new_post
[Documentation]
author = Roberto Alsina
version = 1.0
-website = http://getnikola.com
+website = https://getnikola.com/
description = Create a new post.
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/new_post.py b/nikola/plugins/command/new_post.py
index f9fe3ff..e6eabbd 100644
--- a/nikola/plugins/command/new_post.py
+++ b/nikola/plugins/command/new_post.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2015 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,23 +26,23 @@
"""Create a new post."""
-from __future__ import unicode_literals, print_function
import io
import datetime
+import operator
import os
-import sys
+import shutil
import subprocess
-import operator
+import sys
-from blinker import signal
import dateutil.tz
+from blinker import signal
from nikola.plugin_categories import Command
from nikola import utils
COMPILERS_DOC_LINK = 'https://getnikola.com/handbook.html#configuring-other-input-formats'
-POSTLOGGER = utils.get_logger('new_post', utils.STDERR_HANDLER)
-PAGELOGGER = utils.get_logger('new_page', utils.STDERR_HANDLER)
+POSTLOGGER = utils.get_logger('new_post')
+PAGELOGGER = utils.get_logger('new_page')
LOGGER = POSTLOGGER
@@ -89,7 +89,7 @@ def get_date(schedule=False, rule=None, last_date=None, tz=None, iso8601=False):
except ImportError:
LOGGER.error('To use the --schedule switch of new_post, '
'you have to install the "dateutil" package.')
- rrule = None # NOQA
+ rrule = None
if schedule and rrule and rule:
try:
rule_ = rrule.rrulestr(rule, dtstart=last_date or date)
@@ -110,11 +110,10 @@ def get_date(schedule=False, rule=None, last_date=None, tz=None, iso8601=False):
else:
tz_str = ' UTC'
- return date.strftime('%Y-%m-%d %H:%M:%S') + tz_str
+ return (date.strftime('%Y-%m-%d %H:%M:%S') + tz_str, date)
class CommandNewPost(Command):
-
"""Create a new post."""
name = "new_post"
@@ -204,7 +203,14 @@ class CommandNewPost(Command):
'default': '',
'help': 'Import an existing file instead of creating a placeholder'
},
-
+ {
+ 'name': 'date-path',
+ 'short': 'd',
+ 'long': 'date-path',
+ 'type': bool,
+ 'default': False,
+ 'help': 'Create post with date path (eg. year/month/day, see NEW_POST_DATE_PATH_FORMAT in config)'
+ },
]
def _execute(self, options, args):
@@ -234,6 +240,10 @@ class CommandNewPost(Command):
twofile = options['twofile']
import_file = options['import']
wants_available = options['available-formats']
+ date_path_opt = options['date-path']
+ date_path_auto = self.site.config['NEW_POST_DATE_PATH'] and content_type == 'post'
+ date_path_format = self.site.config['NEW_POST_DATE_PATH_FORMAT'].strip('/')
+ post_type = options.get('type', 'text')
if wants_available:
self.print_compilers()
@@ -255,16 +265,39 @@ class CommandNewPost(Command):
if "@" in content_format:
content_format, content_subformat = content_format.split("@")
- if not content_format: # Issue #400
+ if not content_format and path and not os.path.isdir(path):
+ # content_format not specified. If path was given, use
+ # it to guess (Issue #2798)
+ extension = os.path.splitext(path)[-1]
+ for compiler, extensions in self.site.config['COMPILERS'].items():
+ if extension in extensions:
+ content_format = compiler
+ if not content_format:
+ LOGGER.error("Unknown {0} extension {1}, maybe you need to install a plugin or enable an existing one?".format(content_type, extension))
+ return
+
+ elif not content_format and import_file:
+ # content_format not specified. If import_file was given, use
+ # it to guess (Issue #2798)
+ extension = os.path.splitext(import_file)[-1]
+ for compiler, extensions in self.site.config['COMPILERS'].items():
+ if extension in extensions:
+ content_format = compiler
+ if not content_format:
+ LOGGER.error("Unknown {0} extension {1}, maybe you need to install a plugin or enable an existing one?".format(content_type, extension))
+ return
+
+ elif not content_format: # Issue #400
content_format = get_default_compiler(
is_post,
self.site.config['COMPILERS'],
self.site.config['post_pages'])
- if content_format not in compiler_names:
- LOGGER.error("Unknown {0} format {1}, maybe you need to install a plugin?".format(content_type, content_format))
+ elif content_format not in compiler_names:
+ LOGGER.error("Unknown {0} format {1}, maybe you need to install a plugin or enable an existing one?".format(content_type, content_format))
self.print_compilers()
return
+
compiler_plugin = self.site.plugin_manager.getPluginByName(
content_format, "PageCompiler").plugin_object
@@ -286,7 +319,7 @@ class CommandNewPost(Command):
while not title:
title = utils.ask('Title')
- if isinstance(title, utils.bytes_str):
+ if isinstance(title, bytes):
try:
title = title.decode(sys.stdin.encoding)
except (AttributeError, TypeError): # for tests
@@ -294,28 +327,36 @@ class CommandNewPost(Command):
title = title.strip()
if not path:
- slug = utils.slugify(title)
+ slug = utils.slugify(title, lang=self.site.default_lang)
else:
- if isinstance(path, utils.bytes_str):
+ if isinstance(path, bytes):
try:
path = path.decode(sys.stdin.encoding)
except (AttributeError, TypeError): # for tests
path = path.decode('utf-8')
- slug = utils.slugify(os.path.splitext(os.path.basename(path))[0])
+ if os.path.isdir(path):
+ # If the user provides a directory, add the file name generated from title (Issue #2651)
+ slug = utils.slugify(title, lang=self.site.default_lang)
+ pattern = os.path.basename(entry[0])
+ suffix = pattern[1:]
+ path = os.path.join(path, slug + suffix)
+ else:
+ slug = utils.slugify(os.path.splitext(os.path.basename(path))[0], lang=self.site.default_lang)
- if isinstance(author, utils.bytes_str):
- try:
- author = author.decode(sys.stdin.encoding)
- except (AttributeError, TypeError): # for tests
- author = author.decode('utf-8')
+ if isinstance(author, bytes):
+ try:
+ author = author.decode(sys.stdin.encoding)
+ except (AttributeError, TypeError): # for tests
+ author = author.decode('utf-8')
# Calculate the date to use for the content
- schedule = options['schedule'] or self.site.config['SCHEDULE_ALL']
+ # SCHEDULE_ALL is post-only (Issue #2921)
+ schedule = options['schedule'] or (self.site.config['SCHEDULE_ALL'] and is_post)
rule = self.site.config['SCHEDULE_RULE']
self.site.scan_posts()
timeline = self.site.timeline
last_date = None if not timeline else timeline[0].date
- date = get_date(schedule, rule, last_date, self.site.tzinfo, self.site.config['FORCE_ISO8601'])
+ date, dateobj = get_date(schedule, rule, last_date, self.site.tzinfo, self.site.config['FORCE_ISO8601'])
data = {
'title': title,
'slug': slug,
@@ -323,16 +364,23 @@ class CommandNewPost(Command):
'tags': tags,
'link': '',
'description': '',
- 'type': 'text',
+ 'type': post_type,
}
- output_path = os.path.dirname(entry[0])
- meta_path = os.path.join(output_path, slug + ".meta")
- pattern = os.path.basename(entry[0])
- suffix = pattern[1:]
+
if not path:
+ pattern = os.path.basename(entry[0])
+ suffix = pattern[1:]
+ output_path = os.path.dirname(entry[0])
+ if date_path_auto or date_path_opt:
+ output_path += os.sep + dateobj.strftime(date_path_format)
+
txt_path = os.path.join(output_path, slug + suffix)
+ meta_path = os.path.join(output_path, slug + ".meta")
else:
+ if date_path_opt:
+ LOGGER.warning("A path has been specified, ignoring -d")
txt_path = os.path.join(self.site.original_cwd, path)
+ meta_path = os.path.splitext(txt_path)[0] + ".meta"
if (not onefile and os.path.isfile(meta_path)) or \
os.path.isfile(txt_path):
@@ -344,6 +392,9 @@ class CommandNewPost(Command):
signal('existing_' + content_type).send(self, **event)
LOGGER.error("The title already exists!")
+ LOGGER.info("Existing {0}'s text is at: {1}".format(content_type, txt_path))
+ if not onefile:
+ LOGGER.info("Existing {0}'s metadata is at: {1}".format(content_type, meta_path))
return 8
d_name = os.path.dirname(txt_path)
@@ -354,33 +405,38 @@ class CommandNewPost(Command):
metadata.update(self.site.config['ADDITIONAL_METADATA'])
data.update(metadata)
- # ipynb plugin needs the ipython kernel info. We get the kernel name
+ # ipynb plugin needs the Jupyter kernel info. We get the kernel name
# from the content_subformat and pass it to the compiler in the metadata
if content_format == "ipynb" and content_subformat is not None:
- metadata["ipython_kernel"] = content_subformat
+ metadata["jupyter_kernel"] = content_subformat
# Override onefile if not really supported.
if not compiler_plugin.supports_onefile and onefile:
onefile = False
- LOGGER.warn('This compiler does not support one-file posts.')
+ LOGGER.warning('This compiler does not support one-file posts.')
- if import_file:
- with io.open(import_file, 'r', encoding='utf-8') as fh:
+ if onefile and import_file:
+ with io.open(import_file, 'r', encoding='utf-8-sig') as fh:
content = fh.read()
- else:
+ elif not import_file:
if is_page:
content = self.site.MESSAGES[self.site.default_lang]["Write your page here."]
else:
content = self.site.MESSAGES[self.site.default_lang]["Write your post here."]
- compiler_plugin.create_post(
- txt_path, content=content, onefile=onefile, title=title,
- slug=slug, date=date, tags=tags, is_page=is_page, **metadata)
+
+ if (not onefile) and import_file:
+ # Two-file posts are copied on import (Issue #2380)
+ shutil.copy(import_file, txt_path)
+ else:
+ compiler_plugin.create_post(
+ txt_path, content=content, onefile=onefile, title=title,
+ slug=slug, date=date, tags=tags, is_page=is_page, type=post_type, **metadata)
event = dict(path=txt_path)
if not onefile: # write metadata file
with io.open(meta_path, "w+", encoding="utf8") as fd:
- fd.write(utils.write_metadata(data))
+ fd.write(utils.write_metadata(data, comment_wrap=False, site=self.site))
LOGGER.info("Your {0}'s metadata is at: {1}".format(content_type, meta_path))
event['meta_path'] = meta_path
LOGGER.info("Your {0}'s text is at: {1}".format(content_type, txt_path))
@@ -395,7 +451,7 @@ class CommandNewPost(Command):
if editor:
subprocess.call(to_run)
else:
- LOGGER.error('$EDITOR not set, cannot edit the post. Please do it manually.')
+ LOGGER.error('The $EDITOR environment variable is not set, cannot edit the post with \'-e\'. Please edit the post manually.')
def filter_post_pages(self, compiler, is_post):
"""Return the correct entry from post_pages.
@@ -512,6 +568,6 @@ class CommandNewPost(Command):
More compilers are available in the Plugins Index.
Compilers marked with ! and ~ require additional configuration:
- ! not in the PAGES/POSTS tuples (unused)
+ ! not in the POSTS/PAGES tuples and any post scanners (unused)
~ not in the COMPILERS dict (disabled)
Read more: {0}""".format(COMPILERS_DOC_LINK))
diff --git a/nikola/plugins/command/orphans.plugin b/nikola/plugins/command/orphans.plugin
index 669429d..5107032 100644
--- a/nikola/plugins/command/orphans.plugin
+++ b/nikola/plugins/command/orphans.plugin
@@ -5,9 +5,9 @@ module = orphans
[Documentation]
author = Roberto Alsina, Chris Warrick
version = 1.0
-website = http://getnikola.com
+website = https://getnikola.com/
description = List all orphans
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/orphans.py b/nikola/plugins/command/orphans.py
index b12cc67..0cf2e63 100644
--- a/nikola/plugins/command/orphans.py
+++ b/nikola/plugins/command/orphans.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2015 Roberto Alsina, Chris Warrick and others.
+# Copyright © 2012-2020 Roberto Alsina, Chris Warrick and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,7 +26,6 @@
"""List all orphans."""
-from __future__ import print_function
import os
from nikola.plugin_categories import Command
@@ -34,7 +33,6 @@ from nikola.plugins.command.check import real_scan_files
class CommandOrphans(Command):
-
"""List all orphans."""
name = "orphans"
diff --git a/nikola/plugins/command/plugin.plugin b/nikola/plugins/command/plugin.plugin
index d44dcf3..db99ceb 100644
--- a/nikola/plugins/command/plugin.plugin
+++ b/nikola/plugins/command/plugin.plugin
@@ -5,9 +5,9 @@ module = plugin
[Documentation]
author = Roberto Alsina and Chris Warrick
version = 1.0
-website = http://getnikola.com
+website = https://getnikola.com/
description = Manage Nikola plugins
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/plugin.py b/nikola/plugins/command/plugin.py
index f892ee9..33dee23 100644
--- a/nikola/plugins/command/plugin.py
+++ b/nikola/plugins/command/plugin.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2015 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,9 +26,10 @@
"""Manage plugins."""
-from __future__ import print_function
import io
+import json.decoder
import os
+import sys
import shutil
import subprocess
import time
@@ -41,16 +42,15 @@ from pygments.formatters import TerminalFormatter
from nikola.plugin_categories import Command
from nikola import utils
-LOGGER = utils.get_logger('plugin', utils.STDERR_HANDLER)
+LOGGER = utils.get_logger('plugin')
class CommandPlugin(Command):
-
"""Manage plugins."""
json = None
name = "plugin"
- doc_usage = "[[-u][--user] --install name] | [[-u] [-l |--upgrade|--list-installed] | [--uninstall name]]"
+ doc_usage = "[-u url] [--user] [-i name] [-r name] [--upgrade] [-l] [--list-installed]"
doc_purpose = "manage plugins"
output_dir = None
needs_config = False
@@ -84,9 +84,8 @@ class CommandPlugin(Command):
'short': 'u',
'long': 'url',
'type': str,
- 'help': "URL for the plugin repository (default: "
- "https://plugins.getnikola.com/v7/plugins.json)",
- 'default': 'https://plugins.getnikola.com/v7/plugins.json'
+ 'help': "URL for the plugin repository",
+ 'default': 'https://plugins.getnikola.com/v8/plugins.json'
},
{
'name': 'user',
@@ -137,11 +136,11 @@ class CommandPlugin(Command):
self.output_dir = options.get('output_dir')
else:
if not self.site.configured and not user_mode and install:
- LOGGER.notice('No site found, assuming --user')
+ LOGGER.warning('No site found, assuming --user')
user_mode = True
if user_mode:
- self.output_dir = os.path.expanduser('~/.nikola/plugins')
+ self.output_dir = os.path.expanduser(os.path.join('~', '.nikola', 'plugins'))
else:
self.output_dir = 'plugins'
@@ -177,8 +176,20 @@ class CommandPlugin(Command):
plugins.append([plugin.name, p])
plugins.sort()
+ print('Installed Plugins:')
+ print('------------------')
+ maxlength = max(len(i[0]) for i in plugins)
+ if self.site.colorful:
+ formatstring = '\x1b[1m{0:<{2}}\x1b[0m at {1}'
+ else:
+ formatstring = '{0:<{2}} at {1}'
for name, path in plugins:
- print('{0} at {1}'.format(name, path))
+ print(formatstring.format(name, path, maxlength))
+ dp = self.site.config['DISABLED_PLUGINS']
+ if dp:
+ print('\n\nAlso, you have disabled these plugins: {}'.format(', '.join(dp)))
+ else:
+ print('\n\nNo plugins are disabled.')
return 0
def do_upgrade(self, url):
@@ -232,43 +243,32 @@ class CommandPlugin(Command):
utils.extract_all(zip_file, self.output_dir)
dest_path = os.path.join(self.output_dir, name)
else:
- try:
- plugin_path = utils.get_plugin_path(name)
- except:
- LOGGER.error("Can't find plugin " + name)
- return 1
-
- utils.makedirs(self.output_dir)
- dest_path = os.path.join(self.output_dir, name)
- if os.path.exists(dest_path):
- LOGGER.error("{0} is already installed".format(name))
- return 1
-
- LOGGER.info('Copying {0} into plugins'.format(plugin_path))
- shutil.copytree(plugin_path, dest_path)
+ LOGGER.error("Can't find plugin " + name)
+ return 1
reqpath = os.path.join(dest_path, 'requirements.txt')
if os.path.exists(reqpath):
- LOGGER.notice('This plugin has Python dependencies.')
+ LOGGER.warning('This plugin has Python dependencies.')
LOGGER.info('Installing dependencies with pip...')
try:
- subprocess.check_call(('pip', 'install', '-r', reqpath))
+ subprocess.check_call((sys.executable, '-m', 'pip', 'install', '-r', reqpath))
except subprocess.CalledProcessError:
LOGGER.error('Could not install the dependencies.')
print('Contents of the requirements.txt file:\n')
- with io.open(reqpath, 'r', encoding='utf-8') as fh:
+ with io.open(reqpath, 'r', encoding='utf-8-sig') as fh:
print(utils.indent(fh.read(), 4 * ' '))
print('You have to install those yourself or through a '
'package manager.')
else:
LOGGER.info('Dependency installation succeeded.')
+
reqnpypath = os.path.join(dest_path, 'requirements-nonpy.txt')
if os.path.exists(reqnpypath):
- LOGGER.notice('This plugin has third-party '
- 'dependencies you need to install '
- 'manually.')
+ LOGGER.warning('This plugin has third-party '
+ 'dependencies you need to install '
+ 'manually.')
print('Contents of the requirements-nonpy.txt file:\n')
- with io.open(reqnpypath, 'r', encoding='utf-8') as fh:
+ with io.open(reqnpypath, 'r', encoding='utf-8-sig') as fh:
for l in fh.readlines():
i, j = l.split('::')
print(utils.indent(i.strip(), 4 * ' '))
@@ -277,28 +277,50 @@ class CommandPlugin(Command):
print('You have to install those yourself or through a package '
'manager.')
+
+ req_plug_path = os.path.join(dest_path, 'requirements-plugins.txt')
+ if os.path.exists(req_plug_path):
+ LOGGER.info('This plugin requires other Nikola plugins.')
+ LOGGER.info('Installing plugins...')
+ plugin_failure = False
+ try:
+ with io.open(req_plug_path, 'r', encoding='utf-8-sig') as inf:
+ for plugname in inf.readlines():
+ plugin_failure = self.do_install(url, plugname.strip(), show_install_notes) != 0
+ except Exception:
+ plugin_failure = True
+ if plugin_failure:
+ LOGGER.error('Could not install a plugin.')
+ print('Contents of the requirements-plugins.txt file:\n')
+ with io.open(req_plug_path, 'r', encoding='utf-8-sig') as fh:
+ print(utils.indent(fh.read(), 4 * ' '))
+ print('You have to install those yourself manually.')
+ else:
+ LOGGER.info('Dependency installation succeeded.')
+
confpypath = os.path.join(dest_path, 'conf.py.sample')
if os.path.exists(confpypath) and show_install_notes:
- LOGGER.notice('This plugin has a sample config file. Integrate it with yours in order to make this plugin work!')
+ LOGGER.warning('This plugin has a sample config file. Integrate it with yours in order to make this plugin work!')
print('Contents of the conf.py.sample file:\n')
- with io.open(confpypath, 'r', encoding='utf-8') as fh:
+ with io.open(confpypath, 'r', encoding='utf-8-sig') as fh:
if self.site.colorful:
- print(utils.indent(pygments.highlight(
- fh.read(), PythonLexer(), TerminalFormatter()),
- 4 * ' '))
+ print(pygments.highlight(fh.read(), PythonLexer(), TerminalFormatter()))
else:
- print(utils.indent(fh.read(), 4 * ' '))
+ print(fh.read())
return 0
def do_uninstall(self, name):
"""Uninstall a plugin."""
for plugin in self.site.plugin_manager.getAllPlugins(): # FIXME: this is repeated thrice
- p = plugin.path
- if os.path.isdir(p):
- p = p + os.sep
- else:
- p = os.path.dirname(p)
if name == plugin.name: # Uninstall this one
+ p = plugin.path
+ if os.path.isdir(p):
+ # Plugins that have a package in them need to delete parent
+ # Issue #2356
+ p = p + os.sep
+ p = os.path.abspath(os.path.join(p, os.pardir))
+ else:
+ p = os.path.dirname(p)
LOGGER.warning('About to uninstall plugin: {0}'.format(name))
LOGGER.warning('This will delete {0}'.format(p))
sure = utils.ask_yesno('Are you sure?')
@@ -314,10 +336,19 @@ class CommandPlugin(Command):
"""Download the JSON file with all plugins."""
if self.json is None:
try:
- self.json = requests.get(url).json()
- except requests.exceptions.SSLError:
- LOGGER.warning("SSL error, using http instead of https (press ^C to abort)")
- time.sleep(1)
- url = url.replace('https', 'http', 1)
- self.json = requests.get(url).json()
+ try:
+ self.json = requests.get(url).json()
+ except requests.exceptions.SSLError:
+ LOGGER.warning("SSL error, using http instead of https (press ^C to abort)")
+ time.sleep(1)
+ url = url.replace('https', 'http', 1)
+ self.json = requests.get(url).json()
+ except json.decoder.JSONDecodeError as e:
+ LOGGER.error("Failed to decode JSON data in response from server.")
+ LOGGER.error("JSON error encountered: " + str(e))
+ LOGGER.error("This issue might be caused by server-side issues, or by to unusual activity in your "
+ "network (as determined by CloudFlare). Please visit https://plugins.getnikola.com/ in "
+ "a browser.")
+ sys.exit(2)
+
return self.json
diff --git a/nikola/plugins/command/rst2html.plugin b/nikola/plugins/command/rst2html.plugin
index 02c9276..6f2fb25 100644
--- a/nikola/plugins/command/rst2html.plugin
+++ b/nikola/plugins/command/rst2html.plugin
@@ -5,9 +5,9 @@ module = rst2html
[Documentation]
author = Chris Warrick
version = 1.0
-website = http://getnikola.com
+website = https://getnikola.com/
description = Compile reStructuredText to HTML using the Nikola architecture
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/rst2html/__init__.py b/nikola/plugins/command/rst2html/__init__.py
index 06afffd..5576b35 100644
--- a/nikola/plugins/command/rst2html/__init__.py
+++ b/nikola/plugins/command/rst2html/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2015 Chris Warrick and others.
+# Copyright © 2015-2020 Chris Warrick and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,7 +26,6 @@
"""Compile reStructuredText to HTML, using Nikola architecture."""
-from __future__ import unicode_literals, print_function
import io
import lxml.html
@@ -36,7 +35,6 @@ from nikola.plugin_categories import Command
class CommandRst2Html(Command):
-
"""Compile reStructuredText to HTML, using Nikola architecture."""
name = "rst2html"
@@ -51,12 +49,12 @@ class CommandRst2Html(Command):
print("This command takes only one argument (input file name).")
return 2
source = args[0]
- with io.open(source, "r", encoding="utf8") as in_file:
+ with io.open(source, "r", encoding="utf-8-sig") as in_file:
data = in_file.read()
- output, error_level, deps = compiler.compile_html_string(data, source, True)
+ output, error_level, deps, shortcode_deps = compiler.compile_string(data, source, True)
- rstcss_path = resource_filename('nikola', 'data/themes/base/assets/css/rst.css')
- with io.open(rstcss_path, "r", encoding="utf8") as fh:
+ rstcss_path = resource_filename('nikola', 'data/themes/base/assets/css/rst_base.css')
+ with io.open(rstcss_path, "r", encoding="utf-8-sig") as fh:
rstcss = fh.read()
template_path = resource_filename('nikola', 'plugins/command/rst2html/rst2html.tmpl')
@@ -65,7 +63,7 @@ class CommandRst2Html(Command):
parser = lxml.html.HTMLParser(remove_blank_text=True)
doc = lxml.html.document_fromstring(template_output, parser)
html = b'<!DOCTYPE html>\n' + lxml.html.tostring(doc, encoding='utf8', method='html', pretty_print=True)
- print(html)
+ print(html.decode('utf-8'))
if error_level < 3:
return 0
else:
diff --git a/nikola/plugins/command/serve.plugin b/nikola/plugins/command/serve.plugin
index aca71ec..aa40073 100644
--- a/nikola/plugins/command/serve.plugin
+++ b/nikola/plugins/command/serve.plugin
@@ -5,9 +5,9 @@ module = serve
[Documentation]
author = Roberto Alsina
version = 1.0
-website = http://getnikola.com
+website = https://getnikola.com/
description = Start test server.
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/serve.py b/nikola/plugins/command/serve.py
index 0441c93..ede5179 100644
--- a/nikola/plugins/command/serve.py
+++ b/nikola/plugins/command/serve.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2015 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,43 +26,33 @@
"""Start test server."""
-from __future__ import print_function
import os
+import sys
import re
+import signal
import socket
import webbrowser
-try:
- from BaseHTTPServer import HTTPServer
- from SimpleHTTPServer import SimpleHTTPRequestHandler
-except ImportError:
- from http.server import HTTPServer # NOQA
- from http.server import SimpleHTTPRequestHandler # NOQA
-
-try:
- from StringIO import StringIO
-except ImportError:
- from io import BytesIO as StringIO # NOQA
-
+from http.server import HTTPServer
+from http.server import SimpleHTTPRequestHandler
+from io import BytesIO as StringIO
from nikola.plugin_categories import Command
-from nikola.utils import get_logger, STDERR_HANDLER
+from nikola.utils import dns_sd
class IPv6Server(HTTPServer):
-
"""An IPv6 HTTPServer."""
address_family = socket.AF_INET6
class CommandServe(Command):
-
"""Start test server."""
name = "serve"
doc_usage = "[options]"
doc_purpose = "start the test webserver"
- logger = None
+ dns_sd = None
cmd_options = (
{
@@ -71,7 +61,7 @@ class CommandServe(Command):
'long': 'port',
'default': 8000,
'type': int,
- 'help': 'Port number (default: 8000)',
+ 'help': 'Port number',
},
{
'name': 'address',
@@ -79,7 +69,7 @@ class CommandServe(Command):
'long': 'address',
'type': str,
'default': '',
- 'help': 'Address to bind (default: 0.0.0.0 – all local IPv4 interfaces)',
+ 'help': 'Address to bind, defaults to all local IPv4 interfaces',
},
{
'name': 'detach',
@@ -107,13 +97,24 @@ class CommandServe(Command):
},
)
+ def shutdown(self, signum=None, _frame=None):
+ """Shut down the server that is running detached."""
+ if self.dns_sd:
+ self.dns_sd.Reset()
+ if os.path.exists(self.serve_pidfile):
+ os.remove(self.serve_pidfile)
+ if not self.detached:
+ self.logger.info("Server is shutting down.")
+ if signum:
+ sys.exit(0)
+
def _execute(self, options, args):
"""Start test server."""
- self.logger = get_logger('serve', STDERR_HANDLER)
out_dir = self.site.config['OUTPUT_FOLDER']
if not os.path.isdir(out_dir):
self.logger.error("Missing '{0}' folder?".format(out_dir))
else:
+ self.serve_pidfile = os.path.abspath('nikolaserve.pid')
os.chdir(out_dir)
if '[' in options['address']:
options['address'] = options['address'].strip('[').strip(']')
@@ -129,37 +130,47 @@ class CommandServe(Command):
httpd = OurHTTP((options['address'], options['port']),
OurHTTPRequestHandler)
sa = httpd.socket.getsockname()
- self.logger.info("Serving HTTP on {0} port {1}...".format(*sa))
+ if ipv6:
+ server_url = "http://[{0}]:{1}/".format(*sa)
+ else:
+ server_url = "http://{0}:{1}/".format(*sa)
+ self.logger.info("Serving on {0} ...".format(server_url))
+
if options['browser']:
- if ipv6:
- server_url = "http://[{0}]:{1}/".format(*sa)
- else:
- server_url = "http://{0}:{1}/".format(*sa)
+ # Some browsers fail to load 0.0.0.0 (Issue #2755)
+ if sa[0] == '0.0.0.0':
+ server_url = "http://127.0.0.1:{1}/".format(*sa)
self.logger.info("Opening {0} in the default web browser...".format(server_url))
webbrowser.open(server_url)
if options['detach']:
+ self.detached = True
OurHTTPRequestHandler.quiet = True
try:
pid = os.fork()
if pid == 0:
+ signal.signal(signal.SIGTERM, self.shutdown)
httpd.serve_forever()
else:
- self.logger.info("Detached with PID {0}. Run `kill {0}` to stop the server.".format(pid))
- except AttributeError as e:
+ with open(self.serve_pidfile, 'w') as fh:
+ fh.write('{0}\n'.format(pid))
+ self.logger.info("Detached with PID {0}. Run `kill {0}` or `kill $(cat nikolaserve.pid)` to stop the server.".format(pid))
+ except AttributeError:
if os.name == 'nt':
self.logger.warning("Detaching is not available on Windows, server is running in the foreground.")
else:
- raise e
+ raise
else:
+ self.detached = False
try:
+ self.dns_sd = dns_sd(options['port'], (options['ipv6'] or '::' in options['address']))
+ signal.signal(signal.SIGTERM, self.shutdown)
httpd.serve_forever()
except KeyboardInterrupt:
- self.logger.info("Server is shutting down.")
+ self.shutdown()
return 130
class OurHTTPRequestHandler(SimpleHTTPRequestHandler):
-
"""A request handler, modified for Nikola."""
extensions_map = dict(SimpleHTTPRequestHandler.extensions_map)
@@ -171,8 +182,7 @@ class OurHTTPRequestHandler(SimpleHTTPRequestHandler):
if self.quiet:
return
else:
- # Old-style class in Python 2.7, cannot use super()
- return SimpleHTTPRequestHandler.log_message(self, *args)
+ return super().log_message(*args)
# NOTICE: this is a patched version of send_head() to disable all sorts of
# caching. `nikola serve` is a development server, hence caching should
@@ -184,9 +194,9 @@ class OurHTTPRequestHandler(SimpleHTTPRequestHandler):
# Note that it might break in future versions of Python, in which case we
# would need to do even more magic.
def send_head(self):
- """Common code for GET and HEAD commands.
+ """Send response code and MIME header.
- This sends the response code and MIME headers.
+ This is common code for GET and HEAD commands.
Return value is either a file object (which has to be copied
to the outputfile by the caller unless the command was HEAD,
@@ -197,10 +207,12 @@ class OurHTTPRequestHandler(SimpleHTTPRequestHandler):
path = self.translate_path(self.path)
f = None
if os.path.isdir(path):
- if not self.path.endswith('/'):
+ path_parts = list(self.path.partition('?'))
+ if not path_parts[0].endswith('/'):
# redirect browser - doing basically what apache does
+ path_parts[0] += '/'
self.send_response(301)
- self.send_header("Location", self.path + "/")
+ self.send_header("Location", ''.join(path_parts))
# begin no-cache patch
# For redirects. With redirects, caching is even worse and can
# break more. Especially with 301 Moved Permanently redirects,
@@ -226,7 +238,7 @@ class OurHTTPRequestHandler(SimpleHTTPRequestHandler):
# transmitted *less* than the content-length!
f = open(path, 'rb')
except IOError:
- self.send_error(404, "File not found")
+ self.send_error(404, "File not found: {}".format(path))
return None
filtered_bytes = None
@@ -234,7 +246,7 @@ class OurHTTPRequestHandler(SimpleHTTPRequestHandler):
# Comment out any <base> to allow local resolution of relative URLs.
data = f.read().decode('utf8')
f.close()
- data = re.sub(r'<base\s([^>]*)>', '<!--base \g<1>-->', data, re.IGNORECASE)
+ data = re.sub(r'<base\s([^>]*)>', r'<!--base \g<1>-->', data, flags=re.IGNORECASE)
data = data.encode('utf8')
f = StringIO()
f.write(data)
@@ -242,7 +254,10 @@ class OurHTTPRequestHandler(SimpleHTTPRequestHandler):
f.seek(0)
self.send_response(200)
- self.send_header("Content-type", ctype)
+ if ctype.startswith('text/') or ctype.endswith('+xml'):
+ self.send_header("Content-Type", "{0}; charset=UTF-8".format(ctype))
+ else:
+ self.send_header("Content-Type", ctype)
if os.path.splitext(path)[1] == '.svgz':
# Special handling for svgz to make it work nice with browsers.
self.send_header("Content-Encoding", 'gzip')
diff --git a/nikola/plugins/command/status.plugin b/nikola/plugins/command/status.plugin
index 91390d2..7e2bd96 100644
--- a/nikola/plugins/command/status.plugin
+++ b/nikola/plugins/command/status.plugin
@@ -9,5 +9,5 @@ website = https://getnikola.com
description = Site status
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/status.py b/nikola/plugins/command/status.py
index 55e7f95..c96d13f 100644
--- a/nikola/plugins/command/status.py
+++ b/nikola/plugins/command/status.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2015 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,8 +26,6 @@
"""Display site status."""
-from __future__ import print_function
-import io
import os
from datetime import datetime
from dateutil.tz import gettz, tzlocal
@@ -36,14 +34,13 @@ from nikola.plugin_categories import Command
class CommandStatus(Command):
-
"""Display site status."""
name = "status"
doc_purpose = "display site status"
doc_description = "Show information about the posts and site deployment."
- doc_usage = '[-l|--list-drafts] [-m|--list-modified] [-s|--list-scheduled]'
+ doc_usage = '[-d|--list-drafts] [-m|--list-modified] [-p|--list-private] [-P|--list-published] [-s|--list-scheduled]'
logger = None
cmd_options = [
{
@@ -63,6 +60,22 @@ class CommandStatus(Command):
'help': 'List all modified files since last deployment',
},
{
+ 'name': 'list_private',
+ 'short': 'p',
+ 'long': 'list-private',
+ 'type': bool,
+ 'default': False,
+ 'help': 'List all private posts',
+ },
+ {
+ 'name': 'list_published',
+ 'short': 'P',
+ 'long': 'list-published',
+ 'type': bool,
+ 'default': False,
+ 'help': 'List all published posts',
+ },
+ {
'name': 'list_scheduled',
'short': 's',
'long': 'list-scheduled',
@@ -76,16 +89,12 @@ class CommandStatus(Command):
"""Display site status."""
self.site.scan_posts()
- timestamp_path = os.path.join(self.site.config["CACHE_FOLDER"], "lastdeploy")
-
- last_deploy = None
-
- try:
- with io.open(timestamp_path, "r", encoding="utf8") as inf:
- last_deploy = datetime.strptime(inf.read().strip(), "%Y-%m-%dT%H:%M:%S.%f")
- last_deploy_offset = datetime.utcnow() - last_deploy
- except (IOError, Exception):
- print("It does not seem like you’ve ever deployed the site (or cache missing).")
+ last_deploy = self.site.state.get('last_deploy')
+ if last_deploy is not None:
+ last_deploy = datetime.strptime(last_deploy, "%Y-%m-%dT%H:%M:%S.%f")
+ last_deploy_offset = datetime.utcnow() - last_deploy
+ else:
+ print("It does not seem like you've ever deployed the site (or cache missing).")
if last_deploy:
@@ -111,12 +120,23 @@ class CommandStatus(Command):
posts_count = len(self.site.all_posts)
+ # find all published posts
+ posts_published = [post for post in self.site.all_posts if post.use_in_feeds]
+ posts_published = sorted(posts_published, key=lambda post: post.source_path)
+
+ # find all private posts
+ posts_private = [post for post in self.site.all_posts if post.is_private]
+ posts_private = sorted(posts_private, key=lambda post: post.source_path)
+
# find all drafts
posts_drafts = [post for post in self.site.all_posts if post.is_draft]
posts_drafts = sorted(posts_drafts, key=lambda post: post.source_path)
# find all scheduled posts with offset from now until publishing time
- posts_scheduled = [(post.date - now, post) for post in self.site.all_posts if post.publish_later]
+ posts_scheduled = [
+ (post.date - now, post) for post in self.site.all_posts
+ if post.publish_later and not (post.is_draft or post.is_private)
+ ]
posts_scheduled = sorted(posts_scheduled, key=lambda offset_post: (offset_post[0], offset_post[1].source_path))
if len(posts_scheduled) > 0:
@@ -129,7 +149,13 @@ class CommandStatus(Command):
if options['list_drafts']:
for post in posts_drafts:
print("Draft: '{0}' ({1}; source: {2})".format(post.meta('title'), post.permalink(), post.source_path))
- print("{0} posts in total, {1} scheduled, and {2} drafts.".format(posts_count, len(posts_scheduled), len(posts_drafts)))
+ if options['list_private']:
+ for post in posts_private:
+ print("Private: '{0}' ({1}; source: {2})".format(post.meta('title'), post.permalink(), post.source_path))
+ if options['list_published']:
+ for post in posts_published:
+ print("Published: '{0}' ({1}; source: {2})".format(post.meta('title'), post.permalink(), post.source_path))
+ print("{0} posts in total, {1} scheduled, {2} drafts, {3} private and {4} published.".format(posts_count, len(posts_scheduled), len(posts_drafts), len(posts_private), len(posts_published)))
def human_time(self, dt):
"""Translate time into a human-friendly representation."""
diff --git a/nikola/plugins/command/subtheme.plugin b/nikola/plugins/command/subtheme.plugin
new file mode 100644
index 0000000..d377e22
--- /dev/null
+++ b/nikola/plugins/command/subtheme.plugin
@@ -0,0 +1,13 @@
+[Core]
+name = subtheme
+module = subtheme
+
+[Documentation]
+author = Roberto Alsina
+version = 1.1
+website = https://getnikola.com/
+description = Given a swatch name and a parent theme, creates a custom subtheme.
+
+[Nikola]
+PluginCategory = Command
+
diff --git a/nikola/plugins/command/subtheme.py b/nikola/plugins/command/subtheme.py
new file mode 100644
index 0000000..554a241
--- /dev/null
+++ b/nikola/plugins/command/subtheme.py
@@ -0,0 +1,150 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2012-2020 Roberto Alsina and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+"""Given a swatch name from bootswatch.com or hackerthemes.com and a parent theme, creates a custom theme."""
+
+import configparser
+import os
+
+import requests
+
+from nikola import utils
+from nikola.plugin_categories import Command
+
+LOGGER = utils.get_logger('subtheme')
+
+
+def _check_for_theme(theme, themes):
+ for t in themes:
+ if t.endswith(os.sep + theme):
+ return True
+ return False
+
+
+class CommandSubTheme(Command):
+ """Given a swatch name from bootswatch.com and a parent theme, creates a custom theme."""
+
+ name = "subtheme"
+ doc_usage = "[options]"
+ doc_purpose = "given a swatch name from bootswatch.com or hackerthemes.com and a parent theme, creates a custom"\
+ " theme"
+ cmd_options = [
+ {
+ 'name': 'name',
+ 'short': 'n',
+ 'long': 'name',
+ 'default': 'custom',
+ 'type': str,
+ 'help': 'New theme name',
+ },
+ {
+ 'name': 'swatch',
+ 'short': 's',
+ 'default': '',
+ 'type': str,
+ 'help': 'Name of the swatch from bootswatch.com.'
+ },
+ {
+ 'name': 'parent',
+ 'short': 'p',
+ 'long': 'parent',
+ 'default': 'bootstrap4',
+ 'help': 'Parent theme name',
+ },
+ ]
+
+ def _execute(self, options, args):
+ """Given a swatch name and a parent theme, creates a custom theme."""
+ name = options['name']
+ swatch = options['swatch']
+ if not swatch:
+ LOGGER.error('The -s option is mandatory')
+ return 1
+ parent = options['parent']
+ version = '4'
+
+ # Check which Bootstrap version to use
+ themes = utils.get_theme_chain(parent, self.site.themes_dirs)
+ if _check_for_theme('bootstrap', themes) or _check_for_theme('bootstrap-jinja', themes):
+ version = '2'
+ elif _check_for_theme('bootstrap3', themes) or _check_for_theme('bootstrap3-jinja', themes):
+ version = '3'
+ elif _check_for_theme('bootstrap4', themes) or _check_for_theme('bootstrap4-jinja', themes):
+ version = '4'
+ elif not _check_for_theme('bootstrap4', themes) and not _check_for_theme('bootstrap4-jinja', themes):
+ LOGGER.warning(
+ '"subtheme" only makes sense for themes that use bootstrap')
+ elif _check_for_theme('bootstrap3-gradients', themes) or _check_for_theme('bootstrap3-gradients-jinja', themes):
+ LOGGER.warning(
+ '"subtheme" doesn\'t work well with the bootstrap3-gradients family')
+
+ LOGGER.info("Creating '{0}' theme from '{1}' and '{2}'".format(
+ name, swatch, parent))
+ utils.makedirs(os.path.join('themes', name, 'assets', 'css'))
+ for fname in ('bootstrap.min.css', 'bootstrap.css'):
+ if swatch in [
+ 'bubblegum', 'business-tycoon', 'charming', 'daydream',
+ 'executive-suite', 'good-news', 'growth', 'harbor', 'hello-world',
+ 'neon-glow', 'pleasant', 'retro', 'vibrant-sea', 'wizardry']: # Hackerthemes
+ LOGGER.info(
+ 'Hackertheme-based subthemes often require you use a custom font for full effect.')
+ if version != '4':
+ LOGGER.error(
+ 'The hackertheme subthemes are only available for Bootstrap 4.')
+ return 1
+ if fname == 'bootstrap.css':
+ url = 'https://raw.githubusercontent.com/HackerThemes/theme-machine/master/dist/{swatch}/css/bootstrap4-{swatch}.css'.format(
+ swatch=swatch)
+ else:
+ url = 'https://raw.githubusercontent.com/HackerThemes/theme-machine/master/dist/{swatch}/css/bootstrap4-{swatch}.min.css'.format(
+ swatch=swatch)
+ else: # Bootswatch
+ url = 'https://bootswatch.com'
+ if version:
+ url += '/' + version
+ url = '/'.join((url, swatch, fname))
+ LOGGER.info("Downloading: " + url)
+ r = requests.get(url)
+ if r.status_code > 299:
+ LOGGER.error('Error {} getting {}', r.status_code, url)
+ return 1
+ data = r.text
+
+ with open(os.path.join('themes', name, 'assets', 'css', fname),
+ 'w+') as output:
+ output.write(data)
+
+ with open(os.path.join('themes', name, '%s.theme' % name), 'w+') as output:
+ parent_theme_data_path = utils.get_asset_path(
+ '%s.theme' % parent, themes)
+ cp = configparser.ConfigParser()
+ cp.read(parent_theme_data_path)
+ cp['Theme']['parent'] = parent
+ cp['Family'] = {'family': cp['Family']['family']}
+ cp.write(output)
+
+ LOGGER.info(
+ 'Theme created. Change the THEME setting to "{0}" to use it.'.format(name))
diff --git a/nikola/plugins/command/theme.plugin b/nikola/plugins/command/theme.plugin
new file mode 100644
index 0000000..421d027
--- /dev/null
+++ b/nikola/plugins/command/theme.plugin
@@ -0,0 +1,13 @@
+[Core]
+name = theme
+module = theme
+
+[Documentation]
+author = Roberto Alsina and Chris Warrick
+version = 1.0
+website = https://getnikola.com/
+description = Manage Nikola themes
+
+[Nikola]
+PluginCategory = Command
+
diff --git a/nikola/plugins/command/theme.py b/nikola/plugins/command/theme.py
new file mode 100644
index 0000000..6f4339a
--- /dev/null
+++ b/nikola/plugins/command/theme.py
@@ -0,0 +1,393 @@
+# -*- coding: utf-8 -*-
+
+# Copyright © 2012-2020 Roberto Alsina, Chris Warrick and others.
+
+# Permission is hereby granted, free of charge, to any
+# person obtaining a copy of this software and associated
+# documentation files (the "Software"), to deal in the
+# Software without restriction, including without limitation
+# the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the
+# Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice
+# shall be included in all copies or substantial portions of
+# the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
+# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
+# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
+# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
+# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+"""Manage themes."""
+
+import configparser
+import io
+import json.decoder
+import os
+import shutil
+import sys
+import time
+
+import requests
+import pygments
+from pygments.lexers import PythonLexer
+from pygments.formatters import TerminalFormatter
+from pkg_resources import resource_filename
+
+from nikola.plugin_categories import Command
+from nikola import utils
+
+LOGGER = utils.get_logger('theme')
+
+
+class CommandTheme(Command):
+ """Manage themes."""
+
+ json = None
+ name = "theme"
+ doc_usage = "[-u url] [-i theme_name] [-r theme_name] [-l] [--list-installed] [-g] [-n theme_name] [-c template_name]"
+ doc_purpose = "manage themes"
+ output_dir = 'themes'
+ cmd_options = [
+ {
+ 'name': 'install',
+ 'short': 'i',
+ 'long': 'install',
+ 'type': str,
+ 'default': '',
+ 'help': 'Install a theme.'
+ },
+ {
+ 'name': 'uninstall',
+ 'long': 'uninstall',
+ 'short': 'r',
+ 'type': str,
+ 'default': '',
+ 'help': 'Uninstall a theme.'
+ },
+ {
+ 'name': 'list',
+ 'short': 'l',
+ 'long': 'list',
+ 'type': bool,
+ 'default': False,
+ 'help': 'Show list of available themes.'
+ },
+ {
+ 'name': 'list_installed',
+ 'long': 'list-installed',
+ 'type': bool,
+ 'help': "List the installed themes with their location.",
+ 'default': False
+ },
+ {
+ 'name': 'url',
+ 'short': 'u',
+ 'long': 'url',
+ 'type': str,
+ 'help': "URL for the theme repository",
+ 'default': 'https://themes.getnikola.com/v8/themes.json'
+ },
+ {
+ 'name': 'getpath',
+ 'short': 'g',
+ 'long': 'get-path',
+ 'type': str,
+ 'default': '',
+ 'help': "Print the path for installed theme",
+ },
+ {
+ 'name': 'copy-template',
+ 'short': 'c',
+ 'long': 'copy-template',
+ 'type': str,
+ 'default': '',
+ 'help': 'Copy a built-in template into templates/ or your theme',
+ },
+ {
+ 'name': 'new',
+ 'short': 'n',
+ 'long': 'new',
+ 'type': str,
+ 'default': '',
+ 'help': 'Create a new theme',
+ },
+ {
+ 'name': 'new_engine',
+ 'long': 'engine',
+ 'type': str,
+ 'default': 'mako',
+ 'help': 'Engine to use for new theme (mako or jinja)',
+ },
+ {
+ 'name': 'new_parent',
+ 'long': 'parent',
+ 'type': str,
+ 'default': 'base',
+ 'help': 'Parent to use for new theme',
+ },
+ {
+ 'name': 'new_legacy_meta',
+ 'long': 'legacy-meta',
+ 'type': bool,
+ 'default': False,
+ 'help': 'Create legacy meta files for new theme',
+ },
+ ]
+
+ def _execute(self, options, args):
+ """Install theme into current site."""
+ url = options['url']
+
+ # See the "mode" we need to operate in
+ install = options.get('install')
+ uninstall = options.get('uninstall')
+ list_available = options.get('list')
+ list_installed = options.get('list_installed')
+ get_path = options.get('getpath')
+ copy_template = options.get('copy-template')
+ new = options.get('new')
+ new_engine = options.get('new_engine')
+ new_parent = options.get('new_parent')
+ new_legacy_meta = options.get('new_legacy_meta')
+ command_count = [bool(x) for x in (
+ install,
+ uninstall,
+ list_available,
+ list_installed,
+ get_path,
+ copy_template,
+ new)].count(True)
+ if command_count > 1 or command_count == 0:
+ print(self.help())
+ return 2
+
+ if list_available:
+ return self.list_available(url)
+ elif list_installed:
+ return self.list_installed()
+ elif install:
+ return self.do_install_deps(url, install)
+ elif uninstall:
+ return self.do_uninstall(uninstall)
+ elif get_path:
+ return self.get_path(get_path)
+ elif copy_template:
+ return self.copy_template(copy_template)
+ elif new:
+ return self.new_theme(new, new_engine, new_parent, new_legacy_meta)
+
+ def do_install_deps(self, url, name):
+ """Install themes and their dependencies."""
+ data = self.get_json(url)
+ # `name` may be modified by the while loop.
+ origname = name
+ installstatus = self.do_install(name, data)
+ # See if the theme's parent is available. If not, install it
+ while True:
+ parent_name = utils.get_parent_theme_name(utils.get_theme_path_real(name, self.site.themes_dirs))
+ if parent_name is None:
+ break
+ try:
+ utils.get_theme_path_real(parent_name, self.site.themes_dirs)
+ break
+ except Exception: # Not available
+ self.do_install(parent_name, data)
+ name = parent_name
+ if installstatus:
+ LOGGER.info('Remember to set THEME="{0}" in conf.py to use this theme.'.format(origname))
+
+ def do_install(self, name, data):
+ """Download and install a theme."""
+ if name in data:
+ utils.makedirs(self.output_dir)
+ url = data[name]
+ LOGGER.info("Downloading '{0}'".format(url))
+ try:
+ zip_data = requests.get(url).content
+ except requests.exceptions.SSLError:
+ LOGGER.warning("SSL error, using http instead of https (press ^C to abort)")
+ time.sleep(1)
+ url = url.replace('https', 'http', 1)
+ zip_data = requests.get(url).content
+
+ zip_file = io.BytesIO()
+ zip_file.write(zip_data)
+ LOGGER.info("Extracting '{0}' into themes/".format(name))
+ utils.extract_all(zip_file)
+ dest_path = os.path.join(self.output_dir, name)
+ else:
+ dest_path = os.path.join(self.output_dir, name)
+ try:
+ theme_path = utils.get_theme_path_real(name, self.site.themes_dirs)
+ LOGGER.error("Theme '{0}' is already installed in {1}".format(name, theme_path))
+ except Exception:
+ LOGGER.error("Can't find theme {0}".format(name))
+
+ return False
+
+ confpypath = os.path.join(dest_path, 'conf.py.sample')
+ if os.path.exists(confpypath):
+ LOGGER.warning('This theme has a sample config file. Integrate it with yours in order to make this theme work!')
+ print('Contents of the conf.py.sample file:\n')
+ with io.open(confpypath, 'r', encoding='utf-8-sig') as fh:
+ if self.site.colorful:
+ print(pygments.highlight(fh.read(), PythonLexer(), TerminalFormatter()))
+ else:
+ print(fh.read())
+ return True
+
+ def do_uninstall(self, name):
+ """Uninstall a theme."""
+ try:
+ path = utils.get_theme_path_real(name, self.site.themes_dirs)
+ except Exception:
+ LOGGER.error('Unknown theme: {0}'.format(name))
+ return 1
+ # Don't uninstall builtin themes (Issue #2510)
+ blocked = os.path.dirname(utils.__file__)
+ if path.startswith(blocked):
+ LOGGER.error("Can't delete builtin theme: {0}".format(name))
+ return 1
+ LOGGER.warning('About to uninstall theme: {0}'.format(name))
+ LOGGER.warning('This will delete {0}'.format(path))
+ sure = utils.ask_yesno('Are you sure?')
+ if sure:
+ LOGGER.warning('Removing {0}'.format(path))
+ shutil.rmtree(path)
+ return 0
+ return 1
+
+ def get_path(self, name):
+ """Get path for an installed theme."""
+ try:
+ path = utils.get_theme_path_real(name, self.site.themes_dirs)
+ print(path)
+ except Exception:
+ print("not installed")
+ return 0
+
+ def list_available(self, url):
+ """List all available themes."""
+ data = self.get_json(url)
+ print("Available Themes:")
+ print("-----------------")
+ for theme in sorted(data.keys()):
+ print(theme)
+ return 0
+
+ def list_installed(self):
+ """List all installed themes."""
+ print("Installed Themes:")
+ print("-----------------")
+ themes = []
+ themes_dirs = self.site.themes_dirs + [resource_filename('nikola', os.path.join('data', 'themes'))]
+ for tdir in themes_dirs:
+ if os.path.isdir(tdir):
+ themes += [(i, os.path.join(tdir, i)) for i in os.listdir(tdir)]
+
+ for tname, tpath in sorted(set(themes)):
+ if os.path.isdir(tpath):
+ print("{0} at {1}".format(tname, tpath))
+
+ def copy_template(self, template):
+ """Copy the named template file from the parent to a local theme or to templates/."""
+ # Find template
+ t = self.site.template_system.get_template_path(template)
+ if t is None:
+ LOGGER.error("Cannot find template {0} in the lookup.".format(template))
+ return 2
+
+ # Figure out where to put it.
+ # Check if a local theme exists.
+ theme_path = utils.get_theme_path(self.site.THEMES[0])
+ if theme_path.startswith('themes' + os.sep):
+ # Theme in local themes/ directory
+ base = os.path.join(theme_path, 'templates')
+ else:
+ # Put it in templates/
+ base = 'templates'
+
+ if not os.path.exists(base):
+ os.mkdir(base)
+ LOGGER.info("Created directory {0}".format(base))
+
+ try:
+ out = shutil.copy(t, base)
+ LOGGER.info("Copied template from {0} to {1}".format(t, out))
+ except shutil.SameFileError:
+ LOGGER.error("This file already exists in your templates directory ({0}).".format(base))
+ return 3
+
+ def new_theme(self, name, engine, parent, create_legacy_meta=False):
+ """Create a new theme."""
+ base = 'themes'
+ themedir = os.path.join(base, name)
+ LOGGER.info("Creating theme {0} with parent {1} and engine {2} in {3}".format(name, parent, engine, themedir))
+ if not os.path.exists(base):
+ os.mkdir(base)
+ LOGGER.info("Created directory {0}".format(base))
+
+ # Check if engine and parent match
+ parent_engine = utils.get_template_engine(utils.get_theme_chain(parent, self.site.themes_dirs))
+
+ if parent_engine != engine:
+ LOGGER.error("Cannot use engine {0} because parent theme '{1}' uses {2}".format(engine, parent, parent_engine))
+ return 2
+
+ # Create theme
+ if not os.path.exists(themedir):
+ os.mkdir(themedir)
+ LOGGER.info("Created directory {0}".format(themedir))
+ else:
+ LOGGER.error("Theme already exists")
+ return 2
+
+ cp = configparser.ConfigParser()
+ cp['Theme'] = {
+ 'engine': engine,
+ 'parent': parent
+ }
+
+ theme_meta_path = os.path.join(themedir, name + '.theme')
+ with io.open(theme_meta_path, 'w', encoding='utf-8') as fh:
+ cp.write(fh)
+ LOGGER.info("Created file {0}".format(theme_meta_path))
+
+ if create_legacy_meta:
+ with io.open(os.path.join(themedir, 'parent'), 'w', encoding='utf-8') as fh:
+ fh.write(parent + '\n')
+ LOGGER.info("Created file {0}".format(os.path.join(themedir, 'parent')))
+ with io.open(os.path.join(themedir, 'engine'), 'w', encoding='utf-8') as fh:
+ fh.write(engine + '\n')
+ LOGGER.info("Created file {0}".format(os.path.join(themedir, 'engine')))
+
+ LOGGER.info("Theme {0} created successfully.".format(themedir))
+ LOGGER.info('Remember to set THEME="{0}" in conf.py to use this theme.'.format(name))
+
+ def get_json(self, url):
+ """Download the JSON file with all plugins."""
+ if self.json is None:
+ try:
+ try:
+ self.json = requests.get(url).json()
+ except requests.exceptions.SSLError:
+ LOGGER.warning("SSL error, using http instead of https (press ^C to abort)")
+ time.sleep(1)
+ url = url.replace('https', 'http', 1)
+ self.json = requests.get(url).json()
+ except json.decoder.JSONDecodeError as e:
+ LOGGER.error("Failed to decode JSON data in response from server.")
+ LOGGER.error("JSON error encountered:" + str(e))
+ LOGGER.error("This issue might be caused by server-side issues, or by to unusual activity in your "
+ "network (as determined by CloudFlare). Please visit https://themes.getnikola.com/ in "
+ "a browser.")
+ sys.exit(2)
+
+ return self.json
diff --git a/nikola/plugins/command/version.plugin b/nikola/plugins/command/version.plugin
index 4708bdb..a172e28 100644
--- a/nikola/plugins/command/version.plugin
+++ b/nikola/plugins/command/version.plugin
@@ -5,9 +5,9 @@ module = version
[Documentation]
author = Roberto Alsina
version = 1.0
-website = http://getnikola.com
+website = https://getnikola.com/
description = Show nikola version
[Nikola]
-plugincategory = Command
+PluginCategory = Command
diff --git a/nikola/plugins/command/version.py b/nikola/plugins/command/version.py
index ad08f64..9b81343 100644
--- a/nikola/plugins/command/version.py
+++ b/nikola/plugins/command/version.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright © 2012-2015 Roberto Alsina and others.
+# Copyright © 2012-2020 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
@@ -26,19 +26,16 @@
"""Print Nikola version."""
-from __future__ import print_function
-import lxml
import requests
from nikola.plugin_categories import Command
from nikola import __version__
-URL = 'https://pypi.python.org/pypi?:action=doap&name=Nikola'
+URL = 'https://pypi.org/pypi/Nikola/json'
class CommandVersion(Command):
-
"""Print Nikola version."""
name = "version"
@@ -61,10 +58,11 @@ class CommandVersion(Command):
"""Print the version number."""
print("Nikola v" + __version__)
if options.get('check'):
- data = requests.get(URL).text
- doc = lxml.etree.fromstring(data.encode('utf8'))
- revision = doc.findall('*//{http://usefulinc.com/ns/doap#}revision')[0].text
- if revision == __version__:
+ data = requests.get(URL).json()
+ pypi_version = data['info']['version']
+ if pypi_version == __version__:
print("Nikola is up-to-date")
else:
- print("The latest version of Nikola is v{0} -- please upgrade using `pip install --upgrade Nikola=={0}` or your system package manager".format(revision))
+ print("The latest version of Nikola is v{0}. Please upgrade "
+ "using `pip install --upgrade Nikola=={0}` or your "
+ "system package manager.".format(pypi_version))