aboutsummaryrefslogtreecommitdiffstats
path: root/nikola/plugins/command
diff options
context:
space:
mode:
authorLibravatarAgustin Henze <tin@sluc.org.ar>2015-08-26 07:57:23 -0300
committerLibravatarAgustin Henze <tin@sluc.org.ar>2015-08-26 07:57:23 -0300
commit70ceb871117ca811d63cb02671dc0fefc2700883 (patch)
tree846133ea39797d2cd1101cff2ac0818167353490 /nikola/plugins/command
parent8559119e2f45b7f6508282962c0430423bfab051 (diff)
parent787b97a4cb24330b36f11297c6d3a7a473a907d0 (diff)
Merge tag 'upstream/7.6.4'
Upstream version 7.6.4
Diffstat (limited to 'nikola/plugins/command')
-rw-r--r--nikola/plugins/command/__init__.py2
-rw-r--r--nikola/plugins/command/auto.plugin16
-rw-r--r--nikola/plugins/command/auto/__init__.py196
-rw-r--r--nikola/plugins/command/bootswatch_theme.plugin15
-rw-r--r--nikola/plugins/command/bootswatch_theme.py3
-rw-r--r--nikola/plugins/command/check.plugin15
-rw-r--r--nikola/plugins/command/check.py70
-rw-r--r--nikola/plugins/command/console.plugin16
-rw-r--r--nikola/plugins/command/console.py4
-rw-r--r--nikola/plugins/command/deploy.plugin16
-rw-r--r--nikola/plugins/command/deploy.py12
-rw-r--r--nikola/plugins/command/github_deploy.plugin16
-rw-r--r--nikola/plugins/command/github_deploy.py19
-rw-r--r--nikola/plugins/command/import_wordpress.plugin15
-rw-r--r--nikola/plugins/command/import_wordpress.py666
-rw-r--r--nikola/plugins/command/init.plugin16
-rw-r--r--nikola/plugins/command/init.py43
-rw-r--r--nikola/plugins/command/install_theme.plugin15
-rw-r--r--nikola/plugins/command/install_theme.py28
-rw-r--r--nikola/plugins/command/new_page.plugin16
-rw-r--r--nikola/plugins/command/new_page.py3
-rw-r--r--nikola/plugins/command/new_post.plugin15
-rw-r--r--nikola/plugins/command/new_post.py236
-rw-r--r--nikola/plugins/command/orphans.plugin15
-rw-r--r--nikola/plugins/command/orphans.py6
-rw-r--r--nikola/plugins/command/plugin.plugin15
-rw-r--r--nikola/plugins/command/plugin.py88
-rw-r--r--nikola/plugins/command/rst2html.plugin16
-rw-r--r--nikola/plugins/command/rst2html/__init__.py3
-rw-r--r--nikola/plugins/command/serve.plugin15
-rw-r--r--nikola/plugins/command/serve.py83
-rw-r--r--nikola/plugins/command/status.plugin16
-rw-r--r--nikola/plugins/command/status.py11
-rw-r--r--nikola/plugins/command/version.plugin16
-rw-r--r--nikola/plugins/command/version.py5
35 files changed, 1281 insertions, 461 deletions
diff --git a/nikola/plugins/command/__init__.py b/nikola/plugins/command/__init__.py
index a1d17a6..2aa5267 100644
--- a/nikola/plugins/command/__init__.py
+++ b/nikola/plugins/command/__init__.py
@@ -23,3 +23,5 @@
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+"""Commands for Nikola."""
diff --git a/nikola/plugins/command/auto.plugin b/nikola/plugins/command/auto.plugin
index a1c6820..3e2b17d 100644
--- a/nikola/plugins/command/auto.plugin
+++ b/nikola/plugins/command/auto.plugin
@@ -1,9 +1,13 @@
[Core]
-Name = auto
-Module = auto
+name = auto
+module = auto
[Documentation]
-Author = Roberto Alsina
-Version = 2.1.0
-Website = http://getnikola.com
-Description = Automatically detect site changes, rebuild and optionally refresh a browser.
+author = Roberto Alsina
+version = 2.1.0
+website = http://getnikola.com
+description = Automatically detect site changes, rebuild and optionally refresh a browser.
+
+[Nikola]
+plugincategory = Command
+
diff --git a/nikola/plugins/command/auto/__init__.py b/nikola/plugins/command/auto/__init__.py
index c25ef8a..71f9624 100644
--- a/nikola/plugins/command/auto/__init__.py
+++ b/nikola/plugins/command/auto/__init__.py
@@ -24,6 +24,8 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""Automatic rebuilds for Nikola."""
+
from __future__ import print_function
import json
@@ -31,10 +33,13 @@ import mimetypes
import os
import re
import subprocess
+import sys
+import time
try:
from urlparse import urlparse
+ from urllib2 import unquote
except ImportError:
- from urllib.parse import urlparse # NOQA
+ from urllib.parse import urlparse, unquote # NOQA
import webbrowser
from wsgiref.simple_server import make_server
import wsgiref.util
@@ -42,7 +47,7 @@ import wsgiref.util
from blinker import signal
try:
from ws4py.websocket import WebSocket
- from ws4py.server.wsgirefserver import WSGIServer, WebSocketWSGIRequestHandler
+ from ws4py.server.wsgirefserver import WSGIServer, WebSocketWSGIRequestHandler, WebSocketWSGIHandler
from ws4py.server.wsgiutils import WebSocketWSGIApplication
from ws4py.messaging import TextMessage
except ImportError:
@@ -58,7 +63,7 @@ except ImportError:
from nikola.plugin_categories import Command
-from nikola.utils import req_missing, get_logger, get_theme_path
+from nikola.utils import req_missing, get_logger, get_theme_path, STDERR_HANDLER
LRJS_PATH = os.path.join(os.path.dirname(__file__), 'livereload.js')
error_signal = signal('error')
refresh_signal = signal('refresh')
@@ -74,9 +79,12 @@ ERROR {}
class CommandAuto(Command):
- """Start debugging console."""
+
+ """Automatic rebuilds for Nikola."""
+
name = "auto"
logger = None
+ has_server = True
doc_purpose = "builds and serves a site; automatically detects site changes, rebuilds, and optionally refreshes a browser"
cmd_options = [
{
@@ -100,7 +108,7 @@ class CommandAuto(Command):
'short': 'b',
'long': 'browser',
'type': bool,
- 'help': 'Start a web browser.',
+ 'help': 'Start a web browser',
'default': False,
},
{
@@ -111,12 +119,18 @@ class CommandAuto(Command):
'type': bool,
'help': 'Use IPv6',
},
+ {
+ 'name': 'no-server',
+ 'long': 'no-server',
+ 'default': False,
+ 'type': bool,
+ 'help': 'Disable the server, automate rebuilds only'
+ },
]
def _execute(self, options, args):
"""Start the watcher."""
-
- self.logger = get_logger('auto', self.site.loghandlers)
+ self.logger = get_logger('auto', STDERR_HANDLER)
LRSocket.logger = self.logger
if WebSocket is object and watchdog is None:
@@ -166,10 +180,14 @@ class CommandAuto(Command):
host = options['address'].strip('[').strip(']') or dhost
+ # Server can be disabled (Issue #1883)
+ self.has_server = not options['no-server']
+
# Instantiate global observer
observer = Observer()
- # Watch output folders and trigger reloads
- observer.schedule(OurWatchHandler(self.do_refresh), out_folder, recursive=True)
+ if self.has_server:
+ # Watch output folders and trigger reloads
+ observer.schedule(OurWatchHandler(self.do_refresh), out_folder, recursive=True)
# Watch input folders and trigger rebuilds
for p in watched:
@@ -181,101 +199,155 @@ class CommandAuto(Command):
_conf_dn = os.path.dirname(_conf_fn)
observer.schedule(ConfigWatchHandler(_conf_fn, self.do_rebuild), _conf_dn, recursive=False)
- observer.start()
+ try:
+ self.logger.info("Watching files for changes...")
+ observer.start()
+ except KeyboardInterrupt:
+ pass
parent = self
class Mixed(WebSocketWSGIApplication):
- """A class that supports WS and HTTP protocols in the same port."""
+
+ """A class that supports WS and HTTP protocols on the same port."""
+
def __call__(self, environ, start_response):
if environ.get('HTTP_UPGRADE') is None:
return parent.serve_static(environ, start_response)
return super(Mixed, self).__call__(environ, start_response)
- ws = make_server(
- host, port, server_class=WSGIServer,
- handler_class=WebSocketWSGIRequestHandler,
- app=Mixed(handler_cls=LRSocket)
- )
- ws.initialize_websockets_manager()
- self.logger.info("Serving HTTP on {0} port {1}...".format(host, port))
- if browser:
- if options['ipv6'] or '::' in host:
- server_url = "http://[{0}]:{1}/".format(host, port)
- else:
- server_url = "http://{0}:{1}/".format(host, port)
-
- self.logger.info("Opening {0} in the default web browser...".format(server_url))
- # Yes, this is racy
- webbrowser.open('http://{0}:{1}'.format(host, port))
-
- try:
- ws.serve_forever()
- except KeyboardInterrupt:
- self.logger.info("Server is shutting down.")
- observer.stop()
- observer.join()
+ if self.has_server:
+ ws = make_server(
+ host, port, server_class=WSGIServer,
+ handler_class=WebSocketWSGIRequestHandler,
+ app=Mixed(handler_cls=LRSocket)
+ )
+ ws.initialize_websockets_manager()
+ self.logger.info("Serving HTTP on {0} port {1}...".format(host, port))
+ if browser:
+ if options['ipv6'] or '::' in host:
+ server_url = "http://[{0}]:{1}/".format(host, port)
+ else:
+ server_url = "http://{0}:{1}/".format(host, port)
+
+ self.logger.info("Opening {0} in the default web browser...".format(server_url))
+ # Yes, this is racy
+ webbrowser.open('http://{0}:{1}'.format(host, port))
+
+ try:
+ ws.serve_forever()
+ except KeyboardInterrupt:
+ self.logger.info("Server is shutting down.")
+ # This is a hack, but something is locking up in a futex
+ # and exit() doesn't work.
+ os.kill(os.getpid(), 15)
+ else:
+ # Workaround: can’t have nothing running (instant exit)
+ # but also can’t join threads (no way to exit)
+ # The joys of threading.
+ try:
+ while True:
+ time.sleep(1)
+ except KeyboardInterrupt:
+ self.logger.info("Shutting down.")
+ # This is a hack, but something is locking up in a futex
+ # and exit() doesn't work.
+ os.kill(os.getpid(), 15)
def do_rebuild(self, event):
- self.logger.info('REBUILDING SITE (from {0})'.format(event.src_path))
+ """Rebuild the site."""
+ # Move events have a dest_path, some editors like gedit use a
+ # move on larger save operations for write protection
+ event_path = event.dest_path if hasattr(event, 'dest_path') else event.src_path
+ fname = os.path.basename(event_path)
+ if (fname.endswith('~') or
+ fname.startswith('.') or
+ os.path.isdir(event_path)): # Skip on folders, these are usually duplicates
+ return
+ self.logger.info('REBUILDING SITE (from {0})'.format(event_path))
p = subprocess.Popen(self.cmd_arguments, stderr=subprocess.PIPE)
+ error = p.stderr.read()
+ errord = error.decode('utf-8')
if p.wait() != 0:
- error = p.stderr.read()
- self.logger.error(error)
- error_signal.send(error=error)
+ self.logger.error(errord)
+ error_signal.send(error=errord)
else:
- error = p.stderr.read()
- print(error)
+ print(errord)
def do_refresh(self, event):
- self.logger.info('REFRESHING: {0}'.format(event.src_path))
- p = os.path.relpath(event.src_path, os.path.abspath(self.site.config['OUTPUT_FOLDER']))
+ """Refresh the page."""
+ # Move events have a dest_path, some editors like gedit use a
+ # move on larger save operations for write protection
+ event_path = event.dest_path if hasattr(event, 'dest_path') else event.src_path
+ self.logger.info('REFRESHING: {0}'.format(event_path))
+ p = os.path.relpath(event_path, os.path.abspath(self.site.config['OUTPUT_FOLDER']))
refresh_signal.send(path=p)
def serve_static(self, environ, start_response):
"""Trivial static file server."""
uri = wsgiref.util.request_uri(environ)
p_uri = urlparse(uri)
- f_path = os.path.join(self.site.config['OUTPUT_FOLDER'], *p_uri.path.split('/'))
- mimetype = mimetypes.guess_type(uri)[0] or 'text/html'
+ f_path = os.path.join(self.site.config['OUTPUT_FOLDER'], *[unquote(x) for x in p_uri.path.split('/')])
+
+ # ‘Pretty’ URIs and root are assumed to be HTML
+ mimetype = 'text/html' if uri.endswith('/') else mimetypes.guess_type(uri)[0] or 'application/octet-stream'
if os.path.isdir(f_path):
+ if not f_path.endswith('/'): # Redirect to avoid breakage
+ start_response('301 Redirect', [('Location', p_uri.path + '/')])
+ return []
f_path = os.path.join(f_path, self.site.config['INDEX_FILE'])
+ mimetype = 'text/html'
if p_uri.path == '/robots.txt':
start_response('200 OK', [('Content-type', 'text/plain')])
- return ['User-Agent: *\nDisallow: /\n']
+ return ['User-Agent: *\nDisallow: /\n'.encode('utf-8')]
elif os.path.isfile(f_path):
with open(f_path, 'rb') as fd:
start_response('200 OK', [('Content-type', mimetype)])
- return [self.inject_js(mimetype, fd.read())]
+ return [self.file_filter(mimetype, fd.read())]
elif p_uri.path == '/livereload.js':
with open(LRJS_PATH, 'rb') as fd:
start_response('200 OK', [('Content-type', mimetype)])
- return [self.inject_js(mimetype, fd.read())]
+ return [self.file_filter(mimetype, fd.read())]
start_response('404 ERR', [])
- return [self.inject_js('text/html', ERROR_N.format(404).format(uri))]
+ return [self.file_filter('text/html', ERROR_N.format(404).format(uri).encode('utf-8'))]
- def inject_js(self, mimetype, data):
- """Inject livereload.js in HTML files."""
+ def file_filter(self, mimetype, data):
+ """Apply necessary changes to document before serving."""
if mimetype == 'text/html':
- data = re.sub('</head>', self.snippet, data.decode('utf8'), 1, re.IGNORECASE)
+ data = data.decode('utf8')
+ data = self.remove_base_tag(data)
+ data = self.inject_js(data)
data = data.encode('utf8')
return data
+ def inject_js(self, data):
+ """Inject livereload.js."""
+ data = re.sub('</head>', self.snippet, data, 1, re.IGNORECASE)
+ return data
+
+ def remove_base_tag(self, data):
+ """Comment out any <base> to allow local resolution of relative URLs."""
+ data = re.sub(r'<base\s([^>]*)>', '<!--base \g<1>-->', data, re.IGNORECASE)
+ return data
+
pending = []
class LRSocket(WebSocket):
+
"""Speak Livereload protocol."""
def __init__(self, *a, **kw):
+ """Initialize protocol handler."""
refresh_signal.connect(self.notify)
error_signal.connect(self.send_error)
super(LRSocket, self).__init__(*a, **kw)
def received_message(self, message):
+ """Handle received message."""
message = json.loads(message.data.decode('utf8'))
self.logger.info('<--- {0}'.format(message))
response = None
@@ -364,3 +436,25 @@ class ConfigWatchHandler(FileSystemEventHandler):
"""Call the provided function on any event."""
if event._src_path == self.configuration_filename:
self.function(event)
+
+
+try:
+ # Monkeypatch to hide Broken Pipe Errors
+ f = WebSocketWSGIHandler.finish_response
+
+ if sys.version_info[0] == 3:
+ EX = BrokenPipeError # NOQA
+ else:
+ EX = IOError
+
+ def finish_response(self):
+ """Monkeypatched finish_response that ignores broken pipes."""
+ try:
+ f(self)
+ except EX: # Client closed the connection, not a real error
+ pass
+
+ WebSocketWSGIHandler.finish_response = finish_response
+except NameError:
+ # In case there is no WebSocketWSGIHandler because of a failed import.
+ pass
diff --git a/nikola/plugins/command/bootswatch_theme.plugin b/nikola/plugins/command/bootswatch_theme.plugin
index b428da3..fc25045 100644
--- a/nikola/plugins/command/bootswatch_theme.plugin
+++ b/nikola/plugins/command/bootswatch_theme.plugin
@@ -1,10 +1,13 @@
[Core]
-Name = bootswatch_theme
-Module = bootswatch_theme
+name = bootswatch_theme
+module = bootswatch_theme
[Documentation]
-Author = Roberto Alsina
-Version = 1.0
-Website = http://getnikola.com
-Description = Given a swatch name and a parent theme, creates a custom theme.
+author = Roberto Alsina
+version = 1.0
+website = http://getnikola.com
+description = Given a swatch name and a parent theme, creates a custom theme.
+
+[Nikola]
+plugincategory = Command
diff --git a/nikola/plugins/command/bootswatch_theme.py b/nikola/plugins/command/bootswatch_theme.py
index e19c937..b5644a1 100644
--- a/nikola/plugins/command/bootswatch_theme.py
+++ b/nikola/plugins/command/bootswatch_theme.py
@@ -24,6 +24,8 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""Given a swatch name from bootswatch.com and a parent theme, creates a custom theme."""
+
from __future__ import print_function
import os
import requests
@@ -35,6 +37,7 @@ LOGGER = utils.get_logger('bootswatch_theme', utils.STDERR_HANDLER)
class CommandBootswatchTheme(Command):
+
"""Given a swatch name from bootswatch.com and a parent theme, creates a custom theme."""
name = "bootswatch_theme"
diff --git a/nikola/plugins/command/check.plugin b/nikola/plugins/command/check.plugin
index dd0980e..e380e64 100644
--- a/nikola/plugins/command/check.plugin
+++ b/nikola/plugins/command/check.plugin
@@ -1,10 +1,13 @@
[Core]
-Name = check
-Module = check
+name = check
+module = check
[Documentation]
-Author = Roberto Alsina
-Version = 1.0
-Website = http://getnikola.com
-Description = Check the generated site
+author = Roberto Alsina
+version = 1.0
+website = http://getnikola.com
+description = Check the generated site
+
+[Nikola]
+plugincategory = Command
diff --git a/nikola/plugins/command/check.py b/nikola/plugins/command/check.py
index a9bc44a..abf183e 100644
--- a/nikola/plugins/command/check.py
+++ b/nikola/plugins/command/check.py
@@ -24,11 +24,14 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""Check the generated site."""
+
from __future__ import print_function
from collections import defaultdict
import os
import re
import sys
+import time
try:
from urllib import unquote
from urlparse import urlparse, urljoin, urldefrag
@@ -40,7 +43,7 @@ import lxml.html
import requests
from nikola.plugin_categories import Command
-from nikola.utils import get_logger
+from nikola.utils import get_logger, STDERR_HANDLER
def _call_nikola_list(site):
@@ -58,6 +61,7 @@ def _call_nikola_list(site):
def real_scan_files(site):
+ """Scan for files."""
task_fnames = set([])
real_fnames = set([])
output_folder = site.config['OUTPUT_FOLDER']
@@ -80,7 +84,8 @@ def real_scan_files(site):
def fs_relpath_from_url_path(url_path):
- """Expects as input an urlparse(s).path"""
+ """Create a filesystem relative path from an URL path."""
+ # Expects as input an urlparse(s).path
url_path = unquote(url_path)
# in windows relative paths don't begin with os.sep
if sys.platform == 'win32' and len(url_path):
@@ -89,6 +94,7 @@ def fs_relpath_from_url_path(url_path):
class CommandCheck(Command):
+
"""Check the generated site."""
name = "check"
@@ -147,7 +153,7 @@ class CommandCheck(Command):
def _execute(self, options, args):
"""Check the generated site."""
- self.logger = get_logger('check', self.site.loghandlers)
+ self.logger = get_logger('check', STDERR_HANDLER)
if not options['links'] and not options['files'] and not options['clean']:
print(self.help())
@@ -169,6 +175,7 @@ class CommandCheck(Command):
checked_remote_targets = {}
def analyze(self, fname, find_sources=False, check_remote=False):
+ """Analyze links on a page."""
rv = False
self.whitelist = [re.compile(x) for x in self.site.config['LINK_CHECK_WHITELIST']]
base_url = urlparse(self.site.config['BASE_URL'])
@@ -217,15 +224,45 @@ class CommandCheck(Command):
if parsed.netloc == base_url.netloc: # absolute URL to self.site
continue
if target in self.checked_remote_targets: # already checked this exact target
- if self.checked_remote_targets[target] > 399:
- self.logger.warn("Broken link in {0}: {1} [Error {2}]".format(filename, target, self.checked_remote_targets[target]))
+ if self.checked_remote_targets[target] in [301, 307]:
+ self.logger.warn("Remote link PERMANENTLY redirected in {0}: {1} [Error {2}]".format(filename, target, self.checked_remote_targets[target]))
+ elif self.checked_remote_targets[target] in [302, 308]:
+ self.logger.info("Remote link temporarily redirected in {1}: {2} [HTTP: {3}]".format(filename, target, self.checked_remote_targets[target]))
+ elif self.checked_remote_targets[target] > 399:
+ self.logger.error("Broken link in {0}: {1} [Error {2}]".format(filename, target, self.checked_remote_targets[target]))
continue
+
+ # Skip whitelisted targets
+ if any(re.search(_, target) for _ in self.whitelist):
+ continue
+
# Check the remote link works
req_headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:45.0) Gecko/20100101 Firefox/45.0 (Nikola)'} # I’m a real boy!
- resp = requests.head(target, headers=req_headers)
- self.checked_remote_targets[target] = resp.status_code
+ resp = requests.head(target, headers=req_headers, allow_redirects=False)
+
+ # Retry client errors (4xx) as GET requests because many servers are broken
+ if resp.status_code >= 400 and resp.status_code <= 499:
+ time.sleep(0.5)
+ resp = requests.get(target, headers=req_headers, allow_redirects=False)
+
+ # Follow redirects and see where they lead, redirects to errors will be reported twice
+ if resp.status_code in [301, 302, 307, 308]:
+ redir_status_code = resp.status_code
+ time.sleep(0.5)
+ # Known redirects are retested using GET because IIS servers otherwise get HEADaches
+ resp = requests.get(target, headers=req_headers, allow_redirects=True)
+ # Permanent redirects should be updated
+ if redir_status_code in [301, 308]:
+ self.logger.warn("Remote link moved PERMANENTLY to \"{0}\" and should be updated in {1}: {2} [HTTP: {3}]".format(resp.url, filename, target, redir_status_code))
+ if redir_status_code in [302, 307]:
+ self.logger.info("Remote link temporarily redirected to \"{0}\" in {1}: {2} [HTTP: {3}]".format(resp.url, filename, target, redir_status_code))
+ self.checked_remote_targets[resp.url] = resp.status_code
+ self.checked_remote_targets[target] = redir_status_code
+ else:
+ self.checked_remote_targets[target] = resp.status_code
+
if resp.status_code > 399: # Error
- self.logger.warn("Broken link in {0}: {1} [Error {2}]".format(filename, target, resp.status_code))
+ self.logger.error("Broken link in {0}: {1} [Error {2}]".format(filename, target, resp.status_code))
continue
elif resp.status_code <= 399: # The address leads *somewhere* that is not an error
self.logger.debug("Successfully checked remote link in {0}: {1} [HTTP: {2}]".format(filename, target, resp.status_code))
@@ -271,6 +308,7 @@ class CommandCheck(Command):
return rv
def scan_links(self, find_sources=False, check_remote=False):
+ """Check links on the site."""
self.logger.info("Checking Links:")
self.logger.info("===============\n")
self.logger.notice("{0} mode".format(self.site.config['URL_TYPE']))
@@ -286,6 +324,7 @@ class CommandCheck(Command):
return failure
def scan_files(self):
+ """Check files in the site, find missing and orphaned files."""
failure = False
self.logger.info("Checking Files:")
self.logger.info("===============\n")
@@ -311,7 +350,22 @@ class CommandCheck(Command):
return failure
def clean_files(self):
+ """Remove orphaned files."""
only_on_output, _ = real_scan_files(self.site)
for f in only_on_output:
+ self.logger.info('removed: {0}'.format(f))
os.unlink(f)
+
+ # Find empty directories and remove them
+ output_folder = self.site.config['OUTPUT_FOLDER']
+ all_dirs = []
+ for root, dirs, files in os.walk(output_folder, followlinks=True):
+ all_dirs.append(root)
+ all_dirs.sort(key=len, reverse=True)
+ for d in all_dirs:
+ try:
+ os.rmdir(d)
+ self.logger.info('removed: {0}/'.format(d))
+ except OSError:
+ pass
return True
diff --git a/nikola/plugins/command/console.plugin b/nikola/plugins/command/console.plugin
index 3aef2e7..333762c 100644
--- a/nikola/plugins/command/console.plugin
+++ b/nikola/plugins/command/console.plugin
@@ -1,9 +1,13 @@
[Core]
-Name = console
-Module = console
+name = console
+module = console
[Documentation]
-Author = Chris Warrick, Roberto Alsina
-Version = 1.0
-Website = http://getnikola.com
-Description = Start a debugging python console
+author = Chris Warrick, Roberto Alsina
+version = 1.0
+website = http://getnikola.com
+description = Start a debugging python console
+
+[Nikola]
+plugincategory = Command
+
diff --git a/nikola/plugins/command/console.py b/nikola/plugins/command/console.py
index b8e7825..539fa08 100644
--- a/nikola/plugins/command/console.py
+++ b/nikola/plugins/command/console.py
@@ -24,6 +24,8 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""Start debugging console."""
+
from __future__ import print_function, unicode_literals
import os
@@ -36,7 +38,9 @@ LOGGER = get_logger('console', STDERR_HANDLER)
class CommandConsole(Command):
+
"""Start debugging console."""
+
name = "console"
shells = ['ipython', 'bpython', 'plain']
doc_purpose = "start an interactive Python console with access to your site"
diff --git a/nikola/plugins/command/deploy.plugin b/nikola/plugins/command/deploy.plugin
index 14fd53f..4743ca2 100644
--- a/nikola/plugins/command/deploy.plugin
+++ b/nikola/plugins/command/deploy.plugin
@@ -1,9 +1,13 @@
[Core]
-Name = deploy
-Module = deploy
+name = deploy
+module = deploy
[Documentation]
-Author = Roberto Alsina
-Version = 1.0
-Website = http://getnikola.com
-Description = Deploy the site
+author = Roberto Alsina
+version = 1.0
+website = http://getnikola.com
+description = Deploy the site
+
+[Nikola]
+plugincategory = Command
+
diff --git a/nikola/plugins/command/deploy.py b/nikola/plugins/command/deploy.py
index 2c44e87..821ea11 100644
--- a/nikola/plugins/command/deploy.py
+++ b/nikola/plugins/command/deploy.py
@@ -24,6 +24,8 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""Deploy site."""
+
from __future__ import print_function
import io
from datetime import datetime
@@ -35,11 +37,13 @@ import time
from blinker import signal
from nikola.plugin_categories import Command
-from nikola.utils import get_logger, remove_file, unicode_str, makedirs
+from nikola.utils import get_logger, remove_file, unicode_str, makedirs, STDERR_HANDLER
class CommandDeploy(Command):
+
"""Deploy site."""
+
name = "deploy"
doc_usage = "[[preset [preset...]]"
@@ -48,7 +52,8 @@ class CommandDeploy(Command):
logger = None
def _execute(self, command, args):
- self.logger = get_logger('deploy', self.site.loghandlers)
+ """Execute the deploy command."""
+ self.logger = get_logger('deploy', STDERR_HANDLER)
# Get last successful deploy date
timestamp_path = os.path.join(self.site.config['CACHE_FOLDER'], 'lastdeploy')
if self.site.config['COMMENT_SYSTEM_ID'] == 'nikolademo':
@@ -116,7 +121,7 @@ class CommandDeploy(Command):
outf.write(unicode_str(new_deploy.isoformat()))
def _emit_deploy_event(self, last_deploy, new_deploy, clean=False, undeployed=None):
- """ Emit events for all timeline entries newer than last deploy.
+ """Emit events for all timeline entries newer than last deploy.
last_deploy: datetime
Time stamp of the last successful deployment.
@@ -128,7 +133,6 @@ class CommandDeploy(Command):
True when it appears like deploy is being run after a clean.
"""
-
event = {
'last_deploy': last_deploy,
'new_deploy': new_deploy,
diff --git a/nikola/plugins/command/github_deploy.plugin b/nikola/plugins/command/github_deploy.plugin
index 74e7902..e793548 100644
--- a/nikola/plugins/command/github_deploy.plugin
+++ b/nikola/plugins/command/github_deploy.plugin
@@ -1,9 +1,13 @@
[Core]
-Name = github_deploy
-Module = github_deploy
+name = github_deploy
+module = github_deploy
[Documentation]
-Author = Puneeth Chaganti
-Version = 1,0
-Website = http://getnikola.com
-Description = Deploy the site to GitHub pages.
+author = Puneeth Chaganti
+version = 1,0
+website = http://getnikola.com
+description = Deploy the site to GitHub pages.
+
+[Nikola]
+plugincategory = Command
+
diff --git a/nikola/plugins/command/github_deploy.py b/nikola/plugins/command/github_deploy.py
index 888a4f9..0ab9332 100644
--- a/nikola/plugins/command/github_deploy.py
+++ b/nikola/plugins/command/github_deploy.py
@@ -24,6 +24,8 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""Deploy site to GitHub Pages."""
+
from __future__ import print_function
from datetime import datetime
import io
@@ -33,17 +35,19 @@ from textwrap import dedent
from nikola.plugin_categories import Command
from nikola.plugins.command.check import real_scan_files
-from nikola.utils import get_logger, req_missing, makedirs, unicode_str
+from nikola.utils import get_logger, req_missing, makedirs, unicode_str, STDERR_HANDLER
from nikola.__main__ import main
from nikola import __version__
def uni_check_output(*args, **kwargs):
+ """Run command and return output as Unicode (UTf-8)."""
o = subprocess.check_output(*args, **kwargs)
return o.decode('utf-8')
def check_ghp_import_installed():
+ """Check if ghp-import is installed."""
try:
subprocess.check_output(['ghp-import', '-h'])
except OSError:
@@ -53,7 +57,9 @@ def check_ghp_import_installed():
class CommandGitHubDeploy(Command):
- """ Deploy site to GitHub Pages. """
+
+ """Deploy site to GitHub Pages."""
+
name = 'github_deploy'
doc_usage = ''
@@ -70,10 +76,8 @@ class CommandGitHubDeploy(Command):
logger = None
def _execute(self, command, args):
-
- self.logger = get_logger(
- CommandGitHubDeploy.name, self.site.loghandlers
- )
+ """Run the deployment."""
+ self.logger = get_logger(CommandGitHubDeploy.name, STDERR_HANDLER)
# Check if ghp-import is installed
check_ghp_import_installed()
@@ -95,8 +99,7 @@ class CommandGitHubDeploy(Command):
return
def _commit_and_push(self):
- """ Commit all the files and push. """
-
+ """Commit all the files and push."""
source = self.site.config['GITHUB_SOURCE_BRANCH']
deploy = self.site.config['GITHUB_DEPLOY_BRANCH']
remote = self.site.config['GITHUB_REMOTE_NAME']
diff --git a/nikola/plugins/command/import_wordpress.plugin b/nikola/plugins/command/import_wordpress.plugin
index e072224..6c4384e 100644
--- a/nikola/plugins/command/import_wordpress.plugin
+++ b/nikola/plugins/command/import_wordpress.plugin
@@ -1,10 +1,13 @@
[Core]
-Name = import_wordpress
-Module = import_wordpress
+name = import_wordpress
+module = import_wordpress
[Documentation]
-Author = Roberto Alsina
-Version = 1.0
-Website = http://getnikola.com
-Description = Import a wordpress site from a XML dump (requires markdown).
+author = Roberto Alsina
+version = 1.0
+website = http://getnikola.com
+description = Import a wordpress site from a XML dump (requires markdown).
+
+[Nikola]
+plugincategory = Command
diff --git a/nikola/plugins/command/import_wordpress.py b/nikola/plugins/command/import_wordpress.py
index 674fc2a..a652ec8 100644
--- a/nikola/plugins/command/import_wordpress.py
+++ b/nikola/plugins/command/import_wordpress.py
@@ -24,13 +24,18 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""Import a WordPress dump."""
+
from __future__ import unicode_literals, print_function
import os
import re
import sys
import datetime
+import io
+import json
import requests
from lxml import etree
+from collections import defaultdict
try:
from urlparse import urlparse
@@ -53,7 +58,37 @@ from nikola.plugins.command.init import SAMPLE_CONF, prepare_config, format_defa
LOGGER = utils.get_logger('import_wordpress', utils.STDERR_HANDLER)
+def install_plugin(site, plugin_name, output_dir=None, show_install_notes=False):
+ """Install a Nikola plugin."""
+ LOGGER.notice("Installing plugin '{0}'".format(plugin_name))
+ # Get hold of the 'plugin' plugin
+ plugin_installer_info = site.plugin_manager.getPluginByName('plugin', 'Command')
+ if plugin_installer_info is None:
+ LOGGER.error('Internal error: cannot find the "plugin" plugin which is supposed to come with Nikola!')
+ return False
+ if not plugin_installer_info.is_activated:
+ # Someone might have disabled the plugin in the `conf.py` used
+ site.plugin_manager.activatePluginByName(plugin_installer_info.name)
+ plugin_installer_info.plugin_object.set_site(site)
+ plugin_installer = plugin_installer_info.plugin_object
+ # Try to install the requested plugin
+ options = {}
+ for option in plugin_installer.cmd_options:
+ options[option['name']] = option['default']
+ options['install'] = plugin_name
+ options['output_dir'] = output_dir
+ options['show_install_notes'] = show_install_notes
+ if plugin_installer.execute(options=options) > 0:
+ return False
+ # Let the plugin manager find newly installed plugins
+ site.plugin_manager.collectPlugins()
+ # Re-scan for compiler extensions
+ site.compiler_extensions = site._activate_plugins_of_category("CompilerExtension")
+ return True
+
+
class CommandImportWordpress(Command, ImportMixin):
+
"""Import a WordPress dump."""
name = "import_wordpress"
@@ -70,6 +105,20 @@ class CommandImportWordpress(Command, ImportMixin):
'help': "Don't import drafts",
},
{
+ 'name': 'exclude_privates',
+ 'long': 'exclude-privates',
+ 'default': False,
+ 'type': bool,
+ 'help': "Don't import private posts",
+ },
+ {
+ 'name': 'include_empty_items',
+ 'long': 'include-empty-items',
+ 'default': False,
+ 'type': bool,
+ 'help': "Include empty posts and pages",
+ },
+ {
'name': 'squash_newlines',
'long': 'squash-newlines',
'default': False,
@@ -107,15 +156,57 @@ class CommandImportWordpress(Command, ImportMixin):
'type': str,
'help': "The pattern for translation files names",
},
+ {
+ 'name': 'export_categories_as_categories',
+ 'long': 'export-categories-as-categories',
+ 'default': False,
+ 'type': bool,
+ 'help': "Export categories as categories, instead of treating them as tags",
+ },
+ {
+ 'name': 'export_comments',
+ 'long': 'export-comments',
+ 'default': False,
+ 'type': bool,
+ 'help': "Export comments as .wpcomment files",
+ },
+ {
+ 'name': 'transform_to_html',
+ 'long': 'transform-to-html',
+ 'default': False,
+ 'type': bool,
+ 'help': "Uses WordPress page compiler to transform WordPress posts directly to HTML during import",
+ },
+ {
+ 'name': 'use_wordpress_compiler',
+ 'long': 'use-wordpress-compiler',
+ 'default': False,
+ 'type': bool,
+ 'help': "Instead of converting posts to markdown, leave them as is and use the WordPress page compiler",
+ },
+ {
+ 'name': 'install_wordpress_compiler',
+ 'long': 'install-wordpress-compiler',
+ 'default': False,
+ 'type': bool,
+ 'help': "Automatically installs the WordPress page compiler (either locally or in the new site) if required by other options.\nWarning: the compiler is GPL software!",
+ },
]
all_tags = set([])
- def _execute(self, options={}, args=[]):
- """Import a WordPress blog from an export file into a Nikola site."""
- if not args:
- print(self.help())
+ def _find_wordpress_compiler(self):
+ """Find WordPress compiler plugin."""
+ if self.wordpress_page_compiler is not None:
return
-
+ plugin_info = self.site.plugin_manager.getPluginByName('wordpress', 'PageCompiler')
+ if plugin_info is not None:
+ if not plugin_info.is_activated:
+ self.site.plugin_manager.activatePluginByName(plugin_info.name)
+ plugin_info.plugin_object.set_site(self.site)
+ self.wordpress_page_compiler = plugin_info.plugin_object
+
+ def _read_options(self, options, args):
+ """Read command-line options."""
options['filename'] = args.pop(0)
if args and ('output_folder' not in args or
@@ -136,19 +227,76 @@ class CommandImportWordpress(Command, ImportMixin):
self.output_folder = options.get('output_folder', 'new_site')
self.exclude_drafts = options.get('exclude_drafts', False)
+ self.exclude_privates = options.get('exclude_privates', False)
self.no_downloads = options.get('no_downloads', False)
+ self.import_empty_items = options.get('include_empty_items', False)
+
+ self.export_categories_as_categories = options.get('export_categories_as_categories', False)
+ self.export_comments = options.get('export_comments', False)
+
+ self.transform_to_html = options.get('transform_to_html', False)
+ self.use_wordpress_compiler = options.get('use_wordpress_compiler', False)
+ self.install_wordpress_compiler = options.get('install_wordpress_compiler', False)
+ self.wordpress_page_compiler = None
self.auth = None
if options.get('download_auth') is not None:
username_password = options.get('download_auth')
self.auth = tuple(username_password.split(':', 1))
if len(self.auth) < 2:
- print("Please specify HTTP authentication credentials in the form username:password.")
+ LOGGER.error("Please specify HTTP authentication credentials in the form username:password.")
return False
self.separate_qtranslate_content = options.get('separate_qtranslate_content')
self.translations_pattern = options.get('translations_pattern')
+ if self.transform_to_html and self.use_wordpress_compiler:
+ LOGGER.warn("It does not make sense to combine --transform-to-html with --use-wordpress-compiler, as the first converts all posts to HTML and the latter option affects zero posts.")
+
+ if self.transform_to_html:
+ self._find_wordpress_compiler()
+ if not self.wordpress_page_compiler and self.install_wordpress_compiler:
+ if not install_plugin(self.site, 'wordpress_compiler', output_dir='plugins'): # local install
+ return False
+ self._find_wordpress_compiler()
+ if not self.wordpress_page_compiler:
+ LOGGER.error("To compile WordPress posts to HTML, the WordPress post compiler is needed. You can install it via:")
+ LOGGER.error(" nikola plugin -i wordpress_compiler")
+ LOGGER.error("Please note that the WordPress post compiler is licensed under the GPL v2.")
+ return False
+
+ return True
+
+ def _prepare(self, channel):
+ """Prepare context and category hierarchy."""
+ self.context = self.populate_context(channel)
+ self.base_dir = urlparse(self.context['BASE_URL']).path
+
+ if self.export_categories_as_categories:
+ wordpress_namespace = channel.nsmap['wp']
+ cat_map = dict()
+ for cat in channel.findall('{{{0}}}category'.format(wordpress_namespace)):
+ # cat_id = get_text_tag(cat, '{{{0}}}term_id'.format(wordpress_namespace), None)
+ cat_slug = get_text_tag(cat, '{{{0}}}category_nicename'.format(wordpress_namespace), None)
+ cat_parent_slug = get_text_tag(cat, '{{{0}}}category_parent'.format(wordpress_namespace), None)
+ cat_name = get_text_tag(cat, '{{{0}}}cat_name'.format(wordpress_namespace), None)
+ cat_path = [cat_name]
+ if cat_parent_slug in cat_map:
+ cat_path = cat_map[cat_parent_slug] + cat_path
+ cat_map[cat_slug] = cat_path
+ self._category_paths = dict()
+ for cat, path in cat_map.items():
+ self._category_paths[cat] = utils.join_hierarchical_category_path(path)
+
+ def _execute(self, options={}, args=[]):
+ """Import a WordPress blog from an export file into a Nikola site."""
+ if not args:
+ print(self.help())
+ return False
+
+ if not self._read_options(options, args):
+ return False
+
# A place holder where extra language (if detected) will be stored
self.extra_languages = set()
@@ -166,8 +314,7 @@ class CommandImportWordpress(Command, ImportMixin):
req_missing(['phpserialize'], 'import WordPress dumps without --no-downloads')
channel = self.get_channel_from_file(self.wordpress_export_file)
- self.context = self.populate_context(channel)
- self.base_dir = urlparse(self.context['BASE_URL']).path
+ self._prepare(channel)
conf_template = self.generate_base_site()
# If user has specified a custom pattern for translation files we
@@ -181,6 +328,11 @@ class CommandImportWordpress(Command, ImportMixin):
self.extra_languages)
self.context['REDIRECTIONS'] = self.configure_redirections(
self.url_map)
+ if self.timezone:
+ self.context['TIMEZONE'] = self.timezone
+ if self.export_categories_as_categories:
+ self.context['CATEGORY_ALLOW_HIERARCHIES'] = True
+ self.context['CATEGORY_OUTPUT_FLAT_HIERARCHY'] = True
# Add tag redirects
for tag in self.all_tags:
@@ -197,18 +349,21 @@ class CommandImportWordpress(Command, ImportMixin):
self.write_urlmap_csv(
os.path.join(self.output_folder, 'url_map.csv'), self.url_map)
rendered_template = conf_template.render(**prepare_config(self.context))
- rendered_template = re.sub('# REDIRECTIONS = ', 'REDIRECTIONS = ',
- rendered_template)
-
- if self.timezone:
- rendered_template = re.sub('# TIMEZONE = \'UTC\'',
- 'TIMEZONE = \'' + self.timezone + '\'',
- rendered_template)
self.write_configuration(self.get_configuration_output_path(),
rendered_template)
+ if self.use_wordpress_compiler:
+ if self.install_wordpress_compiler:
+ if not install_plugin(self.site, 'wordpress_compiler', output_dir=os.path.join(self.output_folder, 'plugins')):
+ return False
+ else:
+ LOGGER.warn("Make sure to install the WordPress page compiler via")
+ LOGGER.warn(" nikola plugin -i wordpress_compiler")
+ LOGGER.warn("in your imported blog's folder ({0}), if you haven't installed it system-wide or user-wide. Otherwise, your newly imported blog won't compile.".format(self.output_folder))
+
@classmethod
def read_xml_file(cls, filename):
+ """Read XML file into memory."""
xml = []
with open(filename, 'rb') as fd:
@@ -221,12 +376,13 @@ class CommandImportWordpress(Command, ImportMixin):
@classmethod
def get_channel_from_file(cls, filename):
+ """Get channel from XML file."""
tree = etree.fromstring(cls.read_xml_file(filename))
channel = tree.find('channel')
return channel
- @staticmethod
- def populate_context(channel):
+ def populate_context(self, channel):
+ """Populate context with config for the site."""
wordpress_namespace = channel.nsmap['wp']
context = SAMPLE_CONF.copy()
@@ -255,28 +411,31 @@ class CommandImportWordpress(Command, ImportMixin):
author,
'{{{0}}}author_display_name'.format(wordpress_namespace),
"Joe Example")
- context['POSTS'] = '''(
- ("posts/*.rst", "posts", "post.tmpl"),
- ("posts/*.txt", "posts", "post.tmpl"),
- ("posts/*.md", "posts", "post.tmpl"),
- ("posts/*.wp", "posts", "post.tmpl"),
- )'''
- context['PAGES'] = '''(
- ("stories/*.rst", "stories", "story.tmpl"),
- ("stories/*.txt", "stories", "story.tmpl"),
- ("stories/*.md", "stories", "story.tmpl"),
- ("stories/*.wp", "stories", "story.tmpl"),
- )'''
- context['COMPILERS'] = '''{
- "rest": ('.txt', '.rst'),
- "markdown": ('.md', '.mdown', '.markdown', '.wp'),
- "html": ('.html', '.htm')
- }
- '''
+ extensions = ['rst', 'txt', 'md', 'html']
+ if self.use_wordpress_compiler:
+ extensions.append('wp')
+ POSTS = '(\n'
+ PAGES = '(\n'
+ for extension in extensions:
+ POSTS += ' ("posts/*.{0}", "posts", "post.tmpl"),\n'.format(extension)
+ PAGES += ' ("stories/*.{0}", "stories", "story.tmpl"),\n'.format(extension)
+ POSTS += ')\n'
+ PAGES += ')\n'
+ context['POSTS'] = POSTS
+ context['PAGES'] = PAGES
+ COMPILERS = '{\n'
+ COMPILERS += ''' "rest": ('.txt', '.rst'),''' + '\n'
+ COMPILERS += ''' "markdown": ('.md', '.mdown', '.markdown'),''' + '\n'
+ COMPILERS += ''' "html": ('.html', '.htm'),''' + '\n'
+ if self.use_wordpress_compiler:
+ COMPILERS += ''' "wordpress": ('.wp'),''' + '\n'
+ COMPILERS += '}'
+ context['COMPILERS'] = COMPILERS
return context
def download_url_content_to_file(self, url, dst_path):
+ """Download some content (attachments) to a file."""
if self.no_downloads:
return
@@ -291,6 +450,8 @@ class CommandImportWordpress(Command, ImportMixin):
LOGGER.warn("Downloading {0} to {1} failed: {2}".format(url, dst_path, err))
def import_attachment(self, item, wordpress_namespace):
+ """Import an attachment to the site."""
+ # Download main image
url = get_text_tag(
item, '{{{0}}}attachment_url'.format(wordpress_namespace), 'foo')
link = get_text_tag(item, '{{{0}}}link'.format(wordpress_namespace),
@@ -305,59 +466,136 @@ class CommandImportWordpress(Command, ImportMixin):
links[link] = '/' + dst_url
links[url] = '/' + dst_url
- self.download_additional_image_sizes(
- item,
- wordpress_namespace,
- os.path.dirname(url)
- )
-
- def download_additional_image_sizes(self, item, wordpress_namespace, source_path):
- if phpserialize is None:
- return
+ files = [path]
+ files_meta = [{}]
additional_metadata = item.findall('{{{0}}}postmeta'.format(wordpress_namespace))
- if additional_metadata is None:
- return
-
- for element in additional_metadata:
- meta_key = element.find('{{{0}}}meta_key'.format(wordpress_namespace))
- if meta_key is not None and meta_key.text == '_wp_attachment_metadata':
- meta_value = element.find('{{{0}}}meta_value'.format(wordpress_namespace))
-
- if meta_value is None:
- continue
-
- # Someone from Wordpress thought it was a good idea
- # serialize PHP objects into that metadata field. Given
- # that the export should give you the power to insert
- # your blogging into another site or system its not.
- # Why don't they just use JSON?
- if sys.version_info[0] == 2:
- try:
- metadata = phpserialize.loads(utils.sys_encode(meta_value.text))
- except ValueError:
- # local encoding might be wrong sometimes
+ if phpserialize and additional_metadata:
+ source_path = os.path.dirname(url)
+ for element in additional_metadata:
+ meta_key = element.find('{{{0}}}meta_key'.format(wordpress_namespace))
+ if meta_key is not None and meta_key.text == '_wp_attachment_metadata':
+ meta_value = element.find('{{{0}}}meta_value'.format(wordpress_namespace))
+
+ if meta_value is None:
+ continue
+
+ # Someone from Wordpress thought it was a good idea
+ # serialize PHP objects into that metadata field. Given
+ # that the export should give you the power to insert
+ # your blogging into another site or system its not.
+ # Why don't they just use JSON?
+ if sys.version_info[0] == 2:
+ try:
+ metadata = phpserialize.loads(utils.sys_encode(meta_value.text))
+ except ValueError:
+ # local encoding might be wrong sometimes
+ metadata = phpserialize.loads(meta_value.text.encode('utf-8'))
+ else:
metadata = phpserialize.loads(meta_value.text.encode('utf-8'))
- else:
- metadata = phpserialize.loads(meta_value.text.encode('utf-8'))
- size_key = b'sizes'
- file_key = b'file'
-
- if size_key not in metadata:
- continue
-
- for filename in [metadata[size_key][size][file_key] for size in metadata[size_key]]:
- url = '/'.join([source_path, filename.decode('utf-8')])
- path = urlparse(url).path
- dst_path = os.path.join(*([self.output_folder, 'files'] + list(path.split('/'))))
- dst_dir = os.path.dirname(dst_path)
- utils.makedirs(dst_dir)
- LOGGER.info("Downloading {0} => {1}".format(url, dst_path))
- self.download_url_content_to_file(url, dst_path)
- dst_url = '/'.join(dst_path.split(os.sep)[2:])
- links[url] = '/' + dst_url
- links[url] = '/' + dst_url
+ meta_key = b'image_meta'
+ size_key = b'sizes'
+ file_key = b'file'
+ width_key = b'width'
+ height_key = b'height'
+
+ # Extract metadata
+ if width_key in metadata and height_key in metadata:
+ files_meta[0]['width'] = int(metadata[width_key])
+ files_meta[0]['height'] = int(metadata[height_key])
+
+ if meta_key in metadata:
+ image_meta = metadata[meta_key]
+ dst_meta = {}
+
+ def add(our_key, wp_key, is_int=False, ignore_zero=False, is_float=False):
+ if wp_key in image_meta:
+ value = image_meta[wp_key]
+ if is_int:
+ value = int(value)
+ if ignore_zero and value == 0:
+ return
+ elif is_float:
+ value = float(value)
+ if ignore_zero and value == 0:
+ return
+ else:
+ value = value.decode('utf-8') # assume UTF-8
+ if value == '': # skip empty values
+ return
+ dst_meta[our_key] = value
+
+ add('aperture', b'aperture', is_float=True, ignore_zero=True)
+ add('credit', b'credit')
+ add('camera', b'camera')
+ add('caption', b'caption')
+ add('created_timestamp', b'created_timestamp', is_float=True, ignore_zero=True)
+ add('copyright', b'copyright')
+ add('focal_length', b'focal_length', is_float=True, ignore_zero=True)
+ add('iso', b'iso', is_float=True, ignore_zero=True)
+ add('shutter_speed', b'shutter_speed', ignore_zero=True, is_float=True)
+ add('title', b'title')
+
+ if len(dst_meta) > 0:
+ files_meta[0]['meta'] = dst_meta
+
+ # Find other sizes of image
+ if size_key not in metadata:
+ continue
+
+ for size in metadata[size_key]:
+ filename = metadata[size_key][size][file_key]
+ url = '/'.join([source_path, filename.decode('utf-8')])
+
+ # Construct metadata
+ meta = {}
+ meta['size'] = size.decode('utf-8')
+ if width_key in metadata[size_key][size] and height_key in metadata[size_key][size]:
+ meta['width'] = metadata[size_key][size][width_key]
+ meta['height'] = metadata[size_key][size][height_key]
+
+ path = urlparse(url).path
+ dst_path = os.path.join(*([self.output_folder, 'files'] + list(path.split('/'))))
+ dst_dir = os.path.dirname(dst_path)
+ utils.makedirs(dst_dir)
+ LOGGER.info("Downloading {0} => {1}".format(url, dst_path))
+ self.download_url_content_to_file(url, dst_path)
+ dst_url = '/'.join(dst_path.split(os.sep)[2:])
+ links[url] = '/' + dst_url
+
+ files.append(path)
+ files_meta.append(meta)
+
+ # Prepare result
+ result = {}
+ result['files'] = files
+ result['files_meta'] = files_meta
+
+ # Prepare extraction of more information
+ dc_namespace = item.nsmap['dc']
+ content_namespace = item.nsmap['content']
+ excerpt_namespace = item.nsmap['excerpt']
+
+ def add(result_key, key, namespace=None, filter=None, store_empty=False):
+ if namespace is not None:
+ value = get_text_tag(item, '{{{0}}}{1}'.format(namespace, key), None)
+ else:
+ value = get_text_tag(item, key, None)
+ if value is not None:
+ if filter:
+ value = filter(value)
+ if value or store_empty:
+ result[result_key] = value
+
+ add('title', 'title')
+ add('date_utc', 'post_date_gmt', namespace=wordpress_namespace)
+ add('wordpress_user_name', 'creator', namespace=dc_namespace)
+ add('content', 'encoded', namespace=content_namespace)
+ add('excerpt', 'encoded', namespace=excerpt_namespace)
+ add('description', 'description')
+
+ return result
code_re1 = re.compile(r'\[code.* lang.*?="(.*?)?".*\](.*?)\[/code\]', re.DOTALL | re.MULTILINE)
code_re2 = re.compile(r'\[sourcecode.* lang.*?="(.*?)?".*\](.*?)\[/sourcecode\]', re.DOTALL | re.MULTILINE)
@@ -365,6 +603,7 @@ class CommandImportWordpress(Command, ImportMixin):
code_re4 = re.compile(r'\[sourcecode.*?\](.*?)\[/sourcecode\]', re.DOTALL | re.MULTILINE)
def transform_code(self, content):
+ """Transform code blocks."""
# http://en.support.wordpress.com/code/posting-source-code/. There are
# a ton of things not supported here. We only do a basic [code
# lang="x"] -> ```x translation, and remove quoted html entities (<,
@@ -390,26 +629,126 @@ class CommandImportWordpress(Command, ImportMixin):
@staticmethod
def transform_caption(content):
+ """Transform captions."""
new_caption = re.sub(r'\[/caption\]', '', content)
new_caption = re.sub(r'\[caption.*\]', '', new_caption)
return new_caption
def transform_multiple_newlines(self, content):
- """Replaces multiple newlines with only two."""
+ """Replace multiple newlines with only two."""
if self.squash_newlines:
return re.sub(r'\n{3,}', r'\n\n', content)
else:
return content
- def transform_content(self, content):
- content = self.transform_code(content)
- content = self.transform_caption(content)
- content = self.transform_multiple_newlines(content)
- return content
+ def transform_content(self, content, post_format, attachments):
+ """Transform content into appropriate format."""
+ if post_format == 'wp':
+ if self.transform_to_html:
+ additional_data = {}
+ if attachments is not None:
+ additional_data['attachments'] = attachments
+ try:
+ content = self.wordpress_page_compiler.compile_to_string(content, additional_data=additional_data)
+ except TypeError: # old versions of the plugin don't support the additional argument
+ content = self.wordpress_page_compiler.compile_to_string(content)
+ return content, 'html', True
+ elif self.use_wordpress_compiler:
+ return content, 'wp', False
+ else:
+ content = self.transform_code(content)
+ content = self.transform_caption(content)
+ content = self.transform_multiple_newlines(content)
+ return content, 'md', True
+ elif post_format == 'markdown':
+ return content, 'md', True
+ elif post_format == 'none':
+ return content, 'html', True
+ else:
+ return None
+
+ def _extract_comment(self, comment, wordpress_namespace):
+ """Extract comment from dump."""
+ id = int(get_text_tag(comment, "{{{0}}}comment_id".format(wordpress_namespace), None))
+ author = get_text_tag(comment, "{{{0}}}comment_author".format(wordpress_namespace), None)
+ author_email = get_text_tag(comment, "{{{0}}}comment_author_email".format(wordpress_namespace), None)
+ author_url = get_text_tag(comment, "{{{0}}}comment_author_url".format(wordpress_namespace), None)
+ author_IP = get_text_tag(comment, "{{{0}}}comment_author_IP".format(wordpress_namespace), None)
+ # date = get_text_tag(comment, "{{{0}}}comment_date".format(wordpress_namespace), None)
+ date_gmt = get_text_tag(comment, "{{{0}}}comment_date_gmt".format(wordpress_namespace), None)
+ content = get_text_tag(comment, "{{{0}}}comment_content".format(wordpress_namespace), None)
+ approved = get_text_tag(comment, "{{{0}}}comment_approved".format(wordpress_namespace), '0')
+ if approved == '0':
+ approved = 'hold'
+ elif approved == '1':
+ approved = 'approved'
+ elif approved == 'spam' or approved == 'trash':
+ pass
+ else:
+ LOGGER.warn("Unknown comment approved status: " + str(approved))
+ parent = int(get_text_tag(comment, "{{{0}}}comment_parent".format(wordpress_namespace), 0))
+ if parent == 0:
+ parent = None
+ user_id = int(get_text_tag(comment, "{{{0}}}comment_user_id".format(wordpress_namespace), 0))
+ if user_id == 0:
+ user_id = None
+
+ if approved == 'trash' or approved == 'spam':
+ return None
+
+ return {"id": id, "status": str(approved), "approved": approved == "approved",
+ "author": author, "email": author_email, "url": author_url, "ip": author_IP,
+ "date": date_gmt, "content": content, "parent": parent, "user_id": user_id}
+
+ def _write_comment(self, filename, comment):
+ """Write comment to file."""
+ def write_header_line(fd, header_field, header_content):
+ """Write comment header line."""
+ if header_content is None:
+ return
+ header_content = str(header_content).replace('\n', ' ')
+ line = '.. ' + header_field + ': ' + header_content + '\n'
+ fd.write(line.encode('utf8'))
+
+ with open(filename, "wb+") as fd:
+ write_header_line(fd, "id", comment["id"])
+ write_header_line(fd, "status", comment["status"])
+ write_header_line(fd, "approved", comment["approved"])
+ write_header_line(fd, "author", comment["author"])
+ write_header_line(fd, "author_email", comment["email"])
+ write_header_line(fd, "author_url", comment["url"])
+ write_header_line(fd, "author_IP", comment["ip"])
+ write_header_line(fd, "date_utc", comment["date"])
+ write_header_line(fd, "parent_id", comment["parent"])
+ write_header_line(fd, "wordpress_user_id", comment["user_id"])
+ fd.write(('\n' + comment['content']).encode('utf8'))
+
+ def _create_metadata(self, status, excerpt, tags, categories, post_name=None):
+ """Create post metadata."""
+ other_meta = {'wp-status': status}
+ if excerpt is not None:
+ other_meta['excerpt'] = excerpt
+ if self.export_categories_as_categories:
+ cats = []
+ for text in categories:
+ if text in self._category_paths:
+ cats.append(self._category_paths[text])
+ else:
+ cats.append(utils.join_hierarchical_category_path([text]))
+ other_meta['categories'] = ','.join(cats)
+ if len(cats) > 0:
+ other_meta['category'] = cats[0]
+ if len(cats) > 1:
+ LOGGER.warn(('Post "{0}" has more than one category! ' +
+ 'Will only use the first one.').format(post_name))
+ tags_cats = tags
+ else:
+ tags_cats = tags + categories
+ return tags_cats, other_meta
- def import_item(self, item, wordpress_namespace, out_folder=None):
- """Takes an item from the feed and creates a post file."""
+ def import_postpage_item(self, item, wordpress_namespace, out_folder=None, attachments=None):
+ """Take an item from the feed and creates a post file."""
if out_folder is None:
out_folder = 'posts'
@@ -439,7 +778,7 @@ class CommandImportWordpress(Command, ImportMixin):
item, '{{{0}}}post_id'.format(wordpress_namespace), None)
if not slug: # should never happen
LOGGER.error("Error converting post:", title)
- return
+ return False
else:
if len(pathlist) > 1:
out_folder = os.path.join(*([out_folder] + pathlist[:-1]))
@@ -461,23 +800,42 @@ class CommandImportWordpress(Command, ImportMixin):
item, '{{{0}}}status'.format(wordpress_namespace), 'publish')
content = get_text_tag(
item, '{http://purl.org/rss/1.0/modules/content/}encoded', '')
+ excerpt = get_text_tag(
+ item, '{http://wordpress.org/export/1.2/excerpt/}encoded', None)
+
+ if excerpt is not None:
+ if len(excerpt) == 0:
+ excerpt = None
tags = []
+ categories = []
if status == 'trash':
LOGGER.warn('Trashed post "{0}" will not be imported.'.format(title))
- return
+ return False
+ elif status == 'private':
+ tags.append('private')
+ is_draft = False
+ is_private = True
elif status != 'publish':
tags.append('draft')
is_draft = True
+ is_private = False
else:
is_draft = False
+ is_private = False
for tag in item.findall('category'):
text = tag.text
- if text == 'Uncategorized':
+ type = 'category'
+ if 'domain' in tag.attrib:
+ type = tag.attrib['domain']
+ if text == 'Uncategorized' and type == 'category':
continue
- tags.append(text)
self.all_tags.add(text)
+ if type == 'category':
+ categories.append(type)
+ else:
+ tags.append(text)
if '$latex' in content:
tags.append('mathjax')
@@ -487,11 +845,16 @@ class CommandImportWordpress(Command, ImportMixin):
format_tag = [x for x in item.findall('*//{%s}meta_key' % wordpress_namespace) if x.text == '_tc_post_format']
if format_tag:
post_format = format_tag[0].getparent().find('{%s}meta_value' % wordpress_namespace).text
+ if post_format == 'wpautop':
+ post_format = 'wp'
if is_draft and self.exclude_drafts:
LOGGER.notice('Draft "{0}" will not be imported.'.format(title))
-
- elif content.strip():
+ return False
+ elif is_private and self.exclude_privates:
+ LOGGER.notice('Private post "{0}" will not be imported.'.format(title))
+ return False
+ elif content.strip() or self.import_empty_items:
# If no content is found, no files are written.
self.url_map[link] = (self.context['SITE_URL'] +
out_folder.rstrip('/') + '/' + slug +
@@ -503,53 +866,121 @@ class CommandImportWordpress(Command, ImportMixin):
content_translations = {"": content}
default_language = self.context["DEFAULT_LANG"]
for lang, content in content_translations.items():
+ try:
+ content, extension, rewrite_html = self.transform_content(content, post_format, attachments)
+ except:
+ LOGGER.error(('Cannot interpret post "{0}" (language {1}) with post ' +
+ 'format {2}!').format(os.path.join(out_folder, slug), lang, post_format))
+ return False
if lang:
out_meta_filename = slug + '.meta'
if lang == default_language:
- out_content_filename = slug + '.wp'
+ out_content_filename = slug + '.' + extension
else:
out_content_filename \
= utils.get_translation_candidate(self.context,
- slug + ".wp", lang)
+ slug + "." + extension, lang)
self.extra_languages.add(lang)
meta_slug = slug
else:
out_meta_filename = slug + '.meta'
- out_content_filename = slug + '.wp'
+ out_content_filename = slug + '.' + extension
meta_slug = slug
- if post_format == 'wp':
- content = self.transform_content(content)
+ tags, other_meta = self._create_metadata(status, excerpt, tags, categories,
+ post_name=os.path.join(out_folder, slug))
self.write_metadata(os.path.join(self.output_folder, out_folder,
out_meta_filename),
- title, meta_slug, post_date, description, tags)
+ title, meta_slug, post_date, description, tags, **other_meta)
self.write_content(
os.path.join(self.output_folder,
out_folder, out_content_filename),
- content)
+ content,
+ rewrite_html)
+
+ if self.export_comments:
+ comments = []
+ for tag in item.findall('{{{0}}}comment'.format(wordpress_namespace)):
+ comment = self._extract_comment(tag, wordpress_namespace)
+ if comment is not None:
+ comments.append(comment)
+
+ for comment in comments:
+ comment_filename = slug + "." + str(comment['id']) + ".wpcomment"
+ self._write_comment(os.path.join(self.output_folder, out_folder, comment_filename), comment)
+
+ return (out_folder, slug)
else:
- LOGGER.warn('Not going to import "{0}" because it seems to contain'
- ' no content.'.format(title))
+ LOGGER.warn(('Not going to import "{0}" because it seems to contain'
+ ' no content.').format(title))
+ return False
- def process_item(self, item):
+ def _extract_item_info(self, item):
+ """Extract information about an item."""
# The namespace usually is something like:
# http://wordpress.org/export/1.2/
wordpress_namespace = item.nsmap['wp']
post_type = get_text_tag(
item, '{{{0}}}post_type'.format(wordpress_namespace), 'post')
+ post_id = int(get_text_tag(
+ item, '{{{0}}}post_id'.format(wordpress_namespace), "0"))
+ parent_id = get_text_tag(
+ item, '{{{0}}}post_parent'.format(wordpress_namespace), None)
+ return wordpress_namespace, post_type, post_id, parent_id
+
+ def process_item_if_attachment(self, item):
+ """Process attachments."""
+ wordpress_namespace, post_type, post_id, parent_id = self._extract_item_info(item)
if post_type == 'attachment':
- self.import_attachment(item, wordpress_namespace)
- elif post_type == 'post':
- self.import_item(item, wordpress_namespace, 'posts')
- else:
- self.import_item(item, wordpress_namespace, 'stories')
+ data = self.import_attachment(item, wordpress_namespace)
+ # If parent was found, store relation with imported files
+ if parent_id is not None and int(parent_id) != 0:
+ self.attachments[int(parent_id)][post_id] = data
+ else:
+ LOGGER.warn("Attachment #{0} ({1}) has no parent!".format(post_id, data['files']))
+
+ def write_attachments_info(self, path, attachments):
+ """Write attachments info file."""
+ with io.open(path, "wb") as file:
+ file.write(json.dumps(attachments).encode('utf-8'))
+
+ def process_item_if_post_or_page(self, item):
+ """Process posts and pages."""
+ wordpress_namespace, post_type, post_id, parent_id = self._extract_item_info(item)
+
+ if post_type != 'attachment':
+ # Get attachments for post
+ attachments = self.attachments.pop(post_id, None)
+ # Import item
+ if post_type == 'post':
+ out_folder_slug = self.import_postpage_item(item, wordpress_namespace, 'posts', attachments)
+ else:
+ out_folder_slug = self.import_postpage_item(item, wordpress_namespace, 'stories', attachments)
+ # Process attachment data
+ if attachments is not None:
+ # If post was exported, store data
+ if out_folder_slug:
+ destination = os.path.join(self.output_folder, out_folder_slug[0],
+ out_folder_slug[1] + ".attachments.json")
+ self.write_attachments_info(destination, attachments)
def import_posts(self, channel):
+ """Import posts into the site."""
+ self.attachments = defaultdict(dict)
+ # First process attachments
+ for item in channel.findall('item'):
+ self.process_item_if_attachment(item)
+ # Next process posts
for item in channel.findall('item'):
- self.process_item(item)
+ self.process_item_if_post_or_page(item)
+ # Assign attachments to posts
+ for post_id in self.attachments:
+ LOGGER.warn(("Found attachments for post or page #{0}, but didn't find post or page. " +
+ "(Attachments: {1})").format(post_id, [e['files'][0] for e in self.attachments[post_id].values()]))
def get_text_tag(tag, name, default):
+ """Get the text of an XML tag."""
if tag is None:
return default
t = tag.find(name)
@@ -560,9 +991,10 @@ def get_text_tag(tag, name, default):
def separate_qtranslate_content(text):
- """Parse the content of a wordpress post or page and separate
- the various language specific contents when they are delimited
- with qtranslate tags: <!--:LL-->blabla<!--:-->"""
+ """Parse the content of a wordpress post or page and separate qtranslate languages.
+
+ qtranslate tags: <!--:LL-->blabla<!--:-->
+ """
# TODO: uniformize qtranslate tags <!--/en--> => <!--:-->
qt_start = "<!--:"
qt_end = "-->"
diff --git a/nikola/plugins/command/init.plugin b/nikola/plugins/command/init.plugin
index 850dba9..a5404c4 100644
--- a/nikola/plugins/command/init.plugin
+++ b/nikola/plugins/command/init.plugin
@@ -1,9 +1,13 @@
[Core]
-Name = init
-Module = init
+name = init
+module = init
[Documentation]
-Author = Roberto Alsina
-Version = 1.0
-Website = http://getnikola.com
-Description = Create a new site.
+author = Roberto Alsina
+version = 1.0
+website = http://getnikola.com
+description = Create a new site.
+
+[Nikola]
+plugincategory = Command
+
diff --git a/nikola/plugins/command/init.py b/nikola/plugins/command/init.py
index 7a36894..91ccdb4 100644
--- a/nikola/plugins/command/init.py
+++ b/nikola/plugins/command/init.py
@@ -24,6 +24,8 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""Create a new site."""
+
from __future__ import print_function, unicode_literals
import os
import shutil
@@ -54,6 +56,7 @@ SAMPLE_CONF = {
'BLOG_EMAIL': "joe@demo.site",
'BLOG_DESCRIPTION': "This is a demo site for Nikola.",
'PRETTY_URLS': False,
+ 'STRIP_INDEXES': False,
'DEFAULT_LANG': "en",
'TRANSLATIONS': """{
DEFAULT_LANG: "",
@@ -64,6 +67,8 @@ SAMPLE_CONF = {
'TIMEZONE': 'UTC',
'COMMENT_SYSTEM': 'disqus',
'COMMENT_SYSTEM_ID': 'nikolademo',
+ 'CATEGORY_ALLOW_HIERARCHIES': False,
+ 'CATEGORY_OUTPUT_FLAT_HIERARCHY': False,
'TRANSLATIONS_PATTERN': DEFAULT_TRANSLATIONS_PATTERN,
'INDEX_READ_MORE_LINK': DEFAULT_INDEX_READ_MORE_LINK,
'RSS_READ_MORE_LINK': DEFAULT_RSS_READ_MORE_LINK,
@@ -103,6 +108,7 @@ SAMPLE_CONF = {
'REDIRECTIONS': [],
}
+
# Generate a list of supported languages here.
# Ugly code follows.
_suplang = {}
@@ -154,8 +160,7 @@ SAMPLE_CONF['_SUPPORTED_COMMENT_SYSTEMS'] = '\n'.join(textwrap.wrap(
def format_default_translations_config(additional_languages):
- """Return the string to configure the TRANSLATIONS config variable to
- make each additional language visible on the generated site."""
+ """Adapt TRANSLATIONS setting for all additional languages."""
if not additional_languages:
return SAMPLE_CONF["TRANSLATIONS"]
lang_paths = [' DEFAULT_LANG: "",']
@@ -164,12 +169,12 @@ def format_default_translations_config(additional_languages):
return "{{\n{0}\n}}".format("\n".join(lang_paths))
-def format_navigation_links(additional_languages, default_lang, messages):
+def format_navigation_links(additional_languages, default_lang, messages, strip_indexes=False):
"""Return the string to configure NAVIGATION_LINKS."""
f = u"""\
{0}: (
("{1}/archive.html", "{2[Archive]}"),
- ("{1}/categories/index.html", "{2[Tags]}"),
+ ("{1}/categories/{3}", "{2[Tags]}"),
("{1}/rss.xml", "{2[RSS feed]}"),
),"""
@@ -185,27 +190,32 @@ def format_navigation_links(additional_languages, default_lang, messages):
fmsg[i] = i
return fmsg
+ if strip_indexes:
+ index_html = ''
+ else:
+ index_html = 'index.html'
+
# handle the default language
- pairs.append(f.format('DEFAULT_LANG', '', get_msg(default_lang)))
+ pairs.append(f.format('DEFAULT_LANG', '', get_msg(default_lang), index_html))
for l in additional_languages:
- pairs.append(f.format(json.dumps(l, ensure_ascii=False), '/' + l, get_msg(l)))
+ pairs.append(f.format(json.dumps(l, ensure_ascii=False), '/' + l, get_msg(l), index_html))
return u'{{\n{0}\n}}'.format('\n\n'.join(pairs))
-# In order to ensure proper escaping, all variables but the three
-# pre-formatted ones are handled by json.dumps().
+# In order to ensure proper escaping, all variables but the pre-formatted ones
+# are handled by json.dumps().
def prepare_config(config):
"""Parse sample config with JSON."""
p = config.copy()
- p.update(dict((k, json.dumps(v, ensure_ascii=False)) for k, v in p.items()
- if k not in ('POSTS', 'PAGES', 'COMPILERS', 'TRANSLATIONS', 'NAVIGATION_LINKS', '_SUPPORTED_LANGUAGES', '_SUPPORTED_COMMENT_SYSTEMS', 'INDEX_READ_MORE_LINK', 'RSS_READ_MORE_LINK', 'PRETTY_URLS')))
+ p.update({k: json.dumps(v, ensure_ascii=False) for k, v in p.items()
+ if k not in ('POSTS', 'PAGES', 'COMPILERS', 'TRANSLATIONS', 'NAVIGATION_LINKS', '_SUPPORTED_LANGUAGES', '_SUPPORTED_COMMENT_SYSTEMS', 'INDEX_READ_MORE_LINK', 'RSS_READ_MORE_LINK')})
# READ_MORE_LINKs require some special treatment.
p['INDEX_READ_MORE_LINK'] = "'" + p['INDEX_READ_MORE_LINK'].replace("'", "\\'") + "'"
p['RSS_READ_MORE_LINK'] = "'" + p['RSS_READ_MORE_LINK'].replace("'", "\\'") + "'"
- # json would make that `true` instead of `True`
- p['PRETTY_URLS'] = str(p['PRETTY_URLS'])
+ # fix booleans and None
+ p.update({k: str(v) for k, v in config.items() if isinstance(v, bool) or v is None})
return p
@@ -239,11 +249,13 @@ class CommandInit(Command):
@classmethod
def copy_sample_site(cls, target):
+ """Copy sample site data to target directory."""
src = resource_filename('nikola', os.path.join('data', 'samplesite'))
shutil.copytree(src, target)
@staticmethod
def create_configuration(target):
+ """Create configuration file."""
template_path = resource_filename('nikola', 'conf.py.in')
conf_template = Template(filename=template_path)
conf_path = os.path.join(target, 'conf.py')
@@ -252,12 +264,14 @@ class CommandInit(Command):
@staticmethod
def create_configuration_to_string():
+ """Return configuration file as a string."""
template_path = resource_filename('nikola', 'conf.py.in')
conf_template = Template(filename=template_path)
return conf_template.render(**prepare_config(SAMPLE_CONF))
@classmethod
def create_empty_site(cls, target):
+ """Create an empty site with directories only."""
for folder in ('files', 'galleries', 'listings', 'posts', 'stories'):
makedirs(os.path.join(target, folder))
@@ -295,7 +309,8 @@ class CommandInit(Command):
SAMPLE_CONF['SITE_URL'] = answer
def prettyhandler(default, toconf):
- SAMPLE_CONF['PRETTY_URLS'] = ask_yesno('Enable pretty URLs (/page/ instead of /page.html) that don’t need web server configuration?', default=True)
+ SAMPLE_CONF['PRETTY_URLS'] = ask_yesno('Enable pretty URLs (/page/ instead of /page.html) that don\'t need web server configuration?', default=True)
+ SAMPLE_CONF['STRIP_INDEXES'] = SAMPLE_CONF['PRETTY_URLS']
def lhandler(default, toconf, show_header=True):
if show_header:
@@ -333,7 +348,7 @@ class CommandInit(Command):
# not inherit from anywhere.
try:
messages = load_messages(['base'], tr, default)
- SAMPLE_CONF['NAVIGATION_LINKS'] = format_navigation_links(langs, default, messages)
+ SAMPLE_CONF['NAVIGATION_LINKS'] = format_navigation_links(langs, default, messages, SAMPLE_CONF['STRIP_INDEXES'])
except nikola.utils.LanguageNotFoundError as e:
print(" ERROR: the language '{0}' is not supported.".format(e.lang))
print(" Are you sure you spelled the name correctly? Names are case-sensitive and need to be reproduced as-is (complete with the country specifier, if any).")
diff --git a/nikola/plugins/command/install_theme.plugin b/nikola/plugins/command/install_theme.plugin
index 54a91ff..8434f2e 100644
--- a/nikola/plugins/command/install_theme.plugin
+++ b/nikola/plugins/command/install_theme.plugin
@@ -1,10 +1,13 @@
[Core]
-Name = install_theme
-Module = install_theme
+name = install_theme
+module = install_theme
[Documentation]
-Author = Roberto Alsina
-Version = 1.0
-Website = http://getnikola.com
-Description = Install a theme into the current site.
+author = Roberto Alsina
+version = 1.0
+website = http://getnikola.com
+description = Install a theme into the current site.
+
+[Nikola]
+plugincategory = Command
diff --git a/nikola/plugins/command/install_theme.py b/nikola/plugins/command/install_theme.py
index 4937509..f02252e 100644
--- a/nikola/plugins/command/install_theme.py
+++ b/nikola/plugins/command/install_theme.py
@@ -24,10 +24,12 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""Install a theme."""
+
from __future__ import print_function
import os
import io
-import json
+import time
import requests
import pygments
@@ -41,6 +43,7 @@ LOGGER = utils.get_logger('install_theme', utils.STDERR_HANDLER)
class CommandInstallTheme(Command):
+
"""Install a theme."""
name = "install_theme"
@@ -95,8 +98,13 @@ class CommandInstallTheme(Command):
if name is None and not listing:
LOGGER.error("This command needs either a theme name or the -l option.")
return False
- data = requests.get(url).text
- data = json.loads(data)
+ try:
+ data = requests.get(url).json()
+ except requests.exceptions.SSLError:
+ LOGGER.warning("SSL error, using http instead of https (press ^C to abort)")
+ time.sleep(1)
+ url = url.replace('https', 'http', 1)
+ data = requests.get(url).json()
if listing:
print("Themes:")
print("-------")
@@ -122,11 +130,21 @@ class CommandInstallTheme(Command):
LOGGER.notice('Remember to set THEME="{0}" in conf.py to use this theme.'.format(origname))
def do_install(self, name, data):
+ """Download and install a theme."""
if name in data:
utils.makedirs(self.output_dir)
- LOGGER.info("Downloading '{0}'".format(data[name]))
+ url = data[name]
+ LOGGER.info("Downloading '{0}'".format(url))
+ try:
+ zip_data = requests.get(url).content
+ except requests.exceptions.SSLError:
+ LOGGER.warning("SSL error, using http instead of https (press ^C to abort)")
+ time.sleep(1)
+ url = url.replace('https', 'http', 1)
+ zip_data = requests.get(url).content
+
zip_file = io.BytesIO()
- zip_file.write(requests.get(data[name]).content)
+ zip_file.write(zip_data)
LOGGER.info("Extracting '{0}' into themes/".format(name))
utils.extract_all(zip_file)
dest_path = os.path.join(self.output_dir, name)
diff --git a/nikola/plugins/command/new_page.plugin b/nikola/plugins/command/new_page.plugin
index f078dd6..145a419 100644
--- a/nikola/plugins/command/new_page.plugin
+++ b/nikola/plugins/command/new_page.plugin
@@ -1,9 +1,13 @@
[Core]
-Name = new_page
-Module = new_page
+name = new_page
+module = new_page
[Documentation]
-Author = Roberto Alsina, Chris Warrick
-Version = 1.0
-Website = http://getnikola.com
-Description = Create a new page.
+author = Roberto Alsina, Chris Warrick
+version = 1.0
+website = http://getnikola.com
+description = Create a new page.
+
+[Nikola]
+plugincategory = Command
+
diff --git a/nikola/plugins/command/new_page.py b/nikola/plugins/command/new_page.py
index 39a85bd..811e28b 100644
--- a/nikola/plugins/command/new_page.py
+++ b/nikola/plugins/command/new_page.py
@@ -24,12 +24,15 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""Create a new page."""
+
from __future__ import unicode_literals, print_function
from nikola.plugin_categories import Command
class CommandNewPage(Command):
+
"""Create a new page."""
name = "new_page"
diff --git a/nikola/plugins/command/new_post.plugin b/nikola/plugins/command/new_post.plugin
index fec4b1d..d88469f 100644
--- a/nikola/plugins/command/new_post.plugin
+++ b/nikola/plugins/command/new_post.plugin
@@ -1,10 +1,13 @@
[Core]
-Name = new_post
-Module = new_post
+name = new_post
+module = new_post
[Documentation]
-Author = Roberto Alsina
-Version = 1.0
-Website = http://getnikola.com
-Description = Create a new post.
+author = Roberto Alsina
+version = 1.0
+website = http://getnikola.com
+description = Create a new post.
+
+[Nikola]
+plugincategory = Command
diff --git a/nikola/plugins/command/new_post.py b/nikola/plugins/command/new_post.py
index 5141c7e..f9fe3ff 100644
--- a/nikola/plugins/command/new_post.py
+++ b/nikola/plugins/command/new_post.py
@@ -24,6 +24,8 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""Create a new post."""
+
from __future__ import unicode_literals, print_function
import io
import datetime
@@ -44,107 +46,8 @@ PAGELOGGER = utils.get_logger('new_page', utils.STDERR_HANDLER)
LOGGER = POSTLOGGER
-def filter_post_pages(compiler, is_post, compilers, post_pages, compiler_objs, compilers_raw):
- """Given a compiler ("markdown", "rest"), and whether it's meant for
- a post or a page, and compilers, return the correct entry from
- post_pages."""
-
- # First throw away all the post_pages with the wrong is_post
- filtered = [entry for entry in post_pages if entry[3] == is_post]
-
- # These are the extensions supported by the required format
- extensions = compilers.get(compiler)
- if extensions is None:
- if compiler in compiler_objs:
- LOGGER.error("There is a {0} compiler available, but it's not set in your COMPILERS option.".format(compiler))
- LOGGER.info("Read more: {0}".format(COMPILERS_DOC_LINK))
- else:
- LOGGER.error('Unknown format {0}'.format(compiler))
- print_compilers(compilers_raw, post_pages, compiler_objs)
- return False
-
- # Throw away the post_pages with the wrong extensions
- filtered = [entry for entry in filtered if any([ext in entry[0] for ext in
- extensions])]
-
- if not filtered:
- type_name = "post" if is_post else "page"
- LOGGER.error("Can't find a way, using your configuration, to create "
- "a {0} in format {1}. You may want to tweak "
- "COMPILERS or {2}S in conf.py".format(
- type_name, compiler, type_name.upper()))
- LOGGER.info("Read more: {0}".format(COMPILERS_DOC_LINK))
-
- return False
- return filtered[0]
-
-
-def print_compilers(compilers_raw, post_pages, compiler_objs):
- """
- List all available compilers in a human-friendly format.
-
- :param compilers_raw: The compilers dict, mapping compiler names to tuples of extensions
- :param post_pages: The post_pages structure
- :param compilers_objs: Compiler objects
- """
-
- # We use compilers_raw, because the normal dict can contain
- # garbage coming from the translation candidate implementation.
- # Entries are in format: (name, extensions, used_in_post_pages)
- parsed_compilers = {'used': [], 'unused': [], 'disabled': []}
-
- for compiler_name, compiler_obj in compiler_objs.items():
- fname = compiler_obj.friendly_name or compiler_name
- if compiler_name not in compilers_raw:
- parsed_compilers['disabled'].append((compiler_name, fname, (), False))
- else:
- # stolen from filter_post_pages
- extensions = compilers_raw[compiler_name]
- filtered = [entry for entry in post_pages if any(
- [ext in entry[0] for ext in extensions])]
- if filtered:
- parsed_compilers['used'].append((compiler_name, fname, extensions, True))
- else:
- parsed_compilers['unused'].append((compiler_name, fname, extensions, False))
-
- # Sort compilers alphabetically by name, just so it’s prettier (and
- # deterministic)
- parsed_compilers['used'].sort(key=operator.itemgetter(0))
- parsed_compilers['unused'].sort(key=operator.itemgetter(0))
- parsed_compilers['disabled'].sort(key=operator.itemgetter(0))
-
- # We also group the compilers by status for readability.
- parsed_list = parsed_compilers['used'] + parsed_compilers['unused'] + parsed_compilers['disabled']
-
- print("Available input formats:\n")
-
- name_width = max([len(i[0]) for i in parsed_list] + [4]) # 4 == len('NAME')
- fname_width = max([len(i[1]) for i in parsed_list] + [11]) # 11 == len('DESCRIPTION')
-
- print((' {0:<' + str(name_width) + '} {1:<' + str(fname_width) + '} EXTENSIONS\n').format('NAME', 'DESCRIPTION'))
-
- for name, fname, extensions, used in parsed_list:
- flag = ' ' if used else '!'
- flag = flag if extensions else '~'
-
- extensions = ', '.join(extensions) if extensions else '(disabled: not in COMPILERS)'
-
- print(('{flag}{name:<' + str(name_width) + '} {fname:<' + str(fname_width) + '} {extensions}').format(flag=flag, name=name, fname=fname, extensions=extensions))
-
- print("""
-More compilers are available in the Plugins Index.
-
-Compilers marked with ! and ~ require additional configuration:
- ! not in the PAGES/POSTS tuples (unused)
- ~ not in the COMPILERS dict (disabled)
-Read more: {0}""".format(COMPILERS_DOC_LINK))
-
-
def get_default_compiler(is_post, compilers, post_pages):
- """Given compilers and post_pages, return a reasonable
- default compiler for this kind of post/page.
- """
-
+ """Given compilers and post_pages, return a reasonable default compiler for this kind of post/page."""
# First throw away all the post_pages with the wrong is_post
filtered = [entry for entry in post_pages if entry[3] == is_post]
@@ -159,7 +62,7 @@ def get_default_compiler(is_post, compilers, post_pages):
def get_date(schedule=False, rule=None, last_date=None, tz=None, iso8601=False):
- """Returns a date stamp, given a recurrence rule.
+ """Return a date stamp, given a recurrence rule.
schedule - bool:
whether to use the recurrence rule or not
@@ -177,7 +80,6 @@ def get_date(schedule=False, rule=None, last_date=None, tz=None, iso8601=False):
whether to force ISO 8601 dates (instead of locale-specific ones)
"""
-
if tz is None:
tz = dateutil.tz.tzlocal()
date = now = datetime.datetime.now(tz)
@@ -212,6 +114,7 @@ def get_date(schedule=False, rule=None, last_date=None, tz=None, iso8601=False):
class CommandNewPost(Command):
+
"""Create a new post."""
name = "new_post"
@@ -333,7 +236,7 @@ class CommandNewPost(Command):
wants_available = options['available-formats']
if wants_available:
- print_compilers(self.site.config['_COMPILERS_RAW'], self.site.config['post_pages'], self.site.compilers)
+ self.print_compilers()
return
if is_page:
@@ -360,17 +263,13 @@ class CommandNewPost(Command):
if content_format not in compiler_names:
LOGGER.error("Unknown {0} format {1}, maybe you need to install a plugin?".format(content_type, content_format))
- print_compilers(self.site.config['_COMPILERS_RAW'], self.site.config['post_pages'], self.site.compilers)
+ self.print_compilers()
return
compiler_plugin = self.site.plugin_manager.getPluginByName(
content_format, "PageCompiler").plugin_object
# Guess where we should put this
- entry = filter_post_pages(content_format, is_post,
- self.site.config['COMPILERS'],
- self.site.config['post_pages'],
- self.site.compilers,
- self.site.config['_COMPILERS_RAW'])
+ entry = self.filter_post_pages(content_format, is_post)
if entry is False:
return 1
@@ -497,3 +396,122 @@ class CommandNewPost(Command):
subprocess.call(to_run)
else:
LOGGER.error('$EDITOR not set, cannot edit the post. Please do it manually.')
+
+ def filter_post_pages(self, compiler, is_post):
+ """Return the correct entry from post_pages.
+
+ Information based on:
+ * selected compilers
+ * available compilers
+ * post/page status
+ """
+ compilers = self.site.config['COMPILERS']
+ post_pages = self.site.config['post_pages']
+ compiler_objs = self.site.compilers
+
+ # First throw away all the post_pages with the wrong is_post
+ filtered = [entry for entry in post_pages if entry[3] == is_post]
+
+ # These are the extensions supported by the required format
+ extensions = compilers.get(compiler)
+ if extensions is None:
+ if compiler in compiler_objs:
+ LOGGER.error("There is a {0} compiler available, but it's not set in your COMPILERS option.".format(compiler))
+ LOGGER.info("Read more: {0}".format(COMPILERS_DOC_LINK))
+ else:
+ LOGGER.error('Unknown format {0}'.format(compiler))
+ self.print_compilers()
+ return False
+
+ # Throw away the post_pages with the wrong extensions
+ filtered = [entry for entry in filtered if any([ext in entry[0] for ext in
+ extensions])]
+
+ if not filtered:
+ type_name = "post" if is_post else "page"
+ LOGGER.error("Can't find a way, using your configuration, to create "
+ "a {0} in format {1}. You may want to tweak "
+ "COMPILERS or {2}S in conf.py".format(
+ type_name, compiler, type_name.upper()))
+ LOGGER.info("Read more: {0}".format(COMPILERS_DOC_LINK))
+
+ return False
+ return filtered[0]
+
+ def print_compilers(self):
+ """List all available compilers in a human-friendly format."""
+ # We use compilers_raw, because the normal dict can contain
+ # garbage coming from the translation candidate implementation.
+ # Entries are in format: (name, extensions, used_in_post_pages)
+
+ compilers_raw = self.site.config['_COMPILERS_RAW']
+
+ used_compilers = []
+ unused_compilers = []
+ disabled_compilers = []
+
+ for name, plugin in self.site.compilers.items():
+ if name in compilers_raw:
+ used_compilers.append([
+ name,
+ plugin.friendly_name or name,
+ compilers_raw[name],
+ True
+ ])
+ else:
+ disabled_compilers.append([
+ name,
+ plugin.friendly_name or name,
+ (),
+ False
+ ])
+
+ for name, (_, _, pi) in self.site.disabled_compilers.items():
+ if pi.details.has_option('Nikola', 'Friendlyname'):
+ f_name = pi.details.get('Nikola', 'Friendlyname')
+ else:
+ f_name = name
+ if name in compilers_raw:
+ unused_compilers.append([
+ name,
+ f_name,
+ compilers_raw[name],
+ False
+ ])
+ else:
+ disabled_compilers.append([
+ name,
+ f_name,
+ (),
+ False
+ ])
+
+ used_compilers.sort(key=operator.itemgetter(0))
+ unused_compilers.sort(key=operator.itemgetter(0))
+ disabled_compilers.sort(key=operator.itemgetter(0))
+
+ # We also group the compilers by status for readability.
+ parsed_list = used_compilers + unused_compilers + disabled_compilers
+
+ print("Available input formats:\n")
+
+ name_width = max([len(i[0]) for i in parsed_list] + [4]) # 4 == len('NAME')
+ fname_width = max([len(i[1]) for i in parsed_list] + [11]) # 11 == len('DESCRIPTION')
+
+ print((' {0:<' + str(name_width) + '} {1:<' + str(fname_width) + '} EXTENSIONS\n').format('NAME', 'DESCRIPTION'))
+
+ for name, fname, extensions, used in parsed_list:
+ flag = ' ' if used else '!'
+ flag = flag if extensions else '~'
+
+ extensions = ', '.join(extensions) if extensions else '(disabled: not in COMPILERS)'
+
+ print(('{flag}{name:<' + str(name_width) + '} {fname:<' + str(fname_width) + '} {extensions}').format(flag=flag, name=name, fname=fname, extensions=extensions))
+
+ print("""
+ More compilers are available in the Plugins Index.
+
+ Compilers marked with ! and ~ require additional configuration:
+ ! not in the PAGES/POSTS tuples (unused)
+ ~ not in the COMPILERS dict (disabled)
+ Read more: {0}""".format(COMPILERS_DOC_LINK))
diff --git a/nikola/plugins/command/orphans.plugin b/nikola/plugins/command/orphans.plugin
index f491eaf..669429d 100644
--- a/nikola/plugins/command/orphans.plugin
+++ b/nikola/plugins/command/orphans.plugin
@@ -1,10 +1,13 @@
[Core]
-Name = orphans
-Module = orphans
+name = orphans
+module = orphans
[Documentation]
-Author = Roberto Alsina, Chris Warrick
-Version = 1.0
-Website = http://getnikola.com
-Description = List all orphans
+author = Roberto Alsina, Chris Warrick
+version = 1.0
+website = http://getnikola.com
+description = List all orphans
+
+[Nikola]
+plugincategory = Command
diff --git a/nikola/plugins/command/orphans.py b/nikola/plugins/command/orphans.py
index f550e17..b12cc67 100644
--- a/nikola/plugins/command/orphans.py
+++ b/nikola/plugins/command/orphans.py
@@ -24,6 +24,8 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""List all orphans."""
+
from __future__ import print_function
import os
@@ -32,6 +34,9 @@ from nikola.plugins.command.check import real_scan_files
class CommandOrphans(Command):
+
+ """List all orphans."""
+
name = "orphans"
doc_purpose = "list all orphans"
doc_description = """\
@@ -41,5 +46,6 @@ but are not generated by Nikola.
Output contains filenames only (it is passable to `xargs rm` or the like)."""
def _execute(self, options, args):
+ """Run the orphans command."""
orphans = real_scan_files(self.site)[0]
print('\n'.join([p for p in orphans if not os.path.isdir(p)]))
diff --git a/nikola/plugins/command/plugin.plugin b/nikola/plugins/command/plugin.plugin
index 2815caa..d44dcf3 100644
--- a/nikola/plugins/command/plugin.plugin
+++ b/nikola/plugins/command/plugin.plugin
@@ -1,10 +1,13 @@
[Core]
-Name = plugin
-Module = plugin
+name = plugin
+module = plugin
[Documentation]
-Author = Roberto Alsina and Chris Warrick
-Version = 1.0
-Website = http://getnikola.com
-Description = Manage Nikola plugins
+author = Roberto Alsina and Chris Warrick
+version = 1.0
+website = http://getnikola.com
+description = Manage Nikola plugins
+
+[Nikola]
+plugincategory = Command
diff --git a/nikola/plugins/command/plugin.py b/nikola/plugins/command/plugin.py
index 56eb1d7..f892ee9 100644
--- a/nikola/plugins/command/plugin.py
+++ b/nikola/plugins/command/plugin.py
@@ -24,12 +24,14 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""Manage plugins."""
+
from __future__ import print_function
import io
import os
import shutil
import subprocess
-import sys
+import time
import requests
import pygments
@@ -43,6 +45,7 @@ LOGGER = utils.get_logger('plugin', utils.STDERR_HANDLER)
class CommandPlugin(Command):
+
"""Manage plugins."""
json = None
@@ -119,6 +122,7 @@ class CommandPlugin(Command):
upgrade = options.get('upgrade')
list_available = options.get('list')
list_installed = options.get('list_installed')
+ show_install_notes = options.get('show_install_notes', True)
command_count = [bool(x) for x in (
install,
uninstall,
@@ -127,37 +131,42 @@ class CommandPlugin(Command):
list_installed)].count(True)
if command_count > 1 or command_count == 0:
print(self.help())
- return
+ return 2
- if not self.site.configured and not user_mode and install:
- LOGGER.notice('No site found, assuming --user')
- user_mode = True
-
- if user_mode:
- self.output_dir = os.path.expanduser('~/.nikola/plugins')
+ if options.get('output_dir') is not None:
+ self.output_dir = options.get('output_dir')
else:
- self.output_dir = 'plugins'
+ if not self.site.configured and not user_mode and install:
+ LOGGER.notice('No site found, assuming --user')
+ user_mode = True
+
+ if user_mode:
+ self.output_dir = os.path.expanduser('~/.nikola/plugins')
+ else:
+ self.output_dir = 'plugins'
if list_available:
- self.list_available(url)
+ return self.list_available(url)
elif list_installed:
- self.list_installed()
+ return self.list_installed()
elif upgrade:
- self.do_upgrade(url)
+ return self.do_upgrade(url)
elif uninstall:
- self.do_uninstall(uninstall)
+ return self.do_uninstall(uninstall)
elif install:
- self.do_install(url, install)
+ return self.do_install(url, install, show_install_notes)
def list_available(self, url):
+ """List all available plugins."""
data = self.get_json(url)
print("Available Plugins:")
print("------------------")
for plugin in sorted(data.keys()):
print(plugin)
- return True
+ return 0
def list_installed(self):
+ """List installed plugins."""
plugins = []
for plugin in self.site.plugin_manager.getAllPlugins():
p = plugin.path
@@ -170,8 +179,10 @@ class CommandPlugin(Command):
plugins.sort()
for name, path in plugins:
print('{0} at {1}'.format(name, path))
+ return 0
def do_upgrade(self, url):
+ """Upgrade all installed plugins."""
LOGGER.warning('This is not very smart, it just reinstalls some plugins and hopes for the best')
data = self.get_json(url)
plugins = []
@@ -194,18 +205,29 @@ class CommandPlugin(Command):
break
elif tail == '':
LOGGER.error("Can't find the plugins folder for path: {0}".format(p))
- return False
+ return 1
else:
path = tail
self.do_install(url, name)
+ return 0
- def do_install(self, url, name):
+ def do_install(self, url, name, show_install_notes=True):
+ """Download and install a plugin."""
data = self.get_json(url)
if name in data:
utils.makedirs(self.output_dir)
- LOGGER.info('Downloading: ' + data[name])
+ url = data[name]
+ LOGGER.info("Downloading '{0}'".format(url))
+ try:
+ zip_data = requests.get(url).content
+ except requests.exceptions.SSLError:
+ LOGGER.warning("SSL error, using http instead of https (press ^C to abort)")
+ time.sleep(1)
+ url = url.replace('https', 'http', 1)
+ zip_data = requests.get(url).content
+
zip_file = io.BytesIO()
- zip_file.write(requests.get(data[name]).content)
+ zip_file.write(zip_data)
LOGGER.info('Extracting: {0} into {1}/'.format(name, self.output_dir))
utils.extract_all(zip_file, self.output_dir)
dest_path = os.path.join(self.output_dir, name)
@@ -214,13 +236,13 @@ class CommandPlugin(Command):
plugin_path = utils.get_plugin_path(name)
except:
LOGGER.error("Can't find plugin " + name)
- return False
+ return 1
utils.makedirs(self.output_dir)
dest_path = os.path.join(self.output_dir, name)
if os.path.exists(dest_path):
LOGGER.error("{0} is already installed".format(name))
- return False
+ return 1
LOGGER.info('Copying {0} into plugins'.format(plugin_path))
shutil.copytree(plugin_path, dest_path)
@@ -256,7 +278,7 @@ class CommandPlugin(Command):
print('You have to install those yourself or through a package '
'manager.')
confpypath = os.path.join(dest_path, 'conf.py.sample')
- if os.path.exists(confpypath):
+ if os.path.exists(confpypath) and show_install_notes:
LOGGER.notice('This plugin has a sample config file. Integrate it with yours in order to make this plugin work!')
print('Contents of the conf.py.sample file:\n')
with io.open(confpypath, 'r', encoding='utf-8') as fh:
@@ -266,9 +288,10 @@ class CommandPlugin(Command):
4 * ' '))
else:
print(utils.indent(fh.read(), 4 * ' '))
- return True
+ return 0
def do_uninstall(self, name):
+ """Uninstall a plugin."""
for plugin in self.site.plugin_manager.getAllPlugins(): # FIXME: this is repeated thrice
p = plugin.path
if os.path.isdir(p):
@@ -278,16 +301,23 @@ class CommandPlugin(Command):
if name == plugin.name: # Uninstall this one
LOGGER.warning('About to uninstall plugin: {0}'.format(name))
LOGGER.warning('This will delete {0}'.format(p))
- inpf = raw_input if sys.version_info[0] == 2 else input
- sure = inpf('Are you sure? [y/n] ')
- if sure.lower().startswith('y'):
+ sure = utils.ask_yesno('Are you sure?')
+ if sure:
LOGGER.warning('Removing {0}'.format(p))
shutil.rmtree(p)
- return True
+ return 0
+ return 1
LOGGER.error('Unknown plugin: {0}'.format(name))
- return False
+ return 1
def get_json(self, url):
+ """Download the JSON file with all plugins."""
if self.json is None:
- self.json = requests.get(url).json()
+ try:
+ self.json = requests.get(url).json()
+ except requests.exceptions.SSLError:
+ LOGGER.warning("SSL error, using http instead of https (press ^C to abort)")
+ time.sleep(1)
+ url = url.replace('https', 'http', 1)
+ self.json = requests.get(url).json()
return self.json
diff --git a/nikola/plugins/command/rst2html.plugin b/nikola/plugins/command/rst2html.plugin
index 0d0d3b0..02c9276 100644
--- a/nikola/plugins/command/rst2html.plugin
+++ b/nikola/plugins/command/rst2html.plugin
@@ -1,9 +1,13 @@
[Core]
-Name = rst2html
-Module = rst2html
+name = rst2html
+module = rst2html
[Documentation]
-Author = Chris Warrick
-Version = 1.0
-Website = http://getnikola.com
-Description = Compile reStructuredText to HTML using the Nikola architecture
+author = Chris Warrick
+version = 1.0
+website = http://getnikola.com
+description = Compile reStructuredText to HTML using the Nikola architecture
+
+[Nikola]
+plugincategory = Command
+
diff --git a/nikola/plugins/command/rst2html/__init__.py b/nikola/plugins/command/rst2html/__init__.py
index 342aaeb..06afffd 100644
--- a/nikola/plugins/command/rst2html/__init__.py
+++ b/nikola/plugins/command/rst2html/__init__.py
@@ -24,6 +24,8 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""Compile reStructuredText to HTML, using Nikola architecture."""
+
from __future__ import unicode_literals, print_function
import io
@@ -34,6 +36,7 @@ from nikola.plugin_categories import Command
class CommandRst2Html(Command):
+
"""Compile reStructuredText to HTML, using Nikola architecture."""
name = "rst2html"
diff --git a/nikola/plugins/command/serve.plugin b/nikola/plugins/command/serve.plugin
index 0c1176d..aca71ec 100644
--- a/nikola/plugins/command/serve.plugin
+++ b/nikola/plugins/command/serve.plugin
@@ -1,10 +1,13 @@
[Core]
-Name = serve
-Module = serve
+name = serve
+module = serve
[Documentation]
-Author = Roberto Alsina
-Version = 1.0
-Website = http://getnikola.com
-Description = Start test server.
+author = Roberto Alsina
+version = 1.0
+website = http://getnikola.com
+description = Start test server.
+
+[Nikola]
+plugincategory = Command
diff --git a/nikola/plugins/command/serve.py b/nikola/plugins/command/serve.py
index 0e4d01f..0441c93 100644
--- a/nikola/plugins/command/serve.py
+++ b/nikola/plugins/command/serve.py
@@ -24,8 +24,11 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""Start test server."""
+
from __future__ import print_function
import os
+import re
import socket
import webbrowser
try:
@@ -35,16 +38,25 @@ except ImportError:
from http.server import HTTPServer # NOQA
from http.server import SimpleHTTPRequestHandler # NOQA
+try:
+ from StringIO import StringIO
+except ImportError:
+ from io import BytesIO as StringIO # NOQA
+
+
from nikola.plugin_categories import Command
-from nikola.utils import get_logger
+from nikola.utils import get_logger, STDERR_HANDLER
class IPv6Server(HTTPServer):
+
"""An IPv6 HTTPServer."""
+
address_family = socket.AF_INET6
class CommandServe(Command):
+
"""Start test server."""
name = "serve"
@@ -70,6 +82,14 @@ class CommandServe(Command):
'help': 'Address to bind (default: 0.0.0.0 – all local IPv4 interfaces)',
},
{
+ 'name': 'detach',
+ 'short': 'd',
+ 'long': 'detach',
+ 'type': bool,
+ 'default': False,
+ 'help': 'Detach from TTY (work in the background)',
+ },
+ {
'name': 'browser',
'short': 'b',
'long': 'browser',
@@ -89,7 +109,7 @@ class CommandServe(Command):
def _execute(self, options, args):
"""Start test server."""
- self.logger = get_logger('serve', self.site.loghandlers)
+ self.logger = get_logger('serve', STDERR_HANDLER)
out_dir = self.site.config['OUTPUT_FOLDER']
if not os.path.isdir(out_dir):
self.logger.error("Missing '{0}' folder?".format(out_dir))
@@ -117,16 +137,42 @@ class CommandServe(Command):
server_url = "http://{0}:{1}/".format(*sa)
self.logger.info("Opening {0} in the default web browser...".format(server_url))
webbrowser.open(server_url)
- try:
- httpd.serve_forever()
- except KeyboardInterrupt:
- self.logger.info("Server is shutting down.")
- return 130
+ if options['detach']:
+ OurHTTPRequestHandler.quiet = True
+ try:
+ pid = os.fork()
+ if pid == 0:
+ httpd.serve_forever()
+ else:
+ self.logger.info("Detached with PID {0}. Run `kill {0}` to stop the server.".format(pid))
+ except AttributeError as e:
+ if os.name == 'nt':
+ self.logger.warning("Detaching is not available on Windows, server is running in the foreground.")
+ else:
+ raise e
+ else:
+ try:
+ httpd.serve_forever()
+ except KeyboardInterrupt:
+ self.logger.info("Server is shutting down.")
+ return 130
class OurHTTPRequestHandler(SimpleHTTPRequestHandler):
+
+ """A request handler, modified for Nikola."""
+
extensions_map = dict(SimpleHTTPRequestHandler.extensions_map)
extensions_map[""] = "text/plain"
+ quiet = False
+
+ def log_message(self, *args):
+ """Log messages. Or not, depending on a setting."""
+ if self.quiet:
+ return
+ else:
+ # Old-style class in Python 2.7, cannot use super()
+ return SimpleHTTPRequestHandler.log_message(self, *args)
# NOTICE: this is a patched version of send_head() to disable all sorts of
# caching. `nikola serve` is a development server, hence caching should
@@ -182,14 +228,31 @@ class OurHTTPRequestHandler(SimpleHTTPRequestHandler):
except IOError:
self.send_error(404, "File not found")
return None
+
+ filtered_bytes = None
+ if ctype == 'text/html':
+ # Comment out any <base> to allow local resolution of relative URLs.
+ data = f.read().decode('utf8')
+ f.close()
+ data = re.sub(r'<base\s([^>]*)>', '<!--base \g<1>-->', data, re.IGNORECASE)
+ data = data.encode('utf8')
+ f = StringIO()
+ f.write(data)
+ filtered_bytes = len(data)
+ f.seek(0)
+
self.send_response(200)
self.send_header("Content-type", ctype)
if os.path.splitext(path)[1] == '.svgz':
# Special handling for svgz to make it work nice with browsers.
self.send_header("Content-Encoding", 'gzip')
- fs = os.fstat(f.fileno())
- self.send_header("Content-Length", str(fs[6]))
- self.send_header("Last-Modified", self.date_time_string(fs.st_mtime))
+
+ if filtered_bytes is None:
+ fs = os.fstat(f.fileno())
+ self.send_header('Content-Length', str(fs[6]))
+ else:
+ self.send_header('Content-Length', filtered_bytes)
+
# begin no-cache patch
# For standard requests.
self.send_header("Cache-Control", "no-cache, no-store, "
diff --git a/nikola/plugins/command/status.plugin b/nikola/plugins/command/status.plugin
index e02da8b..91390d2 100644
--- a/nikola/plugins/command/status.plugin
+++ b/nikola/plugins/command/status.plugin
@@ -1,9 +1,13 @@
[Core]
-Name = status
-Module = status
+name = status
+module = status
[Documentation]
-Author = Daniel Aleksandersen
-Version = 1.0
-Website = https://getnikola.com
-Description = Site status
+author = Daniel Aleksandersen
+version = 1.0
+website = https://getnikola.com
+description = Site status
+
+[Nikola]
+plugincategory = Command
+
diff --git a/nikola/plugins/command/status.py b/nikola/plugins/command/status.py
index b8a6a60..55e7f95 100644
--- a/nikola/plugins/command/status.py
+++ b/nikola/plugins/command/status.py
@@ -24,6 +24,8 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""Display site status."""
+
from __future__ import print_function
import io
import os
@@ -33,8 +35,10 @@ from dateutil.tz import gettz, tzlocal
from nikola.plugin_categories import Command
-class CommandDeploy(Command):
- """ Site status. """
+class CommandStatus(Command):
+
+ """Display site status."""
+
name = "status"
doc_purpose = "display site status"
@@ -69,7 +73,7 @@ class CommandDeploy(Command):
]
def _execute(self, options, args):
-
+ """Display site status."""
self.site.scan_posts()
timestamp_path = os.path.join(self.site.config["CACHE_FOLDER"], "lastdeploy")
@@ -128,6 +132,7 @@ class CommandDeploy(Command):
print("{0} posts in total, {1} scheduled, and {2} drafts.".format(posts_count, len(posts_scheduled), len(posts_drafts)))
def human_time(self, dt):
+ """Translate time into a human-friendly representation."""
days = dt.days
hours = dt.seconds / 60 // 60
minutes = dt.seconds / 60 - (hours * 60)
diff --git a/nikola/plugins/command/version.plugin b/nikola/plugins/command/version.plugin
index a3f58e8..4708bdb 100644
--- a/nikola/plugins/command/version.plugin
+++ b/nikola/plugins/command/version.plugin
@@ -1,9 +1,13 @@
[Core]
-Name = version
-Module = version
+name = version
+module = version
[Documentation]
-Author = Roberto Alsina
-Version = 1.0
-Website = http://getnikola.com
-Description = Show nikola version
+author = Roberto Alsina
+version = 1.0
+website = http://getnikola.com
+description = Show nikola version
+
+[Nikola]
+plugincategory = Command
+
diff --git a/nikola/plugins/command/version.py b/nikola/plugins/command/version.py
index b6520d7..ad08f64 100644
--- a/nikola/plugins/command/version.py
+++ b/nikola/plugins/command/version.py
@@ -24,6 +24,8 @@
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""Print Nikola version."""
+
from __future__ import print_function
import lxml
@@ -36,7 +38,8 @@ URL = 'https://pypi.python.org/pypi?:action=doap&name=Nikola'
class CommandVersion(Command):
- """Print the version."""
+
+ """Print Nikola version."""
name = "version"