aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorLibravatar Krytarik Raido <krytarik@tuxgarage.com>2018-04-03 06:50:04 +0200
committerLibravatar Krytarik Raido <krytarik@tuxgarage.com>2018-04-03 06:50:04 +0200
commitdc580be8f9ef38a1c0903820b04e1b5c7217da16 (patch)
tree4a214d88d3e094efdb9e4ff70920537a4d33ae9b
parent23ac25c0b388b5ffebf66154b12a3950b89b977a (diff)
downloadmini-dinstall-dc580be8f9ef38a1c0903820b04e1b5c7217da16.tar.bz2
mini-dinstall-dc580be8f9ef38a1c0903820b04e1b5c7217da16.tar.xz
mini-dinstall-dc580be8f9ef38a1c0903820b04e1b5c7217da16.tar.zst
Various improvements in coding style.
-rwxr-xr-xmini-dinstall742
-rw-r--r--minidinstall/ChangeFile.py77
-rw-r--r--minidinstall/DebianSigVerifier.py13
-rw-r--r--minidinstall/Dnotify.py49
-rwxr-xr-xminidinstall/DpkgControl.py124
-rw-r--r--minidinstall/DpkgDatalist.py31
-rw-r--r--minidinstall/GPGSigVerifier.py25
-rw-r--r--minidinstall/OrderedDict.py11
-rwxr-xr-xminidinstall/SafeWriteFile.py12
-rwxr-xr-xminidinstall/SignedFile.py32
-rw-r--r--minidinstall/mail.py24
-rw-r--r--minidinstall/misc.py30
-rw-r--r--minidinstall/tweet.py17
13 files changed, 559 insertions, 628 deletions
diff --git a/mini-dinstall b/mini-dinstall
index da881ab..92db848 100755
--- a/mini-dinstall
+++ b/mini-dinstall
@@ -1,8 +1,10 @@
#!/usr/bin/python3
# -*- mode: python; coding: utf-8 -*-
-# Miniature version of "dinstall", for installing .changes into an
-# archive
-# Copyright © 2002,2003 Colin Walters <walters@gnu.org>
+
+# Miniature version of "dinstall", for installing .changes
+# into an archive
+
+# Copyright (c) 2002, 2003 Colin Walters <walters@gnu.org>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
@@ -19,7 +21,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os, sys, re, glob, getopt, time, traceback, lzma, getpass, socket
-import shutil, signal, threading, select, queue, socketserver, datetime
+import shutil, threading, select, queue, socketserver, datetime
import logging, logging.handlers
#logging.basicConfig()
import apt_pkg
@@ -31,9 +33,7 @@ from minidinstall.Dnotify import *
from minidinstall.DebianSigVerifier import *
from minidinstall.GPGSigVerifier import *
from minidinstall.version import *
-import minidinstall.misc
-import minidinstall.mail
-import minidinstall.tweet
+from minidinstall import misc, mail, tweet
debchanges_re = re.compile('([-a-z0-9+.]+)_(.+?)_([-a-zA-Z0-9]+)\.changes$')
debpackage_re = re.compile('([-a-z0-9+.]+)_(.+?)_([-a-zA-Z0-9]+)\.(d|u)?deb$')
@@ -42,8 +42,7 @@ debsrc_dsc_re = re.compile('([-a-z0-9+.]+)_(.+?)\.dsc$')
debsrc_diff_re = re.compile('([-a-z0-9+.]+)_(.+?)\.diff\.gz$')
debsrc_orig_re = re.compile('([-a-z0-9+.]+)_(.+?)\.orig[-a-z0-9]*\.tar\.(gz|bz2|lzma|xz)(\.asc)?$')
debsrc_native_re = re.compile('([-a-z0-9+.]+)_(.+?)\.tar\.(gz|bz2|lzma|xz)$')
-
-native_version_re = re.compile('\s*.*-');
+native_version_re = re.compile('\s*.*-')
toplevel_directory = None
tmp_new_suffix = '.dinstall-new'
@@ -53,13 +52,13 @@ incoming_subdir = 'incoming'
socket_name = 'master'
logfile_name = 'mini-dinstall.log'
configfile_names = ['/etc/mini-dinstall.conf', '~/.mini-dinstall.conf']
-use_dnotify = 0
-mail_on_success = 1
-tweet_on_success = 0
+use_dnotify = False
+mail_on_success = True
+tweet_on_success = False
default_poll_time = 30
-default_max_retry_time = 2 * 24 * 60 * 60
+default_max_retry_time = 60 * 60 * 24 * 2
default_mail_log_level = logging.ERROR
-trigger_reindex = 1
+trigger_reindex = True
mail_log_flush_level = logging.ERROR
mail_log_flush_count = 10
mail_to = getpass.getuser()
@@ -70,74 +69,78 @@ tweet_user = None
tweet_password = None
default_architectures = ["all", "i386"]
-default_distributions = ("unstable",)
+default_distributions = ["unstable"]
distributions = {}
+hashes = ['sha256']
scantime = 60
mail_subject_template = "mini-dinstall: Successfully installed %(source)s %(version)s to %(distribution)s"
-mail_body_template = """Package: %(source)s
+mail_body_template = """
+Package: %(source)s
Maintainer: %(maintainer)s
Changed-By: %(changed-by)s
Changes:
%(changes_without_dot)s
-"""
-
+""".lstrip()
tweet_template = "Installed %(source)s %(version)s to %(distribution)s"
-def usage(ecode, ver_only=None):
+def usage(ecode, ver_only=False):
print("mini-dinstall", pkg_version)
if ver_only:
sys.exit(ecode)
- print("Copyright (C) 2002 Colin Walters <walters@gnu.org>")
- print("Licensed under the GNU GPL.")
- print("Usage: mini-dinstall [OPTIONS...] [DIRECTORY]")
- print("Options:")
- print(" -v, --verbose\t\tDisplay extra information")
- print(" -q, --quiet\t\tDisplay less information")
- print(" -c, --config=FILE\tParse configuration info from FILE")
- print(" -d, --debug\t\tOutput information to stdout as well as log")
- print(" --no-log\t\tDon't write information to log file")
- print(" -n, --no-act\t\tDon't actually perform changes")
- print(" -b, --batch\t\tDon't daemonize; run once, then exit")
- print(" -r, --run\t\tProcess queue immediately")
- print(" -k, --kill\t\tKill the running mini-dinstall")
- print(" --no-db\t\tDisable lookups on package database")
- print(" --help\t\tWhat you're looking at")
- print(" --version\t\tPrint the software version and exit")
+ print("""
+Copyright (c) 2002 Colin Walters <walters@gnu.org>
+Licensed under the GNU GPL.
+Usage: mini-dinstall [OPTIONS...] [DIRECTORY]
+Options:
+ -v, --verbose Display extra information
+ -q, --quiet Display less information
+ -c, --config=FILE Parse configuration info from FILE
+ -d, --debug Output information to stdout as well as log
+ --no-log Don't write information to log file
+ -n, --no-act Don't actually perform changes
+ -b, --batch Don't daemonize; run once, then exit
+ -r, --run Process queue immediately
+ -k, --kill Kill the running mini-dinstall
+ --no-db Disable lookups on package database
+ --help What you're looking at
+ --version Print the software version and exit
+""".strip())
sys.exit(ecode)
try:
- opts, args = getopt.getopt(sys.argv[1:], 'vqc:dnbrk',
- ['verbose', 'quiet', 'config=', 'debug', 'no-log',
- 'no-act', 'batch', 'run', 'kill', 'no-db', 'help', 'version', ])
+ (opts, args) = getopt.getopt(sys.argv[1:], 'vqc:dnbrk',
+ ['verbose', 'quiet', 'config=', 'debug', 'no-log',
+ 'no-act', 'batch', 'run', 'kill', 'no-db', 'help', 'version'])
except getopt.GetoptError as e:
sys.stderr.write("Error reading arguments: %s\n" % e)
usage(1)
-for (key, val) in opts:
+for (key, value) in opts:
if key == '--help':
usage(0)
elif key == '--version':
- usage(0, ver_only=1)
+ usage(0, ver_only=True)
if len(args) > 1:
sys.stderr.write("Unknown arguments: %s\n" % args[1:])
usage(1)
# don't propagate exceptions that happen while logging
-logging.raiseExceptions = 0
+logging.raiseExceptions = False
logger = logging.getLogger("mini-dinstall")
loglevel = logging.WARN
-no_act = 0
-debug_mode = 0
-run_mode = 0
-kill_mode = 0
-nodb_mode = 0
-no_log = 0
-batch_mode = 0
-custom_config_files = 0
-for key, val in opts:
+no_act = False
+debug_mode = False
+run_mode = False
+kill_mode = False
+nodb_mode = False
+no_log = False
+batch_mode = False
+custom_config_files = False
+
+for (key, value) in opts:
if key in ('-v', '--verbose'):
if loglevel == logging.INFO:
loglevel = logging.DEBUG
@@ -150,23 +153,23 @@ for key, val in opts:
loglevel = logging.CRITICAL
elif key in ('-c', '--config'):
if not custom_config_files:
- custom_config_files = 1
+ custom_config_files = True
configfile_names = []
configfile_names.append(os.path.abspath(os.path.expanduser(val)))
elif key in ('-n', '--no-act'):
- no_act = 1
+ no_act = True
elif key in ('-d', '--debug'):
- debug_mode = 1
+ debug_mode = True
elif key in ('--no-log',):
- no_log = 1
+ no_log = True
elif key in ('-b', '--batch'):
- batch_mode = 1
+ batch_mode = True
elif key in ('-r', '--run'):
- run_mode = 1
+ run_mode = True
elif key in ('-k', '--kill'):
- kill_mode = 1
- elif key in ('--no-db'):
- nodb_mode = 1
+ kill_mode = True
+ elif key in ('--no-db',):
+ nodb_mode = True
def do_and_log(msg, function, *args):
try:
@@ -180,7 +183,7 @@ def do_mkdir(name):
if os.access(name, os.X_OK):
return
try:
- do_and_log('Creating directory "%s"' % (name), os.mkdir, name)
+ do_and_log('Creating directory "%s"' % name, os.mkdir, name)
except OSError as e:
print(e)
exit(1)
@@ -202,7 +205,7 @@ stderr_handler.setFormatter(logging.Formatter(fmt="%(name)s [%(thread)d] %(level
configp = ConfigParser()
configfile_names = [os.path.abspath(os.path.expanduser(x)) for x in configfile_names]
-logger.debug("Reading config files: %s" % (configfile_names,))
+logger.debug("Reading config files: %s" % configfile_names)
configp.read(configfile_names)
class SubjectSpecifyingLoggingSMTPHandler(logging.handlers.SMTPHandler):
@@ -228,23 +231,22 @@ if not (configp.has_option('DEFAULT', 'mail_log_level') and configp.get('DEFAULT
mail_log_flush_count = configp.getint('DEFAULT', 'mail_log_flush_count')
if configp.has_option('DEFAULT', 'mail_log_flush_level'):
mail_log_flush_level = logging.__dict__[configp.get('DEFAULT', 'mail_log_flush_level')]
- mail_smtp_handler = SubjectSpecifyingLoggingSMTPHandler(mail_server, 'Mini-Dinstall <%s@%s>' % (getpass.getuser(),socket.getfqdn()), [mail_to])
+ mail_smtp_handler = SubjectSpecifyingLoggingSMTPHandler(mail_server, 'Mini-Dinstall <%s@%s>' % (getpass.getuser(), socket.getfqdn()), [mail_to])
mail_smtp_handler.setSubject('mini-dinstall log notice (%l)')
mail_handler = logging.handlers.MemoryHandler(mail_log_flush_count, flushLevel=mail_log_flush_level, target=mail_smtp_handler)
-
mail_handler.setLevel(mail_log_level)
logger.addHandler(mail_handler)
if configp.has_option('DEFAULT', 'archivedir'):
toplevel_directory = os.path.expanduser(configp.get('DEFAULT', 'archivedir'))
-elif len(args) > 0:
+elif args:
toplevel_directory = args[0]
else:
logger.error("No archivedir specified on command line or in config files.")
sys.exit(1)
if configp.has_option('DEFAULT', 'incoming_permissions'):
- incoming_permissions = int(configp.get('DEFAULT', 'incoming_permissions'), 8)
+ incoming_permissions = configp.getint('DEFAULT', 'incoming_permissions')
do_mkdir(toplevel_directory)
dinstall_subdir = os.path.join(toplevel_directory, dinstall_subdir)
@@ -256,8 +258,8 @@ def process_exists(pid):
try:
os.kill(pid, 0)
except OSError as e:
- return 0
- return 1
+ return False
+ return True
if os.access(lockfilename, os.R_OK):
pid = int(open(lockfilename).read())
@@ -265,7 +267,7 @@ if os.access(lockfilename, os.R_OK):
if run_mode:
logger.error("No process running at %d; use mini-dinstall -k to remove lockfile")
sys.exit(1)
- logger.warn("No process running at %d, removing lockfile" % (pid,))
+ logger.warn("No process running at %d, removing lockfile" % pid)
os.unlink(lockfilename)
if kill_mode:
sys.exit(0)
@@ -294,7 +296,7 @@ if configp.has_option('DEFAULT', 'logfile'):
if not no_log:
if not os.path.isabs(logfile_name):
logfile_name = os.path.join(dinstall_subdir, logfile_name)
- logger.debug("Adding log file: %s" % (logfile_name,))
+ logger.debug("Adding log file: %s" % logfile_name)
filehandler = logging.FileHandler(logfile_name)
if loglevel == logging.WARN:
filehandler.setLevel(logging.INFO)
@@ -303,7 +305,7 @@ if not no_log:
logger.addHandler(filehandler)
filehandler.setFormatter(logging.Formatter(fmt="%(asctime)s %(name)s [%(thread)d] %(levelname)s: %(message)s", datefmt="%b %d %H:%M:%S"))
-logger.info('Booting mini-dinstall ' + pkg_version)
+logger.info('Booting mini-dinstall %s' % pkg_version)
class DinstallException(Exception):
def __init__(self, value):
@@ -317,11 +319,11 @@ if not configp.has_option('DEFAULT', 'archive_style'):
sys.exit(1)
default_verify_sigs = os.access('/usr/share/keyrings/debian-keyring.gpg', os.R_OK)
+default_keyrings = []
default_extra_keyrings = []
-default_keyrings = None
if configp.has_option('DEFAULT', 'architectures'):
- default_architectures = configp.get('DEFAULT', 'architectures').split(', ')
+ default_architectures = re.split(', *', configp.get('DEFAULT', 'architectures'))
if configp.has_option('DEFAULT', 'verify_sigs'):
default_verify_sigs = configp.getboolean('DEFAULT', 'verify_sigs')
if configp.has_option('DEFAULT', 'trigger_reindex'):
@@ -332,36 +334,35 @@ if configp.has_option('DEFAULT', 'max_retry_time'):
default_max_retry_time = configp.getint('DEFAULT', 'max_retry_time')
if configp.has_option('DEFAULT', 'expire_release_files'):
expire_release_files = configp.getboolean('DEFAULT', 'expire_release_files')
-if configp.has_option('DEFAULT', 'extra_keyrings'):
- default_extra_keyrings = re.split(', ?', configp.get('DEFAULT', 'extra_keyrings'))
if configp.has_option('DEFAULT', 'keyids'):
- keyids = configp.get('DEFAULT', 'keyids').split(', ')
+ keyids = re.split(', *', configp.get('DEFAULT', 'keyids'))
if configp.has_option('DEFAULT', 'keyrings'):
- default_keyrings = re.split(', ?', configp.get('DEFAULT', 'keyrings'))
+ default_keyrings = re.split(', *', configp.get('DEFAULT', 'keyrings'))
+if configp.has_option('DEFAULT', 'extra_keyrings'):
+ default_extra_keyrings = re.split(', *', configp.get('DEFAULT', 'extra_keyrings'))
if configp.has_option('DEFAULT', 'use_byhash'):
use_byhash = configp.getboolean('DEFAULT', 'use_byhash')
if configp.has_option('DEFAULT', 'use_dnotify'):
use_dnotify = configp.getboolean('DEFAULT', 'use_dnotify')
if configp.has_option('DEFAULT', 'mail_subject_template'):
- mail_subject_template = configp.get('DEFAULT', 'mail_subject_template', 1)
+ mail_subject_template = configp.get('DEFAULT', 'mail_subject_template')
if configp.has_option('DEFAULT', 'mail_body_template'):
- mail_body_template = configp.get('DEFAULT', 'mail_body_template', 1)
+ mail_body_template = configp.get('DEFAULT', 'mail_body_template')
if configp.has_option('DEFAULT', 'tweet_template'):
- tweet_template = configp.get('DEFAULT', 'tweet_template', 1)
-
+ tweet_template = configp.get('DEFAULT', 'tweet_template')
if configp.has_option('DEFAULT', 'tweet_server'):
- tweet_server = configp.get('DEFAULT', 'tweet_server', 1)
+ tweet_server = configp.get('DEFAULT', 'tweet_server')
if configp.has_option('DEFAULT', 'tweet_user'):
- tweet_user = configp.get('DEFAULT', 'tweet_user', 1)
+ tweet_user = configp.get('DEFAULT', 'tweet_user')
if configp.has_option('DEFAULT', 'tweet_password'):
- tweet_password = configp.get('DEFAULT', 'tweet_password', 1)
+ tweet_password = configp.get('DEFAULT', 'tweet_password')
sects = configp.sections()
-if not len(sects) == 0:
+if sects:
for sect in sects:
distributions[sect] = {}
if configp.has_option(sect, "architectures"):
- distributions[sect]["arches"] = configp.get(sect, "architectures").split(', ')
+ distributions[sect]["arches"] = re.split(', *', configp.get(sect, "architectures"))
else:
distributions[sect]["arches"] = default_architectures
else:
@@ -372,67 +373,67 @@ class DistOptionHandler:
def __init__(self, distributions, configp):
self._configp = configp
self._distributions = distributions
- self._optionmap = {}
- self._optionmap['alias'] = ['str', None]
- self._optionmap['poll_time'] = ['int', default_poll_time]
- # two days
- self._optionmap['max_retry_time'] = ['int', default_max_retry_time]
- self._optionmap['post_install_script'] = ['str', None]
- self._optionmap['pre_install_script'] = ['str', None]
- self._optionmap['dynamic_reindex'] = ['bool', 1]
- self._optionmap['chown_changes_files'] = ['bool', 1]
- self._optionmap['keep_old'] = ['bool', None]
- self._optionmap['mail_on_success'] = ['bool', 1]
- self._optionmap['tweet_on_success'] = ['bool', 0]
- self._optionmap['archive_style'] = ['str', None]
- # Release file stuff
- self._optionmap['generate_release'] = ['bool', 0]
- self._optionmap['release_origin'] = ['str', getpass.getuser()]
- self._optionmap['release_label'] = ['str', self._optionmap['release_origin'][1]]
- self._optionmap['release_suite'] = ['str', None]
- self._optionmap['release_codename'] = ['str', None]
- self._optionmap['experimental_release'] = ['bool', 0]
- self._optionmap['backport_release'] = ['bool', 0]
- self._optionmap['release_description'] = ['str', None]
- self._optionmap['release_signscript'] = ['str', None]
- self._optionmap['keyids'] = ['list', None]
- self._optionmap['keyrings'] = ['list', None]
- self._optionmap['expire_release_files'] = ['bool', 0]
- self._optionmap['extra_keyrings'] = ['list', None]
- self._optionmap['verify_sigs'] = ['bool', 0]
- self._optionmap['use_byhash'] = ['bool', 1]
+ user = getpass.getuser()
+ self._optionmap = {
+ 'alias': ['str', None],
+ 'poll_time': ['int', default_poll_time],
+ 'max_retry_time': ['int', default_max_retry_time],
+ 'post_install_script': ['str', None],
+ 'pre_install_script': ['str', None],
+ 'dynamic_reindex': ['bool', True],
+ 'chown_changes_files': ['bool', True],
+ 'keep_old': ['bool', False],
+ 'mail_on_success': ['bool', True],
+ 'tweet_on_success': ['bool', False],
+ 'archive_style': ['str', None],
+ # Release file stuff
+ 'generate_release': ['bool', False],
+ 'release_origin': ['str', user],
+ 'release_label': ['str', user],
+ 'release_suite': ['str', None],
+ 'release_codename': ['str', None],
+ 'experimental_release': ['bool', False],
+ 'backport_release': ['bool', False],
+ 'release_description': ['str', None],
+ 'release_signscript': ['str', None],
+ 'keyids': ['list', None],
+ 'keyrings': ['list', None],
+ 'extra_keyrings': ['list', None],
+ 'expire_release_files': ['bool', False],
+ 'verify_sigs': ['bool', False],
+ 'use_byhash': ['bool', True]
+ }
def get_option_map(self, dist):
ret = self._distributions[dist]
- for key in list(self._optionmap.keys()):
- type = self._optionmap[key][0]
- ret[key] = self._optionmap[key][1]
- if self._configp.has_option ('DEFAULT', key):
- ret[key] = self.get_option (type, 'DEFAULT', key)
- if self._configp.has_option (dist, key):
- ret[key] = self.get_option (type, dist, key)
+ for (key, value) in list(self._optionmap.items()):
+ if self._configp.has_option(dist, key):
+ ret[key] = self.get_option(value[0], dist, key)
+ elif self._configp.has_option('DEFAULT', key):
+ ret[key] = self.get_option(value[0], 'DEFAULT', key)
+ else:
+ ret[key] = value[1]
return ret
- def get_option (self, type, dist, key):
+ def get_option(self, type, dist, key):
if type == 'int':
return self._configp.getint(dist, key)
elif type == 'str':
return self._configp.get(dist, key)
elif type == 'list':
- return re.split(', ?', self._configp.get(dist, key))
+ return re.split(', *', self._configp.get(dist, key))
elif type == 'bool':
return self._configp.getboolean(dist, key)
- assert(None)
-
+ assert None
distoptionhandler = DistOptionHandler(distributions, configp)
for dist in list(distributions.keys()):
distributions[dist] = distoptionhandler.get_option_map(dist)
- if not distributions[dist]['archive_style'] in ('simple-subdir', 'flat'):
- raise DinstallException("Unknown archive style \"%s\"" % (distributions[dist]['archive_style'],))
+ if distributions[dist]['archive_style'] not in ('simple-subdir', 'flat'):
+ raise DinstallException('Unknown archive style "%s"' % distributions[dist]['archive_style'])
-logger.debug("Distributions: %s" % (distributions,))
+logger.debug("Distributions: %s" % distributions)
# class DinstallTransaction:
# def __init__(self, dir):
@@ -459,9 +460,9 @@ logger.debug("Distributions: %s" % (distributions,))
# def renameFile(self, source, dst):
# self._start_op('rename',
-
# def _sync():
# os.system("sync")
+
os.chdir(toplevel_directory)
do_mkdir(dinstall_subdir)
rejectdir = os.path.join(dinstall_subdir, 'REJECT')
@@ -480,9 +481,10 @@ reprocess_needed = threading.Event()
reprocess_finished = threading.Event()
reprocess_lock = threading.Lock()
+
class IncomingDirRequestHandler(socketserver.StreamRequestHandler, socketserver.BaseRequestHandler):
def handle(self):
- logger.debug('Got request from %s' % (self.client_address,))
+ logger.debug('Got request from %s' % self.client_address)
req = self.rfile.readline().strip().decode('utf-8')
if req == 'RUN':
logger.debug('Doing RUN command')
@@ -498,7 +500,7 @@ class IncomingDirRequestHandler(socketserver.StreamRequestHandler, socketserver.
self.wfile.write('200 Beginning shutdown'.encode('utf-8'))
die_event.set()
else:
- logger.debug('Got unknown command %s' % (req,))
+ logger.debug('Got unknown command %s' % req)
self.wfile.write('500 Unknown request'.encode('utf-8'))
class ExceptionThrowingThreadedUnixStreamServer(socketserver.ThreadingUnixStreamServer):
@@ -507,7 +509,7 @@ class ExceptionThrowingThreadedUnixStreamServer(socketserver.ThreadingUnixStream
die_event.set()
class IncomingDir(threading.Thread):
- def __init__(self, dir, archivemap, logger, trigger_reindex=1, poll_time=30, max_retry_time=172800, batch_mode=0):
+ def __init__(self, dir, archivemap, logger, trigger_reindex=True, poll_time=30, max_retry_time=172800, batch_mode=False):
threading.Thread.__init__(self, name="incoming")
self._dir = dir
self._archivemap = archivemap
@@ -523,7 +525,7 @@ class IncomingDir(threading.Thread):
self._reprocess_queue = {}
def run(self):
- self._logger.info('Created new installer thread (%s)' % (self.getName(),))
+ self._logger.info('Created new installer thread (%s)' % self.getName())
self._logger.info('Entering batch mode...')
initial_reprocess_queue = []
initial_fucked_list = []
@@ -533,10 +535,10 @@ class IncomingDir(threading.Thread):
try:
self._install_changefile(changefilename, changefile, 0)
except Exception:
- logger.exception("Unable to install \"%s\"; adding to screwed list" % (changefilename,))
+ logger.exception('Unable to install "%s"; adding to screwed list' % changefilename)
initial_fucked_list.append(changefilename)
else:
- self._logger.warn('Skipping "%s"; upload incomplete' % (changefilename,))
+ self._logger.warn('Skipping "%s"; upload incomplete' % changefilename)
initial_reprocess_queue.append(changefilename)
if not self._batch_mode:
self._daemonize(initial_reprocess_queue, initial_fucked_list)
@@ -546,7 +548,7 @@ class IncomingDir(threading.Thread):
self._logger.exception("Unhandled exception; shutting down")
die_event.set()
self._done_event.set()
- return 0
+ return False
def _abspath(self, *args):
return os.path.abspath(os.path.join(*[self._dir] + list(args)))
@@ -554,76 +556,75 @@ class IncomingDir(threading.Thread):
def _get_changefiles(self):
ret = []
globpath = self._abspath("*.changes")
- self._logger.debug("glob: " + globpath)
+ self._logger.debug("glob: %s" % globpath)
changefilenames = glob.glob(globpath)
for changefilename in changefilenames:
if changefilename not in self._reprocess_queue:
- self._logger.info('Examining "%s"' % (changefilename,))
+ self._logger.info('Examining "%s"' % changefilename)
changefile = ChangeFile()
try:
changefile.load_from_file(changefilename)
except ChangeFileException:
- self._logger.debug("Unable to parse \"%s\", skipping" % (changefilename,))
+ self._logger.debug('Unable to parse "%s", skipping' % changefilename)
continue
ret.append((changefilename, changefile))
else:
- self._logger.debug('Skipping "%s" during new scan because it is in the reprocess queue.' % (changefilename,))
+ self._logger.debug('Skipping "%s" during new scan because it is in the reprocess queue.' % changefilename)
return ret
def _changefile_ready(self, changefilename, changefile):
try:
dist = changefile['distribution']
except KeyError as e:
- self._logger.warn("Unable to read distribution field for \"%s\"; data: %s" % (changefilename, changefile,))
- return 0
+ self._logger.warn('Unable to read distribution field for "%s"; data: %s' % (changefilename, changefile))
+ return False
try:
changefile.verify(self._abspath(''))
except ChangeFileException:
- return 0
- return 1
+ return False
+ return True
def _install_changefile(self, changefilename, changefile, doing_reprocess):
changefiledist = changefile['distribution']
for dist in list(distributions.keys()):
distributions[dist] = distoptionhandler.get_option_map(dist)
- if distributions[dist]['alias'] != None and changefiledist in distributions[dist]['alias']:
+ if distributions[dist]['alias'] and changefiledist in distributions[dist]['alias']:
logger.info('Distribution "%s" is an alias for "%s"' % (changefiledist, dist))
break
else:
dist = changefiledist
- if not dist in list(self._archivemap.keys()):
- raise DinstallException('Unknown distribution "%s" in \"%s\"' % (dist, changefilename,))
+ if dist not in list(self._archivemap.keys()):
+ raise DinstallException('Unknown distribution "%s" in "%s"' % (dist, changefilename))
logger.debug('Installing %s in archive %s' % (changefilename, self._archivemap[dist][1].getName()))
self._archivemap[dist][0].install(changefilename, changefile)
if self._trigger_reindex:
if doing_reprocess:
- logger.debug('Waiting on archive %s to reprocess' % (self._archivemap[dist][1].getName()))
+ logger.debug('Waiting on archive %s to reprocess' % self._archivemap[dist][1].getName())
self._archivemap[dist][1].wait_reprocess()
else:
- logger.debug('Notifying archive %s of change' % (self._archivemap[dist][1].getName()))
+ logger.debug('Notifying archive %s of change' % self._archivemap[dist][1].getName())
self._archivemap[dist][1].notify()
- logger.debug('Finished processing %s' % (changefilename))
+ logger.debug('Finished processing %s' % changefilename)
def _reject_changefile(self, changefilename, changefile, e):
dist = changefile['distribution']
- if not dist in self._archivemap:
- raise DinstallException('Unknown distribution "%s" in \"%s\"' % (dist, changefilename,))
+ if dist not in self._archivemap:
+ raise DinstallException('Unknown distribution "%s" in "%s"' % (dist, changefilename))
self._archivemap[dist][0].reject(changefilename, changefile, e)
def _daemon_server_isready(self):
(inready, outready, exready) = select.select([self._server.fileno()], [], [], 0)
- return len(inready) > 0
+ return bool(inready)
def _daemon_event_ispending(self):
- return die_event.isSet() or reprocess_needed.isSet() or self._daemon_server_isready() or (not self._eventqueue.empty())
+ return die_event.isSet() or reprocess_needed.isSet() or self._daemon_server_isready() or not self._eventqueue.empty()
def _daemon_reprocess_pending(self):
curtime = time.time()
- for changefilename in list(self._reprocess_queue.keys()):
- (starttime, nexttime, delay) = self._reprocess_queue[changefilename]
+ for (starttime, nexttime, delay) in list(self._reprocess_queue.values()):
if curtime >= nexttime:
- return 1
- return 0
+ return True
+ return False
def _daemonize(self, init_reprocess_queue, init_fucked_list):
self._logger.info('Entering daemon mode...')
@@ -635,11 +636,11 @@ class IncomingDir(threading.Thread):
except OSError as e:
pass
self._server = ExceptionThrowingThreadedUnixStreamServer(socket_name, IncomingDirRequestHandler)
- self._server.allow_reuse_address = 1
+ self._server.allow_reuse_address = True
retry_time = 30
self._reprocess_queue = {}
fucked = init_fucked_list
- doing_reprocess = 0
+ doing_reprocess = False
# Initialize the reprocessing queue
for changefilename in init_reprocess_queue:
curtime = time.time()
@@ -647,6 +648,7 @@ class IncomingDir(threading.Thread):
# The main daemon loop
while True:
+
# Wait until we have something to do
while not (self._daemon_event_ispending() or self._daemon_reprocess_pending()):
time.sleep(0.5)
@@ -663,75 +665,74 @@ class IncomingDir(threading.Thread):
self._logger.debug('Scanning for changes')
# do we have anything to reprocess?
- for changefilename in list(self._reprocess_queue.keys()):
- (starttime, nexttime, delay) = self._reprocess_queue[changefilename]
+ for (changefilename, (starttime, nexttime, delay)) in list(self._reprocess_queue.items()):
curtime = time.time()
try:
changefile = ChangeFile()
changefile.load_from_file(changefilename)
- except (ChangeFileException,IOError) as e:
+ except (ChangeFileException, IOError) as e:
if not os.path.exists(changefilename):
- self._logger.info('Changefile "%s" got removed' % (changefilename,))
+ self._logger.info('Changes file "%s" got removed' % changefilename)
else:
- self._logger.exception("Unable to load change file \"%s\"" % (changefilename,))
- self._logger.warn("Marking \"%s\" as screwed" % (changefilename,))
+ self._logger.exception('Unable to load Changes file "%s"' % changefilename)
+ self._logger.warn('Marking "%s" as screwed' % changefilename)
fucked.append(changefilename)
del self._reprocess_queue[changefilename]
continue
- if (curtime - starttime) > self._max_retry_time:
+ if curtime - starttime > self._max_retry_time:
# We've tried too many times; reject it.
self._reject_changefile(changefilename, changefile, DinstallException("Couldn't install \"%s\" in %d seconds" % (changefilename, self._max_retry_time)))
elif curtime >= nexttime:
if self._changefile_ready(changefilename, changefile):
# Let's do it!
- self._logger.debug('Preparing to install "%s"' % (changefilename,))
+ self._logger.debug('Preparing to install "%s"' % changefilename)
try:
self._install_changefile(changefilename, changefile, doing_reprocess)
- self._logger.debug('Removing "%s" from incoming queue after successful install.' % (changefilename,))
+ self._logger.debug('Removing "%s" from incoming queue after successful install.' % changefilename)
del self._reprocess_queue[changefilename]
except Exception as e:
- logger.exception("Unable to install \"%s\"; adding to screwed list" % (changefilename,))
+ logger.exception('Unable to install "%s"; adding to screwed list' % changefilename)
fucked.append(changefilename)
else:
delay *= 2
if delay > 60 * 60:
delay = 60 * 60
- self._logger.info('Upload "%s" isn\'t complete; marking for retry in %d seconds' % (changefilename, delay))
+ self._logger.info("Upload \"%s\" isn't complete; marking for retry in %d seconds" % (changefilename, delay))
self._reprocess_queue[changefilename][1:3] = [time.time() + delay, delay]
# done reprocessing; now scan for changed dirs.
relname = None
self._logger.debug('Checking dnotify event queue')
if not self._eventqueue.empty():
relname = os.path.basename(os.path.abspath(self._eventqueue.get()))
- self._logger.debug('Got %s from dnotify' % (relname,))
- if relname is None:
- if (not doing_reprocess) and reprocess_needed.isSet():
+ self._logger.debug('Got %s from dnotify' % relname)
+ if not (relname or doing_reprocess):
+ if reprocess_needed.isSet():
self._logger.info('Got reprocessing event')
reprocess_needed.clear()
- doing_reprocess = 1
- if relname is None and (not doing_reprocess):
- self._logger.debug('No events to process')
- continue
+ doing_reprocess = True
+ else:
+ self._logger.debug('No events to process')
+ continue
for (changefilename, changefile) in self._get_changefiles():
if changefilename in fucked:
- self._logger.warn("Skipping screwed changefile \"%s\"" % (changefilename,))
+ self._logger.warn('Skipping screwed Changes file "%s"' % changefilename)
continue
# Have we tried this changefile before?
if changefilename not in self._reprocess_queue:
- self._logger.debug('New change file "%s"' % (changefilename,))
+ self._logger.debug('New Changes file "%s"' % changefilename)
if self._changefile_ready(changefilename, changefile):
try:
self._install_changefile(changefilename, changefile, doing_reprocess)
except Exception as e:
- logger.exception("Unable to install \"%s\"; adding to screwed list" % (changefilename,))
+ logger.exception('Unable to install "%s"; adding to screwed list' % changefilename)
fucked.append(changefilename)
else:
curtime = time.time()
- self._logger.info('Upload "%s" isn\'t complete; marking for retry in %d seconds' % (changefilename, retry_time))
+ self._logger.info("Upload \"%s\" isn't complete; marking for retry in %d seconds" % (changefilename, retry_time))
self._reprocess_queue[changefilename] = [curtime, curtime + retry_time, retry_time]
if doing_reprocess:
- doing_reprocess = 0
+ doing_reprocess = False
self._logger.info('Reprocessing complete')
reprocess_finished.set()
@@ -741,17 +742,16 @@ class IncomingDir(threading.Thread):
def parse_versions(fullversion):
debianversion = re.sub('^[0-9]+:', '', fullversion)
upstreamver = re.sub('-[^-]*$', '', debianversion)
-
return (upstreamver, debianversion)
class ArchiveDir:
- def __init__(self, dir, logger, configdict, batch_mode=0, keyrings=None, extra_keyrings=None, verify_sigs=0):
+ def __init__(self, dir, logger, configdict, batch_mode=False, keyrings=None, extra_keyrings=None, verify_sigs=False):
self._dir = dir
self._name = os.path.basename(os.path.abspath(dir))
self._logger = logger
- for key in list(configdict.keys()):
- self._logger.debug("Setting \"%s\" => \"%s\" in archive \"%s\"" % ('_'+key, configdict[key], self._name))
- self.__dict__['_' + key] = configdict[key]
+ for (key, value) in list(configdict.items()):
+ self._logger.debug('Setting "%s" => "%s" in archive "%s"' % ('_' + key, value, self._name))
+ self.__dict__['_' + key] = value
do_mkdir(dir)
self._batch_mode = batch_mode
if 'verify_sigs' in configdict:
@@ -764,14 +764,12 @@ class ArchiveDir:
self._keyrings = keyrings
if configdict['extra_keyrings']:
self._extra_keyrings = configdict['extra_keyrings']
- elif extra_keyrings:
- self._extra_keyrings = extra_keyrings
else:
- self._extra_keyrings = []
+ self._extra_keyrings = extra_keyrings
if self._mail_on_success:
self._success_logger = logging.Logger("mini-dinstall." + self._name)
self._success_logger.setLevel(logging.DEBUG)
- self.mailHandler = SubjectSpecifyingLoggingSMTPHandler(mail_server, 'Mini-Dinstall <%s@%s>' % (getpass.getuser(),socket.getfqdn()), [mail_to])
+ self.mailHandler = SubjectSpecifyingLoggingSMTPHandler(mail_server, 'Mini-Dinstall <%s@%s>' % (getpass.getuser(), socket.getfqdn()), [mail_to])
self.mailHandler.setLevel(logging.DEBUG)
self._success_logger.addHandler(self.mailHandler)
self._clean_targets = []
@@ -786,23 +784,23 @@ class ArchiveDir:
return os.path.join(*[self._name] + list(args))
def install(self, changefilename, changefile):
- retval = 0
+ retval = False
try:
- retval = self._install_run_scripts(changefilename, changefile)
+ retval = self._install_run_scripts(changefilename, changefile)
except Exception:
self._logger.exception("Unhandled exception during installation")
if not retval:
- self._logger.info('Failed to install "%s"' % (changefilename,))
+ self._logger.info('Failed to install "%s"' % changefilename)
def reject(self, changefilename, changefile, reason):
self._reject_changefile(changefilename, changefile, reason)
def _install_run_scripts(self, changefilename, changefile):
- self._logger.info('Preparing to install \"%s\" in archive %s' % (changefilename, self._name,))
+ self._logger.info('Preparing to install "%s" in archive %s' % (changefilename, self._name))
sourcename = changefile['source']
version = changefile['version']
if self._verify_sigs:
- self._logger.info('Verifying signature on "%s"' % (changefilename,))
+ self._logger.info('Verifying signature on "%s"' % changefilename)
try:
if self._keyrings:
verifier = DebianSigVerifier(keyrings=list(map(os.path.expanduser, self._keyrings)), extra_keyrings=self._extra_keyrings)
@@ -810,29 +808,29 @@ class ArchiveDir:
verifier = DebianSigVerifier(extra_keyrings=self._extra_keyrings)
output = verifier.verify(changefilename)
logger.debug(output)
- logger.info('Good signature on "%s"' % (changefilename,))
+ logger.info('Good signature on "%s"' % changefilename)
except GPGSigVerificationFailure as e:
- msg = "Failed to verify signature on \"%s\": %s\n" % (changefilename, e)
+ msg = 'Failed to verify signature on "%s": %s\n' % (changefilename, e)
msg += ''.join(e.getOutput())
logger.error(msg)
self._reject_changefile(changefilename, changefile, e)
- return 0
+ return False
else:
- self._logger.debug('Skipping signature verification on "%s"' % (changefilename,))
+ self._logger.debug('Skipping signature verification on "%s"' % changefilename)
if self._pre_install_script:
try:
- self._logger.debug("Running pre-installation script: " + self._pre_install_script)
+ self._logger.debug("Running pre-installation script: %s" % self._pre_install_script)
if self._run_script(os.path.abspath(changefilename), self._pre_install_script):
- return 0
+ return False
except:
self._logger.exception("failure while running pre-installation script")
- return 0
+ return False
try:
self._install_changefile_internal(changefilename, changefile)
except Exception as e:
- self._logger.exception('Failed to process "%s"' % (changefilename,))
+ self._logger.exception('Failed to process "%s"' % changefilename)
self._reject_changefile(changefilename, changefile, e)
- return 0
+ return False
if self._chown_changes_files:
do_chmod(changefilename, 0o600)
target = os.path.join(self._dir, os.path.basename(changefilename))
@@ -843,7 +841,7 @@ class ArchiveDir:
done = False
missing_fields = []
if 'changes' in changefile:
- changefile ['changes_without_dot'] = misc.format_changes(changefile['changes'])
+ changefile['changes_without_dot'] = misc.format_changes(changefile['changes'])
while not done:
try:
mail_subject = mail_subject_template % changefile
@@ -855,14 +853,14 @@ class ArchiveDir:
else:
done = True
if missing_fields:
- mail_body = mail_body + "\n\nMissing changefile fields: %s" % missing_fields
- minidinstall.mail.send(mail_server, 'Mini-Dinstall <%s@%s>' % (getpass.getuser(),socket.getfqdn()), mail_to, mail_body, mail_subject)
+ mail_body = mail_body + "\n\nMissing Changes file fields: %s" % missing_fields
+ mail.send(mail_server, 'Mini-Dinstall <%s@%s>' % (getpass.getuser(), socket.getfqdn()), mail_to, mail_body, mail_subject)
if self._tweet_on_success:
done = False
missing_fields = []
if 'changes' in changefile:
- changefile ['changes_without_dot'] = misc.format_changes(changefile['changes'])
+ changefile['changes_without_dot'] = misc.format_changes(changefile['changes'])
while not done:
try:
tweet_body = tweet_template % changefile
@@ -874,16 +872,16 @@ class ArchiveDir:
done = True
if missing_fields:
tweet_body = tweet_body + "\n\n(errs: %s)" % missing_fields
- minidinstall.tweet.send(tweet_body, tweet_server, tweet_user, tweet_password)
+ tweet.send(tweet_body, tweet_server, tweet_user, tweet_password)
if self._post_install_script:
try:
- self._logger.debug("Running post-installation script: " + self._post_install_script)
+ self._logger.debug("Running post-installation script: %s" % self._post_install_script)
self._run_script(target, self._post_install_script)
except:
self._logger.exception("failure while running post-installation script")
- return 0
- return 1
+ return False
+ return True
def _install_changefile_internal(self, changefilename, changefile):
sourcename = changefile['source']
@@ -895,41 +893,42 @@ class ArchiveDir:
(ignored, newdebianver) = parse_versions(version)
else:
(newupstreamver, newdebianver) = parse_versions(version)
- is_sourceful = 0
+ is_sourceful = False
for file in [x[2] for x in changefile.getFiles()]:
match = debpackage_re.search(file)
if match:
arch = match.group(3)
- if not arch in self._arches:
- raise DinstallException("Unknown architecture: %s" % (arch))
+ if arch not in self._arches:
+ raise DinstallException("Unknown architecture: %s" % arch)
target = self._arch_target(arch, file)
newfiles.append((os.path.join(incomingdir, file), target, match.group(1), arch))
continue
match = debbuildinfo_re.search(file)
if match:
arch = match.group(3)
- if not arch in self._arches:
- raise DinstallException("Unknown architecture: %s" % (arch))
+ if arch not in self._arches:
+ raise DinstallException("Unknown architecture: %s" % arch)
target = self._arch_target(arch, file)
newfiles.append((os.path.join(incomingdir, file), target, match.group(1), arch))
continue
match = debsrc_dsc_re.search(file) or debsrc_diff_re.search(file) \
or debsrc_orig_re.search(file) or debsrc_native_re.search(file)
if match:
- is_sourceful = 1
+ is_sourceful = True
target = self._source_target(file)
newfiles.append((os.path.join(incomingdir, file), target, match.group(1), 'source'))
- all_arches = {}
+ all_arches = []
for arch in [x[3] for x in newfiles]:
- all_arches[arch] = 1
+ if arch not in all_arches:
+ all_arches.append(arch)
completed = []
oldfiles = []
if not self._keep_old:
- found_old_bins = 0
+ found_old_bins = False
for (oldversion, oldarch) in [x[1:] for x in self._get_package_versions()]:
if oldarch not in all_arches and apt_pkg.version_compare(oldversion, version) < 0:
- found_old_bins = 1
+ found_old_bins = True
for (pkgname, arch) in [x[2:] for x in newfiles]:
if arch == 'source' and found_old_bins:
continue
@@ -941,14 +940,14 @@ class ArchiveDir:
oldpkgname = match.group(1)
oldarch = match.group(3)
file = self._arch_target(arch, file)
- if not file in [x[0] for x in oldfiles]:
+ if file not in [x[0] for x in oldfiles]:
target = file + tmp_old_suffix
if oldpkgname == pkgname and oldarch == arch:
oldfiles.append((file, target))
- self._logger.debug('Scanning "%s" for old files' % (self._abspath('source')))
+ self._logger.debug('Scanning "%s" for old files' % self._abspath('source'))
for file in self._read_source_dir():
file = self._source_target(file)
- if not file in [x[0] for x in oldfiles]:
+ if file not in [x[0] for x in oldfiles]:
target = file + tmp_old_suffix
match = debchanges_re.search(file)
if not match and is_sourceful:
@@ -973,9 +972,9 @@ class ArchiveDir:
self._logger.debug('keeping upstream tarball "%s" version %s' % (file, oldupstreamver))
continue
else:
- self._logger.debug('old native tarball "%s", tagging for deletion' % (file,))
- oldfiles.append((file, target))
- continue
+ self._logger.debug('old native tarball "%s", tagging for deletion' % file)
+ oldfiles.append((file, target))
+ continue
match = debsrc_native_re.search(file)
if match and match.group(1) in [x[2] for x in newfiles]:
oldfiles.append((file, target))
@@ -984,13 +983,11 @@ class ArchiveDir:
self._clean_targets = [x[1] for x in oldfiles]
allrenames = oldfiles + [x[:2] for x in newfiles]
try:
- while not allrenames == []:
- (oldname, newname) = allrenames[0]
+ for (oldname, newname) in allrenames:
do_rename(oldname, newname)
- completed.append(allrenames[0])
- allrenames = allrenames[1:]
+ completed.append((oldname, newname))
except OSError as e:
- logger.exception("Failed to do rename (%s); attempting rollback" % (e.strerror,))
+ logger.exception("Failed to do rename (%s); attempting rollback" % e.strerror)
try:
self._logger.error(traceback.format_tb(sys.exc_info()[2]))
except:
@@ -1007,20 +1004,20 @@ class ArchiveDir:
if script:
script = os.path.expanduser(script)
cmd = '%s %s' % (script, changefilename)
- self._logger.info('Running \"%s\"' % (cmd,))
+ self._logger.info('Running "%s"' % cmd)
if not no_act:
if not os.access(script, os.X_OK):
- self._logger.error("Can't execute script \"%s\"" % (script,))
- return 1
+ self._logger.error("Can't execute script \"%s\"" % script)
+ return True
pid = os.fork()
if pid == 0:
os.execlp(script, script, changefilename)
sys.exit(1)
(pid, status) = os.waitpid(pid, 0)
- if not (status is None or (os.WIFEXITED(status) and os.WEXITSTATUS(status) == 0)):
- self._logger.error("script \"%s\" exited with error code %d" % (cmd, os.WEXITSTATUS(status)))
- return 1
- return 0
+ if status or (not os.WIFEXITED(status) and os.WEXITSTATUS(status) != 0):
+ self._logger.error('script "%s" exited with error code %d' % (cmd, os.WEXITSTATUS(status)))
+ return True
+ return False
def _reject_changefile(self, changefilename, changefile, exception):
sourcename = changefile['source']
@@ -1043,13 +1040,13 @@ class ArchiveDir:
do_rename(changefilename, os.path.join(rejectdir, os.path.basename(changefilename)))
self._logger.info('Rejecting "%s": %s' % (changefilename, repr(exception)))
except Exception:
- self._logger.error("Unhandled exception while rejecting %s; archive may be in inconsistent state" % (changefilename,))
+ self._logger.error("Unhandled exception while rejecting %s; archive may be in inconsistent state" % changefilename)
raise
def clean(self):
self._logger.debug('Removing old files')
for file in self._clean_targets:
- self._logger.debug('Deleting "%s"' % (file,))
+ self._logger.debug('Deleting "%s"' % file)
if not no_act:
os.unlink(file)
@@ -1081,7 +1078,6 @@ class SimpleSubdirArchiveDir(ArchiveDir):
ret.append((match.group(1), match.group(2), match.group(3)))
return ret
-
class FlatArchiveDir(ArchiveDir):
def _read_source_dir(self):
return os.listdir(self._dir)
@@ -1104,15 +1100,15 @@ class FlatArchiveDir(ArchiveDir):
return ret
class ArchiveDirIndexer(threading.Thread):
- def __init__(self, dir, logger, configdict, use_dnotify=0, batch_mode=1):
+ def __init__(self, dir, logger, configdict, use_dnotify=False, batch_mode=True):
self._dir = dir
self._name = os.path.basename(os.path.abspath(dir))
threading.Thread.__init__(self, name=self._name)
self._logger = logger
self._eventqueue = queue.Queue()
- for key in list(configdict.keys()):
- self._logger.debug("Setting \"%s\" => \"%s\" in archive \"%s\"" % ('_'+key, configdict[key], self._name))
- self.__dict__['_' + key] = configdict[key]
+ for (key, value) in list(configdict.items()):
+ self._logger.debug('Setting "%s" => "%s" in archive "%s"' % ('_' + key, value, self._name))
+ self.__dict__['_' + key] = value
do_mkdir(dir)
self._use_dnotify = use_dnotify
self._batch_mode = batch_mode
@@ -1130,11 +1126,11 @@ class ArchiveDirIndexer(threading.Thread):
'-o', 'APT::FTPArchive::SHA1=false',
'-o', 'APT::FTPArchive::MD5=false']
if arch:
- cmdline += ['--arch', arch]
+ cmdline.extend(['--arch', arch])
if not nodb_mode:
- cmdline += ['--db', '%s.db' %dir]
+ cmdline.extend(['--db', '%s.db' % dir])
- self._logger.debug("Running: " + ' '.join(cmdline))
+ self._logger.debug("Running: %s" % ' '.join(cmdline))
if no_act:
return
(infd, outfd) = os.pipe()
@@ -1158,20 +1154,20 @@ class ArchiveDirIndexer(threading.Thread):
newpackagesfile = open(newpackagesfilename, 'w')
newxzpackagesfile = lzma.open(newxzpackagesfilename, 'wt')
buf = stdout.read(8192)
- while buf != '':
+ while buf:
newpackagesfile.write(buf)
newxzpackagesfile.write(buf)
buf = stdout.read(8192)
stdout.close()
(pid, status) = os.waitpid(pid, 0)
- if not (status is None or (os.WIFEXITED(status) and os.WEXITSTATUS(status) == 0)):
- raise DinstallException("apt-ftparchive exited with status code %d" % (status,))
+ if status or (not os.WIFEXITED(status) and os.WEXITSTATUS(status) != 0):
+ raise DinstallException("apt-ftparchive exited with status code %d" % status)
newpackagesfile.close()
newxzpackagesfile.close()
shutil.move(newpackagesfilename, packagesfilename)
shutil.move(newxzpackagesfilename, xzpackagesfilename)
if self._use_byhash:
- for hash in [ 'sha256' ]:
+ for hash in hashes:
do_mkdir(os.path.join(dir, 'by-hash'))
hashdir = os.path.join(dir, 'by-hash', hash.upper())
do_mkdir(hashdir)
@@ -1180,7 +1176,7 @@ class ArchiveDirIndexer(threading.Thread):
mtime = lambda f: os.stat(os.path.join(hashdir, f)).st_mtime
for oldbyhash in sorted(os.listdir(hashdir), key=mtime)[:-16]:
self._logger.debug("Removing old by-hash file: %s" % oldbyhash)
- os.remove(os.path.join(hashdir,oldbyhash))
+ os.remove(os.path.join(hashdir, oldbyhash))
def _make_packagesfile(self, dir):
self._make_indexfile(dir, 'packages', 'Packages')
@@ -1194,24 +1190,24 @@ class ArchiveDirIndexer(threading.Thread):
def _sign_releasefile(self, name, dir):
if self._release_signscript:
try:
- self._logger.debug("Running Release signing script: " + self._release_signscript)
+ self._logger.debug("Running release signing script: %s" % self._release_signscript)
if self._run_script(name, self._release_signscript, dir=dir):
- return None
+ return False
except:
self._logger.exception("failure while running Release signature script")
- return None
- return 1
+ return False
+ return True
# Copied from ArchiveDir
def _run_script(self, changefilename, script, dir=None):
if script:
script = os.path.expanduser(script)
cmd = '%s %s' % (script, changefilename)
- self._logger.info('Running \"%s\"' % (cmd,))
+ self._logger.info('Running "%s"' % cmd)
if not no_act:
if not os.access(script, os.X_OK):
- self._logger.error("Can't execute script \"%s\"" % (script,))
- return 1
+ self._logger.error("Can't execute script \"%s\"" % script)
+ return True
pid = os.fork()
if pid == 0:
if dir:
@@ -1219,10 +1215,10 @@ class ArchiveDirIndexer(threading.Thread):
os.execlp(script, script, changefilename)
sys.exit(1)
(pid, status) = os.waitpid(pid, 0)
- if not (status is None or (os.WIFEXITED(status) and os.WEXITSTATUS(status) == 0)):
- self._logger.error("script \"%s\" exited with error code %d" % (cmd, os.WEXITSTATUS(status)))
- return 1
- return 0
+ if status or (not os.WIFEXITED(status) and os.WEXITSTATUS(status) != 0):
+ self._logger.error('script "%s" exited with error code %d' % (cmd, os.WEXITSTATUS(status)))
+ return True
+ return False
def _get_file_sum(self, type, filename):
ret = misc.get_file_sum(self, type, filename)
@@ -1246,14 +1242,14 @@ class ArchiveDirIndexer(threading.Thread):
size = os.stat(absfile)[stat.ST_SIZE]
f.write(' %s% 16d %s\n' % (h, size, os.path.basename(absfile)))
- def _index_all(self, force=None):
+ def _index_all(self, force=False):
self._index(self._arches + ['source'], force)
def _gen_release_all(self, force=False):
self._gen_release(self._arches, force)
def run(self):
- self._logger.info('Created new thread (%s) for archive indexer %s' % (self.getName(), self._name,))
+ self._logger.info('Created new thread (%s) for archive indexer %s' % (self.getName(), self._name))
self._logger.info('Entering batch mode...')
try:
self._index_all(1)
@@ -1266,15 +1262,15 @@ class ArchiveDirIndexer(threading.Thread):
self._logger.exception("Unhandled exception; shutting down")
die_event.set()
self._done_event.set()
- self._logger.info('Thread \"%s\" exiting' % (self.getName(),))
+ self._logger.info('Thread "%s" exiting' % self.getName())
def _daemon_event_ispending(self):
- return die_event.isSet() or (not self._eventqueue.empty())
+ return die_event.isSet() or not self._eventqueue.empty()
+
def _daemonize(self):
self._logger.info('Entering daemon mode...')
if self._dynamic_reindex:
self._dnotify = DirectoryNotifierFactory().create(self._get_dnotify_dirs(), use_dnotify=self._use_dnotify, poll_time=self._poll_time, cancel_event=die_event)
-
self._async_dnotify = DirectoryNotifierAsyncWrapper(self._dnotify, self._eventqueue, logger=self._logger, name=self._name + " Indexer")
self._async_dnotify.start()
@@ -1295,14 +1291,14 @@ class ArchiveDirIndexer(threading.Thread):
if isinstance(obj, str):
self._logger.debug('got dir change')
dir = obj
- elif obj is None:
+ elif not obj:
self._logger.debug('got general event')
setevent = None
elif obj.__class__ == threading.Event().__class__:
self._logger.debug('got wait_reprocess event')
setevent = obj
else:
- self._logger.error("unknown object %s in event queue" % (obj,))
+ self._logger.error("unknown object %s in event queue" % obj)
assert None
# This is to protect against both lots of activity, and to
@@ -1313,12 +1309,12 @@ class ArchiveDirIndexer(threading.Thread):
self._logger.debug('setting wait_reprocess event')
setevent.set()
continue
- if dir is None:
+ if not dir:
self._logger.debug('Got general change')
self._index_all(1)
self._gen_release_all(True)
else:
- self._logger.debug('Got change in %s' % (dir,))
+ self._logger.debug('Got change in %s' % dir)
self._index([os.path.basename(os.path.abspath(dir))])
self._gen_release([os.path.basename(os.path.abspath(dir))])
if setevent:
@@ -1326,19 +1322,17 @@ class ArchiveDirIndexer(threading.Thread):
setevent.set()
def _reindex_needed(self):
- reindex_needed = 0
if os.access(self._abspath('Release.gpg'), os.R_OK):
gpg_mtime = os.stat(self._abspath('Release.gpg'))[stat.ST_MTIME]
for dir in self._get_dnotify_dirs():
- dir_mtime = os.stat(self._abspath(dir))[stat.ST_MTIME]
- if dir_mtime > gpg_mtime:
- reindex_needed = 1
+ if os.stat(self._abspath(dir))[stat.ST_MTIME] > gpg_mtime:
+ return True
else:
- reindex_needed = 1
- return reindex_needed
+ return True
+ return False
- def _index(self, arches, force=None):
- self._index_impl(arches, force=force)
+ def _index(self, arches, force=False):
+ self._index_impl(arches, force)
def _gen_release(self, arches, force=False):
self._gen_release_impl(self._arches, force)
@@ -1364,28 +1358,27 @@ class SimpleSubdirArchiveDirIndexer(ArchiveDirIndexer):
target = os.path.join(self._dir, arch)
do_mkdir(target)
- def _index_impl(self, arches, force=None):
+ def _index_impl(self, arches, force=False):
for arch in arches:
dirmtime = os.stat(self._relpath(arch))[stat.ST_MTIME]
if arch != 'source':
pkgsfile = self._relpath(arch, 'Packages')
- if force or (not os.access(pkgsfile, os.R_OK)) or dirmtime > os.stat(pkgsfile)[stat.ST_MTIME]:
- self._logger.info('Generating Packages file for %s...' % (arch,))
+ if force or not os.access(pkgsfile, os.R_OK) or dirmtime > os.stat(pkgsfile)[stat.ST_MTIME]:
+ self._logger.info('Generating Packages file for %s...' % arch)
self._make_packagesfile(self._relpath(arch))
self._logger.info('Packages generation complete')
else:
- self._logger.info('Skipping generation of Packages file for %s' % (arch,))
-
+ self._logger.info('Skipping generation of Packages file for %s' % arch)
else:
pkgsfile = self._relpath(arch, 'Sources')
- if force or (not os.access(pkgsfile, os.R_OK)) or dirmtime > os.stat(pkgsfile)[stat.ST_MTIME]:
- self._logger.info('Generating Sources file for %s...' % (arch,))
+ if force or not os.access(pkgsfile, os.R_OK) or dirmtime > os.stat(pkgsfile)[stat.ST_MTIME]:
+ self._logger.info('Generating Sources file for %s...' % arch)
self._make_sourcesfile(self._relpath('source'))
self._logger.info('Sources generation complete')
else:
- self._logger.info('Skipping generation of Sources file for %s' % (arch,))
+ self._logger.info('Skipping generation of Sources file for %s' % arch)
- def _gen_release_impl(self, arches, force):
+ def _gen_release_impl(self, arches, force=False):
for arch in arches:
targetname = self._relpath(arch, 'Release')
if not self._generate_release:
@@ -1397,23 +1390,13 @@ class SimpleSubdirArchiveDirIndexer(ArchiveDirIndexer):
pass
return
tmpname = targetname + tmp_new_suffix
- release_needed = 0
uncompr_indexfile = os.path.join(arch, 'Packages')
- indexfiles = [uncompr_indexfile]
+ indexfiles = [uncompr_indexfile]
comprexts = ['.xz']
for ext in comprexts:
- indexfiles = indexfiles + [uncompr_indexfile + ext]
- if os.access(targetname, os.R_OK):
- release_mtime = os.stat(targetname)[stat.ST_MTIME]
- for file in indexfiles:
- if release_needed:
- break
- if os.stat(self._abspath(file))[stat.ST_MTIME] > release_mtime:
- release_needed = 1
- else:
- release_needed = 1
+ indexfiles.append(uncompr_indexfile + ext)
- if not release_needed:
+ if not self._release_needed(targetname, indexfiles):
self._logger.info("Skipping Release generation")
continue
self._logger.info("Generating Release...")
@@ -1421,40 +1404,40 @@ class SimpleSubdirArchiveDirIndexer(ArchiveDirIndexer):
self._logger.info("Release generation complete")
return
f = open(tmpname, 'w')
- f.write('Origin: ' + self._release_origin + '\n')
- f.write('Label: ' + self._release_label + '\n')
+ f.write('Origin: %s\n' % self._release_origin)
+ f.write('Label: %s\n' % self._release_label)
suite = self._release_suite
if not suite:
suite = self._name
- f.write('Suite: ' + suite + '\n')
+ f.write('Suite: %s\n' % suite)
codename = self._release_codename
if not codename:
codename = suite
- f.write('Codename: ' + '%s/%s\n' % (codename, arch))
+ f.write('Codename: %s/%s\n' % (codename, arch))
if self._experimental_release:
f.write('NotAutomatic: yes\n')
elif self._backport_release:
f.write('NotAutomatic: yes\n')
f.write('ButAutomaticUpgrades: yes\n')
- f.write('Date: ' + time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime()) + '\n')
+ f.write('Date: %s\n' % time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime()))
if self._expire_release_files or self._keyids:
- f.write('Valid-Until: ' + (datetime.datetime.utcnow() + datetime.timedelta(days=28)).strftime("%a, %d %b %Y %H:%M:%S UTC") + '\n')
- f.write('Architectures: ' + arch + '\n')
+ f.write('Valid-Until: %s\n' % (datetime.datetime.utcnow() + datetime.timedelta(days=28)).strftime("%a, %d %b %Y %H:%M:%S UTC"))
+ f.write('Architectures: %s\n' % arch)
if self._keyids:
- f.write('Signed-By: ' + ','.join(self._keyids) + '\n')
+ f.write('Signed-By: %s\n' % ','.join(self._keyids))
if self._use_byhash:
f.write('Acquire-By-Hash: yes\n')
if self._release_description:
- f.write('Description: ' + self._release_description + '\n')
- for hash in [ 'sha256' ]:
- self._do_hash(hash, indexfiles, f)
+ f.write('Description: %s\n' % self._release_description)
+ for hash in hashes:
+ self._do_hash(hash, indexfiles, f)
f.close()
if self._sign_releasefile(os.path.basename(tmpname), self._abspath(arch)):
os.rename(tmpname, targetname)
self._logger.info("Release generation complete")
def _in_archdir(self, *args):
- return (lambda x,self=self: self._abspath(x)) (*args)
+ return (lambda x, self=self: self._abspath(x))(*args)
def _get_dnotify_dirs(self):
return list(map(lambda x, self=self: self._abspath(x), self._arches + ['source']))
@@ -1462,21 +1445,31 @@ class SimpleSubdirArchiveDirIndexer(ArchiveDirIndexer):
def _get_all_indexfiles(self):
return [os.path.join(arch, 'Packages') for arch in self._arches] + ['source/Sources']
+ def _release_needed(self, targetname, indexfiles):
+ if os.access(targetname, os.R_OK):
+ release_mtime = os.stat(targetname)[stat.ST_MTIME]
+ for file in indexfiles:
+ if os.stat(self._abspath(file))[stat.ST_MTIME] > release_mtime:
+ return True
+ else:
+ return True
+ return False
+
class FlatArchiveDirIndexer(ArchiveDirIndexer):
def __init__(self, *args, **kwargs):
ArchiveDirIndexer.__init__(*[self] + list(args), **kwargs)
- def _index_impl(self, arches, force=None):
+ def _index_impl(self, arches, force=False):
pkgsfile = self._abspath('Packages')
dirmtime = os.stat(self._relpath())[stat.ST_MTIME]
- if force or (not os.access(pkgsfile, os.R_OK)) or dirmtime > os.stat(pkgsfile)[stat.ST_MTIME]:
+ if force or not os.access(pkgsfile, os.R_OK) or dirmtime > os.stat(pkgsfile)[stat.ST_MTIME]:
self._logger.info('Generating Packages file...')
self._make_packagesfile(self._relpath())
self._logger.info('Packages generation complete')
else:
self._logger.info('Skipping generation of Packages file')
pkgsfile = self._abspath('Sources')
- if force or (not os.access(pkgsfile, os.R_OK)) or dirmtime > os.stat(pkgsfile)[stat.ST_MTIME]:
+ if force or not os.access(pkgsfile, os.R_OK) or dirmtime > os.stat(pkgsfile)[stat.ST_MTIME]:
self._logger.info('Generating Sources file...')
self._make_sourcesfile(self._relpath())
self._logger.info('Sources generation complete')
@@ -1484,17 +1477,17 @@ class FlatArchiveDirIndexer(ArchiveDirIndexer):
self._logger.info('Skipping generation of Sources file')
for arch in self._arches:
- if arch == "all" or arch == "source":
+ if arch in ("all", "source"):
continue
pkgsfile = self._abspath('Contents-%s' % arch)
- if force or (not os.access(pkgsfile, os.R_OK)) or dirmtime > os.stat(pkgsfile)[stat.ST_MTIME]:
+ if force or not os.access(pkgsfile, os.R_OK) or dirmtime > os.stat(pkgsfile)[stat.ST_MTIME]:
self._logger.info('Generating Contents file...')
self._make_contentsfile(self._relpath(), arch)
self._logger.info('Contents generation complete')
else:
self._logger.info('Skipping generation of Contents file')
- def _gen_release_impl(self, arches, force):
+ def _gen_release_impl(self, arches, force=False):
targetname = self._abspath('Release')
if not self._generate_release:
if os.access(targetname, os.R_OK):
@@ -1505,25 +1498,15 @@ class FlatArchiveDirIndexer(ArchiveDirIndexer):
pass
return
tmpname = targetname + tmp_new_suffix
- release_needed = 0
uncompr_indexfiles = self._get_all_indexfiles()
indexfiles = []
comprexts = ['.xz']
for index in uncompr_indexfiles:
- indexfiles = indexfiles + [index]
+ indexfiles.append(index)
for ext in comprexts:
- indexfiles = indexfiles + [index + ext]
- if os.access(targetname, os.R_OK):
- release_mtime = os.stat(targetname)[stat.ST_MTIME]
- for file in indexfiles:
- if release_needed:
- break
- if os.stat(self._abspath(file))[stat.ST_MTIME] > release_mtime:
- release_needed = 1
- else:
- release_needed = 1
+ indexfiles.append(index + ext)
- if not release_needed:
+ if not self._release_needed(targetname, indexfiles):
self._logger.info("Skipping Release generation")
return
self._logger.info("Generating Release...")
@@ -1531,32 +1514,32 @@ class FlatArchiveDirIndexer(ArchiveDirIndexer):
self._logger.info("Release generation complete")
return
f = open(tmpname, 'w')
- f.write('Origin: ' + self._release_origin + '\n')
- f.write('Label: ' + self._release_label + '\n')
+ f.write('Origin: %s\n' % self._release_origin)
+ f.write('Label: %s\n' % self._release_label)
suite = self._release_suite
if not suite:
suite = self._name
- f.write('Suite: ' + suite + '\n')
+ f.write('Suite: %s\n' % suite)
codename = self._release_codename
if not codename:
codename = suite
- f.write('Codename: ' + codename + '\n')
+ f.write('Codename: %s\n' % codename)
if self._experimental_release:
f.write('NotAutomatic: yes\n')
elif self._backport_release:
f.write('NotAutomatic: yes\n')
f.write('ButAutomaticUpgrades: yes\n')
- f.write('Date: ' + time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime()) + '\n')
+ f.write('Date: %s\n' % time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime()))
if self._expire_release_files or self._keyids:
- f.write('Valid-Until: ' + (datetime.datetime.utcnow() + datetime.timedelta(days=28)).strftime("%a, %d %b %Y %H:%M:%S UTC") + '\n')
- f.write('Architectures: ' + ' '.join(self._arches) + '\n')
+ f.write('Valid-Until: %s\n' % (datetime.datetime.utcnow() + datetime.timedelta(days=28)).strftime("%a, %d %b %Y %H:%M:%S UTC"))
+ f.write('Architectures: %s\n' % ' '.join(self._arches))
if self._keyids:
- f.write('Signed-By: ' + ','.join(self._keyids) + '\n')
+ f.write('Signed-By: %s\n' % ','.join(self._keyids))
if self._use_byhash:
f.write('Acquire-By-Hash: yes\n')
if self._release_description:
- f.write('Description: ' + self._release_description + '\n')
- for hash in [ 'sha256' ]:
+ f.write('Description: %s\n' % self._release_description)
+ for hash in hashes:
self._do_hash(hash, indexfiles, f)
f.close()
if self._sign_releasefile(tmpname, self._abspath()):
@@ -1564,7 +1547,7 @@ class FlatArchiveDirIndexer(ArchiveDirIndexer):
self._logger.info("Release generation complete")
def _in_archdir(self, *args):
- return (lambda x,self=self: self._abspath(x))(*args[1:])
+ return (lambda x, self=self: self._abspath(x))(*args[1:])
def _get_dnotify_dirs(self):
return [self._dir]
@@ -1572,16 +1555,26 @@ class FlatArchiveDirIndexer(ArchiveDirIndexer):
def _get_all_indexfiles(self):
allindexes = []
for arch in self._arches:
- if arch == "all" or arch == "source":
+ if arch in ("all", "source"):
continue
- allindexes += ["Contents-%s" % arch]
+ allindexes.append("Contents-%s" % arch)
return ['Packages', 'Sources'] + allindexes
+ def _release_needed(self, targetname, indexfiles):
+ if os.access(targetname, os.R_OK):
+ release_mtime = os.stat(targetname)[stat.ST_MTIME]
+ for file in indexfiles:
+ if os.stat(self._abspath(file))[stat.ST_MTIME] > release_mtime:
+ return True
+ else:
+ return True
+ return False
+
if os.access(lockfilename, os.R_OK):
- logger.critical("lockfile \"%s\" exists (pid %s): is another mini-dinstall running?" % (lockfilename, open(lockfilename).read(10)))
+ logger.critical('lockfile "%s" exists (pid %s): is another mini-dinstall running?' % (lockfilename, open(lockfilename).read(10)))
logging.shutdown()
sys.exit(1)
-logger.debug('Creating lock file: ' + lockfilename)
+logger.debug('Creating lock file: %s' % lockfilename)
if not no_act:
lockfile = open(lockfilename, 'w')
lockfile.close()
@@ -1601,14 +1594,14 @@ if not batch_mode:
os.close(0)
os.close(1)
os.close(2)
- # unix file descriptor allocation ensures that the followin are fd 0,1,2
+ # unix file descriptor allocation ensures that the following are fd 0,1,2
sys.stdin = open("/dev/null")
sys.stdout = open("/dev/null")
sys.stderr = open("/dev/null")
- logger.debug("Finished daemonizing (pid %s)" % (os.getpid(),))
+ logger.debug("Finished daemonizing (pid %d)" % os.getpid())
lockfile = open(lockfilename, 'w')
-lockfile.write("%s" % (os.getpid(),))
+lockfile.write(str(os.getpid()))
lockfile.close()
if not (debug_mode or batch_mode):
@@ -1616,24 +1609,23 @@ if not (debug_mode or batch_mode):
logger.removeHandler(stderr_handler)
archivemap = {}
-# Instantiaate archive classes for installing files
-for dist in list(distributions.keys()):
- if distributions[dist]['archive_style'] == 'simple-subdir':
- newclass = SimpleSubdirArchiveDir
+# Instantiate archive classes for installing files
+for (dist, value) in list(distributions.items()):
+ if value['archive_style'] == 'simple-subdir':
+ archdir = SimpleSubdirArchiveDir
else:
- newclass = FlatArchiveDir
- archivemap[dist] = [newclass(dist, logger, distributions[dist], batch_mode=batch_mode, keyrings=default_keyrings, extra_keyrings=default_extra_keyrings, verify_sigs=default_verify_sigs), None]
+ archdir = FlatArchiveDir
+ archivemap[dist] = [archdir(dist, logger, value, batch_mode=batch_mode, keyrings=default_keyrings, extra_keyrings=default_extra_keyrings, verify_sigs=default_verify_sigs), None]
# Create archive indexing threads, but don't start them yet
-for dist in list(distributions.keys()):
- targetdir = os.path.join(toplevel_directory, dist)
- logger.info('Initializing archive indexer %s' % (dist,))
- if distributions[dist]['archive_style'] == 'simple-subdir':
- newclass = SimpleSubdirArchiveDirIndexer
- else:
- newclass = FlatArchiveDirIndexer
- archive = newclass(targetdir, logger, distributions[dist], use_dnotify=use_dnotify, batch_mode=batch_mode)
- archivemap[dist][1] = archive
+for (dist, value) in list(distributions.items()):
+ targetdir = os.path.join(toplevel_directory, dist)
+ logger.info('Initializing archive indexer %s' % dist)
+ if value['archive_style'] == 'simple-subdir':
+ archdiridx = SimpleSubdirArchiveDirIndexer
+ else:
+ archdiridx = FlatArchiveDirIndexer
+ archivemap[dist][1] = archdiridx(targetdir, logger, value, use_dnotify=use_dnotify, batch_mode=batch_mode)
# Now: kick off the incoming processor
logger.info('Initializing incoming processor')
@@ -1647,7 +1639,7 @@ if batch_mode:
# Once we've installed everything, start the indexing threads
for dist in list(distributions.keys()):
archive = archivemap[dist][1]
- logger.debug('Starting archive %s' % (archive.getName(),))
+ logger.debug('Starting archive %s' % archive.getName())
archive.start()
# Wait for all the indexing threads to finish; none of these ever
@@ -1655,7 +1647,7 @@ for dist in list(distributions.keys()):
if batch_mode:
for dist in list(distributions.keys()):
archive = archivemap[dist][1]
- logger.debug('Waiting for archive %s to finish' % (archive.getName(),))
+ logger.debug('Waiting for archive %s to finish' % archive.getName())
archive.wait()
else:
logger.debug("Waiting for die event")
@@ -1664,11 +1656,11 @@ else:
incoming.wait()
for dist in list(distributions.keys()):
archive = archivemap[dist][1]
- logger.info('Die event caught; waiting for archive %s to finish' % (archive.getName(),))
+ logger.info('Die event caught; waiting for archive %s to finish' % archive.getName())
archive.wait()
#logging.shutdown()
-logger.debug('Removing lock file: ' + lockfilename)
+logger.debug('Removing lock file: %s' % lockfilename)
os.unlink(lockfilename)
logger.info("main thread exiting...")
sys.exit(0)
diff --git a/minidinstall/ChangeFile.py b/minidinstall/ChangeFile.py
index 3b0cf48..4a65af6 100644
--- a/minidinstall/ChangeFile.py
+++ b/minidinstall/ChangeFile.py
@@ -1,8 +1,8 @@
-# ChangeFile
+# ChangeFile -*- mode: python; coding: utf-8 -*-
# A class which represents a Debian change file.
-# Copyright 2002 Colin Walters <walters@gnu.org>
+# Copyright (c) 2002 Colin Walters <walters@gnu.org>
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
@@ -18,11 +18,11 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-import os, re, sys, string, stat
-import threading, queue
+import os, re, stat
import logging
-from minidinstall import DpkgControl, SignedFile
-from minidinstall import misc
+from .DpkgControl import *
+from .SignedFile import *
+from . import misc
class ChangeFileException(Exception):
def __init__(self, value):
@@ -30,19 +30,19 @@ class ChangeFileException(Exception):
def __str__(self):
return repr(self._value)
-class ChangeFile(DpkgControl.DpkgParagraph):
- md5_re = r'^(?P<md5>[0-9a-f]{32})[ \t]+(?P<size>\d+)[ \t]+(?P<section>[-/a-zA-Z0-9]+)[ \t]+(?P<priority>[-a-zA-Z0-9]+)[ \t]+(?P<file>[0-9a-zA-Z][-+:.,=~0-9a-zA-Z_]+)$'
- sha1_re = r'^(?P<sha1>[0-9a-f]{40})[ \t]+(?P<size>\d+)[ \t]+(?P<file>[0-9a-zA-Z][-+:.,=~0-9a-zA-Z_]+)$'
- sha256_re = r'^(?P<sha256>[0-9a-f]{64})[ \t]+(?P<size>\d+)[ \t]+(?P<file>[0-9a-zA-Z][-+:.,=~0-9a-zA-Z_]+)$'
+class ChangeFile(DpkgParagraph):
+ md5_re = r'^(?P<hashsum>[0-9a-f]{32})[ \t]+(?P<size>\d+)[ \t]+(?P<section>[-/a-zA-Z0-9]+)[ \t]+(?P<priority>[-a-zA-Z0-9]+)[ \t]+(?P<file>[0-9a-zA-Z][-+:.,=~0-9a-zA-Z_]+)$'
+ sha1_re = r'^(?P<hashsum>[0-9a-f]{40})[ \t]+(?P<size>\d+)[ \t]+(?P<file>[0-9a-zA-Z][-+:.,=~0-9a-zA-Z_]+)$'
+ sha256_re = r'^(?P<hashsum>[0-9a-f]{64})[ \t]+(?P<size>\d+)[ \t]+(?P<file>[0-9a-zA-Z][-+:.,=~0-9a-zA-Z_]+)$'
def __init__(self):
- DpkgControl.DpkgParagraph.__init__(self)
+ DpkgParagraph.__init__(self)
self._logger = logging.getLogger("mini-dinstall")
self._file = ''
def load_from_file(self, filename):
self._file = filename
- f = SignedFile.SignedFile(open(self._file))
+ f = SignedFile(open(self._file))
self.load(f)
f.close()
@@ -52,57 +52,50 @@ class ChangeFile(DpkgControl.DpkgParagraph):
def _get_checksum_from_changes(self):
""" extract checksums and size from changes file """
output = {}
- hashes = { 'md5': ['files', re.compile(self.md5_re)],
- 'sha1': ['checksums-sha1', re.compile(self.sha1_re)],
- 'sha256': ['checksums-sha256', re.compile(self.sha256_re)]
- }
- hashes_checked = hashes.copy()
+ hashes = {
+ 'md5': ['files', re.compile(self.md5_re)],
+ 'sha1': ['checksums-sha1', re.compile(self.sha1_re)],
+ 'sha256': ['checksums-sha256', re.compile(self.sha256_re)]
+ }
- try:
- self['files']
- except KeyError:
+ if 'files' not in self:
return []
- for hash in hashes:
- try:
- self[hashes[hash][0]]
- except KeyError:
+ for (hash, (field, regex)) in list(hashes.items()):
+ if field not in self:
self._logger.warn("Can't find %s checksum in changes file '%s'" % (hash, os.path.basename(self._file)))
- hashes_checked.pop(hash)
-
- for hash in hashes_checked:
+ continue
output[hash] = []
- for line in self[hashes[hash][0]]:
- if line == '':
+ for line in self[field].splitlines():
+ if not line:
continue
- match = hashes[hash][1].match(line)
- if (match is None):
- raise ChangeFileException("Couldn't parse file entry \"%s\" in Files field of .changes" % (line,))
- output[hash].append([match.group(hash), match.group('size'), match.group('file') ])
+ match = regex.match(line)
+ if not match:
+ raise ChangeFileException("Couldn't parse file entry \"%s\" in %s field of .changes" % (line, self.trueFieldCasing[field]))
+ output[hash].append([match.group('hashsum'), match.group('size'), match.group('file')])
return output
def verify(self, sourcedir):
""" verify size and hash values from changes file """
checksum = self._get_checksum_from_changes()
- for hash in list(checksum.keys()):
- for (hashsum, size, filename) in checksum[hash]:
+ for (hash, value) in list(checksum.items()):
+ for (hashsum, size, filename) in value:
self._verify_file_integrity(os.path.join(sourcedir, filename), int(size), hash, hashsum)
-
def _verify_file_integrity(self, filename, expected_size, hash, expected_hashsum):
""" check uploaded file integrity """
- self._logger.debug('Checking integrity of %s' % (filename,))
+ self._logger.debug('Checking integrity of %s' % filename)
try:
statbuf = os.stat(filename)
if not stat.S_ISREG(statbuf[stat.ST_MODE]):
- raise ChangeFileException("%s is not a regular file" % (filename,))
+ raise ChangeFileException("%s is not a regular file" % filename)
size = statbuf[stat.ST_SIZE]
except OSError as e:
- raise ChangeFileException("Can't stat %s: %s" % (filename,e.strerror))
+ raise ChangeFileException("Can't stat %s: %s" % (filename, e.strerror))
if size != expected_size:
- raise ChangeFileException("File size for %s does not match that specified in .dsc" % (filename,))
- if (misc.get_file_sum(self, hash, filename) != expected_hashsum):
- raise ChangeFileException("%ssum for %s does not match that specified in .dsc" % (hash, filename,))
+ raise ChangeFileException("File size for %s does not match that specified in .dsc" % filename)
+ if misc.get_file_sum(self, hash, filename) != expected_hashsum:
+ raise ChangeFileException("%ssum for %s does not match that specified in .dsc" % (hash, filename))
self._logger.debug('Verified %ssum %s and size %s for %s' % (hash, expected_hashsum, expected_size, filename))
# vim:ts=4:sw=4:et:
diff --git a/minidinstall/DebianSigVerifier.py b/minidinstall/DebianSigVerifier.py
index d441a58..17a6ec2 100644
--- a/minidinstall/DebianSigVerifier.py
+++ b/minidinstall/DebianSigVerifier.py
@@ -2,7 +2,7 @@
# A class for verifying signed files, using Debian keys
-# Copyright © 2002 Colin Walters <walters@gnu.org>
+# Copyright (c) 2002 Colin Walters <walters@gnu.org>
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
@@ -18,18 +18,19 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-import os, re, sys, string, stat, logging
-from minidinstall.GPGSigVerifier import GPGSigVerifier
+import os
+from .GPGSigVerifier import *
class DebianSigVerifier(GPGSigVerifier):
_dpkg_ring = '/etc/dpkg/local-keyring.gpg'
+
def __init__(self, keyrings=None, extra_keyrings=None):
- if keyrings is None:
+ if not keyrings:
keyrings = ['/usr/share/keyrings/debian-keyring.gpg', '/usr/share/keyrings/debian-keyring.pgp']
if os.access(self._dpkg_ring, os.R_OK):
keyrings.append(self._dpkg_ring)
- if not extra_keyrings is None:
- keyrings += extra_keyrings
+ if extra_keyrings:
+ keyrings.extend(extra_keyrings)
GPGSigVerifier.__init__(self, keyrings)
# vim:ts=4:sw=4:et:
diff --git a/minidinstall/Dnotify.py b/minidinstall/Dnotify.py
index 18606e1..cdc2c48 100644
--- a/minidinstall/Dnotify.py
+++ b/minidinstall/Dnotify.py
@@ -2,7 +2,7 @@
# A simple FAM-like beast in Python
-# Copyright © 2002 Colin Walters <walters@gnu.org>
+# Copyright (c) 2002 Colin Walters <walters@gnu.org>
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
@@ -18,9 +18,9 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-import os, re, sys, string, stat, threading, queue, time
+import os, stat, threading, queue, time
import logging
-from minidinstall import misc
+from . import misc
class DnotifyException(Exception):
def __init__(self, value):
@@ -29,7 +29,7 @@ class DnotifyException(Exception):
return repr(self._value)
class DirectoryNotifierFactory:
- def create(self, dirs, use_dnotify=1, poll_time=30, logger=None, cancel_event=None):
+ def create(self, dirs, use_dnotify=False, poll_time=30, logger=None, cancel_event=None):
if use_dnotify and os.access('/usr/bin/dnotify', os.X_OK):
if logger:
logger.debug("Using dnotify directory notifier")
@@ -47,11 +47,11 @@ class DirectoryNotifier:
def __init__(self, dirs, logger, cancel_event=None):
self._cwd = os.getcwd()
self._dirs = dirs
- if cancel_event is None:
+ if not cancel_event:
self._cancel_event = threading.Event()
else:
self._cancel_event = cancel_event
- if logger is None:
+ if not logger:
self._logger = logging.getLogger("Dnotify")
self._logger.addFilter(DnotifyNullLoggingFilter())
else:
@@ -62,13 +62,13 @@ class DirectoryNotifier:
class DirectoryNotifierAsyncWrapper(threading.Thread):
def __init__(self, dnotify, queue, logger=None, name=None):
- if not name is None:
+ if name:
threading.Thread.__init__(self, name=name)
else:
threading.Thread.__init__(self)
self._eventqueue = queue
self._dnotify = dnotify
- if logger is None:
+ if not logger:
self._logger = logging.getLogger("Dnotify")
self._logger.addFilter(DnotifyNullLoggingFilter())
else:
@@ -78,7 +78,7 @@ class DirectoryNotifierAsyncWrapper(threading.Thread):
self._cancel_event.set()
def run(self):
- self._logger.info('Created new thread (%s) for async directory notification' % (self.getName()))
+ self._logger.info('Created new thread (%s) for async directory notification' % self.getName())
while not self._dnotify.cancelled():
dir = self._dnotify.poll()
self._eventqueue.put(dir)
@@ -97,7 +97,7 @@ class MtimeDirectoryNotifier(DirectoryNotifier):
timeout_time = None
if timeout:
timeout_time = time.time() + timeout
- while self._changed == []:
+ while not self._changed:
if timeout_time and time.time() > timeout_time:
return None
self._logger.debug('Polling...')
@@ -105,16 +105,15 @@ class MtimeDirectoryNotifier(DirectoryNotifier):
oldtime = self._dirmap[dir]
mtime = os.stat(os.path.join(self._cwd, dir))[stat.ST_MTIME]
if oldtime < mtime:
- self._logger.debug('Directory "%s" has changed' % (dir,))
+ self._logger.debug('Directory "%s" has changed' % dir)
self._changed.append(dir)
self._dirmap[dir] = mtime
- if self._changed == []:
+ if not self._changed:
for x in range(self._polltime):
if self._cancel_event.isSet():
return None
time.sleep(1)
- ret = self._changed[0]
- self._changed = self._changed[1:]
+ ret = self._changed.pop(0)
return ret
class DnotifyDirectoryNotifier(DirectoryNotifier):
@@ -127,11 +126,11 @@ class DnotifyDirectoryNotifier(DirectoryNotifier):
def poll(self, timeout=None):
# delete duplicates
i = self._queue.qsize()
- self._logger.debug('Queue size: %d', (i,))
+ self._logger.debug('Queue size: %d' % i)
set = {}
- while i > 0:
+ while i:
dir = self._queue_get(timeout)
- if dir is None:
+ if not dir:
# We shouldn't have to do this; no one else is reading
# from the queue. But we do it just to be safe.
for key in list(set.keys()):
@@ -142,11 +141,11 @@ class DnotifyDirectoryNotifier(DirectoryNotifier):
for key in list(set.keys()):
self._queue.put(key)
i = self._queue.qsize()
- self._logger.debug('Queue size (after duplicate filter): %d', (i,))
+ self._logger.debug('Queue size (after duplicate filter): %d' % i)
return self._queue_get(timeout)
def _queue_get(self, timeout):
- if timeout is None:
+ if not timeout:
return self._queue.get()
timeout_time = time.time() + timeout
while True:
@@ -179,21 +178,21 @@ class DnotifyThread(threading.Thread):
os.close(outfd)
stdout = os.fdopen(infd)
c = 'x'
- while c != '':
+ while c:
curline = ''
c = stdout.read(1)
- while c != '' and c != '\0':
+ while c and c != '\0':
curline += c
c = stdout.read(1)
- if c == '':
+ if not c:
break
- self._logger.debug('Directory "%s" changed' % (curline,))
+ self._logger.debug('Directory "%s" changed' % curline)
self._queue.put(curline)
(pid, status) = os.waitpid(pid, 0)
- if status is None:
+ if not status:
ecode = 0
else:
ecode = os.WEXITSTATUS(status)
- raise DnotifyException("dnotify exited with code %s" % (ecode,))
+ raise DnotifyException("dnotify exited with code %s" % ecode)
# vim:ts=4:sw=4:et:
diff --git a/minidinstall/DpkgControl.py b/minidinstall/DpkgControl.py
index be08155..02e3567 100755
--- a/minidinstall/DpkgControl.py
+++ b/minidinstall/DpkgControl.py
@@ -1,7 +1,10 @@
-# DpkgControl.py
+#!/usr/bin/python3
+# DpkgControl -*- mode: python; coding: utf-8 -*-
#
# This module implements control file parsing.
#
+# Copyright (c) 2001 Adam Heath <doogie@debian.org>
+#
# DpkgParagraph is a low-level class, that reads/parses a single paragraph
# from a file object.
#
@@ -16,9 +19,7 @@
# To test this, pass it a filetype char, a filename, then, optionally,
# the key to a paragraph to display, and if a fourth arg is given, only
# show that field.
-#
-# Copyright 2001 Adam Heath <doogie@debian.org>
-#
+
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
@@ -33,20 +34,15 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-import re, string
from .DpkgDatalist import *
-from minidinstall.SignedFile import *
+from .SignedFile import *
class DpkgParagraph(DpkgOrderedDatalist):
- caseSensitive = 0
trueFieldCasing = {}
- def setCaseSensitive( self, value ): self.caseSensitive = value
-
def load( self, f ):
- "Paragraph data from a file object."
+ """Paragraph data from a file object"""
key = None
- value = None
while True:
line = f.readline()
if not line:
@@ -57,85 +53,53 @@ class DpkgParagraph(DpkgOrderedDatalist):
continue
else:
return
- line = line[ :-1 ]
if line[ 0 ] != ' ':
- key, value = line.split( ":", 1 )
- if value: value = value[ 1: ]
- if not self.caseSensitive:
- newkey = key.lower()
- if key not in self.trueFieldCasing:
- self.trueFieldCasing[ newkey ] = key
- key = newkey
+ ( truekey, value ) = line.split( ":", 1 )
+ key = truekey.lower()
+ self.trueFieldCasing[ key ] = truekey
+ self[ key ] = value.strip()
else:
- if isinstance( value, list ):
- value.append( line[ 1: ] )
- else:
- value = [ value, line[ 1: ] ]
- self[ key ] = value
+ self[ key ] += "\n%s" % line.strip()
def _storeField( self, f, value, lead = " " ):
- if isinstance( value, list ):
- value = "\n".join(list(map( lambda v, lead = lead: v and ( lead + v ) or v, value )))
- else:
- if value: value = lead + value
- f.write( "%s\n" % ( value ) )
+ value = "\n".join(list(map( lambda v, lead = lead: lead + v if v else "", value.splitlines() )))
+ f.write( "%s\n" % value )
def _store( self, f ):
- "Write our paragraph data to a file object"
- for key in list(self.keys()):
- value = self[ key ]
- if key in self.trueFieldCasing:
- key = self.trueFieldCasing[ key ]
- f.write( "%s:" % key )
+ """Write our paragraph data to a file object"""
+ for ( key, value ) in list(self.items()):
+ truekey = self.trueFieldCasing[ key ]
+ f.write( "%s:" % truekey )
self._storeField( f, value )
-class DpkgControl(DpkgOrderedDatalist):
-
- key = "package"
- caseSensitive = 0
-
- def setkey( self, key ): self.key = key
- def setCaseSensitive( self, value ): self.caseSensitive = value
-
- def _load_one( self, f ):
- p = DpkgParagraph( None )
- p.setCaseSensitive( self.caseSensitive )
- p.load( f )
- return p
-
- def load( self, f ):
+class DpkgControl(DpkgParagraph):
+ def load( self, f, source = False ):
while True:
- p = self._load_one( f )
- if not p: break
- self[ p[ self.key ] ] = p
+ para = DpkgParagraph()
+ para.load( f )
+ if not para:
+ break
+ if "source" not in para:
+ self[ para[ "package" ] ] = para
+ elif source:
+ self[ "source" ] = para
def _store( self, f ):
- "Write our control data to a file object"
-
- for key in list(self.keys()):
- self[ key ]._store( f )
- f.write( "\n" )
-
-class DpkgSourceControl( DpkgControl ):
- source = None
-
+ """Write our control data to a file object"""
+ keys = list(self.keys())
+ while keys:
+ self[ keys.pop( 0 ) ]._store( f )
+ if keys:
+ f.write( "\n" )
+
+class DpkgSourceControl(DpkgControl):
def load( self, f ):
- f = SignedFile(f)
- self.source = self._load_one( f )
- DpkgControl.load( self, f )
-
- def __repr__( self ):
- return self.source.__repr__() + "\n" + DpkgControl.__repr__( self )
-
- def _store( self, f ):
- "Write our control data to a file object"
- self.source._store( f )
- f.write( "\n" )
- DpkgControl._store( self, f )
+ f = SignedFile( f )
+ DpkgControl.load( self, f, source = True )
if __name__ == "__main__":
import sys
- types = { 'p' : DpkgParagraph, 'c' : DpkgControl, 's' : DpkgSourceControl }
+ types = { 'p': DpkgParagraph, 'c': DpkgControl, 's': DpkgSourceControl }
type = sys.argv[ 1 ]
if type not in types:
print( "Unknown type `%s'!" % type )
@@ -144,11 +108,13 @@ if __name__ == "__main__":
data = types[ type ]()
data.load( file )
if len( sys.argv ) > 3:
- para = data[ sys.argv[ 3 ] ]
- if len( sys.argv ) > 4:
- para._storeField( sys.stdout, para[ sys.argv[ 4 ] ], "" )
+ rargs = sys.argv[ 3: ]
+ if type != 'p':
+ data = data[ rargs.pop( 0 ) ]
+ if rargs:
+ data._storeField( sys.stdout, data[ rargs[ 0 ].lower() ], "" )
else:
- para._store( sys.stdout )
+ data._store( sys.stdout )
else:
data._store( sys.stdout )
diff --git a/minidinstall/DpkgDatalist.py b/minidinstall/DpkgDatalist.py
index 68f9940..e7abbd2 100644
--- a/minidinstall/DpkgDatalist.py
+++ b/minidinstall/DpkgDatalist.py
@@ -1,10 +1,10 @@
-# DpkgDatalist.py
+# DpkgDatalist -*- mode: python; coding: utf-8 -*-
#
# This module implements DpkgDatalist, an abstract class for storing
# a list of objects in a file. Children of this class have to implement
# the load and _store methods.
#
-# Copyright 2001 Wichert Akkerman <wichert@linux.com>
+# Copyright (c) 2001 Wichert Akkerman <wichert@linux.com>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
@@ -20,34 +20,31 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-import os, sys
-from collections import UserDict
-from collections import OrderedDict
-from minidinstall.SafeWriteFile import SafeWriteFile
+import sys
+from collections import UserDict, OrderedDict
+from .SafeWriteFile import SafeWriteFile
class DpkgDatalistException(Exception):
UNKNOWN = 0
SYNTAXERROR = 1
- def __init__(self, message="", reason=UNKNOWN, file=None, line=None):
+ def __init__(self, message=None, reason=UNKNOWN, file=None, line=None):
self.message=message
self.reason=reason
self.filename=file
self.line=line
class _DpkgDatalist:
- def __init__(self, fn=""):
- '''Initialize a DpkgDatalist object. An optional argument is a
- file from which we load values.'''
-
+ def __init__(self, fn=None):
+ """Initialize a DpkgDatalist object. An optional argument is a
+ file from which we load values."""
self.filename=fn
if self.filename:
self.load(self.filename)
def store(self, fn=None):
- "Store variable data in a file."
-
- if fn==None:
+ """Store variable data in a file."""
+ if not fn:
fn=self.filename
# Special case for writing to stdout
if not fn:
@@ -64,15 +61,13 @@ class _DpkgDatalist:
if isinstance(fn, str):
vf.close()
-
class DpkgDatalist(UserDict, _DpkgDatalist):
- def __init__(self, fn=""):
+ def __init__(self, fn=None):
UserDict.__init__(self)
_DpkgDatalist.__init__(self, fn)
-
class DpkgOrderedDatalist(OrderedDict, _DpkgDatalist):
- def __init__(self, fn=""):
+ def __init__(self, fn=None):
OrderedDict.__init__(self)
_DpkgDatalist.__init__(self, fn)
diff --git a/minidinstall/GPGSigVerifier.py b/minidinstall/GPGSigVerifier.py
index 2e0dee5..0d47379 100644
--- a/minidinstall/GPGSigVerifier.py
+++ b/minidinstall/GPGSigVerifier.py
@@ -2,7 +2,7 @@
# A class for verifying signed files
-# Copyright © 2002 Colin Walters <walters@gnu.org>
+# Copyright (c) 2002 Colin Walters <walters@gnu.org>
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
@@ -18,8 +18,8 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-import os, re, sys, string, stat
-from minidinstall import misc
+import os
+from . import misc
class GPGSigVerifierException(Exception):
def __init__(self, value):
@@ -40,10 +40,10 @@ class GPGSigVerificationFailure(Exception):
class GPGSigVerifier:
def __init__(self, keyrings, gpgv=None):
self._keyrings = keyrings
- if gpgv is None:
+ if not gpgv:
gpgv = '/usr/bin/gpgv'
if not os.access(gpgv, os.X_OK):
- raise GPGSigVerifierException("Couldn't execute \"%s\"" % (gpgv,))
+ raise GPGSigVerifierException("Couldn't execute \"%s\"" % gpgv)
self._gpgv = gpgv
def verify(self, filename, sigfilename=None):
@@ -53,25 +53,24 @@ class GPGSigVerifier:
os.close(stdin)
misc.dup2(stdout, 1)
misc.dup2(stdout, 2)
- args = []
+ args = [self._gpgv]
for keyring in self._keyrings:
- args.append('--keyring')
- args.append(keyring)
+ args.extend(['--keyring', keyring])
if sigfilename:
args.append(sigfilename)
- args = [self._gpgv] + args + [filename]
+ args.append(filename)
os.execv(self._gpgv, args)
os.exit(1)
os.close(stdout)
output = os.fdopen(stdin).readlines()
(pid, status) = os.waitpid(pid, 0)
- if not (status is None or (os.WIFEXITED(status) and os.WEXITSTATUS(status) == 0)):
+ if status or (not os.WIFEXITED(status) and os.WEXITSTATUS(status) != 0):
if os.WIFEXITED(status):
- msg = "gpgv exited with error code %d" % (os.WEXITSTATUS(status),)
+ msg = "gpgv exited with error code %d" % os.WEXITSTATUS(status)
elif os.WIFSTOPPED(status):
- msg = "gpgv stopped unexpectedly with signal %d" % (os.WSTOPSIG(status),)
+ msg = "gpgv stopped unexpectedly with signal %d" % os.WSTOPSIG(status)
elif os.WIFSIGNALED(status):
- msg = "gpgv died with signal %d" % (os.WTERMSIG(status),)
+ msg = "gpgv died with signal %d" % os.WTERMSIG(status)
raise GPGSigVerificationFailure(msg, output)
return output
diff --git a/minidinstall/OrderedDict.py b/minidinstall/OrderedDict.py
index 7c842b0..dab57db 100644
--- a/minidinstall/OrderedDict.py
+++ b/minidinstall/OrderedDict.py
@@ -1,10 +1,10 @@
-# OrderedDict.py
+# OrderedDict -*- mode: python; coding: utf-8 -*-
#
# This class functions almost exactly like UserDict. However, when using
# the sequence methods, it returns items in the same order in which they
# were added, instead of some random order.
#
-# Copyright 2001 Adam Heath <doogie@debian.org>
+# Copyright (c) 2001 Adam Heath <doogie@debian.org>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
@@ -28,7 +28,7 @@ class OrderedDict(UserDict):
def __init__(self, dict=None):
UserDict.__init__(self)
self.__order=[]
- if dict is not None and dict.__class__ is not None:
+ if dict and dict.__class__:
self.update(dict)
def __cmp__(self, dict):
@@ -57,8 +57,7 @@ class OrderedDict(UserDict):
def copy(self):
if self.__class__ is OrderedDict:
return OrderedDict(self)
- import copy
- return copy.copy(self)
+ return self.copy()
def keys(self):
return self.__order
@@ -70,7 +69,7 @@ class OrderedDict(UserDict):
return list(map(lambda x, self=self: self.__getitem__(x), self.__order))
def update(self, dict):
- for k, v in list(dict.items()):
+ for (k, v) in list(dict.items()):
self.__setitem__(k, v)
# vim:ts=4:sw=4:et:
diff --git a/minidinstall/SafeWriteFile.py b/minidinstall/SafeWriteFile.py
index 591c4f0..38262f2 100755
--- a/minidinstall/SafeWriteFile.py
+++ b/minidinstall/SafeWriteFile.py
@@ -1,11 +1,12 @@
-# SafeWriteFile.py
+#!/usr/bin/python3
+# SafeWriteFile -*- mode: python; coding: utf-8 -*-
#
# This file is a writable file object. It writes to a specified newname,
# and when closed, renames the file to the realname. If the object is
# deleted, without being closed, this rename isn't done. If abort() is
# called, it also disables the rename.
#
-# Copyright 2001 Adam Heath <doogie@debian.org>
+# Copyright (c) 2001 Adam Heath <doogie@debian.org>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
@@ -27,11 +28,9 @@ from os import rename
class ObjectNotAllowed(Exception):
pass
-
class InvalidMode(Exception):
pass
-
class SafeWriteFile:
def __init__(self, newname, realname, mode="w", bufsize=-1):
@@ -47,7 +46,7 @@ class SafeWriteFile:
self.fobj=open(newname, mode, bufsize)
self.newname=newname
self.realname=realname
- self.__abort=0
+ self.__abort=False
def close(self):
self.fobj.close()
@@ -55,7 +54,7 @@ class SafeWriteFile:
rename(self.newname, self.realname)
def abort(self):
- self.__abort=1
+ self.__abort=True
def __del__(self):
self.abort()
@@ -67,7 +66,6 @@ class SafeWriteFile:
except:
return eval("self.fobj." + attr)
-
if __name__ == "__main__":
import time
f=SafeWriteFile("sf.new", "sf.data")
diff --git a/minidinstall/SignedFile.py b/minidinstall/SignedFile.py
index efc4730..5a1f7f6 100755
--- a/minidinstall/SignedFile.py
+++ b/minidinstall/SignedFile.py
@@ -1,9 +1,10 @@
+#!/usr/bin/python3
# SignedFile -*- mode: python; coding: utf-8 -*-
# SignedFile offers a subset of file object operations, and is
# designed to transparently handle files with PGP signatures.
-# Copyright © 2002 Colin Walters <walters@gnu.org>
+# Copyright (c) 2002 Colin Walters <walters@gnu.org>
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
@@ -19,23 +20,22 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-import re,string
-
class SignedFile:
_stream = None
- _eof = 0
- _signed = 0
+ _eof = False
+ _signed = False
_signature = None
_signatureversion = None
_initline = None
+
def __init__(self, stream):
self._stream = stream
line = stream.readline()
- if (line == "-----BEGIN PGP SIGNED MESSAGE-----\n"):
- self._signed = 1
+ if line == "-----BEGIN PGP SIGNED MESSAGE-----\n":
+ self._signed = True
while True:
line = stream.readline()
- if (len(line) == 0 or line == '\n'):
+ if not line or line == '\n':
break
else:
self._initline = line
@@ -51,13 +51,13 @@ class SignedFile:
if not self._signed:
return line
elif line == "-----BEGIN PGP SIGNATURE-----\n":
- self._eof = 1
+ self._eof = True
self._signature = []
self._signatureversion = self._stream.readline()
self._stream.readline() # skip blank line
while True:
line = self._stream.readline()
- if len(line) == 0 or line == "-----END PGP SIGNATURE-----\n":
+ if not line or line == "-----END PGP SIGNATURE-----\n":
break
self._signature.append(line)
self._signature = ''.join(self._signature)
@@ -68,7 +68,7 @@ class SignedFile:
ret = []
while True:
line = self.readline()
- if (line != ''):
+ if line:
ret.append(line)
else:
break
@@ -86,22 +86,22 @@ class SignedFile:
def getSignatureVersion(self):
return self._signatureversion
-if __name__=="__main__":
+if __name__ == "__main__":
import sys
- if len(sys.argv) == 0:
+ if not sys.argv:
print("Need one file as an argument")
sys.exit(1)
filename = sys.argv[1]
- f=SignedFile(open(filename))
+ f = SignedFile(open(filename))
if f.getSigned():
print("**** SIGNED ****")
else:
print("**** NOT SIGNED ****")
- lines=f.readlines()
+ lines = f.readlines()
print(lines)
if not f.getSigned():
assert(len(lines) == len(actuallines))
else:
- print("Signature: %s" % (f.getSignature()))
+ print("Signature: %s" % f.getSignature())
# vim:ts=4:sw=4:et:
diff --git a/minidinstall/mail.py b/minidinstall/mail.py
index 50df462..305d822 100644
--- a/minidinstall/mail.py
+++ b/minidinstall/mail.py
@@ -1,8 +1,8 @@
# mail -*- mode: python; coding: utf-8 -*-
-"""Simple mail support for mini-dinstall."""
+# Simple mail support for mini-dinstall.
-# Copyright © 2008 Stephan Sürken <absurd@debian.org>
+# Copyright (c) 2008 Stephan Sürken <absurd@debian.org>
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
@@ -21,24 +21,22 @@
import smtplib
import email.mime.text
import email.utils
-
import logging
def send(smtp_server, smtp_from, smtp_to, body, subject="mini-dinstall mail notice"):
"""Send email; on error, log and continue."""
-
logger = logging.getLogger("mini-dinstall")
- try:
- # Create a mime body
- mime_body = email.mime.text.MIMEText(body, 'plain', 'utf-8')
- mime_body['Subject'] = subject
- mime_body['From'] = smtp_from
- mime_body['To'] = smtp_to
- mime_body['Date'] = email.utils.formatdate(localtime=True)
- mime_body['Message-ID'] = email.utils.make_msgid()
- mime_body.add_header('X-Mini-Dinstall', 'YES')
+ # Create a mime body
+ mime_body = email.mime.text.MIMEText(body, 'plain', 'utf-8')
+ mime_body['Subject'] = subject
+ mime_body['From'] = smtp_from
+ mime_body['To'] = smtp_to
+ mime_body['Date'] = email.utils.formatdate(localtime=True)
+ mime_body['Message-ID'] = email.utils.make_msgid()
+ mime_body.add_header('X-Mini-Dinstall', 'YES')
+ try:
# Send via SMTP server
smtp = smtplib.SMTP(smtp_server)
smtp.sendmail(smtp_from, [smtp_to], mime_body.as_string())
diff --git a/minidinstall/misc.py b/minidinstall/misc.py
index 372c450..5f7fb71 100644
--- a/minidinstall/misc.py
+++ b/minidinstall/misc.py
@@ -2,7 +2,7 @@
# misc tools for mini-dinstall
-# Copyright © 2004 Thomas Viehmann <tv@beamnet.de>
+# Copyright (c) 2004 Thomas Viehmann <tv@beamnet.de>
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
@@ -18,26 +18,25 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-import os, errno, time, string, re, hashlib
+import os, errno, time, re, hashlib
-def dup2(fd,fd2):
+def dup2(fd, fd2):
# dup2 with EBUSY retries (cf. dup2(2) and Debian bug #265513)
- success = 0
+ success = False
tries = 0
- while (not success):
+ while not success:
try:
- os.dup2(fd,fd2)
- success = 1
+ os.dup2(fd, fd2)
+ success = True
except OSError as e:
- if (e.errno != errno.EBUSY) or (tries >= 3):
+ if e.errno != errno.EBUSY or tries >= 3:
raise
- # wait 0-2 seconds befor next try
+ # wait 0-2 seconds before next try
time.sleep(tries)
tries += 1
def format_changes(L):
""" remove changelog header and all lines with only a dot """
-
dotmatch = re.compile('^\.$')
L1 = []
@@ -48,16 +47,11 @@ def format_changes(L):
def get_file_sum(self, type, filename):
""" generate hash sums for file """
- if type == 'md5':
- sum = hashlib.md5()
- elif type == 'sha1':
- sum = hashlib.sha1()
- elif type == 'sha256':
- sum = hashlib.sha256()
+ sum = getattr(hashlib, type)()
self._logger.debug("Generate %s (python-internal) for %s" % (type, filename))
- f = open(filename,'rb')
+ f = open(filename, 'rb')
buf = f.read(8192)
- while buf != '':
+ while buf:
sum.update(buf)
buf = f.read(8192)
return sum.hexdigest()
diff --git a/minidinstall/tweet.py b/minidinstall/tweet.py
index 7106085..3a4a4ef 100644
--- a/minidinstall/tweet.py
+++ b/minidinstall/tweet.py
@@ -1,8 +1,8 @@
-# mail -*- mode: python; coding: utf-8 -*-
+# tweet -*- mode: python; coding: utf-8 -*-
-"""Simple tweet support for mini-dinstall."""
+# Simple tweet support for mini-dinstall.
-# Copyright © 2010 Christopher R. Gabriel <cgabriel@truelite.it>
+# Copyright (c) 2010 Christopher R. Gabriel <cgabriel@truelite.it>
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
@@ -18,10 +18,8 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-
import logging
import urllib.request
-import base64
def send(tweet_body, tweet_server, tweet_user, tweet_password):
"""Send tweet; on error, log and continue."""
@@ -37,8 +35,8 @@ def send(tweet_body, tweet_server, tweet_user, tweet_password):
if not post_url:
logger.exception("Unknown tweet site")
- if not tweet_user or not tweet_password:
- logger.exception("Missing username or password for twitting")
+ if not (tweet_user and tweet_password):
+ logger.exception("Missing username or password for tweeting")
auth_handler = urllib.request.HTTPBasicAuthHandler()
auth_handler.add_password(realm=auth_realm,
@@ -49,10 +47,9 @@ def send(tweet_body, tweet_server, tweet_user, tweet_password):
req = urllib.request.Request(post_url)
req.add_data("status=%s" % tweet_body)
- handle = None
try:
handle = m_http_opener.open(req)
- a = handle.read()
+ result = handle.read()
logger.info("Tweet sent to %s (%s)" % (tweet_server, tweet_user))
except Exception as e:
- logger.exception("Error sending tweet to %s ('%s') via %s: %s: %s", tweet_server, tweet_body, tweet_user, type(e), e.args)
+ logger.exception("Error sending tweet to %s ('%s') via %s: %s: %s" % (tweet_server, tweet_body, tweet_user, type(e), e.args))