aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xmini-dinstall230
-rw-r--r--minidinstall/ChangeFile.py8
-rw-r--r--minidinstall/Dnotify.py16
-rwxr-xr-xminidinstall/DpkgControl.py29
-rw-r--r--minidinstall/DpkgDatalist.py10
-rw-r--r--minidinstall/GPGSigVerifier.py4
-rw-r--r--minidinstall/OrderedDict.py12
-rwxr-xr-xminidinstall/SafeWriteFile.py10
-rwxr-xr-xminidinstall/SignedFile.py18
-rw-r--r--minidinstall/mail.py2
-rw-r--r--minidinstall/misc.py6
-rw-r--r--minidinstall/tweet.py10
12 files changed, 175 insertions, 180 deletions
diff --git a/mini-dinstall b/mini-dinstall
index 82f0d4c..da881ab 100755
--- a/mini-dinstall
+++ b/mini-dinstall
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/python3
# -*- mode: python; coding: utf-8 -*-
# Miniature version of "dinstall", for installing .changes into an
# archive
@@ -19,12 +19,12 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os, sys, re, glob, getopt, time, traceback, lzma, getpass, socket
-import shutil, signal, threading, select, Queue, SocketServer, datetime
+import shutil, signal, threading, select, queue, socketserver, datetime
import logging, logging.handlers
#logging.basicConfig()
import apt_pkg
apt_pkg.init()
-from ConfigParser import *
+from configparser import *
from minidinstall.ChangeFile import *
from minidinstall.Dnotify import *
@@ -64,7 +64,7 @@ mail_log_flush_level = logging.ERROR
mail_log_flush_count = 10
mail_to = getpass.getuser()
mail_server = 'localhost'
-incoming_permissions = 0750
+incoming_permissions = 0o750
tweet_server = 'identica'
tweet_user = None
tweet_password = None
@@ -86,32 +86,32 @@ Changes:
tweet_template = "Installed %(source)s %(version)s to %(distribution)s"
def usage(ecode, ver_only=None):
- print "mini-dinstall", pkg_version
+ print("mini-dinstall", pkg_version)
if ver_only:
sys.exit(ecode)
- print "Copyright (C) 2002 Colin Walters <walters@gnu.org>"
- print "Licensed under the GNU GPL."
- print "Usage: mini-dinstall [OPTIONS...] [DIRECTORY]"
- print "Options:"
- print " -v, --verbose\t\tDisplay extra information"
- print " -q, --quiet\t\tDisplay less information"
- print " -c, --config=FILE\tParse configuration info from FILE"
- print " -d, --debug\t\tOutput information to stdout as well as log"
- print " --no-log\t\tDon't write information to log file"
- print " -n, --no-act\t\tDon't actually perform changes"
- print " -b, --batch\t\tDon't daemonize; run once, then exit"
- print " -r, --run\t\tProcess queue immediately"
- print " -k, --kill\t\tKill the running mini-dinstall"
- print " --no-db\t\tDisable lookups on package database"
- print " --help\t\tWhat you're looking at"
- print " --version\t\tPrint the software version and exit"
+ print("Copyright (C) 2002 Colin Walters <walters@gnu.org>")
+ print("Licensed under the GNU GPL.")
+ print("Usage: mini-dinstall [OPTIONS...] [DIRECTORY]")
+ print("Options:")
+ print(" -v, --verbose\t\tDisplay extra information")
+ print(" -q, --quiet\t\tDisplay less information")
+ print(" -c, --config=FILE\tParse configuration info from FILE")
+ print(" -d, --debug\t\tOutput information to stdout as well as log")
+ print(" --no-log\t\tDon't write information to log file")
+ print(" -n, --no-act\t\tDon't actually perform changes")
+ print(" -b, --batch\t\tDon't daemonize; run once, then exit")
+ print(" -r, --run\t\tProcess queue immediately")
+ print(" -k, --kill\t\tKill the running mini-dinstall")
+ print(" --no-db\t\tDisable lookups on package database")
+ print(" --help\t\tWhat you're looking at")
+ print(" --version\t\tPrint the software version and exit")
sys.exit(ecode)
try:
opts, args = getopt.getopt(sys.argv[1:], 'vqc:dnbrk',
['verbose', 'quiet', 'config=', 'debug', 'no-log',
'no-act', 'batch', 'run', 'kill', 'no-db', 'help', 'version', ])
-except getopt.GetoptError, e:
+except getopt.GetoptError as e:
sys.stderr.write("Error reading arguments: %s\n" % e)
usage(1)
for (key, val) in opts:
@@ -181,8 +181,8 @@ def do_mkdir(name):
return
try:
do_and_log('Creating directory "%s"' % (name), os.mkdir, name)
- except OSError, e:
- print e
+ except OSError as e:
+ print(e)
exit(1)
def do_rename(source, target):
@@ -201,13 +201,13 @@ stderr_handler.setLevel(loglevel)
stderr_handler.setFormatter(logging.Formatter(fmt="%(name)s [%(thread)d] %(levelname)s: %(message)s"))
configp = ConfigParser()
-configfile_names = map(lambda x: os.path.abspath(os.path.expanduser(x)), configfile_names)
+configfile_names = [os.path.abspath(os.path.expanduser(x)) for x in configfile_names]
logger.debug("Reading config files: %s" % (configfile_names,))
configp.read(configfile_names)
class SubjectSpecifyingLoggingSMTPHandler(logging.handlers.SMTPHandler):
def __init__(self, *args, **kwargs):
- apply(logging.handlers.SMTPHandler.__init__, [self] + list(args) + ['dummy'], kwargs)
+ logging.handlers.SMTPHandler.__init__(*[self] + list(args) + ['dummy'], **kwargs)
def setSubject(self, subject):
self._subject = subject
@@ -255,7 +255,7 @@ lockfilename = os.path.join(dinstall_subdir, 'mini-dinstall.lock')
def process_exists(pid):
try:
os.kill(pid, 0)
- except OSError, e:
+ except OSError as e:
return 0
return 1
@@ -279,13 +279,13 @@ if run_mode or kill_mode:
sock.connect(socket_name)
if run_mode:
logger.debug('Sending RUN command')
- sock.send('RUN\n')
+ sock.send('RUN\n'.encode('utf-8'))
else:
logger.debug('Sending DIE command')
- sock.send('DIE\n')
+ sock.send('DIE\n'.encode('utf-8'))
logger.debug('Reading response')
- response = sock.recv(8192)
- print response
+ response = sock.recv(8192).decode('utf-8')
+ print(response)
sys.exit(0)
if configp.has_option('DEFAULT', 'logfile'):
@@ -309,7 +309,7 @@ class DinstallException(Exception):
def __init__(self, value):
self._value = value
def __str__(self):
- return `self._value`
+ return repr(self._value)
if not configp.has_option('DEFAULT', 'archive_style'):
logger.critical("You must set the default archive_style option (since version 0.4.0)")
@@ -321,7 +321,7 @@ default_extra_keyrings = []
default_keyrings = None
if configp.has_option('DEFAULT', 'architectures'):
- default_architectures = string.split(configp.get('DEFAULT', 'architectures'), ', ')
+ default_architectures = configp.get('DEFAULT', 'architectures').split(', ')
if configp.has_option('DEFAULT', 'verify_sigs'):
default_verify_sigs = configp.getboolean('DEFAULT', 'verify_sigs')
if configp.has_option('DEFAULT', 'trigger_reindex'):
@@ -335,7 +335,7 @@ if configp.has_option('DEFAULT', 'expire_release_files'):
if configp.has_option('DEFAULT', 'extra_keyrings'):
default_extra_keyrings = re.split(', ?', configp.get('DEFAULT', 'extra_keyrings'))
if configp.has_option('DEFAULT', 'keyids'):
- keyids = string.split(configp.get('DEFAULT', 'keyids'), ', ')
+ keyids = configp.get('DEFAULT', 'keyids').split(', ')
if configp.has_option('DEFAULT', 'keyrings'):
default_keyrings = re.split(', ?', configp.get('DEFAULT', 'keyrings'))
if configp.has_option('DEFAULT', 'use_byhash'):
@@ -361,7 +361,7 @@ if not len(sects) == 0:
for sect in sects:
distributions[sect] = {}
if configp.has_option(sect, "architectures"):
- distributions[sect]["arches"] = string.split(configp.get(sect, "architectures"), ', ')
+ distributions[sect]["arches"] = configp.get(sect, "architectures").split(', ')
else:
distributions[sect]["arches"] = default_architectures
else:
@@ -404,7 +404,7 @@ class DistOptionHandler:
def get_option_map(self, dist):
ret = self._distributions[dist]
- for key in self._optionmap.keys():
+ for key in list(self._optionmap.keys()):
type = self._optionmap[key][0]
ret[key] = self._optionmap[key][1]
if self._configp.has_option ('DEFAULT', key):
@@ -427,7 +427,7 @@ class DistOptionHandler:
distoptionhandler = DistOptionHandler(distributions, configp)
-for dist in distributions.keys():
+for dist in list(distributions.keys()):
distributions[dist] = distoptionhandler.get_option_map(dist)
if not distributions[dist]['archive_style'] in ('simple-subdir', 'flat'):
raise DinstallException("Unknown archive style \"%s\"" % (distributions[dist]['archive_style'],))
@@ -480,11 +480,11 @@ reprocess_needed = threading.Event()
reprocess_finished = threading.Event()
reprocess_lock = threading.Lock()
-class IncomingDirRequestHandler(SocketServer.StreamRequestHandler, SocketServer.BaseRequestHandler):
+class IncomingDirRequestHandler(socketserver.StreamRequestHandler, socketserver.BaseRequestHandler):
def handle(self):
logger.debug('Got request from %s' % (self.client_address,))
- req = self.rfile.readline()
- if req == 'RUN\n':
+ req = self.rfile.readline().strip().decode('utf-8')
+ if req == 'RUN':
logger.debug('Doing RUN command')
reprocess_lock.acquire()
reprocess_needed.set()
@@ -492,16 +492,16 @@ class IncomingDirRequestHandler(SocketServer.StreamRequestHandler, SocketServer.
reprocess_finished.wait()
reprocess_finished.clear()
reprocess_lock.release()
- self.wfile.write('200 Reprocessing complete\n')
- elif req == 'DIE\n':
+ self.wfile.write('200 Reprocessing complete'.encode('utf-8'))
+ elif req == 'DIE':
logger.debug('Doing DIE command')
- self.wfile.write('200 Beginning shutdown\n')
+ self.wfile.write('200 Beginning shutdown'.encode('utf-8'))
die_event.set()
else:
logger.debug('Got unknown command %s' % (req,))
- self.wfile.write('500 Unknown request\n')
+ self.wfile.write('500 Unknown request'.encode('utf-8'))
-class ExceptionThrowingThreadedUnixStreamServer(SocketServer.ThreadingUnixStreamServer):
+class ExceptionThrowingThreadedUnixStreamServer(socketserver.ThreadingUnixStreamServer):
def handle_error(self, request, client_address):
self._logger.exception("Unhandled exception during request processing; shutting down")
die_event.set()
@@ -517,7 +517,7 @@ class IncomingDir(threading.Thread):
self._batch_mode = batch_mode
self._max_retry_time = max_retry_time
self._last_failed_targets = {}
- self._eventqueue = Queue.Queue()
+ self._eventqueue = queue.Queue()
self._done_event = threading.Event()
# ensure we always have some reprocess queue
self._reprocess_queue = {}
@@ -542,14 +542,14 @@ class IncomingDir(threading.Thread):
self._daemonize(initial_reprocess_queue, initial_fucked_list)
self._done_event.set()
self._logger.info('All packages in incoming dir installed; exiting')
- except Exception, e:
+ except Exception as e:
self._logger.exception("Unhandled exception; shutting down")
die_event.set()
self._done_event.set()
return 0
def _abspath(self, *args):
- return os.path.abspath(apply(os.path.join, [self._dir] + list(args)))
+ return os.path.abspath(os.path.join(*[self._dir] + list(args)))
def _get_changefiles(self):
ret = []
@@ -557,7 +557,7 @@ class IncomingDir(threading.Thread):
self._logger.debug("glob: " + globpath)
changefilenames = glob.glob(globpath)
for changefilename in changefilenames:
- if not self._reprocess_queue.has_key(changefilename):
+ if changefilename not in self._reprocess_queue:
self._logger.info('Examining "%s"' % (changefilename,))
changefile = ChangeFile()
try:
@@ -573,7 +573,7 @@ class IncomingDir(threading.Thread):
def _changefile_ready(self, changefilename, changefile):
try:
dist = changefile['distribution']
- except KeyError, e:
+ except KeyError as e:
self._logger.warn("Unable to read distribution field for \"%s\"; data: %s" % (changefilename, changefile,))
return 0
try:
@@ -584,14 +584,14 @@ class IncomingDir(threading.Thread):
def _install_changefile(self, changefilename, changefile, doing_reprocess):
changefiledist = changefile['distribution']
- for dist in distributions.keys():
+ for dist in list(distributions.keys()):
distributions[dist] = distoptionhandler.get_option_map(dist)
if distributions[dist]['alias'] != None and changefiledist in distributions[dist]['alias']:
logger.info('Distribution "%s" is an alias for "%s"' % (changefiledist, dist))
break
else:
dist = changefiledist
- if not dist in self._archivemap.keys():
+ if not dist in list(self._archivemap.keys()):
raise DinstallException('Unknown distribution "%s" in \"%s\"' % (dist, changefilename,))
logger.debug('Installing %s in archive %s' % (changefilename, self._archivemap[dist][1].getName()))
self._archivemap[dist][0].install(changefilename, changefile)
@@ -619,7 +619,7 @@ class IncomingDir(threading.Thread):
def _daemon_reprocess_pending(self):
curtime = time.time()
- for changefilename in self._reprocess_queue.keys():
+ for changefilename in list(self._reprocess_queue.keys()):
(starttime, nexttime, delay) = self._reprocess_queue[changefilename]
if curtime >= nexttime:
return 1
@@ -632,7 +632,7 @@ class IncomingDir(threading.Thread):
self._async_dnotify.start()
try:
os.unlink(socket_name)
- except OSError, e:
+ except OSError as e:
pass
self._server = ExceptionThrowingThreadedUnixStreamServer(socket_name, IncomingDirRequestHandler)
self._server.allow_reuse_address = 1
@@ -646,7 +646,7 @@ class IncomingDir(threading.Thread):
self._reprocess_queue[changefilename] = [curtime, curtime, retry_time]
# The main daemon loop
- while 1:
+ while True:
# Wait until we have something to do
while not (self._daemon_event_ispending() or self._daemon_reprocess_pending()):
time.sleep(0.5)
@@ -663,13 +663,13 @@ class IncomingDir(threading.Thread):
self._logger.debug('Scanning for changes')
# do we have anything to reprocess?
- for changefilename in self._reprocess_queue.keys():
+ for changefilename in list(self._reprocess_queue.keys()):
(starttime, nexttime, delay) = self._reprocess_queue[changefilename]
curtime = time.time()
try:
changefile = ChangeFile()
changefile.load_from_file(changefilename)
- except (ChangeFileException,IOError), e:
+ except (ChangeFileException,IOError) as e:
if not os.path.exists(changefilename):
self._logger.info('Changefile "%s" got removed' % (changefilename,))
else:
@@ -689,7 +689,7 @@ class IncomingDir(threading.Thread):
self._install_changefile(changefilename, changefile, doing_reprocess)
self._logger.debug('Removing "%s" from incoming queue after successful install.' % (changefilename,))
del self._reprocess_queue[changefilename]
- except Exception, e:
+ except Exception as e:
logger.exception("Unable to install \"%s\"; adding to screwed list" % (changefilename,))
fucked.append(changefilename)
else:
@@ -718,12 +718,12 @@ class IncomingDir(threading.Thread):
self._logger.warn("Skipping screwed changefile \"%s\"" % (changefilename,))
continue
# Have we tried this changefile before?
- if not self._reprocess_queue.has_key(changefilename):
+ if changefilename not in self._reprocess_queue:
self._logger.debug('New change file "%s"' % (changefilename,))
if self._changefile_ready(changefilename, changefile):
try:
self._install_changefile(changefilename, changefile, doing_reprocess)
- except Exception, e:
+ except Exception as e:
logger.exception("Unable to install \"%s\"; adding to screwed list" % (changefilename,))
fucked.append(changefilename)
else:
@@ -749,12 +749,12 @@ class ArchiveDir:
self._dir = dir
self._name = os.path.basename(os.path.abspath(dir))
self._logger = logger
- for key in configdict.keys():
+ for key in list(configdict.keys()):
self._logger.debug("Setting \"%s\" => \"%s\" in archive \"%s\"" % ('_'+key, configdict[key], self._name))
self.__dict__['_' + key] = configdict[key]
do_mkdir(dir)
self._batch_mode = batch_mode
- if configdict.has_key('verify_sigs'):
+ if 'verify_sigs' in configdict:
self._verify_sigs = configdict['verify_sigs']
else:
self._verify_sigs = verify_sigs
@@ -780,10 +780,10 @@ class ArchiveDir:
# self._changefiles = []
def _abspath(self, *args):
- return os.path.abspath(apply(os.path.join, [self._dir] + list(args)))
+ return os.path.abspath(os.path.join(*[self._dir] + list(args)))
def _relpath(self, *args):
- return apply(os.path.join, [self._name] + list(args))
+ return os.path.join(*[self._name] + list(args))
def install(self, changefilename, changefile):
retval = 0
@@ -805,15 +805,15 @@ class ArchiveDir:
self._logger.info('Verifying signature on "%s"' % (changefilename,))
try:
if self._keyrings:
- verifier = DebianSigVerifier(keyrings=map(os.path.expanduser, self._keyrings), extra_keyrings=self._extra_keyrings)
+ verifier = DebianSigVerifier(keyrings=list(map(os.path.expanduser, self._keyrings)), extra_keyrings=self._extra_keyrings)
else:
verifier = DebianSigVerifier(extra_keyrings=self._extra_keyrings)
output = verifier.verify(changefilename)
logger.debug(output)
logger.info('Good signature on "%s"' % (changefilename,))
- except GPGSigVerificationFailure, e:
+ except GPGSigVerificationFailure as e:
msg = "Failed to verify signature on \"%s\": %s\n" % (changefilename, e)
- msg += string.join(e.getOutput(), '')
+ msg += ''.join(e.getOutput())
logger.error(msg)
self._reject_changefile(changefilename, changefile, e)
return 0
@@ -829,12 +829,12 @@ class ArchiveDir:
return 0
try:
self._install_changefile_internal(changefilename, changefile)
- except Exception, e:
+ except Exception as e:
self._logger.exception('Failed to process "%s"' % (changefilename,))
self._reject_changefile(changefilename, changefile, e)
return 0
if self._chown_changes_files:
- do_chmod(changefilename, 0600)
+ do_chmod(changefilename, 0o600)
target = os.path.join(self._dir, os.path.basename(changefilename))
# the final step
do_rename(changefilename, target)
@@ -842,13 +842,13 @@ class ArchiveDir:
if self._mail_on_success:
done = False
missing_fields = []
- if changefile.has_key('changes'):
+ if 'changes' in changefile:
changefile ['changes_without_dot'] = misc.format_changes(changefile['changes'])
while not done:
try:
mail_subject = mail_subject_template % changefile
mail_body = mail_body_template % changefile
- except KeyError, exc:
+ except KeyError as exc:
key = exc.args[0]
changefile[key] = ''
missing_fields.append(key)
@@ -861,12 +861,12 @@ class ArchiveDir:
if self._tweet_on_success:
done = False
missing_fields = []
- if changefile.has_key('changes'):
+ if 'changes' in changefile:
changefile ['changes_without_dot'] = misc.format_changes(changefile['changes'])
while not done:
try:
tweet_body = tweet_template % changefile
- except KeyError, exc:
+ except KeyError as exc:
key = exc.args[0]
changefile[key] = ''
missing_fields.append(key)
@@ -896,7 +896,7 @@ class ArchiveDir:
else:
(newupstreamver, newdebianver) = parse_versions(version)
is_sourceful = 0
- for file in map(lambda x: x[2], changefile.getFiles()):
+ for file in [x[2] for x in changefile.getFiles()]:
match = debpackage_re.search(file)
if match:
arch = match.group(3)
@@ -921,16 +921,16 @@ class ArchiveDir:
newfiles.append((os.path.join(incomingdir, file), target, match.group(1), 'source'))
all_arches = {}
- for arch in map(lambda x: x[3], newfiles):
+ for arch in [x[3] for x in newfiles]:
all_arches[arch] = 1
completed = []
oldfiles = []
if not self._keep_old:
found_old_bins = 0
- for (oldversion, oldarch) in map(lambda x: x[1:], self._get_package_versions()):
- if not all_arches.has_key(oldarch) and apt_pkg.version_compare(oldversion, version) < 0:
+ for (oldversion, oldarch) in [x[1:] for x in self._get_package_versions()]:
+ if oldarch not in all_arches and apt_pkg.version_compare(oldversion, version) < 0:
found_old_bins = 1
- for (pkgname, arch) in map(lambda x: x[2:], newfiles):
+ for (pkgname, arch) in [x[2:] for x in newfiles]:
if arch == 'source' and found_old_bins:
continue
self._logger.debug('Scanning for old files')
@@ -941,14 +941,14 @@ class ArchiveDir:
oldpkgname = match.group(1)
oldarch = match.group(3)
file = self._arch_target(arch, file)
- if not file in map(lambda x: x[0], oldfiles):
+ if not file in [x[0] for x in oldfiles]:
target = file + tmp_old_suffix
if oldpkgname == pkgname and oldarch == arch:
oldfiles.append((file, target))
self._logger.debug('Scanning "%s" for old files' % (self._abspath('source')))
for file in self._read_source_dir():
file = self._source_target(file)
- if not file in map(lambda x: x[0], oldfiles):
+ if not file in [x[0] for x in oldfiles]:
target = file + tmp_old_suffix
match = debchanges_re.search(file)
if not match and is_sourceful:
@@ -977,22 +977,22 @@ class ArchiveDir:
oldfiles.append((file, target))
continue
match = debsrc_native_re.search(file)
- if match and match.group(1) in map(lambda x: x[2], newfiles):
+ if match and match.group(1) in [x[2] for x in newfiles]:
oldfiles.append((file, target))
continue
- self._clean_targets = map(lambda x: x[1], oldfiles)
- allrenames = oldfiles + map(lambda x: x[:2], newfiles)
+ self._clean_targets = [x[1] for x in oldfiles]
+ allrenames = oldfiles + [x[:2] for x in newfiles]
try:
while not allrenames == []:
(oldname, newname) = allrenames[0]
do_rename(oldname, newname)
completed.append(allrenames[0])
allrenames = allrenames[1:]
- except OSError, e:
+ except OSError as e:
logger.exception("Failed to do rename (%s); attempting rollback" % (e.strerror,))
try:
- self._logger.error(traceback.format_tb(sys.exc_traceback))
+ self._logger.error(traceback.format_tb(sys.exc_info()[2]))
except:
pass
# Unwind to previous state
@@ -1028,12 +1028,12 @@ class ArchiveDir:
incomingdir = os.path.dirname(changefilename)
try:
f = open(os.path.join(rejectdir, "%s_%s.reason" % (sourcename, version)), 'w')
- if type(exception) == type('string'):
+ if isinstance(exception, str):
f.write(exception)
else:
traceback.print_exception(Exception, exception, None, None, f)
f.close()
- for file in map(lambda x: x[2], changefile.getFiles()):
+ for file in [x[2] for x in changefile.getFiles()]:
if os.access(os.path.join(incomingdir, file), os.R_OK):
file = os.path.join(incomingdir, file)
else:
@@ -1041,7 +1041,7 @@ class ArchiveDir:
target = os.path.join(rejectdir, os.path.basename(file))
do_rename(file, target)
do_rename(changefilename, os.path.join(rejectdir, os.path.basename(changefilename)))
- self._logger.info('Rejecting "%s": %s' % (changefilename, `exception`))
+ self._logger.info('Rejecting "%s": %s' % (changefilename, repr(exception)))
except Exception:
self._logger.error("Unhandled exception while rejecting %s; archive may be in inconsistent state" % (changefilename,))
raise
@@ -1055,7 +1055,7 @@ class ArchiveDir:
class SimpleSubdirArchiveDir(ArchiveDir):
def __init__(self, *args, **kwargs):
- apply(ArchiveDir.__init__, [self] + list(args), kwargs)
+ ArchiveDir.__init__(*[self] + list(args), **kwargs)
for arch in list(self._arches) + ['source']:
target = os.path.join(self._dir, arch)
do_mkdir(target)
@@ -1109,8 +1109,8 @@ class ArchiveDirIndexer(threading.Thread):
self._name = os.path.basename(os.path.abspath(dir))
threading.Thread.__init__(self, name=self._name)
self._logger = logger
- self._eventqueue = Queue.Queue()
- for key in configdict.keys():
+ self._eventqueue = queue.Queue()
+ for key in list(configdict.keys()):
self._logger.debug("Setting \"%s\" => \"%s\" in archive \"%s\"" % ('_'+key, configdict[key], self._name))
self.__dict__['_' + key] = configdict[key]
do_mkdir(dir)
@@ -1119,10 +1119,10 @@ class ArchiveDirIndexer(threading.Thread):
self._done_event = threading.Event()
def _abspath(self, *args):
- return os.path.abspath(apply(os.path.join, [self._dir] + list(args)))
+ return os.path.abspath(os.path.join(*[self._dir] + list(args)))
def _relpath(self, *args):
- return apply(os.path.join, [self._name] + list(args))
+ return os.path.join(*[self._name] + list(args))
def _make_indexfile(self, dir, type, name, arch=None):
cmdline = ['apt-ftparchive', type, dir,
@@ -1134,7 +1134,7 @@ class ArchiveDirIndexer(threading.Thread):
if not nodb_mode:
cmdline += ['--db', '%s.db' %dir]
- self._logger.debug("Running: " + string.join(cmdline, ' '))
+ self._logger.debug("Running: " + ' '.join(cmdline))
if no_act:
return
(infd, outfd) = os.pipe()
@@ -1156,7 +1156,7 @@ class ArchiveDirIndexer(threading.Thread):
xzpackagesfilename = packagesfilename + '.xz'
newxzpackagesfilename = newpackagesfilename + '.xz'
newpackagesfile = open(newpackagesfilename, 'w')
- newxzpackagesfile = lzma.LZMAFile(newxzpackagesfilename, 'w')
+ newxzpackagesfile = lzma.open(newxzpackagesfilename, 'wt')
buf = stdout.read(8192)
while buf != '':
newpackagesfile.write(buf)
@@ -1262,7 +1262,7 @@ class ArchiveDirIndexer(threading.Thread):
# never returns
self._daemonize()
self._done_event.set()
- except Exception, e:
+ except Exception as e:
self._logger.exception("Unhandled exception; shutting down")
die_event.set()
self._done_event.set()
@@ -1279,7 +1279,7 @@ class ArchiveDirIndexer(threading.Thread):
self._async_dnotify.start()
# The main daemon loop
- while 1:
+ while True:
# Wait until we have a pending event
while not self._daemon_event_ispending():
@@ -1292,10 +1292,10 @@ class ArchiveDirIndexer(threading.Thread):
setevent = None
dir = None
obj = self._eventqueue.get()
- if type(obj) == type(''):
+ if isinstance(obj, str):
self._logger.debug('got dir change')
dir = obj
- elif type(obj) == type(None):
+ elif obj is None:
self._logger.debug('got general event')
setevent = None
elif obj.__class__ == threading.Event().__class__:
@@ -1359,7 +1359,7 @@ class ArchiveDirIndexer(threading.Thread):
class SimpleSubdirArchiveDirIndexer(ArchiveDirIndexer):
def __init__(self, *args, **kwargs):
- apply(ArchiveDirIndexer.__init__, [self] + list(args), kwargs)
+ ArchiveDirIndexer.__init__(*[self] + list(args), **kwargs)
for arch in list(self._arches) + ['source']:
target = os.path.join(self._dir, arch)
do_mkdir(target)
@@ -1393,7 +1393,7 @@ class SimpleSubdirArchiveDirIndexer(ArchiveDirIndexer):
self._logger.info("Release generation disabled, removing existing Release file")
try:
os.unlink(targetname)
- except OSError, e:
+ except OSError as e:
pass
return
tmpname = targetname + tmp_new_suffix
@@ -1441,7 +1441,7 @@ class SimpleSubdirArchiveDirIndexer(ArchiveDirIndexer):
f.write('Valid-Until: ' + (datetime.datetime.utcnow() + datetime.timedelta(days=28)).strftime("%a, %d %b %Y %H:%M:%S UTC") + '\n')
f.write('Architectures: ' + arch + '\n')
if self._keyids:
- f.write('Signed-By: ' + string.join(self._keyids, ',') + '\n')
+ f.write('Signed-By: ' + ','.join(self._keyids) + '\n')
if self._use_byhash:
f.write('Acquire-By-Hash: yes\n')
if self._release_description:
@@ -1454,17 +1454,17 @@ class SimpleSubdirArchiveDirIndexer(ArchiveDirIndexer):
self._logger.info("Release generation complete")
def _in_archdir(self, *args):
- return apply(lambda x,self=self: self._abspath(x), args)
+ return (lambda x,self=self: self._abspath(x)) (*args)
def _get_dnotify_dirs(self):
- return map(lambda x, self=self: self._abspath(x), self._arches + ['source'])
+ return list(map(lambda x, self=self: self._abspath(x), self._arches + ['source']))
def _get_all_indexfiles(self):
- return map(lambda arch: os.path.join(arch, 'Packages'), self._arches) + ['source/Sources']
+ return [os.path.join(arch, 'Packages') for arch in self._arches] + ['source/Sources']
class FlatArchiveDirIndexer(ArchiveDirIndexer):
def __init__(self, *args, **kwargs):
- apply(ArchiveDirIndexer.__init__, [self] + list(args), kwargs)
+ ArchiveDirIndexer.__init__(*[self] + list(args), **kwargs)
def _index_impl(self, arches, force=None):
pkgsfile = self._abspath('Packages')
@@ -1501,7 +1501,7 @@ class FlatArchiveDirIndexer(ArchiveDirIndexer):
self._logger.info("Release generation disabled, removing existing Release file")
try:
os.unlink(targetname)
- except OSError, e:
+ except OSError as e:
pass
return
tmpname = targetname + tmp_new_suffix
@@ -1549,9 +1549,9 @@ class FlatArchiveDirIndexer(ArchiveDirIndexer):
f.write('Date: ' + time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime()) + '\n')
if self._expire_release_files or self._keyids:
f.write('Valid-Until: ' + (datetime.datetime.utcnow() + datetime.timedelta(days=28)).strftime("%a, %d %b %Y %H:%M:%S UTC") + '\n')
- f.write('Architectures: ' + string.join(self._arches, ' ') + '\n')
+ f.write('Architectures: ' + ' '.join(self._arches) + '\n')
if self._keyids:
- f.write('Signed-By: ' + string.join(self._keyids, ',') + '\n')
+ f.write('Signed-By: ' + ','.join(self._keyids) + '\n')
if self._use_byhash:
f.write('Acquire-By-Hash: yes\n')
if self._release_description:
@@ -1564,7 +1564,7 @@ class FlatArchiveDirIndexer(ArchiveDirIndexer):
self._logger.info("Release generation complete")
def _in_archdir(self, *args):
- return apply(lambda x,self=self: self._abspath(x), args[1:])
+ return (lambda x,self=self: self._abspath(x))(*args[1:])
def _get_dnotify_dirs(self):
return [self._dir]
@@ -1617,7 +1617,7 @@ if not (debug_mode or batch_mode):
archivemap = {}
# Instantiaate archive classes for installing files
-for dist in distributions.keys():
+for dist in list(distributions.keys()):
if distributions[dist]['archive_style'] == 'simple-subdir':
newclass = SimpleSubdirArchiveDir
else:
@@ -1625,7 +1625,7 @@ for dist in distributions.keys():
archivemap[dist] = [newclass(dist, logger, distributions[dist], batch_mode=batch_mode, keyrings=default_keyrings, extra_keyrings=default_extra_keyrings, verify_sigs=default_verify_sigs), None]
# Create archive indexing threads, but don't start them yet
-for dist in distributions.keys():
+for dist in list(distributions.keys()):
targetdir = os.path.join(toplevel_directory, dist)
logger.info('Initializing archive indexer %s' % (dist,))
if distributions[dist]['archive_style'] == 'simple-subdir':
@@ -1645,7 +1645,7 @@ if batch_mode:
incoming.wait()
# Once we've installed everything, start the indexing threads
-for dist in distributions.keys():
+for dist in list(distributions.keys()):
archive = archivemap[dist][1]
logger.debug('Starting archive %s' % (archive.getName(),))
archive.start()
@@ -1653,7 +1653,7 @@ for dist in distributions.keys():
# Wait for all the indexing threads to finish; none of these ever
# return if we're in daemon mode
if batch_mode:
- for dist in distributions.keys():
+ for dist in list(distributions.keys()):
archive = archivemap[dist][1]
logger.debug('Waiting for archive %s to finish' % (archive.getName(),))
archive.wait()
@@ -1662,7 +1662,7 @@ else:
die_event.wait()
logger.info('Die event caught; waiting for incoming processor to finish')
incoming.wait()
- for dist in distributions.keys():
+ for dist in list(distributions.keys()):
archive = archivemap[dist][1]
logger.info('Die event caught; waiting for archive %s to finish' % (archive.getName(),))
archive.wait()
diff --git a/minidinstall/ChangeFile.py b/minidinstall/ChangeFile.py
index 702069e..3b0cf48 100644
--- a/minidinstall/ChangeFile.py
+++ b/minidinstall/ChangeFile.py
@@ -19,7 +19,7 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os, re, sys, string, stat
-import threading, Queue
+import threading, queue
import logging
from minidinstall import DpkgControl, SignedFile
from minidinstall import misc
@@ -28,7 +28,7 @@ class ChangeFileException(Exception):
def __init__(self, value):
self._value = value
def __str__(self):
- return `self._value`
+ return repr(self._value)
class ChangeFile(DpkgControl.DpkgParagraph):
md5_re = r'^(?P<md5>[0-9a-f]{32})[ \t]+(?P<size>\d+)[ \t]+(?P<section>[-/a-zA-Z0-9]+)[ \t]+(?P<priority>[-a-zA-Z0-9]+)[ \t]+(?P<file>[0-9a-zA-Z][-+:.,=~0-9a-zA-Z_]+)$'
@@ -84,7 +84,7 @@ class ChangeFile(DpkgControl.DpkgParagraph):
def verify(self, sourcedir):
""" verify size and hash values from changes file """
checksum = self._get_checksum_from_changes()
- for hash in checksum.keys():
+ for hash in list(checksum.keys()):
for (hashsum, size, filename) in checksum[hash]:
self._verify_file_integrity(os.path.join(sourcedir, filename), int(size), hash, hashsum)
@@ -97,7 +97,7 @@ class ChangeFile(DpkgControl.DpkgParagraph):
if not stat.S_ISREG(statbuf[stat.ST_MODE]):
raise ChangeFileException("%s is not a regular file" % (filename,))
size = statbuf[stat.ST_SIZE]
- except OSError, e:
+ except OSError as e:
raise ChangeFileException("Can't stat %s: %s" % (filename,e.strerror))
if size != expected_size:
raise ChangeFileException("File size for %s does not match that specified in .dsc" % (filename,))
diff --git a/minidinstall/Dnotify.py b/minidinstall/Dnotify.py
index e31080c..18606e1 100644
--- a/minidinstall/Dnotify.py
+++ b/minidinstall/Dnotify.py
@@ -18,7 +18,7 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-import os, re, sys, string, stat, threading, Queue, time
+import os, re, sys, string, stat, threading, queue, time
import logging
from minidinstall import misc
@@ -26,7 +26,7 @@ class DnotifyException(Exception):
def __init__(self, value):
self._value = value
def __str__(self):
- return `self._value`
+ return repr(self._value)
class DirectoryNotifierFactory:
def create(self, dirs, use_dnotify=1, poll_time=30, logger=None, cancel_event=None):
@@ -101,7 +101,7 @@ class MtimeDirectoryNotifier(DirectoryNotifier):
if timeout_time and time.time() > timeout_time:
return None
self._logger.debug('Polling...')
- for dir in self._dirmap.keys():
+ for dir in list(self._dirmap.keys()):
oldtime = self._dirmap[dir]
mtime = os.stat(os.path.join(self._cwd, dir))[stat.ST_MTIME]
if oldtime < mtime:
@@ -120,7 +120,7 @@ class MtimeDirectoryNotifier(DirectoryNotifier):
class DnotifyDirectoryNotifier(DirectoryNotifier):
def __init__(self, dirs, logger):
DirectoryNotifier.__init__(self, dirs, logger)
- self._queue = Queue.Queue()
+ self._queue = queue.Queue()
dnotify = DnotifyThread(self._queue, self._dirs, self._logger)
dnotify.start()
@@ -134,12 +134,12 @@ class DnotifyDirectoryNotifier(DirectoryNotifier):
if dir is None:
# We shouldn't have to do this; no one else is reading
# from the queue. But we do it just to be safe.
- for key in set.keys():
+ for key in list(set.keys()):
self._queue.put(key)
return None
set[dir] = 1
i -= 1
- for key in set.keys():
+ for key in list(set.keys()):
self._queue.put(key)
i = self._queue.qsize()
self._logger.debug('Queue size (after duplicate filter): %d', (i,))
@@ -149,10 +149,10 @@ class DnotifyDirectoryNotifier(DirectoryNotifier):
if timeout is None:
return self._queue.get()
timeout_time = time.time() + timeout
- while 1:
+ while True:
try:
self._queue.get(0)
- except Queue.Empty:
+ except queue.Empty:
if time.time() > timeout_time:
return None
else:
diff --git a/minidinstall/DpkgControl.py b/minidinstall/DpkgControl.py
index 4bda8c5..be08155 100755
--- a/minidinstall/DpkgControl.py
+++ b/minidinstall/DpkgControl.py
@@ -34,9 +34,8 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import re, string
-from DpkgDatalist import *
+from .DpkgDatalist import *
from minidinstall.SignedFile import *
-from types import ListType
class DpkgParagraph(DpkgOrderedDatalist):
caseSensitive = 0
@@ -48,7 +47,7 @@ class DpkgParagraph(DpkgOrderedDatalist):
"Paragraph data from a file object."
key = None
value = None
- while 1:
+ while True:
line = f.readline()
if not line:
return
@@ -60,32 +59,32 @@ class DpkgParagraph(DpkgOrderedDatalist):
return
line = line[ :-1 ]
if line[ 0 ] != ' ':
- key, value = string.split( line, ":", 1 )
+ key, value = line.split( ":", 1 )
if value: value = value[ 1: ]
if not self.caseSensitive:
- newkey = string.lower( key )
- if not self.trueFieldCasing.has_key( key ):
+ newkey = key.lower()
+ if key not in self.trueFieldCasing:
self.trueFieldCasing[ newkey ] = key
key = newkey
else:
- if isinstance( value, ListType ):
+ if isinstance( value, list ):
value.append( line[ 1: ] )
else:
value = [ value, line[ 1: ] ]
self[ key ] = value
def _storeField( self, f, value, lead = " " ):
- if isinstance( value, ListType ):
- value = string.join( map( lambda v, lead = lead: v and ( lead + v ) or v, value ), "\n" )
+ if isinstance( value, list ):
+ value = "\n".join(list(map( lambda v, lead = lead: v and ( lead + v ) or v, value )))
else:
if value: value = lead + value
f.write( "%s\n" % ( value ) )
def _store( self, f ):
"Write our paragraph data to a file object"
- for key in self.keys():
+ for key in list(self.keys()):
value = self[ key ]
- if self.trueFieldCasing.has_key( key ):
+ if key in self.trueFieldCasing:
key = self.trueFieldCasing[ key ]
f.write( "%s:" % key )
self._storeField( f, value )
@@ -105,7 +104,7 @@ class DpkgControl(DpkgOrderedDatalist):
return p
def load( self, f ):
- while 1:
+ while True:
p = self._load_one( f )
if not p: break
self[ p[ self.key ] ] = p
@@ -113,7 +112,7 @@ class DpkgControl(DpkgOrderedDatalist):
def _store( self, f ):
"Write our control data to a file object"
- for key in self.keys():
+ for key in list(self.keys()):
self[ key ]._store( f )
f.write( "\n" )
@@ -138,8 +137,8 @@ if __name__ == "__main__":
import sys
types = { 'p' : DpkgParagraph, 'c' : DpkgControl, 's' : DpkgSourceControl }
type = sys.argv[ 1 ]
- if not types.has_key( type ):
- print "Unknown type `%s'!" % type
+ if type not in types:
+ print( "Unknown type `%s'!" % type )
sys.exit( 1 )
file = open( sys.argv[ 2 ], "r" )
data = types[ type ]()
diff --git a/minidinstall/DpkgDatalist.py b/minidinstall/DpkgDatalist.py
index 0c11612..68f9940 100644
--- a/minidinstall/DpkgDatalist.py
+++ b/minidinstall/DpkgDatalist.py
@@ -21,10 +21,9 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import os, sys
-from UserDict import UserDict
-from OrderedDict import OrderedDict
+from collections import UserDict
+from collections import OrderedDict
from minidinstall.SafeWriteFile import SafeWriteFile
-from types import StringType
class DpkgDatalistException(Exception):
UNKNOWN = 0
@@ -55,15 +54,14 @@ class _DpkgDatalist:
self._store(sys.stdout)
return
- # Write to a temporary file first
- if type(fn) == StringType:
+ if isinstance(fn, str):
vf=SafeWriteFile(fn+".new", fn, "w")
else:
vf=fn
try:
self._store(vf)
finally:
- if type(fn) == StringType:
+ if isinstance(fn, str):
vf.close()
diff --git a/minidinstall/GPGSigVerifier.py b/minidinstall/GPGSigVerifier.py
index a8fb46c..2e0dee5 100644
--- a/minidinstall/GPGSigVerifier.py
+++ b/minidinstall/GPGSigVerifier.py
@@ -25,14 +25,14 @@ class GPGSigVerifierException(Exception):
def __init__(self, value):
self._value = value
def __str__(self):
- return `self._value`
+ return repr(self._value)
class GPGSigVerificationFailure(Exception):
def __init__(self, value, output):
self._value = value
self._output = output
def __str__(self):
- return `self._value`
+ return repr(self._value)
def getOutput(self):
return self._output
diff --git a/minidinstall/OrderedDict.py b/minidinstall/OrderedDict.py
index fa3f276..7c842b0 100644
--- a/minidinstall/OrderedDict.py
+++ b/minidinstall/OrderedDict.py
@@ -20,7 +20,7 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-from UserDict import UserDict
+from collections import UserDict
class OrderedDict(UserDict):
__order=[]
@@ -41,12 +41,12 @@ class OrderedDict(UserDict):
return UserDict.__cmp__(self, dict)
def __setitem__(self, key, value):
- if not self.has_key(key):
+ if key not in self:
self.__order.append(key)
UserDict.__setitem__(self, key, value)
def __delitem__(self, key):
- if self.has_key(key):
+ if key in self:
del self.__order[self.__order.index(key)]
UserDict.__delitem__(self, key)
@@ -64,13 +64,13 @@ class OrderedDict(UserDict):
return self.__order
def items(self):
- return map(lambda x, self=self: (x, self.__getitem__(x)), self.__order)
+ return list(map(lambda x, self=self: (x, self.__getitem__(x)), self.__order))
def values(self):
- return map(lambda x, self=self: self.__getitem__(x), self.__order)
+ return list(map(lambda x, self=self: self.__getitem__(x), self.__order))
def update(self, dict):
- for k, v in dict.items():
+ for k, v in list(dict.items()):
self.__setitem__(k, v)
# vim:ts=4:sw=4:et:
diff --git a/minidinstall/SafeWriteFile.py b/minidinstall/SafeWriteFile.py
index 1777d36..591c4f0 100755
--- a/minidinstall/SafeWriteFile.py
+++ b/minidinstall/SafeWriteFile.py
@@ -21,9 +21,7 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-from types import StringType
from shutil import copy2
-from string import find
from os import rename
class ObjectNotAllowed(Exception):
@@ -37,14 +35,14 @@ class InvalidMode(Exception):
class SafeWriteFile:
def __init__(self, newname, realname, mode="w", bufsize=-1):
- if type(newname)!=StringType:
+ if not isinstance(newname, str):
raise ObjectNotAllowed(newname)
- if type(realname)!=StringType:
+ if not isinstance(realname, str):
raise ObjectNotAllowed(realname)
- if find(mode, "r")>=0:
+ if "r" in mode:
raise InvalidMode(mode)
- if find(mode, "a")>=0 or find(mode, "+") >= 0:
+ if "a" in mode or "+" in mode:
copy2(realname, newname)
self.fobj=open(newname, mode, bufsize)
self.newname=newname
diff --git a/minidinstall/SignedFile.py b/minidinstall/SignedFile.py
index 71181c3..efc4730 100755
--- a/minidinstall/SignedFile.py
+++ b/minidinstall/SignedFile.py
@@ -33,7 +33,7 @@ class SignedFile:
line = stream.readline()
if (line == "-----BEGIN PGP SIGNED MESSAGE-----\n"):
self._signed = 1
- while (1):
+ while True:
line = stream.readline()
if (len(line) == 0 or line == '\n'):
break
@@ -55,18 +55,18 @@ class SignedFile:
self._signature = []
self._signatureversion = self._stream.readline()
self._stream.readline() # skip blank line
- while 1:
+ while True:
line = self._stream.readline()
if len(line) == 0 or line == "-----END PGP SIGNATURE-----\n":
break
self._signature.append(line)
- self._signature = string.join
+ self._signature = ''.join(self._signature)
return ''
return line
def readlines(self):
ret = []
- while 1:
+ while True:
line = self.readline()
if (line != ''):
ret.append(line)
@@ -89,19 +89,19 @@ class SignedFile:
if __name__=="__main__":
import sys
if len(sys.argv) == 0:
- print "Need one file as an argument"
+ print("Need one file as an argument")
sys.exit(1)
filename = sys.argv[1]
f=SignedFile(open(filename))
if f.getSigned():
- print "**** SIGNED ****"
+ print("**** SIGNED ****")
else:
- print "**** NOT SIGNED ****"
+ print("**** NOT SIGNED ****")
lines=f.readlines()
- print lines
+ print(lines)
if not f.getSigned():
assert(len(lines) == len(actuallines))
else:
- print "Signature: %s" % (f.getSignature())
+ print("Signature: %s" % (f.getSignature()))
# vim:ts=4:sw=4:et:
diff --git a/minidinstall/mail.py b/minidinstall/mail.py
index 30103a3..50df462 100644
--- a/minidinstall/mail.py
+++ b/minidinstall/mail.py
@@ -43,5 +43,5 @@ def send(smtp_server, smtp_from, smtp_to, body, subject="mini-dinstall mail noti
smtp = smtplib.SMTP(smtp_server)
smtp.sendmail(smtp_from, [smtp_to], mime_body.as_string())
logger.info("Mail sent to %s (%s)" % (smtp_to, subject))
- except Exception, e:
+ except Exception as e:
logger.exception("Error sending mail to %s ('%s') via %s: %s: %s", smtp_to, subject, smtp_server, type(e), e.args)
diff --git a/minidinstall/misc.py b/minidinstall/misc.py
index 94fe291..372c450 100644
--- a/minidinstall/misc.py
+++ b/minidinstall/misc.py
@@ -2,7 +2,7 @@
# misc tools for mini-dinstall
-# Copyright © 2004 Thomas Viehmann <tv@beamnet.de>
+# Copyright © 2004 Thomas Viehmann <tv@beamnet.de>
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
@@ -28,7 +28,7 @@ def dup2(fd,fd2):
try:
os.dup2(fd,fd2)
success = 1
- except OSError, e:
+ except OSError as e:
if (e.errno != errno.EBUSY) or (tries >= 3):
raise
# wait 0-2 seconds befor next try
@@ -55,7 +55,7 @@ def get_file_sum(self, type, filename):
elif type == 'sha256':
sum = hashlib.sha256()
self._logger.debug("Generate %s (python-internal) for %s" % (type, filename))
- f = open(filename)
+ f = open(filename,'rb')
buf = f.read(8192)
while buf != '':
sum.update(buf)
diff --git a/minidinstall/tweet.py b/minidinstall/tweet.py
index 548918b..7106085 100644
--- a/minidinstall/tweet.py
+++ b/minidinstall/tweet.py
@@ -20,7 +20,7 @@
import logging
-import urllib2
+import urllib.request
import base64
def send(tweet_body, tweet_server, tweet_user, tweet_password):
@@ -40,19 +40,19 @@ def send(tweet_body, tweet_server, tweet_user, tweet_password):
if not tweet_user or not tweet_password:
logger.exception("Missing username or password for twitting")
- auth_handler = urllib2.HTTPBasicAuthHandler()
+ auth_handler = urllib.request.HTTPBasicAuthHandler()
auth_handler.add_password(realm=auth_realm,
uri=post_url,
user=tweet_user,
passwd=tweet_password)
- m_http_opener = urllib2.build_opener(auth_handler)
+ m_http_opener = urllib.request.build_opener(auth_handler)
- req = urllib2.Request(post_url)
+ req = urllib.request.Request(post_url)
req.add_data("status=%s" % tweet_body)
handle = None
try:
handle = m_http_opener.open(req)
a = handle.read()
logger.info("Tweet sent to %s (%s)" % (tweet_server, tweet_user))
- except Exception, e:
+ except Exception as e:
logger.exception("Error sending tweet to %s ('%s') via %s: %s: %s", tweet_server, tweet_body, tweet_user, type(e), e.args)