#152 python3: improve Python 3.x compatibility
Closed 6 years ago by tkopecek. Opened 7 years ago by pavlix.
pavlix/koji python3  into  master

avoid specfile warnings
Pavel Šimerda • 7 years ago  
python3: fix imports after modernize
Pavel Šimerda • 7 years ago  
python3: modernize all client code
Pavel Šimerda • 7 years ago  
python3: port umask octal numerals
Pavel Šimerda • 7 years ago  
python3: port binary/text string checks
Pavel Šimerda • 7 years ago  
python3: port adler32 numeric literals
Pavel Šimerda • 7 years ago  
python3: port SSLConnection code
Pavel Šimerda • 7 years ago  
python3: port Makefile python checks
Pavel Šimerda • 7 years ago  
python3: avoid using exceptions module
Pavel Šimerda • 7 years ago  
file modified
+430 -425
@@ -24,7 +24,13 @@ 

  #       Mike Bonnet <mikeb@redhat.com>

  #       Cristian Balint <cbalint@redhat.com>

  

+ from __future__ import absolute_import

+ from __future__ import print_function

  import sys

+ from six.moves import filter

+ from six.moves import map

+ from six.moves import range

+ from six.moves import zip

  try:

      import krbV

  except ImportError:  # pragma: no cover
@@ -40,7 +46,7 @@ 

          import simplejson as json

      except ImportError:

          json = None

- import ConfigParser

+ from six.moves import configparser

  import base64

  import dateutil.parser

  import errno
@@ -60,7 +66,7 @@ 

  import traceback

  import urlgrabber.grabber as grabber

  import urlgrabber.progress as progress

- import xmlrpclib

+ from six.moves import xmlrpc_client

  try:

      import libcomps

  except ImportError:  # pragma: no cover
@@ -142,7 +148,7 @@ 

  def get_epilog_str(progname=None):

      if progname is None:

          progname = os.path.basename(sys.argv[0]) or 'koji'

-     categories_ordered=', '.join(sorted(['all'] + categories.keys()))

+     categories_ordered=', '.join(sorted(['all'] + list(categories.keys())))

      epilog_str = '''

  Try "%(progname)s --help" for help about global options

  Try "%(progname)s help" to get all available commands
@@ -237,12 +243,12 @@ 

      # load local config

      try:

          result = koji.read_config(options.profile, user_config=options.configFile)

-     except koji.ConfigurationError, e:

+     except koji.ConfigurationError as e:

          parser.error(e.args[0])

          assert False  # pragma: no cover

  

      # update options according to local config

-     for name, value in result.iteritems():

+     for name, value in result.items():

          if getattr(options, name, None) is None:

              setattr(options, name, value)

  
@@ -275,7 +281,7 @@ 

  def ensure_connection(session):

      try:

          ret = session.getAPIVersion()

-     except xmlrpclib.ProtocolError:

+     except xmlrpc_client.ProtocolError:

          error(_("Error: Unable to connect to server"))

      if ret != koji.API_VERSION:

          warn(_("WARNING: The server is at API version %d and the client is at %d" % (ret, koji.API_VERSION)))
@@ -294,7 +300,7 @@ 

      else:

          indent = ''

      label = koji.taskLabel(task)

-     print(''.join([fmt % task, indent, label]))

+     print((''.join([fmt % task, indent, label])))

  

  def print_task_recurse(task,depth=0):

      """Print a task and its children"""
@@ -339,7 +345,7 @@ 

          error = None

          try:

              result = self.session.getTaskResult(self.id)

-         except (xmlrpclib.Fault,koji.GenericError),e:

+         except (xmlrpc_client.Fault, koji.GenericError) as e:

              error = e

          if error is None:

              # print("%s: complete" % self.str())
@@ -357,7 +363,7 @@ 

          self.info = self.session.getTaskInfo(self.id, request=True)

          if self.info is None:

              if not self.quiet:

-                 print("No such task id: %i" % self.id)

+                 print(("No such task id: %i" % self.id))

              sys.exit(1)

          state = self.info['state']

          if last:
@@ -365,13 +371,13 @@ 

              laststate = last['state']

              if laststate != state:

                  if not self.quiet:

-                     print("%s: %s -> %s" % (self.str(), self.display_state(last), self.display_state(self.info)))

+                     print(("%s: %s -> %s" % (self.str(), self.display_state(last), self.display_state(self.info))))

                  return True

              return False

          else:

              # First time we're seeing this task, so just show the current state

              if not self.quiet:

-                 print("%s: %s" % (self.str(), self.display_state(self.info)))

+                 print(("%s: %s" % (self.str(), self.display_state(self.info))))

              return False

  

      def is_done(self):
@@ -417,7 +423,7 @@ 

              open += 1

          elif status == koji.TASK_STATES['FREE']:

              free += 1

-     print("  %d free  %d open  %d done  %d failed" % (free, open, done, failed))

+     print(("  %d free  %d open  %d done  %d failed" % (free, open, done, failed)))

  

  def display_task_results(tasks):

      for task in [task for task in tasks.values() if task.level == 0]:
@@ -425,14 +431,14 @@ 

          task_label = task.str()

  

          if state == koji.TASK_STATES['CLOSED']:

-             print('%s completed successfully' % task_label)

+             print(('%s completed successfully' % task_label))

          elif state == koji.TASK_STATES['FAILED']:

-             print('%s failed' % task_label)

+             print(('%s failed' % task_label))

          elif state == koji.TASK_STATES['CANCELED']:

-             print('%s was canceled' % task_label)

+             print(('%s was canceled' % task_label))

          else:

              # shouldn't happen

-             print('%s has not completed' % task_label)

+             print(('%s has not completed' % task_label))

  

  def watch_tasks(session,tasklist,quiet=False):

      global options
@@ -448,7 +454,7 @@ 

              tasks[task_id] = TaskWatcher(task_id,session,quiet=quiet)

          while True:

              all_done = True

-             for task_id,task in tasks.items():

+             for task_id, task in list(tasks.items()):

                  changed = task.update()

                  if not task.is_done():

                      all_done = False
@@ -461,7 +467,7 @@ 

                          rv = 1

                  for child in session.getTaskChildren(task_id):

                      child_id = child['id']

-                     if not child_id in tasks.keys():

+                     if not child_id in list(tasks.keys()):

                          tasks[child_id] = TaskWatcher(child_id, session, task.level + 1, quiet=quiet)

                          tasks[child_id].update()

                          # If we found new children, go through the list again,
@@ -480,10 +486,10 @@ 

              progname = os.path.basename(sys.argv[0]) or 'koji'

              tlist = ['%s: %s' % (t.str(), t.display_state(t.info))

                              for t in tasks.values() if not t.is_done()]

-             print( \

+             print(( \

  """Tasks still running. You can continue to watch with the '%s watch-task' command.

  Running Tasks:

- %s""" % (progname, '\n'.join(tlist)))

+ %s""" % (progname, '\n'.join(tlist))))

          rv = 1

      return rv

  
@@ -493,7 +499,7 @@ 

      def _isDone(session, taskId):

          info = session.getTaskInfo(taskId)

          if info is None:

-             print("No such task id: %i" % taskId)

+             print(("No such task id: %i" % taskId))

              sys.exit(1)

          state = koji.TASK_STATES[info['state']]

          return (state in ['CLOSED','CANCELED','FAILED'])
@@ -560,13 +566,13 @@ 

  

      dsttag = session.getTag(tag)

      if not dsttag:

-         print("Unknown tag: %s" % tag)

+         print(("Unknown tag: %s" % tag))

          return 1

  

      groups = dict([(p['name'], p['group_id']) for p in session.getTagGroups(tag, inherit=False)])

      group_id = groups.get(group, None)

      if group_id is not None:

-         print("Group %s already exists for tag %s" % (group, tag))

+         print(("Group %s already exists for tag %s" % (group, tag)))

          return 1

  

      session.groupListAdd(tag, group)
@@ -605,9 +611,9 @@ 

  

      ret = session.assignTask(task_id, hostname, force)

      if ret:

-         print('assigned task %d to host %s' % (task_id, hostname))

+         print(('assigned task %d to host %s' % (task_id, hostname)))

      else:

-         print('failed to assign task %d to host %s' % (task_id, hostname))

+         print(('failed to assign task %d to host %s' % (task_id, hostname)))

  

  

  def handle_add_host(options, session, args):
@@ -624,7 +630,7 @@ 

      activate_session(session)

      id = session.getHost(host)

      if id:

-         print("%s is already in the database" % host)

+         print(("%s is already in the database" % host))

          return 1

      else:

          kwargs = {}
@@ -632,7 +638,7 @@ 

              kwargs['krb_principal'] = options.krb_principal

          id = session.addHost(host, args[1:], **kwargs)

          if id:

-             print("%s added: id %d" % (host, id))

+             print(("%s added: id %d" % (host, id)))

  

  def handle_edit_host(options, session, args):

      "[admin] Edit a host"
@@ -662,11 +668,11 @@ 

      error = False

      for host, [info] in zip(args, session.multiCall(strict=True)):

          if not info:

-             print(_("Host %s does not exist") % host)

+             print((_("Host %s does not exist") % host))

              error = True

  

      if error:

-         print(_("No changes made, please correct the command line"))

+         print((_("No changes made, please correct the command line")))

          return 1

  

      session.multicall = True
@@ -674,9 +680,9 @@ 

          session.editHost(host, **vals)

      for host, [result] in zip(args, session.multiCall(strict=True)):

          if result:

-             print(_("Edited %s") % host)

+             print((_("Edited %s") % host))

          else:

-             print(_("No changes made to %s") % host)

+             print((_("No changes made to %s") % host))

  

  def handle_add_host_to_channel(options, session, args):

      "[admin] Add a host to a channel"
@@ -692,18 +698,18 @@ 

      activate_session(session)

      if options.list:

          for channel in session.listChannels():

-             print(channel['name'])

+             print((channel['name']))

          return

      channel = args[1]

      if not options.new:

          channelinfo = session.getChannel(channel)

          if not channelinfo:

-             print("No such channel: %s" % channel)

+             print(("No such channel: %s" % channel))

              return 1

      host = args[0]

      hostinfo = session.getHost(host)

      if not hostinfo:

-         print("No such host: %s" % host)

+         print(("No such host: %s" % host))

          return 1

      kwargs = {}

      if options.new:
@@ -723,13 +729,13 @@ 

      activate_session(session)

      hostinfo = session.getHost(host)

      if not hostinfo:

-         print("No such host: %s" % host)

+         print(("No such host: %s" % host))

          return 1

      hostchannels = [c['name'] for c in session.listChannels(hostinfo['id'])]

  

      channel = args[1]

      if channel not in hostchannels:

-         print("Host %s is not a member of channel %s" % (host, channel))

+         print(("Host %s is not a member of channel %s" % (host, channel)))

          return 1

  

      session.removeHostFromChannel(host, channel)
@@ -747,7 +753,7 @@ 

      activate_session(session)

      cinfo = session.getChannel(args[0])

      if not cinfo:

-         print("No such channel: %s" % args[0])

+         print(("No such channel: %s" % args[0]))

          return 1

      session.removeChannel(args[0], force=options.force)

  
@@ -763,7 +769,7 @@ 

      activate_session(session)

      cinfo = session.getChannel(args[0])

      if not cinfo:

-         print("No such channel: %s" % args[0])

+         print(("No such channel: %s" % args[0]))

          return 1

      session.renameChannel(args[0], args[1])

  
@@ -783,7 +789,7 @@ 

          parser.error(_("Please specify an owner for the package(s)"))

          assert False  # pragma: no cover

      if not session.getUser(options.owner):

-         print("User %s does not exist" % options.owner)

+         print(("User %s does not exist" % options.owner))

          return 1

      activate_session(session)

      tag = args[0]
@@ -793,21 +799,21 @@ 

      # check if list of packages exists for that tag already

      dsttag=session.getTag(tag)

      if dsttag is None:

-         print("No such tag: %s" % tag)

+         print(("No such tag: %s" % tag))

          sys.exit(1)

      pkglist = dict([(p['package_name'], p['package_id']) for p in session.listPackages(tagID=dsttag['id'])])

      to_add = []

      for package in args[1:]:

          package_id = pkglist.get(package, None)

          if not package_id is None:

-             print("Package %s already exists in tag %s" % (package, tag))

+             print(("Package %s already exists in tag %s" % (package, tag)))

              continue

          to_add.append(package)

      if options.extra_arches:

          opts['extra_arches'] = parse_arches(options.extra_arches)

  

      # add the packages

-     print("Adding %i packages to tag %s" % (len(to_add), dsttag['name']))

+     print(("Adding %i packages to tag %s" % (len(to_add), dsttag['name'])))

      session.multicall = True

      for package in to_add:

          session.packageListAdd(tag, package, options.owner, **opts)
@@ -828,14 +834,14 @@ 

      # check if list of packages exists for that tag already

      dsttag=session.getTag(tag)

      if dsttag is None:

-         print("No such tag: %s" % tag)

+         print(("No such tag: %s" % tag))

          return 1

      pkglist = dict([(p['package_name'], p['package_id']) for p in session.listPackages(tagID=dsttag['id'], inherited=True)])

      ret = 0

      for package in args[1:]:

          package_id = pkglist.get(package, None)

          if package_id is None:

-             print("Package %s doesn't exist in tag %s" % (package, tag))

+             print(("Package %s doesn't exist in tag %s" % (package, tag)))

              ret = 1

      if ret:

          return ret
@@ -861,14 +867,14 @@ 

      # check if list of packages exists for that tag already

      dsttag=session.getTag(tag)

      if dsttag is None:

-         print("No such tag: %s" % tag)

+         print(("No such tag: %s" % tag))

          return 1

      pkglist = dict([(p['package_name'], p['package_id']) for p in session.listPackages(tagID=dsttag['id'])])

      ret = 0

      for package in args[1:]:

          package_id = pkglist.get(package, None)

          if package_id is None:

-             print("Package %s is not in tag %s" % (package, tag))

+             print(("Package %s is not in tag %s" % (package, tag)))

              ret = 1

      if ret:

          return ret
@@ -927,7 +933,7 @@ 

  def _running_in_bg():

      try:

          return (not os.isatty(0)) or (os.getpgrp() != os.tcgetpgrp(0))

-     except OSError, e:

+     except OSError as e:

          return True

  

  def handle_build(options, session, args):
@@ -987,7 +993,7 @@ 

      if '://' not in source:

          #treat source as an srpm and upload it

          if not build_opts.quiet:

-             print("Uploading srpm: %s" % source)

+             print(("Uploading srpm: %s" % source))

          serverdir = _unique_path('cli-build')

          if _running_in_bg() or build_opts.noprogress or build_opts.quiet:

              callback = None
@@ -998,8 +1004,8 @@ 

          source = "%s/%s" % (serverdir, os.path.basename(source))

      task_id = session.build(source, target, opts, priority=priority)

      if not build_opts.quiet:

-         print("Created task: %d" % task_id)

-         print("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id))

+         print(("Created task: %d" % task_id))

+         print(("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)))

      if build_opts.wait or (build_opts.wait is None and not _running_in_bg()):

          session.logout()

          return watch_tasks(session, [task_id], quiet=build_opts.quiet)
@@ -1035,8 +1041,8 @@ 

      # otherwise there is no way that a chain-build can work

      ancestors = session.getFullInheritance(build_target['build_tag'])

      if dest_tag['id'] not in [build_target['build_tag']] + [ancestor['parent_id'] for ancestor in ancestors]:

-         print(_("Packages in destination tag %(dest_tag_name)s are not inherited by build tag %(build_tag_name)s" % build_target))

-         print(_("Target %s is not usable for a chain-build" % build_target['name']))

+         print((_("Packages in destination tag %(dest_tag_name)s are not inherited by build tag %(build_tag_name)s" % build_target)))

+         print((_("Target %s is not usable for a chain-build" % build_target['name'])))

          return 1

  

      sources = args[1:]
@@ -1058,7 +1064,7 @@ 

              # quick check that it looks like a N-V-R

              build_level.append(src)

          else:

-             print(_('"%s" is not a SCM URL or package N-V-R' % src))

+             print((_('"%s" is not a SCM URL or package N-V-R' % src)))

              return 1

      if build_level:

          src_list.append(build_level)
@@ -1073,8 +1079,8 @@ 

  

      task_id = session.chainBuild(src_list, target, priority=priority)

      if not build_opts.quiet:

-         print("Created task: %d" % task_id)

-         print("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id))

+         print(("Created task: %d" % task_id))

+         print(("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)))

      if _running_in_bg() or build_opts.nowait:

          return

      else:
@@ -1150,11 +1156,11 @@ 

          try:

              params = koji.util.parse_maven_param(build_opts.inis, scratch=build_opts.scratch,

                                                   section=build_opts.section)

-         except ValueError, e:

+         except ValueError as e:

              parser.error(e.args[0])

-         opts = params.values()[0]

+         opts = list(params.values())[0]

          if opts.pop('type', 'maven') != 'maven':

-             parser.error(_("Section %s does not contain a maven-build config") % params.keys()[0])

+             parser.error(_("Section %s does not contain a maven-build config") % list(params.keys())[0])

          source = opts.pop('scmurl')

      else:

          source = args[1]
@@ -1171,8 +1177,8 @@ 

          priority = 5

      task_id = session.mavenBuild(source, target, opts, priority=priority)

      if not build_opts.quiet:

-         print("Created task: %d" % task_id)

-         print("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id))

+         print(("Created task: %d" % task_id))

+         print(("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)))

      if _running_in_bg() or build_opts.nowait:

          return

      else:
@@ -1209,11 +1215,11 @@ 

          try:

              params = koji.util.parse_maven_param(build_opts.inis, scratch=build_opts.scratch,

                                                   section=build_opts.section)

-         except ValueError, e:

+         except ValueError as e:

              parser.error(e.args[0])

-         opts = params.values()[0]

+         opts = list(params.values())[0]

          if opts.get('type') != 'wrapper':

-             parser.error(_("Section %s does not contain a wrapper-rpm config") % params.keys()[0])

+             parser.error(_("Section %s does not contain a wrapper-rpm config") % list(params.keys())[0])

          url = opts['scmurl']

          package = opts['buildrequires'][0]

          target_info = session.getBuildTarget(target, strict=True)
@@ -1237,8 +1243,8 @@ 

      if build_opts.scratch:

          opts['scratch'] = True

      task_id = session.wrapperRPM(build_id, url, target, priority, opts=opts)

-     print("Created task: %d" % task_id)

-     print("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id))

+     print(("Created task: %d" % task_id))

+     print(("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)))

      if _running_in_bg() or build_opts.nowait:

          return

      else:
@@ -1283,14 +1289,14 @@ 

              opts[key] = val

      try:

          builds = koji.util.parse_maven_chain(args[1:], scratch=opts.get('scratch'))

-     except ValueError, e:

+     except ValueError as e:

          parser.error(e.args[0])

      priority = None

      if build_opts.background:

          priority = 5

      task_id = session.chainMaven(builds, target, opts, priority=priority)

-     print("Created task: %d" % task_id)

-     print("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id))

+     print(("Created task: %d" % task_id))

+     print(("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)))

      if _running_in_bg() or build_opts.nowait:

          return

      else:
@@ -1317,7 +1323,7 @@ 

          _printTaskInfo(session, taskID, 0, False, True)

      newID = session.resubmitTask(taskID)

      if not options.quiet:

-         print("Resubmitted task %s as new task %s" % (taskID, newID))

+         print(("Resubmitted task %s as new task %s" % (taskID, newID)))

      if _running_in_bg() or options.nowait:

          return

      else:
@@ -1410,10 +1416,10 @@ 

              parser.error(_("Task id must be an integer"))

          broots = session.listBuildroots(taskID=task_id)

          if not broots:

-             print(_("No buildroots for task %s (or no such task)") % options.task)

+             print((_("No buildroots for task %s (or no such task)") % options.task))

              return 1

          if len(broots) > 1:

-             print(_("Multiple buildroots found: %s" % [br['id'] for br in broots]))

+             print((_("Multiple buildroots found: %s" % [br['id'] for br in broots])))

          brootinfo = broots[-1]

          if options.latest:

              opts['repoid'] = 'latest'
@@ -1424,7 +1430,7 @@ 

          def_name = "%s-task_%i" % (opts['tag_name'], task_id)

      elif options.tag:

          if not options.arch:

-             print(_("Please specify an arch"))

+             print((_("Please specify an arch")))

              return 1

          tag = session.getTag(options.tag)

          if not tag:
@@ -1432,7 +1438,7 @@ 

          arch = options.arch

          config = session.getBuildConfig(tag['id'])

          if not config:

-             print(_("Could not get config info for tag: %(name)s") % tag)

+             print((_("Could not get config info for tag: %(name)s") % tag))

              return 1

          opts['tag_name'] = tag['name']

          if options.latest:
@@ -1440,13 +1446,13 @@ 

          else:

              repo = session.getRepo(config['id'])

              if not repo:

-                 print(_("Could not get a repo for tag: %(name)s") % tag)

+                 print((_("Could not get a repo for tag: %(name)s") % tag))

                  return 1

              opts['repoid'] = repo['id']

          def_name = "%(tag_name)s-repo_%(repoid)s" % opts

      elif options.target:

          if not options.arch:

-             print(_("Please specify an arch"))

+             print((_("Please specify an arch")))

              return 1

          arch = options.arch

          target = session.getBuildTarget(options.target)
@@ -1458,7 +1464,7 @@ 

          else:

              repo = session.getRepo(target['build_tag'])

              if not repo:

-                 print(_("Could not get a repo for tag: %(name)s") % opts['tag_name'])

+                 print((_("Could not get a repo for tag: %(name)s") % opts['tag_name']))

                  return 1

              opts['repoid'] = repo['id']

      else:
@@ -1470,7 +1476,7 @@ 

          name = "%(tag_name)s-repo_%(repoid)s" % opts

      output = koji.genMockConfig(name, arch, **opts)

      if options.ofile:

-         fo = file(options.ofile, 'w')

+         fo = open(options.ofile, 'w')

          fo.write(output)

          fo.close()

      else:
@@ -1491,7 +1497,7 @@ 

      error = False

      for host, [id] in zip(args, session.multiCall(strict=True)):

          if not id:

-             print("Host %s does not exist" % host)

+             print(("Host %s does not exist" % host))

              error = True

      if error:

          print("No changes made. Please correct the command line.")
@@ -1518,7 +1524,7 @@ 

      error = False

      for host, [id] in zip(args, session.multiCall(strict=True)):

          if not id:

-             print("Host %s does not exist" % host)

+             print(("Host %s does not exist" % host))

              error = True

      if error:

          print("No changes made. Please correct the command line.")
@@ -1555,7 +1561,7 @@ 

  

  def linked_upload(localfile, path, name=None):

      """Link a file into the (locally writable) workdir, bypassing upload"""

-     old_umask = os.umask(002)

+     old_umask = os.umask(0o002)

      try:

          if name is None:

              name = os.path.basename(localfile)
@@ -1565,7 +1571,7 @@ 

          # fix uid/gid to keep httpd happy

          st = os.stat(koji.pathinfo.work())

          os.chown(dest_dir, st.st_uid, st.st_gid)

-         print("Linking rpm to: %s" % dst)

+         print(("Linking rpm to: %s" % dst))

          os.link(localfile, dst)

      finally:

          os.umask(old_umask)
@@ -1604,7 +1610,7 @@ 

              nvr = "%(name)s-%(version)s-%(release)s" % koji.parse_NVRA(data['sourcerpm'])

          to_import.setdefault(nvr,[]).append((path,data))

      builds_missing = False

-     nvrs = to_import.keys()

+     nvrs = list(to_import.keys())

      nvrs.sort()

      for nvr in nvrs:

          to_import[nvr].sort()
@@ -1615,10 +1621,10 @@ 

              #no srpm included, check for build

              binfo = session.getBuild(nvr)

              if not binfo:

-                 print(_("Missing build or srpm: %s") % nvr)

+                 print((_("Missing build or srpm: %s") % nvr))

                  builds_missing = True

      if builds_missing and not options.create_build:

-         print(_("Aborting import"))

+         print((_("Aborting import")))

          return

  

      #local function to help us out below
@@ -1627,13 +1633,13 @@ 

          prev = session.getRPM(rinfo)

          if prev and not prev.get('external_repo_id', 0):

              if prev['payloadhash'] == koji.hex_string(data['sigmd5']):

-                 print(_("RPM already imported: %s") % path)

+                 print((_("RPM already imported: %s") % path))

              else:

-                 print(_("WARNING: md5sum mismatch for %s") % path)

-             print(_("Skipping import"))

+                 print((_("WARNING: md5sum mismatch for %s") % path))

+             print((_("Skipping import")))

              return

          if options.test:

-             print(_("Test mode -- skipping import for %s") % path)

+             print((_("Test mode -- skipping import for %s") % path))

              return

          serverdir = _unique_path('cli-import')

          if options.link:
@@ -1642,17 +1648,17 @@ 

              sys.stdout.write(_("uploading %s... ") % path)

              sys.stdout.flush()

              session.uploadWrapper(path, serverdir)

-             print(_("done"))

+             print((_("done")))

              sys.stdout.flush()

          sys.stdout.write(_("importing %s... ") % path)

          sys.stdout.flush()

          try:

              session.importRPM(serverdir, os.path.basename(path))

-         except koji.GenericError, e:

-             print(_("\nError importing: %s" % str(e).splitlines()[-1]))

+         except koji.GenericError as e:

+             print((_("\nError importing: %s" % str(e).splitlines()[-1])))

              sys.stdout.flush()

          else:

-             print(_("done"))

+             print((_("done")))

          sys.stdout.flush()

  

      for nvr in nvrs:
@@ -1665,10 +1671,10 @@ 

                  need_build = False

              elif b_state in ['FAILED', 'CANCELED']:

                  if not options.create_build:

-                     print(_("Build %s state is %s. Skipping import") % (nvr, b_state))

+                     print((_("Build %s state is %s. Skipping import") % (nvr, b_state)))

                      continue

              else:

-                 print(_("Build %s exists with state=%s. Skipping import") % (nvr, b_state))

+                 print((_("Build %s exists with state=%s. Skipping import") % (nvr, b_state)))

                  continue

  

          # import srpms first, if any
@@ -1676,7 +1682,7 @@ 

              if data['sourcepackage']:

                  if binfo and b_state != 'COMPLETE':

                      # need to fix the state

-                     print(_("Creating empty build: %s") % nvr)

+                     print((_("Creating empty build: %s") % nvr))

                      b_data = koji.util.dslice(binfo, ['name', 'version', 'release'])

                      b_data['epoch'] = data['epoch']

                      session.createEmptyBuild(**b_data)
@@ -1690,11 +1696,11 @@ 

                  if binfo:

                      # should have caught this earlier, but just in case...

                      b_state = koji.BUILD_STATES[binfo['state']]

-                     print(_("Build %s state is %s. Skipping import") % (nvr, b_state))

+                     print((_("Build %s state is %s. Skipping import") % (nvr, b_state)))

                      continue

                  else:

-                     print(_("No such build: %s (include matching srpm or use "

-                             "--create-build option to add it)") % nvr)

+                     print((_("No such build: %s (include matching srpm or use "

+                             "--create-build option to add it)") % nvr))

                      continue

              else:

                  # let's make a new build
@@ -1706,9 +1712,9 @@ 

                      data = to_import[nvr][0][1]

                      b_data['epoch'] = data['epoch']

                  if options.test:

-                     print(_("Test mode -- would have created empty build: %s") % nvr)

+                     print((_("Test mode -- would have created empty build: %s") % nvr))

                  else:

-                     print(_("Creating empty build: %s") % nvr)

+                     print((_("Creating empty build: %s") % nvr))

                      session.createEmptyBuild(**b_data)

                      binfo = session.getBuild(nvr)

  
@@ -1735,9 +1741,9 @@ 

          parser.error(_("Unable to find json module"))

          assert False  # pragma: no cover

      activate_session(session)

-     metadata = json.load(file(args[0], 'r'))

+     metadata = json.load(open(args[0], 'r'))

      if 'output' not in metadata:

-         print(_("Metadata contains no output"))

+         print((_("Metadata contains no output")))

          sys.exit(1)

      localdir = args[1]

  
@@ -1766,7 +1772,7 @@ 

          if options.link:

              linked_upload(localpath, relpath)

          else:

-             print("Uploading %s" % localpath)

+             print(("Uploading %s" % localpath))

              session.uploadWrapper(localpath, relpath, callback=callback)

              if callback:

                  print('')
@@ -1788,7 +1794,7 @@ 

      # check if the tag exists

      dsttag = session.getTag(args[1])

      if dsttag is None:

-         print("No such tag: %s" % args[1])

+         print(("No such tag: %s" % args[1]))

          return 1

      if libcomps is not None:

          _import_comps(session, args[0], args[1], local_options)
@@ -1812,7 +1818,7 @@ 

          libcomps.PACKAGE_TYPE_UNKNOWN : 'unknown',

      }

      for group in comps.groups:

-         print("Group: %s (%s)" % (group.id, group.name))

+         print(("Group: %s (%s)" % (group.id, group.name)))

          session.groupListAdd(

                      tag, group.id, force=force, display_name=group.name,

                      is_default=bool(group.default),
@@ -1826,7 +1832,7 @@ 

                          }

              if pkg.type == libcomps.PACKAGE_TYPE_CONDITIONAL:

                  pkgopts['requires'] = pkg.requires

-             print("  Package: %s: %r" % (pkg.name, pkgopts))

+             print(("  Package: %s: %r" % (pkg.name, pkgopts)))

              session.groupPackageListAdd(tag, group.id, pkg.name, force=force, **pkgopts)

          # libcomps does not support group dependencies

          # libcomps does not support metapkgs
@@ -1839,7 +1845,7 @@ 

      comps.add(filename)

      force = options.force

      for group in comps.groups:

-         print("Group: %(groupid)s (%(name)s)" % vars(group))

+         print(("Group: %(groupid)s (%(name)s)" % vars(group)))

          session.groupListAdd(tag, group.groupid, force=force, display_name=group.name,

                          is_default=bool(group.default),

                          uservisible=bool(group.user_visible),
@@ -1855,7 +1861,7 @@ 

                  if ptype == 'conditional':

                      pkgopts['requires'] = pdata[pkg]

                  #yum.comps does not support basearchonly

-                 print("  Package: %s: %r" % (pkg, pkgopts))

+                 print(("  Package: %s: %r" % (pkg, pkgopts)))

                  session.groupPackageListAdd(tag, group.groupid, pkg, force=force, **pkgopts)

          #yum.comps does not support group dependencies

          #yum.comps does not support metapkgs
@@ -1888,7 +1894,7 @@ 

          if not sigkey:

              sigkey = ""

              if not options.with_unsigned:

-                 print(_("Skipping unsigned package: %s" % path))

+                 print((_("Skipping unsigned package: %s" % path)))

                  continue

          else:

              sigkey = koji.get_sigpacket_key_id(sigkey)
@@ -1896,10 +1902,10 @@ 

          del data['sigpgp']

          rinfo = session.getRPM(data)

          if not rinfo:

-             print("No such rpm in system: %(name)s-%(version)s-%(release)s.%(arch)s" % data)

+             print(("No such rpm in system: %(name)s-%(version)s-%(release)s.%(arch)s" % data))

              continue

          if rinfo.get('external_repo_id'):

-             print("Skipping external rpm: %(name)s-%(version)s-%(release)s.%(arch)s@%(external_repo_name)s" % rinfo)

+             print(("Skipping external rpm: %(name)s-%(version)s-%(release)s.%(arch)s@%(external_repo_name)s" % rinfo))

              continue

          sighdr = koji.rip_rpm_sighdr(path)

          previous = session.queryRPMSigs(rpm_id=rinfo['id'], sigkey=sigkey)
@@ -1907,12 +1913,12 @@ 

          if previous:

              sighash = md5_constructor(sighdr).hexdigest()

              if previous[0]['sighash'] == sighash:

-                 print(_("Signature already imported: %s") % path)

+                 print((_("Signature already imported: %s") % path))

                  continue

              else:

-                 print(_("Warning: signature mismatch: %s") % path)

+                 print((_("Warning: signature mismatch: %s") % path))

                  continue

-         print(_("Importing signature [key %s] from %s...") % (sigkey, path))

+         print((_("Importing signature [key %s] from %s...") % (sigkey, path)))

          if not options.test:

              session.addRPMSig(rinfo['id'], base64.encodestring(sighdr))

  
@@ -1936,7 +1942,7 @@ 

          rpms = session.queryRPMSigs(sigkey=key)

          count = 1

          for rpm in rpms:

-             print("%d/%d" % (count, len(rpms)))

+             print(("%d/%d" % (count, len(rpms))))

              count += 1

              session.writeSignedRPM(rpm['rpm_id'], key)

      elif options.buildid:
@@ -1982,18 +1988,18 @@ 

      #(with the modification that we check to see if the build was latest within

      #the last N days)

      if options.ignore_tag_file:

-         fo = file(options.ignore_tag_file)

+         fo = open(options.ignore_tag_file)

          options.ignore_tag.extend([line.strip() for line in fo.readlines()])

          fo.close()

      if options.protect_tag_file:

-         fo = file(options.protect_tag_file)

+         fo = open(options.protect_tag_file)

          options.protect_tag.extend([line.strip() for line in fo.readlines()])

          fo.close()

      if options.debug:

          options.verbose = True

      cutoff_ts = time.time() - options.days * 24 * 3600

      if options.debug:

-         print("Cutoff date: %s" % time.asctime(time.localtime(cutoff_ts)))

+         print(("Cutoff date: %s" % time.asctime(time.localtime(cutoff_ts))))

      if not options.build:

          if options.verbose:

              print("Getting builds...")
@@ -2003,7 +2009,7 @@ 

              qopts['packageID'] = pkginfo['id']

          builds = [(b['nvr'], b) for b in session.listBuilds(**qopts)]

          if options.verbose:

-             print("...got %i builds" % len(builds))

+             print(("...got %i builds" % len(builds)))

          builds.sort()

      else:

          #single build
@@ -2034,7 +2040,7 @@ 

          if 'name' not in binfo:

              binfo['name'] = binfo['package_name']

          if options.debug:

-             print("DEBUG: %s" % nvr)

+             print(("DEBUG: %s" % nvr))

          #see how recently this build was latest for a tag

          is_latest = False

          is_protected = False
@@ -2045,17 +2051,17 @@ 

              #that the build was recently untagged from

              tags.setdefault(entry['tag_name'], 1)

          if options.debug:

-             print("Tags: %s" % tags.keys())

+             print(("Tags: %s" % list(tags.keys())))

          for tag_name in tags:

              if tag_name == options.trashcan_tag:

                  if options.debug:

-                     print("Ignoring trashcan tag for build %s" % nvr)

+                     print(("Ignoring trashcan tag for build %s" % nvr))

                  continue

              ignore_tag = False

              for pattern in options.ignore_tag:

                  if fnmatch.fnmatch(tag_name, pattern):

                      if options.debug:

-                         print("Ignoring tag %s for build %s" % (tag_name, nvr))

+                         print(("Ignoring tag %s for build %s" % (tag_name, nvr)))

                      ignore_tag = True

                      break

              if ignore_tag:
@@ -2077,7 +2083,7 @@ 

              timeline.sort()

              #find most recent creation entry for our build and crop there

              latest_ts = None

-             for i in xrange(len(timeline)-1, -1, -1):

+             for i in range(len(timeline)-1, -1, -1):

                  #searching in reverse cronological order

                  event_id, is_create, entry = timeline[i]

                  if entry['build_id'] == binfo['id'] and is_create:
@@ -2088,13 +2094,13 @@ 

                  raise koji.GenericError("No creation event found for %s in %s" % (nvr, tag_name))

              our_entry = entry

              if options.debug:

-                 print(_histline(event_id, our_entry))

+                 print((_histline(event_id, our_entry)))

              #now go through the events since most recent creation entry

              timeline = timeline[i+1:]

              if not timeline:

                  is_latest = True

                  if options.debug:

-                     print("%s is latest in tag %s" % (nvr, tag_name))

+                     print(("%s is latest in tag %s" % (nvr, tag_name)))

                  break

              #before we go any further, is this a protected tag?

              protect_tag = False
@@ -2109,14 +2115,14 @@ 

                  if our_entry['revoke_event'] is None:

                      #we're still tagged with a protected tag

                      if options.debug:

-                         print("Build %s has protected tag %s" % (nvr, tag_name))

+                         print(("Build %s has protected tag %s" % (nvr, tag_name)))

                      is_protected = True

                      break

                  elif our_entry['revoke_ts'] > cutoff_ts:

                      #we were still tagged here sometime before the cutoff

                      if options.debug:

-                         print("Build %s had protected tag %s until %s" \

-                                 % (nvr, tag_name, time.asctime(time.localtime(our_entry['revoke_ts']))))

+                         print(("Build %s had protected tag %s until %s" \

+                                 % (nvr, tag_name, time.asctime(time.localtime(our_entry['revoke_ts'])))))

                      is_protected = True

                      break

              replaced_ts = None
@@ -2129,7 +2135,7 @@ 

                  #Note however that if the superceding entry is itself revoked, then

                  #our build could become latest again

                  if options.debug:

-                     print(_histline(event_id, entry))

+                     print((_histline(event_id, entry)))

                  if entry['build_id'] == binfo['id']:

                      if is_create:

                          #shouldn't happen
@@ -2168,31 +2174,31 @@ 

                      #turns out we are still latest

                      is_latest = True

                      if options.debug:

-                         print("%s is latest (again) in tag %s" % (nvr, tag_name))

+                         print(("%s is latest (again) in tag %s" % (nvr, tag_name)))

                      break

                  else:

                      #replaced (but not revoked)

                      timestamps.append(replaced_ts)

                      if options.debug:

-                         print("tag %s: %s not latest (replaced %s)" \

-                                 % (tag_name, nvr, time.asctime(time.localtime(replaced_ts))))

+                         print(("tag %s: %s not latest (replaced %s)" \

+                                 % (tag_name, nvr, time.asctime(time.localtime(replaced_ts)))))

              elif replaced_ts is None:

                  #revoked but not replaced

                  timestamps.append(revoke_ts)

                  if options.debug:

-                     print("tag %s: %s not latest (revoked %s)" \

-                             % (tag_name, nvr, time.asctime(time.localtime(revoke_ts))))

+                     print(("tag %s: %s not latest (revoked %s)" \

+                             % (tag_name, nvr, time.asctime(time.localtime(revoke_ts)))))

              else:

                  #revoked AND replaced

                  timestamps.append(min(revoke_ts, replaced_ts))

                  if options.debug:

-                     print("tag %s: %s not latest (revoked %s, replaced %s)" \

+                     print(("tag %s: %s not latest (revoked %s, replaced %s)" \

                              % (tag_name, nvr, time.asctime(time.localtime(revoke_ts)),

-                                 time.asctime(time.localtime(replaced_ts))))

+                                 time.asctime(time.localtime(replaced_ts)))))

              last_latest = max(timestamps)

              if last_latest > cutoff_ts:

                  if options.debug:

-                     print("%s was latest past the cutoff" % nvr)

+                     print(("%s was latest past the cutoff" % nvr))

                  is_latest = True

                  break

          if is_latest:
@@ -2215,7 +2221,7 @@ 

          build_space = 0

          if not by_sig and options.debug:

              print("(build has no signatures)")

-         for sigkey, rpms in by_sig.iteritems():

+         for sigkey, rpms in by_sig.items():

              mycount = 0

              archdirs = {}

              sigdirs = {}
@@ -2227,21 +2233,21 @@ 

                      continue

                  if not stat.S_ISREG(st.st_mode):

                      #warn about this

-                     print("Skipping %s. Not a regular file" % signedpath)

+                     print(("Skipping %s. Not a regular file" % signedpath))

                      continue

                  if st.st_mtime > cutoff_ts:

-                     print("Skipping %s. File newer than cutoff" % signedpath)

+                     print(("Skipping %s. File newer than cutoff" % signedpath))

                      continue

                  if options.test:

-                     print("Would have unlinked: %s" % signedpath)

+                     print(("Would have unlinked: %s" % signedpath))

                  else:

                      if options.verbose:

-                         print("Unlinking: %s" % signedpath)

+                         print(("Unlinking: %s" % signedpath))

                      try:

                          os.unlink(signedpath)

-                     except OSError, e:

-                         print("Error removing %s: %s" % (signedpath, e))

-                         print("This script needs write access to %s" % koji.BASEDIR)

+                     except OSError as e:

+                         print(("Error removing %s: %s" % (signedpath, e)))

+                         print(("This script needs write access to %s" % koji.BASEDIR))

                          continue

                  mycount +=1

                  build_files += 1
@@ -2253,38 +2259,38 @@ 

                  sigdirs[os.path.dirname(mydir)] = 1

              for dir in archdirs:

                  if options.test:

-                     print("Would have removed dir: %s" % dir)

+                     print(("Would have removed dir: %s" % dir))

                  else:

                      if options.verbose:

-                         print("Removing dir: %s" % dir)

+                         print(("Removing dir: %s" % dir))

                      try:

                          os.rmdir(dir)

-                     except OSError, e:

-                         print("Error removing %s: %s" % (signedpath, e))

+                     except OSError as e:

+                         print(("Error removing %s: %s" % (signedpath, e)))

              if len(sigdirs) == 1:

-                 dir = sigdirs.keys()[0]

+                 dir = list(sigdirs.keys())[0]

                  if options.test:

-                     print("Would have removed dir: %s" % dir)

+                     print(("Would have removed dir: %s" % dir))

                  else:

                      if options.verbose:

-                         print("Removing dir: %s" % dir)

+                         print(("Removing dir: %s" % dir))

                      try:

                          os.rmdir(dir)

-                     except OSError, e:

-                         print("Error removing %s: %s" % (signedpath, e))

+                     except OSError as e:

+                         print(("Error removing %s: %s" % (signedpath, e)))

              elif len(sigdirs) > 1:

-                 print("Warning: more than one signature dir for %s: %r" % (sigkey, sigdirs))

+                 print(("Warning: more than one signature dir for %s: %r" % (sigkey, sigdirs)))

          if build_files:

              total_files += build_files

              total_space += build_space

              if options.verbose:

-                 print("Build: %s, Removed %i signed copies (%i bytes). Total: %i/%i" \

-                         % (nvr, build_files, build_space, total_files, total_space))

+                 print(("Build: %s, Removed %i signed copies (%i bytes). Total: %i/%i" \

+                         % (nvr, build_files, build_space, total_files, total_space)))

          elif options.debug and by_sig:

              print("(build has no signed copies)")

      print("--- Grand Totals ---")

-     print("Files: %i" % total_files)

-     print("Bytes: %i" % total_space)

+     print(("Files: %i" % total_files))

+     print(("Bytes: %i" % total_space))

  

  def handle_set_build_volume(options, session, args):

      "[admin] Move a build to a different volume"
@@ -2295,16 +2301,16 @@ 

      (options, args) = parser.parse_args(args)

      volinfo = session.getVolume(args[0])

      if not volinfo:

-         print("No such volume: %s" % args[0])

+         print(("No such volume: %s" % args[0]))

          return 1

      activate_session(session)

      builds = []

      for nvr in args[1:]:

          binfo = session.getBuild(nvr)

          if not binfo:

-             print("No such build: %s" % nvr)

+             print(("No such build: %s" % nvr))

          elif binfo['volume_id'] == volinfo['id']:

-             print("Build %s already on volume %s" %(nvr, volinfo['name']))

+             print(("Build %s already on volume %s" %(nvr, volinfo['name'])))

          else:

              builds.append(binfo)

      if not builds:
@@ -2313,7 +2319,7 @@ 

      for binfo in builds:

          session.changeBuildVolume(binfo['id'], volinfo['id'])

          if options.verbose:

-             print("%s: %s -> %s" % (binfo['nvr'], binfo['volume_name'], volinfo['name']))

+             print(("%s: %s -> %s" % (binfo['nvr'], binfo['volume_name'], volinfo['name'])))

  

  def handle_add_volume(options, session, args):

      "[admin] Add a new storage volume"
@@ -2326,11 +2332,11 @@ 

      name = args[0]

      volinfo = session.getVolume(name)

      if volinfo:

-         print("Volume %s already exists" % name)

+         print(("Volume %s already exists" % name))

          return 1

      activate_session(session)

      volinfo = session.addVolume(name)

-     print("Added volume %(name)s with id %(id)i" % volinfo)

+     print(("Added volume %(name)s with id %(id)i" % volinfo))

  

  def handle_list_volumes(options, session, args):

      "[info] List storage volumes"
@@ -2339,7 +2345,7 @@ 

      parser = OptionParser(usage=usage)

      (options, args) = parser.parse_args(args)

      for volinfo in session.listVolumes():

-         print(volinfo['name'])

+         print((volinfo['name']))

  

  def handle_list_permissions(options, session, args):

      "[info] List user permissions"
@@ -2356,7 +2362,7 @@ 

      if options.user:

          user = session.getUser(options.user)

          if not user:

-             print("User %s does not exist" % options.user)

+             print(("User %s does not exist" % options.user))

              return 1

          perms = session.getUserPerms(user['id'])

      elif options.mine:
@@ -2385,7 +2391,7 @@ 

          status = koji.USER_STATUS['NORMAL']

      activate_session(session)

      user_id = session.createUser(username, status=status, krb_principal=options.principal)

-     print("Added user %s (%i)" % (username, user_id))

+     print(("Added user %s (%i)" % (username, user_id)))

  

  def handle_enable_user(options, session, args):

      "[admin] Enable logins by a user"
@@ -2438,7 +2444,7 @@ 

          if rinfo is None:

              parser.error(_("No such RPM: %s") % options.rpm)

          if rinfo.get('external_repo_id'):

-             print("External rpm: %(name)s-%(version)s-%(release)s.%(arch)s@%(external_repo_name)s" % rinfo)

+             print(("External rpm: %(name)s-%(version)s-%(release)s.%(arch)s@%(external_repo_name)s" % rinfo))

              return 1

          qopts['rpm_id'] = rinfo['id']

      if options.build:
@@ -2483,7 +2489,7 @@ 

          signedpath = "%s/%s" % (builddir, koji.pathinfo.signed(rinfo, sigkey))

          if not os.path.exists(signedpath):

              if options.debug:

-                 print("No copy: %s" % signedpath)

+                 print(("No copy: %s" % signedpath))

              continue

          print(signedpath)

  
@@ -2502,11 +2508,11 @@ 

          sys.stdout.write(_("importing %s... ") % nvr)

          try:

              session.importBuildInPlace(data)

-         except koji.GenericError, e:

-             print(_("\nError importing: %s" % str(e).splitlines()[-1]))

+         except koji.GenericError as e:

+             print((_("\nError importing: %s" % str(e).splitlines()[-1])))

              sys.stdout.flush()

          else:

-             print(_("done"))

+             print((_("done")))

          sys.stdout.flush()

  

  def handle_import_archive(options, session, args):
@@ -2598,7 +2604,7 @@ 

  

      for filepath in args[1:]:

          filename = os.path.basename(filepath)

-         print("Uploading archive: %s" % filename)

+         print(("Uploading archive: %s" % filename))

          serverdir = _unique_path('cli-import')

          if _running_in_bg() or suboptions.noprogress:

              callback = None
@@ -2611,7 +2617,7 @@ 

          print('')

          serverpath = "%s/%s" % (serverdir, filename)

          session.importArchive(serverpath, buildinfo, suboptions.type, suboptions.type_info)

-         print("Imported: %s" % filename)

+         print(("Imported: %s" % filename))

  

  def handle_grant_permission(options, session, args):

      "[admin] Grant a permission to a user"
@@ -2765,11 +2771,11 @@ 

                      fmt = "%(nvr)-40s  %(tag_name)-20s  %(owner_name)s"

              if not options.quiet:

                  if options.type == 'maven':

-                     print("%-40s  %-20s  %-20s  %-20s  %s" % ("Build", "Tag", "Group Id", "Artifact Id", "Built by"))

-                     print("%s  %s  %s  %s  %s" % ("-"*40, "-"*20, "-"*20, "-"*20, "-"*16))

+                     print(("%-40s  %-20s  %-20s  %-20s  %s" % ("Build", "Tag", "Group Id", "Artifact Id", "Built by")))

+                     print(("%s  %s  %s  %s  %s" % ("-"*40, "-"*20, "-"*20, "-"*20, "-"*16)))

                  else:

-                     print("%-40s  %-20s  %s" % ("Build","Tag","Built by"))

-                     print("%s  %s  %s" % ("-"*40, "-"*20, "-"*16))

+                     print(("%-40s  %-20s  %s" % ("Build","Tag","Built by")))

+                     print(("%s  %s  %s" % ("-"*40, "-"*20, "-"*16)))

                  options.quiet = True

  

          output = [ fmt % x for x in data]
@@ -2805,9 +2811,9 @@ 

              args = "(%s)" % ", ".join(expanded)

          else:

              args = "()"

-         print('%s%s' % (x['name'], args))

+         print(('%s%s' % (x['name'], args)))

          if x['doc']:

-             print("  description: %s" % x['doc'])

+             print(("  description: %s" % x['doc']))

  

  def anon_handle_list_tagged(options, session, args):

      "[info] List the builds or rpms in a tag"
@@ -2858,7 +2864,7 @@ 

      if event:

          opts['event'] = event['id']

          event['timestr'] = time.asctime(time.localtime(event['ts']))

-         print("Querying at event %(id)i (%(timestr)s)" % event)

+         print(("Querying at event %(id)i (%(timestr)s)" % event))

  

      if options.rpms:

          rpms, builds = session.listTaggedRPMS(tag, **opts)
@@ -2899,11 +2905,11 @@ 

                  fmt = "%(nvr)-40s  %(tag_name)-20s  %(owner_name)s"

          if not options.quiet:

              if options.type == 'maven':

-                 print("%-40s  %-20s  %-20s  %-20s  %s" % ("Build", "Tag", "Group Id", "Artifact Id", "Built by"))

-                 print("%s  %s  %s  %s  %s" % ("-"*40, "-"*20, "-"*20, "-"*20, "-"*16))

+                 print(("%-40s  %-20s  %-20s  %-20s  %s" % ("Build", "Tag", "Group Id", "Artifact Id", "Built by")))

+                 print(("%s  %s  %s  %s  %s" % ("-"*40, "-"*20, "-"*20, "-"*20, "-"*16)))

              else:

-                 print("%-40s  %-20s  %s" % ("Build","Tag","Built by"))

-                 print("%s  %s  %s" % ("-"*40, "-"*20, "-"*16))

+                 print(("%-40s  %-20s  %s" % ("Build","Tag","Built by")))

+                 print(("%s  %s  %s" % ("-"*40, "-"*20, "-"*16)))

  

      output = [ fmt % x for x in data]

      output.sort()
@@ -2936,7 +2942,7 @@ 

      order.sort()

      for nvra, rinfo in order:

          if options.verbose and rinfo.get('is_update'):

-             print(nvra, "[update]")

+             print((nvra, "[update]"))

          else:

              print(nvra)

  
@@ -2991,10 +2997,10 @@ 

          print(line)

  

  def print_group_list_req_group(group):

-     print("  @%(name)s  [%(tag_name)s]" % group)

+     print(("  @%(name)s  [%(tag_name)s]" % group))

  

  def print_group_list_req_package(pkg):

-     print("  %(package)s: %(basearchonly)s, %(type)s  [%(tag_name)s]" % pkg)

+     print(("  %(package)s: %(basearchonly)s, %(type)s  [%(tag_name)s]" % pkg))

  

  def anon_handle_list_groups(options, session, args):

      "[info] Print the group listings"
@@ -3014,7 +3020,7 @@ 

      if event:

          opts['event'] = event['id']

          event['timestr'] = time.asctime(time.localtime(event['ts']))

-         print("Querying at event %(id)i (%(timestr)s)" % event)

+         print(("Querying at event %(id)i (%(timestr)s)" % event))

      tags = dict([(x['id'], x['name']) for x in session.listTags()])

      tmp_list = [(x['name'], x) for x in session.getTagGroups(args[0], **opts)]

      tmp_list.sort()
@@ -3022,7 +3028,7 @@ 

      for group in groups:

          if len(args) > 1 and group['name'] != args[1]:

              continue

-         print("%s  [%s]" % (group['name'], tags.get(group['tag_id'], group['tag_id'])))

+         print(("%s  [%s]" % (group['name'], tags.get(group['tag_id'], group['tag_id']))))

          groups = [(x['name'], x) for x in group['grouplist']]

          groups.sort()

          for x in [x[1] for x in groups]:
@@ -3177,7 +3183,7 @@ 

      if not options.quiet:

          print("Hostname                     Enb Rdy Load/Cap Arches           Last Update")

      for host in hosts:

-         print("%(name)-28s %(enabled)-3s %(ready)-3s %(task_load)4.1f/%(capacity)-3.1f %(arches)-16s %(update)s" % host)

+         print(("%(name)-28s %(enabled)-3s %(ready)-3s %(task_load)4.1f/%(capacity)-3.1f %(arches)-16s %(update)s" % host))

  

  def anon_handle_list_pkgs(options, session, args):

      "[info] Print the package listing for tag or for owner"
@@ -3228,7 +3234,7 @@ 

      if event:

          opts['event'] = event['id']

          event['timestr'] = time.asctime(time.localtime(event['ts']))

-         print("Querying at event %(id)i (%(timestr)s)" % event)

+         print(("Querying at event %(id)i (%(timestr)s)" % event))

      data = session.listPackages(**opts)

      if not data:

          print("(no matching packages)")
@@ -3236,13 +3242,13 @@ 

      if not options.quiet:

          if allpkgs:

              print("Package")

-             print('-'*23)

+             print(('-'*23))

          else:

-             print("%-23s %-23s %-16s %-15s" % ('Package','Tag','Extra Arches','Owner'))

-             print("%s %s %s %s" % ('-'*23,'-'*23,'-'*16,'-'*15))

+             print(("%-23s %-23s %-16s %-15s" % ('Package','Tag','Extra Arches','Owner')))

+             print(("%s %s %s %s" % ('-'*23,'-'*23,'-'*16,'-'*15)))

      for pkg in data:

          if allpkgs:

-             print(pkg['package_name'])

+             print((pkg['package_name']))

          else:

              if not options.show_blocked and pkg.get('blocked',False):

                  continue
@@ -3254,7 +3260,7 @@ 

                      fmt += " [BLOCKED]"

              else:

                  fmt = "%(package_name)s"

-             print(fmt % pkg)

+             print((fmt % pkg))

  

  def anon_handle_rpminfo(options, session, args):

      "[info] Print basic information about an RPM"
@@ -3270,7 +3276,7 @@ 

      for rpm in args:

          info = session.getRPM(rpm)

          if info is None:

-             print("No such rpm: %s\n" % rpm)

+             print(("No such rpm: %s\n" % rpm))

              continue

          if info['epoch'] is None:

              info['epoch'] = ""
@@ -3284,42 +3290,42 @@ 

                  buildinfo['epoch'] = ""

              else:

                  buildinfo['epoch'] = str(buildinfo['epoch']) + ":"

-         print("RPM: %(epoch)s%(name)s-%(version)s-%(release)s.%(arch)s [%(id)d]" % info)

+         print(("RPM: %(epoch)s%(name)s-%(version)s-%(release)s.%(arch)s [%(id)d]" % info))

          if info.get('external_repo_id'):

              repo = session.getExternalRepo(info['external_repo_id'])

-             print("External Repository: %(name)s [%(id)i]" % repo)

-             print("External Repository url: %(url)s" % repo)

+             print(("External Repository: %(name)s [%(id)i]" % repo))

+             print(("External Repository url: %(url)s" % repo))

          else:

-             print("RPM Path: %s" % os.path.join(koji.pathinfo.build(buildinfo), koji.pathinfo.rpm(info)))

-             print("SRPM: %(epoch)s%(name)s-%(version)s-%(release)s [%(id)d]" % buildinfo)

-             print("SRPM Path: %s" % os.path.join(koji.pathinfo.build(buildinfo), koji.pathinfo.rpm(buildinfo)))

-             print("Built: %s" % time.strftime('%a, %d %b %Y %H:%M:%S %Z', time.localtime(info['buildtime'])))

-         print("SIGMD5: %(payloadhash)s" % info)

-         print("Size: %(size)s" % info)

+             print(("RPM Path: %s" % os.path.join(koji.pathinfo.build(buildinfo), koji.pathinfo.rpm(info))))

+             print(("SRPM: %(epoch)s%(name)s-%(version)s-%(release)s [%(id)d]" % buildinfo))

+             print(("SRPM Path: %s" % os.path.join(koji.pathinfo.build(buildinfo), koji.pathinfo.rpm(buildinfo))))

+             print(("Built: %s" % time.strftime('%a, %d %b %Y %H:%M:%S %Z', time.localtime(info['buildtime']))))

+         print(("SIGMD5: %(payloadhash)s" % info))

+         print(("Size: %(size)s" % info))

          if not info.get('external_repo_id', 0):

-             print("Build ID: %(build_id)s" % info)

+             print(("Build ID: %(build_id)s" % info))

          if info['buildroot_id'] is None:

              print("No buildroot data available")

          else:

              br_info = session.getBuildroot(info['buildroot_id'])

              if br_info['br_type'] == koji.BR_TYPES['STANDARD']:

-                 print("Buildroot: %(id)i (tag %(tag_name)s, arch %(arch)s, repo %(repo_id)i)" % br_info)

-                 print("Build Host: %(host_name)s" % br_info)

-                 print("Build Task: %(task_id)i" % br_info)

+                 print(("Buildroot: %(id)i (tag %(tag_name)s, arch %(arch)s, repo %(repo_id)i)" % br_info))

+                 print(("Build Host: %(host_name)s" % br_info))

+                 print(("Build Task: %(task_id)i" % br_info))

              else:

-                 print("Content generator: %(cg_name)s" % br_info)

-                 print("Buildroot: %(id)i" % br_info)

-                 print("Build Host OS: %(host_os)s (%(host_arch)s)" % br_info)

+                 print(("Content generator: %(cg_name)s" % br_info))

+                 print(("Buildroot: %(id)i" % br_info))

+                 print(("Build Host OS: %(host_os)s (%(host_arch)s)" % br_info))

          if info.get('extra'):

-             print("Extra: %(extra)r" % info)

+             print(("Extra: %(extra)r" % info))

          if options.buildroots:

              br_list = session.listBuildroots(rpmID=info['id'], queryOpts={'order':'buildroot.id'})

-             print("Used in %i buildroots:" % len(br_list))

+             print(("Used in %i buildroots:" % len(br_list)))

              if len(br_list):

-                 print("  %8s %-28s %-8s %-29s" % ('id','build tag','arch','build host'))

-                 print("  %s %s %s %s" % ('-'*8, '-'*28, '-'*8, '-'*29))

+                 print(("  %8s %-28s %-8s %-29s" % ('id','build tag','arch','build host')))

+                 print(("  %s %s %s %s" % ('-'*8, '-'*28, '-'*8, '-'*29)))

              for br_info in br_list:

-                 print("  %(id)8i %(tag_name)-28s %(arch)-8s %(host_name)-29s" % br_info)

+                 print(("  %(id)8i %(tag_name)-28s %(arch)-8s %(host_name)-29s" % br_info))

  

  

  def anon_handle_buildinfo(options, session, args):
@@ -3338,7 +3344,7 @@ 

              build = int(build)

          info = session.getBuild(build)

          if info is None:

-             print("No such build: %s\n" % build)

+             print(("No such build: %s\n" % build))

              continue

          task = None

          if info['task_id']:
@@ -3348,43 +3354,43 @@ 

              taglist.append(tag['name'])

          info['arch'] = 'src'

          info['state'] = koji.BUILD_STATES[info['state']]

-         print("BUILD: %(name)s-%(version)s-%(release)s [%(id)d]" % info)

-         print("State: %(state)s" % info)

-         print("Built by: %(owner_name)s" % info)

+         print(("BUILD: %(name)s-%(version)s-%(release)s [%(id)d]" % info))

+         print(("State: %(state)s" % info))

+         print(("Built by: %(owner_name)s" % info))

          source = info.get('source')

          if source is not None:

-             print("Source: %s" % source)

+             print(("Source: %s" % source))

          if 'volume_name' in info:

-             print("Volume: %(volume_name)s" % info)

+             print(("Volume: %(volume_name)s" % info))

          if task:

-             print("Task: %s %s" % (task['id'], koji.taskLabel(task)))

+             print(("Task: %s %s" % (task['id'], koji.taskLabel(task))))

          else:

              print("Task: none")

-         print("Finished: %s" % koji.formatTimeLong(info['completion_time']))

+         print(("Finished: %s" % koji.formatTimeLong(info['completion_time'])))

          maven_info = session.getMavenBuild(info['id'])

          if maven_info:

-             print("Maven groupId: %s" % maven_info['group_id'])

-             print("Maven artifactId: %s" % maven_info['artifact_id'])

-             print("Maven version: %s" % maven_info['version'])

+             print(("Maven groupId: %s" % maven_info['group_id']))

+             print(("Maven artifactId: %s" % maven_info['artifact_id']))

+             print(("Maven version: %s" % maven_info['version']))

          win_info = session.getWinBuild(info['id'])

          if win_info:

-             print("Windows build platform: %s" % win_info['platform'])

-         print("Tags: %s" % ' '.join(taglist))

+             print(("Windows build platform: %s" % win_info['platform']))

+         print(("Tags: %s" % ' '.join(taglist)))

          if info.get('extra'):

-             print("Extra: %(extra)r" % info)

+             print(("Extra: %(extra)r" % info))

          archives_seen = {}

          maven_archives = session.listArchives(buildID=info['id'], type='maven')

          if maven_archives:

              print("Maven archives:")

              for archive in maven_archives:

                  archives_seen.setdefault(archive['id'], 1)

-                 print(os.path.join(koji.pathinfo.mavenbuild(info), koji.pathinfo.mavenfile(archive)))

+                 print((os.path.join(koji.pathinfo.mavenbuild(info), koji.pathinfo.mavenfile(archive))))

          win_archives = session.listArchives(buildID=info['id'], type='win')

          if win_archives:

              print("Windows archives:")

              for archive in win_archives:

                  archives_seen.setdefault(archive['id'], 1)

-                 print(os.path.join(koji.pathinfo.winbuild(info), koji.pathinfo.winfile(archive)))

+                 print((os.path.join(koji.pathinfo.winbuild(info), koji.pathinfo.winfile(archive))))

          rpms = session.listRPMs(buildID=info['id'])

          image_info = session.getImageBuild(info['id'])

          img_archives = session.listArchives(buildID=info['id'], type='image')
@@ -3392,7 +3398,7 @@ 

              print('Image archives:')

              for archive in img_archives:

                  archives_seen.setdefault(archive['id'], 1)

-                 print(os.path.join(koji.pathinfo.imagebuild(info), archive['filename']))

+                 print((os.path.join(koji.pathinfo.imagebuild(info), archive['filename'])))

          archive_idx = {}

          for archive in session.listArchives(buildID=info['id']):

              if archive['id'] in archives_seen:
@@ -3400,18 +3406,18 @@ 

              archive_idx.setdefault(archive['btype'], []).append(archive)

          for btype in archive_idx:

              archives = archive_idx[btype]

-             print('%s Archives:' % btype.capitalize())

+             print(('%s Archives:' % btype.capitalize()))

              for archive in archives:

-                 print(os.path.join(koji.pathinfo.typedir(info, btype), archive['filename']))

+                 print((os.path.join(koji.pathinfo.typedir(info, btype), archive['filename'])))

          if rpms:

              print("RPMs:")

              for rpm in rpms:

-                 print(os.path.join(koji.pathinfo.build(info), koji.pathinfo.rpm(rpm)))

+                 print((os.path.join(koji.pathinfo.build(info), koji.pathinfo.rpm(rpm))))

          if options.changelog:

              changelog = session.getChangelogEntries(info['id'])

              if changelog:

                  print("Changelog:")

-                 print(koji.util.formatChangelog(changelog))

+                 print((koji.util.formatChangelog(changelog)))

  

  def handle_clone_tag(options, session, args):

      "[admin] Duplicate the contents of one tag onto another tag"
@@ -3452,7 +3458,7 @@ 

      activate_session(session)

  

      if not session.hasPerm('admin') and not options.test:

-         print(_("This action requires admin privileges"))

+         print((_("This action requires admin privileges")))

          return

  

      if args[0] == args[1]:
@@ -3465,7 +3471,7 @@ 

      event = koji.util.eventFromOpts(session, options) or {}

      if event:

          event['timestr'] = time.asctime(time.localtime(event['ts']))

-         print(_("Cloning at event %(id)i (%(timestr)s)") % event)

+         print((_("Cloning at event %(id)i (%(timestr)s)") % event))

  

      # store tags.

      srctag = session.getTag(args[0])
@@ -3474,8 +3480,8 @@ 

          sys.stdout.write("Unknown src-tag: %s\n" % args[0])

          return

      if (srctag['locked'] and not options.force) or (dsttag and dsttag['locked'] and not options.force):

-         print(_("Error: You are attempting to clone from or to a tag which is locked."))

-         print(_("Please use --force if this is what you really want to do."))

+         print((_("Error: You are attempting to clone from or to a tag which is locked.")))

+         print((_("Please use --force if this is what you really want to do.")))

          return

  

      # init debug lists.
@@ -3485,7 +3491,7 @@ 

      # case of brand new dst-tag.

      if not dsttag:

          if not options.config:

-             print(_('Cannot create tag without specifying --config'))

+             print((_('Cannot create tag without specifying --config')))

              return

          # create a new tag, copy srctag header.

          if not options.test:
@@ -3573,35 +3579,35 @@ 

                  dstgroups[group['name']] = group

          #construct to-do lists.

          paddlist = [] # list containing new packages to be added from src tag

-         for (package_name, pkg) in srcpkgs.iteritems():

+         for (package_name, pkg) in srcpkgs.items():

              if package_name not in dstpkgs:

                  paddlist.append(pkg)

          paddlist.sort(key = lambda x: x['package_name'])

          pdellist = [] # list containing packages no more present in dst tag

-         for (package_name, pkg) in dstpkgs.iteritems():

+         for (package_name, pkg) in dstpkgs.items():

              if package_name not in srcpkgs:

                  pdellist.append(pkg)

          pdellist.sort(key = lambda x: x['package_name'])

          baddlist = [] # list containing new builds to be added from src tag

-         for (nvr, lbld) in srclblds.iteritems():

+         for (nvr, lbld) in srclblds.items():

              if nvr not in dstlblds:

                  baddlist.append(lbld)

          baddlist.sort(key = lambda x: x['package_name'])

          bdellist = [] # list containing new builds to be removed from src tag

-         for (nvr, lbld) in dstlblds.iteritems():

+         for (nvr, lbld) in dstlblds.items():

              if nvr not in srclblds:

                  bdellist.append(lbld)

          bdellist.sort(key = lambda x: x['package_name'])

          gaddlist = [] # list containing new groups to be added from src tag

-         for (grpname, group) in srcgroups.iteritems():

+         for (grpname, group) in srcgroups.items():

              if grpname not in dstgroups:

                  gaddlist.append(group)

          gdellist = [] # list containing groups to be removed from src tag

-         for (grpname, group) in dstgroups.iteritems():

+         for (grpname, group) in dstgroups.items():

              if grpname not in srcgroups:

                  gdellist.append(group)

          grpchanges = {} # dict of changes to make in shared groups

-         for (grpname, group) in srcgroups.iteritems():

+         for (grpname, group) in srcgroups.items():

              if grpname in dstgroups:

                  grpchanges[grpname] = {'adds':[], 'dels':[]}

                  # Store whether group is inherited or not
@@ -3797,13 +3803,13 @@ 

      chkbuildtag = session.getTag(build_tag)

      chkdesttag = session.getTag(dest_tag)

      if not chkbuildtag:

-         print("Build tag does not exist: %s" % build_tag)

+         print(("Build tag does not exist: %s" % build_tag))

          return 1

      if not chkbuildtag.get("arches", None):

-         print("Build tag has no arches: %s" % build_tag)

+         print(("Build tag has no arches: %s" % build_tag))

          return 1

      if not chkdesttag:

-         print("Destination tag does not exist: %s" % dest_tag)

+         print(("Destination tag does not exist: %s" % dest_tag))

          return 1

  

      session.createBuildTarget(name, build_tag, dest_tag)
@@ -3840,15 +3846,15 @@ 

          targetInfo['build_tag_name'] = options.build_tag

          chkbuildtag = session.getTag(options.build_tag)

          if not chkbuildtag:

-             print("Build tag does not exist: %s" % options.build_tag)

+             print(("Build tag does not exist: %s" % options.build_tag))

              return 1

          if not chkbuildtag.get("arches", None):

-             print("Build tag has no arches: %s" % options.build_tag)

+             print(("Build tag has no arches: %s" % options.build_tag))

              return 1

      if options.dest_tag:

          chkdesttag = session.getTag(options.dest_tag)

          if not chkdesttag:

-             print("Destination tag does not exist: %s" % options.dest_tag)

+             print(("Destination tag does not exist: %s" % options.dest_tag))

              return 1

          targetInfo['dest_tag_name'] = options.dest_tag

  
@@ -3873,7 +3879,7 @@ 

      target = args[0]

      target_info = session.getBuildTarget(target)

      if not target_info:

-         print("Build target %s does not exist" % target)

+         print(("Build target %s does not exist" % target))

          return 1

  

      session.deleteBuildTarget(target_info['id'])
@@ -3897,7 +3903,7 @@ 

      tag = args[0]

      tag_info = session.getTag(tag)

      if not tag_info:

-         print("Tag %s does not exist" % tag)

+         print(("Tag %s does not exist" % tag))

          return 1

  

      session.deleteTag(tag_info['id'])
@@ -3917,13 +3923,13 @@ 

  

      fmt = "%(name)-30s %(build_tag_name)-30s %(dest_tag_name)-30s"

      if not options.quiet:

-         print("%-30s %-30s %-30s" % ('Name','Buildroot','Destination'))

-         print("-" * 93)

+         print(("%-30s %-30s %-30s" % ('Name','Buildroot','Destination')))

+         print(("-" * 93))

      tmp_list = [(x['name'], x) for x in session.getBuildTargets(options.name)]

      tmp_list.sort()

      targets = [x[1] for x in tmp_list]

      for target in targets:

-         print(fmt % target)

+         print((fmt % target))

      #pprint.pprint(session.getBuildTargets())

  

  def _printInheritance(tags, sibdepths=None, reverse=False):
@@ -3985,7 +3991,7 @@ 

      event = koji.util.eventFromOpts(session, options)

      if event:

          event['timestr'] = time.asctime(time.localtime(event['ts']))

-         print("Querying at event %(id)i (%(timestr)s)" % event)

+         print(("Querying at event %(id)i (%(timestr)s)" % event))

      if event:

          tag = session.getTag(args[0], event=event['id'])

      else:
@@ -4068,7 +4074,7 @@ 

              if tag['locked'] or tag['perm']:

                  continue

          if not options.verbose:

-             print(fmt % tag)

+             print((fmt % tag))

          else:

              sys.stdout.write(fmt % tag)

              if tag['locked']:
@@ -4135,8 +4141,8 @@ 

          return "%s: %s" % (time_str, fmt % x)

      for event_id, x in timeline:

          if options.debug:

-             print("%r" % x)

-         print(_histline(event_id, x))

+             print(("%r" % x))

+         print((_histline(event_id, x)))

  

  def _print_histline(entry, **kwargs):

      options = kwargs['options']
@@ -4155,7 +4161,7 @@ 

          if event_id != other[0]:

              bad_edit = "non-matching"

          if bad_edit:

-             print("Warning: unusual edit at event %i in table %s (%s)" % (event_id, table, bad_edit))

+             print(("Warning: unusual edit at event %i in table %s (%s)" % (event_id, table, bad_edit)))

              #we'll simply treat them as separate events

              pprint.pprint(entry)

              pprint.pprint(edit)
@@ -4284,7 +4290,7 @@ 

          parts.append(who % x)

      if create and x['active']:

          parts.append("[still active]")

-     print(' '.join(parts))

+     print((' '.join(parts)))

      hidden_fields = ['active', 'create_event', 'revoke_event', 'creator_id', 'revoker_id',

                       'creator_name', 'revoker_name', 'create_ts', 'revoke_ts']

      def get_nkey(key):
@@ -4295,7 +4301,7 @@ 

          else:

              return '%s.name' % key

      if edit:

-         keys = x.keys()

+         keys = list(x.keys())

          keys.sort()

          y = other[-1]

          for key in keys:
@@ -4308,9 +4314,9 @@ 

              nkey = get_nkey(key)

              if nkey in x and nkey in y:

                  continue

-             print("    %s: %s -> %s" % (key, x[key], y[key]))

+             print(("    %s: %s -> %s" % (key, x[key], y[key])))

      elif create and options.verbose and table != 'tag_listing':

-         keys = x.keys()

+         keys = list(x.keys())

          keys.sort()

          # the table keys have already been represented in the base format string

          also_hidden = list(_table_keys[table])
@@ -4329,7 +4335,7 @@ 

                  dkey = key[:-5]

              else:

                  dkey = key

-             print("    %s: %s" % (dkey, x[key]))

+             print(("    %s: %s" % (dkey, x[key])))

  

  _table_keys = {

      'user_perms' : ['user_id', 'perm_id'],
@@ -4468,7 +4474,7 @@ 

                  new_timeline.append(entry)

          for entry in new_timeline:

              if options.debug:

-                 print("%r" % list(entry))

+                 print(("%r" % list(entry)))

              _print_histline(entry, options=options)

          if not options.watch:

              break
@@ -4673,35 +4679,35 @@ 

  

      owner = session.getUser(info['owner'])['name']

  

-     print("%sTask: %d" % (indent, task_id))

-     print("%sType: %s" % (indent, info['method']))

+     print(("%sTask: %d" % (indent, task_id)))

+     print(("%sType: %s" % (indent, info['method'])))

      if verbose:

-         print("%sRequest Parameters:" % indent)

+         print(("%sRequest Parameters:" % indent))

          for line in _parseTaskParams(session, info['method'], task_id):

-             print("%s  %s" % (indent, line))

-     print("%sOwner: %s" % (indent, owner))

-     print("%sState: %s" % (indent, koji.TASK_STATES[info['state']].lower()))

-     print("%sCreated: %s" % (indent, time.asctime(time.localtime(info['create_ts']))))

+             print(("%s  %s" % (indent, line)))

+     print(("%sOwner: %s" % (indent, owner)))

+     print(("%sState: %s" % (indent, koji.TASK_STATES[info['state']].lower())))

+     print(("%sCreated: %s" % (indent, time.asctime(time.localtime(info['create_ts'])))))

      if info.get('start_ts'):

-         print("%sStarted: %s" % (indent, time.asctime(time.localtime(info['start_ts']))))

+         print(("%sStarted: %s" % (indent, time.asctime(time.localtime(info['start_ts'])))))

      if info.get('completion_ts'):

-         print("%sFinished: %s" % (indent, time.asctime(time.localtime(info['completion_ts']))))

+         print(("%sFinished: %s" % (indent, time.asctime(time.localtime(info['completion_ts'])))))

      if host_info:

-         print("%sHost: %s" % (indent, host_info['name']))

+         print(("%sHost: %s" % (indent, host_info['name'])))

      if build_info:

-         print("%sBuild: %s (%d)" % (indent, build_info[0]['nvr'], build_info[0]['build_id']))

+         print(("%sBuild: %s (%d)" % (indent, build_info[0]['nvr'], build_info[0]['build_id'])))

      if buildroot_infos:

-         print("%sBuildroots:" % indent)

+         print(("%sBuildroots:" % indent))

          for root in buildroot_infos:

-             print("%s  %s/%s-%d-%d/" % (indent, BUILDDIR, root['tag_name'], root['id'], root['repo_id']))

+             print(("%s  %s/%s-%d-%d/" % (indent, BUILDDIR, root['tag_name'], root['id'], root['repo_id'])))

      if logs:

-         print("%sLog Files:" % indent)

+         print(("%sLog Files:" % indent))

          for log in logs:

-             print("%s  %s/%s" % (indent, files_dir, log))

+             print(("%s  %s/%s" % (indent, files_dir, log)))

      if output:

-         print("%sOutput:" % indent)

+         print(("%sOutput:" % indent))

          for filename in output:

-             print("%s  %s/%s" % (indent, files_dir, filename))

+             print(("%s  %s/%s" % (indent, files_dir, filename)))

  

      # white space

      print('')
@@ -4748,7 +4754,7 @@ 

      event_opts = {}

      if event:

          event['timestr'] = time.asctime(time.localtime(event['ts']))

-         print("Querying at event %(id)i (%(timestr)s)" % event)

+         print(("Querying at event %(id)i (%(timestr)s)" % event))

          event_opts['event'] = event['id']

      perms = dict([(p['id'], p['name']) for p in session.getAllPerms()])

  
@@ -4756,32 +4762,31 @@ 

      for tag in args:

          info = session.getTag(tag, **event_opts)

          if info is None:

-             print("No such tag: %s" % tag)

+             print(("No such tag: %s" % tag))

              sys.exit(1)

          tags.append(info)

  

      for n, info in enumerate(tags):

          if n > 0:

              print('')

-         print("Tag: %(name)s [%(id)d]" %info)

-         print("Arches: %(arches)s" %info)

+         print(("Tag: %(name)s [%(id)d]" %info))

+         print(("Arches: %(arches)s" %info))

          group_list = [x['name'] for x in session.getTagGroups(info['id'], **event_opts)]

          group_list.sort()

-         print("Groups: " + ', '.join(group_list))

+         print(("Groups: " + ', '.join(group_list)))

          if info.get('locked'):

              print('LOCKED')

          if info.get('perm_id') is not None:

              perm_id = info['perm_id']

-             print("Required permission: %r" % perms.get(perm_id, perm_id))

+             print(("Required permission: %r" % perms.get(perm_id, perm_id)))

          if session.mavenEnabled():

-             print("Maven support?: %s" % (info['maven_support'] and 'yes' or 'no'))

-             print("Include all Maven archives?: %s" % (info['maven_include_all'] and 'yes' or 'no'))

+             print(("Maven support?: %s" % (info['maven_support'] and 'yes' or 'no')))

+             print(("Include all Maven archives?: %s" % (info['maven_include_all'] and 'yes' or 'no')))

          if 'extra' in info:

              print("Tag options:")

-             keys = info['extra'].keys()

-             keys.sort()

+             keys = sorted(info['extra'].keys())

              for key in keys:

-                 print("  %s : %s" % (key, pprint.pformat(info['extra'][key])))

+                 print(("  %s : %s" % (key, pprint.pformat(info['extra'][key]))))

          dest_targets = session.getBuildTargets(destTagID=info['id'], **event_opts)

          build_targets = session.getBuildTargets(buildTagID=info['id'], **event_opts)

          repos = {}
@@ -4797,21 +4802,21 @@ 

              print("Targets that build into this tag:")

              for target in dest_targets:

                  if event:

-                     print("  %s (%s)" % (target['name'], target['build_tag_name']))

+                     print(("  %s (%s)" % (target['name'], target['build_tag_name'])))

                  else:

-                     print("  %s (%s, %s)" % (target['name'], target['build_tag_name'], repos[target['build_tag']]))

+                     print(("  %s (%s, %s)" % (target['name'], target['build_tag_name'], repos[target['build_tag']])))

          if build_targets:

              print("This tag is a buildroot for one or more targets")

              if not event:

-                 print("Current repo: %s" % repos[info['id']])

+                 print(("Current repo: %s" % repos[info['id']]))

              print("Targets that build from this tag:")

              for target in build_targets:

-                 print("  %s" % target['name'])

+                 print(("  %s" % target['name']))

          external_repos = session.getTagExternalRepos(tag_info=info['id'], **event_opts)

          if external_repos:

              print("External repos:")

              for rinfo in external_repos:

-                 print("  %(priority)3i %(external_repo_name)s (%(url)s)" % rinfo)

+                 print(("  %(priority)3i %(external_repo_name)s (%(url)s)" % rinfo))

          print("Inheritance:")

          for parent in session.getInheritanceData(tag, **event_opts):

              flags = ''
@@ -4825,11 +4830,11 @@ 

                  else:

                      flags += '.'

              parent['flags'] = flags

-             print("  %(priority)-4d %(flags)s %(name)s [%(parent_id)s]" % parent)

+             print(("  %(priority)-4d %(flags)s %(name)s [%(parent_id)s]" % parent))

              if parent['maxdepth'] is not None:

-                 print("    maxdepth: %(maxdepth)s" % parent)

+                 print(("    maxdepth: %(maxdepth)s" % parent))

              if parent['pkg_filter']:

-                 print("    package filter: %(pkg_filter)s" % parent)

+                 print(("    package filter: %(pkg_filter)s" % parent))

  

  

  def handle_add_tag(options, session, args):
@@ -4955,25 +4960,25 @@ 

                      selected.append(tag)

                      break

          if not selected:

-             print(_("No tags matched"))

+             print((_("No tags matched")))

      else:

          selected = [session.getTag(name) for name in args]

      for tag in selected:

          if options.master:

              #set the master lock

              if tag['locked']:

-                 print(_("Tag %s: master lock already set") % tag['name'])

+                 print((_("Tag %s: master lock already set") % tag['name']))

                  continue

              elif options.test:

-                 print(_("Would have set master lock for: %s") % tag['name'])

+                 print((_("Would have set master lock for: %s") % tag['name']))

                  continue

              session.editTag2(tag['id'], locked=True)

          else:

              if tag['perm_id'] == perm_id:

-                 print(_("Tag %s: %s permission already required") % (tag['name'], perm))

+                 print((_("Tag %s: %s permission already required") % (tag['name'], perm)))

                  continue

              elif options.test:

-                 print(_("Would have set permission requirement %s for tag %s") % (perm, tag['name']))

+                 print((_("Would have set permission requirement %s for tag %s") % (perm, tag['name'])))

                  continue

              session.editTag2(tag['id'], perm=perm_id)

  
@@ -4997,7 +5002,7 @@ 

                      selected.append(tag)

                      break

          if not selected:

-             print(_("No tags matched"))

+             print((_("No tags matched")))

      else:

          selected = []

          for name in args:
@@ -5014,10 +5019,10 @@ 

          if tag['perm_id']:

              opts['perm'] = None

          if not opts:

-             print("Tag %(name)s: not locked" % tag)

+             print(("Tag %(name)s: not locked" % tag))

              continue

          if options.test:

-             print("Tag %s: skipping changes: %r" % (tag['name'], opts))

+             print(("Tag %s: skipping changes: %r" % (tag['name'], opts)))

          else:

              session.editTag2(tag['id'], locked=False, perm_id=None)

  
@@ -5054,12 +5059,12 @@ 

      samePriority = [datum for datum in inheritanceData if datum['priority'] == priority]

  

      if sameParents and not options.force:

-         print(_("Error: You are attempting to add %s as %s's parent even though it already is %s's parent.")

-                     % (parent['name'], tag['name'], tag['name']))

-         print(_("Please use --force if this is what you really want to do."))

+         print((_("Error: You are attempting to add %s as %s's parent even though it already is %s's parent.")

+                     % (parent['name'], tag['name'], tag['name'])))

+         print((_("Please use --force if this is what you really want to do.")))

          return

      if samePriority:

-         print(_("Error: There is already an active inheritance with that priority on %s, please specify a different priority with --priority." % tag['name']))

+         print((_("Error: There is already an active inheritance with that priority on %s, please specify a different priority with --priority." % tag['name'])))

          return

  

      new_data = {}
@@ -5119,17 +5124,17 @@ 

          data = [datum for datum in data if datum['priority'] == priority]

  

      if len(data) == 0:

-         print(_("No inheritance link found to remove.  Please check your arguments"))

+         print((_("No inheritance link found to remove.  Please check your arguments")))

          return 1

      elif len(data) > 1:

-         print(_("Multiple matches for tag."))

+         print((_("Multiple matches for tag.")))

          if not parent:

-             print(_("Please specify a parent on the command line."))

+             print((_("Please specify a parent on the command line.")))

              return 1

          if not priority:

-             print(_("Please specify a priority on the command line."))

+             print((_("Please specify a priority on the command line.")))

              return 1

-         print(_("Error: Key constraints may be broken.  Exiting."))

+         print((_("Error: Key constraints may be broken.  Exiting.")))

          return 1

  

      # len(data) == 1
@@ -5138,7 +5143,7 @@ 

      inheritanceData = session.getInheritanceData(tag['id'])

      samePriority = [datum for datum in inheritanceData if datum['priority'] == options.priority]

      if samePriority:

-         print(_("Error: There is already an active inheritance with that priority on %s, please specify a different priority with --priority.") % tag['name'])

+         print((_("Error: There is already an active inheritance with that priority on %s, please specify a different priority with --priority.") % tag['name']))

          return 1

  

      new_data = data.copy()
@@ -5150,7 +5155,7 @@ 

          elif options.maxdepth.lower() == "none":

              new_data['maxdepth'] = None

          else:

-             print(_("Invalid maxdepth: %s") % options.maxdepth)

+             print((_("Invalid maxdepth: %s") % options.maxdepth))

              return 1

      if options.intransitive:

          new_data['intransitive'] = options.intransitive
@@ -5201,17 +5206,17 @@ 

          data = [datum for datum in data if datum['priority'] == priority]

  

      if len(data) == 0:

-         print(_("No inheritance link found to remove.  Please check your arguments"))

+         print((_("No inheritance link found to remove.  Please check your arguments")))

          return

      elif len(data) > 1:

-         print(_("Multiple matches for tag."))

+         print((_("Multiple matches for tag.")))

          if not parent:

-             print(_("Please specify a parent on the command line."))

+             print((_("Please specify a parent on the command line.")))

              return

          if not priority:

-             print(_("Please specify a priority on the command line."))

+             print((_("Please specify a priority on the command line.")))

              return

-         print(_("Error: Key constrainsts may be broken.  Exiting."))

+         print((_("Error: Key constrainsts may be broken.  Exiting.")))

          return

  

      # len(data) == 1
@@ -5244,9 +5249,9 @@ 

      tag = args[0]

      groups = session.getTagGroups(tag)

      if options.comps:

-         print(koji.generate_comps(groups, expand_groups=options.expand))

+         print((koji.generate_comps(groups, expand_groups=options.expand)))

      elif options.spec:

-         print(koji.make_groups_spec(groups,name='buildgroups',buildgroup='build'))

+         print((koji.make_groups_spec(groups,name='buildgroups',buildgroup='build')))

      else:

          pprint.pprint(groups)

  
@@ -5277,7 +5282,7 @@ 

      if event:

          opts['event'] = event['id']

          event['timestr'] = time.asctime(time.localtime(event['ts']))

-         print("Querying at event %(id)i (%(timestr)s)" % event)

+         print(("Querying at event %(id)i (%(timestr)s)" % event))

      if options.tag:

          format = "tag"

          opts['tag_info'] = options.tag
@@ -5322,7 +5327,7 @@ 

          print(header1)

          print(header2)

      for rinfo in data:

-         print(format % rinfo)

+         print((format % rinfo))

  

  def _pick_external_repo_priority(session, tag):

      """pick priority after current ones, leaving space for later insertions"""
@@ -5368,7 +5373,7 @@ 

      elif len(args) == 2:

          name, url = args

          rinfo = session.createExternalRepo(name, url)

-         print("Created external repo %(id)i" % rinfo)

+         print(("Created external repo %(id)i" % rinfo))

      else:

          parser.error(_("Incorrect number of arguments"))

          assert False  # pragma: no cover
@@ -5381,8 +5386,8 @@ 

                  else:

                      priority = _pick_external_repo_priority(session, tag)

              session.addExternalRepoToTag(tag, rinfo['name'], priority)

-             print("Added external repo %s to tag %s (priority %i)" \

-                     % (rinfo['name'], tag, priority))

+             print(("Added external repo %s to tag %s (priority %i)" \

+                     % (rinfo['name'], tag, priority)))

  

  def handle_edit_external_repo(options, session, args):

      "[admin] Edit data for an external repo"
@@ -5429,20 +5434,20 @@ 

              parser.error(_("Do not specify tags when using --alltags"))

              assert False  # pragma: no cover

          if not current_tags:

-             print(_("External repo %s not associated with any tags") % repo)

+             print((_("External repo %s not associated with any tags") % repo))

              return 0

          tags = current_tags

      if delete:

          #removing entirely

          if current_tags and not options.force:

-             print(_("Error: external repo %s used by tag(s): %s") % (repo, ', '.join(current_tags)))

-             print(_("Use --force to remove anyway"))

+             print((_("Error: external repo %s used by tag(s): %s") % (repo, ', '.join(current_tags))))

+             print((_("Use --force to remove anyway")))

              return 1

          session.deleteExternalRepo(args[0])

      else:

          for tag in tags:

              if not tag in current_tags:

-                 print(_("External repo %s not associated with tag %s") % (repo, tag))

+                 print((_("External repo %s not associated with tag %s") % (repo, tag)))

                  continue

              session.removeExternalRepoFromTag(tag, repo)

  
@@ -5671,7 +5676,7 @@ 

              missing.append(opt)

  

      if len(missing) > 0:

-         print("Missing the following required options: %s" % ' '.join(['--%s' % o.replace('_','-') for o in missing]))

+         print(("Missing the following required options: %s" % ' '.join(['--%s' % o.replace('_','-') for o in missing])))

          raise koji.GenericError(_("Missing required options specified above"))

  

      activate_session(session)
@@ -5728,8 +5733,8 @@ 

                                              priority=priority)

  

      if not options.quiet:

-         print("Created task: %d" % task_id)

-         print("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id))

+         print(("Created task: %d" % task_id))

+         print(("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)))

      #if task_opts.wait or (task_opts.wait is None and not _running_in_bg()):

      #    session.logout()

      #    return watch_tasks(session, [task_id], quiet=options.quiet)
@@ -5808,7 +5813,7 @@ 

          if not os.path.exists(task_options.config):

              parser.error(_("%s not found!" % task_options.config))

          section = 'image-build'

-         config = ConfigParser.ConfigParser()

+         config = configparser.ConfigParser()

          conf_fd = open(task_options.config)

          config.readfp(conf_fd)

          conf_fd.close()
@@ -5928,8 +5933,8 @@ 

                                   img_type, opts=hub_opts, priority=priority)

  

      if not options.quiet:

-         print("Created task: %d" % task_id)

-         print("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id))

+         print(("Created task: %d" % task_id))

+         print(("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)))

      if task_opts.wait or (task_opts.wait is None and not _running_in_bg()):

          session.logout()

          return watch_tasks(session, [task_id], quiet=options.quiet)
@@ -5997,8 +6002,8 @@ 

                                   opts=hub_opts, priority=priority)

  

      if not options.quiet:

-         print("Created task: %d" % task_id)

-         print("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id))

+         print(("Created task: %d" % task_id))

+         print(("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)))

      if task_opts.wait or (task_opts.wait is None and not _running_in_bg()):

          session.logout()

          return watch_tasks(session, [task_id], quiet=options.quiet)
@@ -6074,8 +6079,8 @@ 

          priority = 5

      task_id = session.winBuild(vm_name, scmurl, target, opts, priority=priority)

      if not build_opts.quiet:

-         print("Created task: %d" % task_id)

-         print("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id))

+         print(("Created task: %d" % task_id))

+         print(("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)))

      if build_opts.wait or (build_opts.wait is None and not _running_in_bg()):

          session.logout()

          return watch_tasks(session, [task_id], quiet=build_opts.quiet)
@@ -6180,7 +6185,7 @@ 

      if options.user:

          user = session.getUser(options.user)

          if not user:

-             print("No such user: %s" % options.user)

+             print(("No such user: %s" % options.user))

              sys.exit(1)

          callopts['owner'] = user['id']

      if options.arch:
@@ -6190,13 +6195,13 @@ 

      if options.channel:

          chan = session.getChannel(options.channel)

          if not chan:

-             print("No such channel: %s" % options.channel)

+             print(("No such channel: %s" % options.channel))

              sys.exit(1)

          callopts['channel_id'] = chan['id']

      if options.host:

          host = session.getHost(options.host)

          if not host:

-             print("No such host: %s" % options.host)

+             print(("No such host: %s" % options.host))

              sys.exit(1)

          callopts['host_id'] = host['id']

  
@@ -6301,42 +6306,42 @@ 

      packages = args[1:]

      user = session.getUser(owner)

      if not user:

-         print("No such user: %s" % owner)

+         print(("No such user: %s" % owner))

          return 1

      opts = {'with_dups' : True}

      old_user = None

      if options.old_user:

          old_user = session.getUser(options.old_user)

          if not old_user:

-             print("No such user: %s" % options.old_user)

+             print(("No such user: %s" % options.old_user))

              return 1

          opts['userID'] = old_user['id']

      to_change = []

      for package in packages:

          entries = session.listPackages(pkgID=package, **opts)

          if not entries:

-             print("No data for package %s" % package)

+             print(("No data for package %s" % package))

              continue

          to_change.extend(entries)

      if not packages and options.old_user:

          entries = session.listPackages(**opts)

          if not entries:

-             print("No data for user %s" % old_user['name'])

+             print(("No data for user %s" % old_user['name']))

              return 1

          to_change.extend(entries)

      for entry in to_change:

          if user['id'] == entry['owner_id']:

              if options.verbose:

-                 print("Preserving owner=%s for package %s in tag %s" \

-                         % (user['name'], package,  entry['tag_name']))

+                 print(("Preserving owner=%s for package %s in tag %s" \

+                         % (user['name'], package,  entry['tag_name'])))

          else:

              if options.test:

-                 print("Would have changed owner for %s in tag %s: %s -> %s" \

-                         % (entry['package_name'], entry['tag_name'], entry['owner_name'], user['name']))

+                 print(("Would have changed owner for %s in tag %s: %s -> %s" \

+                         % (entry['package_name'], entry['tag_name'], entry['owner_name'], user['name'])))

                  continue

              if options.verbose:

-                 print("Changing owner for %s in tag %s: %s -> %s" \

-                         % (entry['package_name'], entry['tag_name'], entry['owner_name'], user['name']))

+                 print(("Changing owner for %s in tag %s: %s -> %s" \

+                         % (entry['package_name'], entry['tag_name'], entry['owner_name'], user['name'])))

              session.packageListSetOwner(entry['tag_id'], entry['package_name'], user['id'])

  

  def anon_handle_watch_task(options, session, args):
@@ -6418,9 +6423,9 @@ 

          if value is not None:

              taskopts[key] = value

      task_id = session.makeTask(method=args[0],

-                                arglist=map(arg_filter,args[1:]),

+                                arglist=list(map(arg_filter,args[1:])),

                                 **taskopts)

-     print("Created task id %d" % task_id)

+     print(("Created task id %d" % task_id))

      if _running_in_bg() or not options.watch:

          return

      else:
@@ -6444,7 +6449,7 @@ 

          task_id = session.tagBuild(args[0], pkg, force=options.force)

          #XXX - wait on task

          tasks.append(task_id)

-         print("Created task %d" % task_id)

+         print(("Created task %d" % task_id))

      if _running_in_bg() or options.nowait:

          return

      else:
@@ -6474,7 +6479,7 @@ 

          for arg in args[2:]:

              pkg = session.getPackage(arg)

              if not pkg:

-                 print(_("Invalid package name %s, skipping." % arg))

+                 print((_("Invalid package name %s, skipping." % arg)))

                  continue

              tasklist = session.moveAllBuilds(args[0], args[1], arg, options.force)

              tasks.extend(tasklist)
@@ -6482,7 +6487,7 @@ 

          for arg in args[2:]:

              build = session.getBuild(arg)

              if not build:

-                 print(_("Invalid build %s, skipping." % arg))

+                 print((_("Invalid build %s, skipping." % arg)))

                  continue

              if not build in builds:

                  builds.append(build)
@@ -6490,7 +6495,7 @@ 

          for build in builds:

              task_id = session.moveBuild(args[0], args[1], build['id'], options.force)

              tasks.append(task_id)

-             print("Created task %d, moving %s" % (task_id, koji.buildLabel(build)))

+             print(("Created task %d, moving %s" % (task_id, koji.buildLabel(build))))

      if _running_in_bg() or options.nowait:

          return

      else:
@@ -6537,7 +6542,7 @@ 

              if binfo['name'] not in seen_pkg:

                  #latest for this package

                  if options.verbose:

-                     print(_("Leaving latest build for package %(name)s: %(nvr)s") % binfo)

+                     print((_("Leaving latest build for package %(name)s: %(nvr)s") % binfo))

              else:

                  builds.append(binfo)

              seen_pkg[binfo['name']] = 1
@@ -6553,18 +6558,18 @@ 

                  # not in tag, see if it even exists

                  binfo = session.getBuild(nvr)

                  if not binfo:

-                     print(_("No such build: %s") % nvr)

+                     print((_("No such build: %s") % nvr))

                  else:

-                     print(_("Build %s not in tag %s") % (nvr, tag['name']))

+                     print((_("Build %s not in tag %s") % (nvr, tag['name'])))

                  if not options.force:

                      return 1

      builds.reverse()

      for binfo in builds:

          if options.test:

-             print(_("would have untagged %(nvr)s") % binfo)

+             print((_("would have untagged %(nvr)s") % binfo))

          else:

              if options.verbose:

-                 print(_("untagging %(nvr)s") % binfo)

+                 print((_("untagging %(nvr)s") % binfo))

              session.untagBuild(tag['name'], binfo['nvr'], force=options.force)

  

  def handle_unblock_pkg(options, session, args):
@@ -6618,7 +6623,7 @@ 

          if suboptions.task_id:

              builds = session.listBuilds(taskID=build)

              if not builds:

-                 print("No associated builds for task %s" % build)

+                 print(("No associated builds for task %s" % build))

                  return 1

              build = builds[0]['build_id']

  
@@ -6626,17 +6631,17 @@ 

          # We want the latest build, not a specific build

          try:

              builds = session.listTagged(suboptions.latestfrom, latest=True, package=build, type=suboptions.type)

-         except koji.GenericError, data:

-             print("Error finding latest build: %s" % data)

+         except koji.GenericError as data:

+             print(("Error finding latest build: %s" % data))

              return 1

          if not builds:

-             print("%s has no builds of %s" % (suboptions.latestfrom, build))

+             print(("%s has no builds of %s" % (suboptions.latestfrom, build)))

              return 1

          info = builds[0]

      elif suboptions.rpm:

          rpminfo = session.getRPM(build)

          if rpminfo is None:

-             print("No such rpm: %s" % build)

+             print(("No such rpm: %s" % build))

              return 1

          info = session.getBuild(rpminfo['build_id'])

      else:
@@ -6650,7 +6655,7 @@ 

          info = session.getBuild(build)

  

      if info is None:

-         print("No such build: %s" % build)

+         print(("No such build: %s" % build))

          return 1

  

      if not suboptions.topurl:
@@ -6662,7 +6667,7 @@ 

      if suboptions.type:

          archives = session.listArchives(buildID=info['id'], type=suboptions.type)

          if not archives:

-             print("No %s archives available for %s" % (suboptions.type, koji.buildLabel(info)))

+             print(("No %s archives available for %s" % (suboptions.type, koji.buildLabel(info))))

              return 1

          if suboptions.type == 'maven':

              for archive in archives:
@@ -6693,9 +6698,9 @@ 

              rpms = session.listRPMs(buildID=info['id'], arches=arches)

          if not rpms:

              if arches:

-                 print("No %s packages available for %s" % (" or ".join(arches), koji.buildLabel(info)))

+                 print(("No %s packages available for %s" % (" or ".join(arches), koji.buildLabel(info))))

              else:

-                 print("No packages available for %s" % koji.buildLabel(info))

+                 print(("No packages available for %s" % koji.buildLabel(info)))

              return 1

          for rpm in rpms:

              if not suboptions.debuginfo and koji.is_debuginfo(rpm['name']):
@@ -6755,7 +6760,7 @@ 

          full_filename = os.path.normpath(os.path.join(task_log_dir, FAIL_LOG))

          koji.ensuredir(os.path.dirname(full_filename))

          sys.stdout.write("Writing: %s\n" % full_filename)

-         file(full_filename, 'w').write(content)

+         open(full_filename, 'w').write(content)

  

      def download_log(task_log_dir, task_id, filename, blocksize=102400):

          #Create directories only if there is any log file to write to
@@ -6764,11 +6769,11 @@ 

          contents = 'IGNORE ME!'

          if suboptions.cont and os.path.exists(full_filename):

              sys.stdout.write("Continuing: %s\n" % full_filename)

-             fd = file(full_filename, 'ab')

+             fd = open(full_filename, 'ab')

              offset = fd.tell()

          else:

              sys.stdout.write("Downloading: %s\n" % full_filename)

-             fd = file(full_filename, 'wb')

+             fd = open(full_filename, 'wb')

              offset = 0

          try:

              while contents:
@@ -6865,7 +6870,7 @@ 

          downloadable_tasks.append(base_task)

      else:

          subtasks = session.getTaskChildren(base_task_id)

-         downloadable_tasks.extend(filter(check_downloadable, subtasks))

+         downloadable_tasks.extend(list(filter(check_downloadable, subtasks)))

  

      # get files for download

  
@@ -6906,7 +6911,7 @@ 

      number = 0

      for (task, filename, new_filename) in downloads:

          number += 1

-         print(_("Downloading [%d/%d]: %s") % (number, len(downloads), new_filename))

+         print((_("Downloading [%d/%d]: %s") % (number, len(downloads), new_filename)))

          output_file = open(new_filename, "wb")

          output_file.write(session.downloadTaskOutput(task["id"], filename))

          output_file.close()
@@ -6945,13 +6950,13 @@ 

              parser.error("Invalid tag: %s" % tag)

          targets = session.getBuildTargets(buildTagID=tag_info['id'])

          if not targets:

-             print("%(name)s is not a build tag for any target" % tag_info)

+             print(("%(name)s is not a build tag for any target" % tag_info))

              targets = session.getBuildTargets(destTagID=tag_info['id'])

              if targets:

                  maybe = {}.fromkeys([t['build_tag_name'] for t in targets])

-                 maybe = maybe.keys()

+                 maybe = list(maybe.keys())

                  maybe.sort()

-                 print("Suggested tags: %s" % ', '.join(maybe))

+                 print(("Suggested tags: %s" % ', '.join(maybe)))

              return 1

          tag_id = tag_info['id']

  
@@ -6959,11 +6964,11 @@ 

      for nvr in builds:

          data = session.getLatestBuilds(tag_id, package=nvr["name"])

          if len(data) == 0:

-             print("Warning: package %s is not in tag %s" % (nvr["name"], tag))

+             print(("Warning: package %s is not in tag %s" % (nvr["name"], tag)))

          else:

              present_nvr = [x["nvr"] for x in data][0]

              if present_nvr != "%s-%s-%s" % (nvr["name"], nvr["version"], nvr["release"]):

-                 print("Warning: nvr %s-%s-%s is not current in tag %s\n  latest build in %s is %s" % (nvr["name"], nvr["version"], nvr["release"], tag, tag, present_nvr))

+                 print(("Warning: nvr %s-%s-%s is not current in tag %s\n  latest build in %s is %s" % (nvr["name"], nvr["version"], nvr["release"], tag, tag, present_nvr)))

  

      last_repo = None

      repo = session.getRepo(tag_id)
@@ -6972,15 +6977,15 @@ 

          if builds and repo and repo != last_repo:

              if koji.util.checkForBuilds(session, tag_id, builds, repo['create_event'], latest=True):

                  if not suboptions.quiet:

-                     print("Successfully waited %s for %s to appear in the %s repo" % (koji.util.duration(start), koji.util.printList(suboptions.builds), tag))

+                     print(("Successfully waited %s for %s to appear in the %s repo" % (koji.util.duration(start), koji.util.printList(suboptions.builds), tag)))

                  return

  

          if (time.time() - start) >= (suboptions.timeout * 60.0):

              if not suboptions.quiet:

                  if builds:

-                     print("Unsuccessfully waited %s for %s to appear in the %s repo" % (koji.util.duration(start), koji.util.printList(suboptions.builds), tag))

+                     print(("Unsuccessfully waited %s for %s to appear in the %s repo" % (koji.util.duration(start), koji.util.printList(suboptions.builds), tag)))

                  else:

-                     print("Unsuccessfully waited %s for a new %s repo" % (koji.util.duration(start), tag))

+                     print(("Unsuccessfully waited %s for a new %s repo" % (koji.util.duration(start), tag)))

              return 1

  

          time.sleep(60)
@@ -6990,7 +6995,7 @@ 

          if not builds:

              if repo != last_repo:

                  if not suboptions.quiet:

-                     print("Successfully waited %s for a new %s repo" % (koji.util.duration(start), tag))

+                     print(("Successfully waited %s for a new %s repo" % (koji.util.duration(start), tag)))

                  return

  

  _search_types = ('package', 'build', 'tag', 'target', 'user', 'host', 'rpm', 'maven', 'win')
@@ -7032,17 +7037,17 @@ 

          tag = info['name']

          targets = session.getBuildTargets(buildTagID=info['id'])

          if not targets:

-             print("Warning: %s is not a build tag" % tag)

+             print(("Warning: %s is not a build tag" % tag))

      if not info['arches']:

-         print("Warning: tag %s has an empty arch list" % info['name'])

+         print(("Warning: tag %s has an empty arch list" % info['name']))

      if suboptions.debuginfo:

          repo_opts['debuginfo'] = True

      if suboptions.source:

          repo_opts['src'] = True

      task_id = session.newRepo(tag, **repo_opts)

-     print("Regenerating repo for tag: %s" % tag)

-     print("Created task: %d" % task_id)

-     print("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id))

+     print(("Regenerating repo for tag: %s" % tag))

+     print(("Created task: %d" % task_id))

+     print(("Task info: %s/taskinfo?taskID=%s" % (options.weburl, task_id)))

      if _running_in_bg() or suboptions.nowait:

          return

      else:
@@ -7072,7 +7077,7 @@ 

          matchType = 'exact'

      data = session.search(pattern, type, matchType)

      for row in data:

-         print(row['name'])

+         print((row['name']))

  

  def handle_moshimoshi(options, session, args):

      "[misc] Introduce yourself"
@@ -7087,18 +7092,18 @@ 

      if not u:

          print("Not authenticated")

          u = {'name' : 'anonymous user'}

-     print("%s, %s!" % (random.choice(greetings).encode('utf-8'), u["name"]))

+     print(("%s, %s!" % (random.choice(greetings).encode('utf-8'), u["name"])))

      print("")

-     print("You are using the hub at %s" % session.baseurl)

+     print(("You are using the hub at %s" % session.baseurl))

      authtype = u.get('authtype', getattr(session, 'authtype', None))

      if authtype == koji.AUTHTYPE_NORMAL:

          print("Authenticated via password")

      elif authtype == koji.AUTHTYPE_GSSAPI:

          print("Authenticated via GSSAPI")

      elif authtype == koji.AUTHTYPE_KERB:

-         print("Authenticated via Kerberos principal %s" % u["krb_principal"])

+         print(("Authenticated via Kerberos principal %s" % u["krb_principal"]))

      elif authtype == koji.AUTHTYPE_SSL:

-         print("Authenticated via client certificate %s" % options.cert)

+         print(("Authenticated via client certificate %s" % options.cert))

  

  def handle_runroot(options, session, args):

      "[admin] Run a command in a buildroot"
@@ -7145,7 +7150,7 @@ 

              kwargs['new_chroot'] = True

  

          task_id = session.runroot(tag, arch, command, **kwargs)

-     except koji.GenericError, e:

+     except koji.GenericError as e:

          if 'Invalid method' in str(e):

              print("* The runroot plugin appears to not be installed on the"

                    " koji hub.  Please contact the administrator.")
@@ -7191,10 +7196,10 @@ 

      chosen = set(args)

      if options.admin:

          chosen.add('admin')

-     avail = set(categories.keys() + ['all'])

+     avail = set(list(categories.keys()) + ['all'])

      unavail = chosen - avail

      for arg in unavail:

-         print("No such help category: %s" % arg)

+         print(("No such help category: %s" % arg))

  

      if not chosen:

          list_commands()
@@ -7204,7 +7209,7 @@ 

  

  def list_commands(categories_chosen=None):

      if categories_chosen is None or "all" in categories_chosen:

-         categories_chosen = categories.keys()

+         categories_chosen = list(categories.keys())

      else:

          # copy list since we're about to modify it

          categories_chosen = list(categories_chosen)
@@ -7220,18 +7225,18 @@ 

              alias = alias.replace('_','-')

              handlers.append((alias,value))

      handlers.sort()

-     print(_("Available commands:"))

+     print((_("Available commands:")))

      for category in categories_chosen:

-         print(_("\n%s:" % categories[category]))

+         print((_("\n%s:" % categories[category])))

          for alias,handler in handlers:

              desc = handler.__doc__

              if desc.startswith('[%s] ' % category):

                  desc = desc[len('[%s] ' % category):]

              elif category != 'misc' or desc.startswith('['):

                  continue

-             print("        %-25s %s" % (alias, desc))

+             print(("        %-25s %s" % (alias, desc)))

  

-     print("%s" % get_epilog_str().rstrip("\n"))

+     print(("%s" % get_epilog_str().rstrip("\n")))

  

  def error(msg=None, code=1):

      if msg:
@@ -7272,9 +7277,9 @@ 

                  session.krb_login(principal=options.principal, keytab=options.keytab, proxyuser=options.runas)

              else:

                  session.krb_login(proxyuser=options.runas)

-         except socket.error, e:

+         except socket.error as e:

              warn(_("Could not connect to Kerberos authentication service: %s") % e.args[1])

-         except Exception, e:

+         except Exception as e:

              if krbV is not None and isinstance(e, krbV.Krb5Error):

                  error(_("Kerberos authentication failed: %s (%s)") % (e.args[1], e.args[0]))

              else:
@@ -7318,7 +7323,7 @@ 

          else:

              exctype, value = sys.exc_info()[:2]

              rv = 1

-             print("%s: %s" % (exctype.__name__, value))

+             print(("%s: %s" % (exctype.__name__, value)))

      try:

          session.logout()

      except:

file modified
+135 -130
@@ -22,6 +22,8 @@ 

  #       Mike Bonnet <mikeb@redhat.com>

  #       Cristian Balint <cbalint@redhat.com>

  

+ from __future__ import absolute_import

+ from __future__ import print_function

  import base64

  import calendar

  import cgi
@@ -53,9 +55,12 @@ 

  import tempfile

  import time

  import types

- import xmlrpclib

+ from six.moves import xmlrpc_client

  import zipfile

  from koji.context import context

+ import six

+ from six.moves import range

+ from six.moves import zip

  

  try:

      import json
@@ -397,7 +402,7 @@ 

          if xml_request.find('<?xml', 0, 10) == -1:

              #handle older base64 encoded data

              xml_request = base64.decodestring(xml_request)

-         params, method = xmlrpclib.loads(xml_request)

+         params, method = xmlrpc_client.loads(xml_request)

          return params

  

      def getResult(self, raise_fault=True):
@@ -416,8 +421,8 @@ 

          try:

              # If the result is a Fault, then loads will raise it

              # This is normally what we want to happen

-             result, method = xmlrpclib.loads(xml_result)

-         except xmlrpclib.Fault, fault:

+             result, method = xmlrpc_client.loads(xml_result)

+         except xmlrpc_client.Fault as fault:

              if raise_fault:

                  raise

              # Note that you can't really return a fault over xmlrpc, except by
@@ -448,7 +453,7 @@ 

                  if task['request'].find('<?xml', 0, 10) == -1:

                      #handle older base64 encoded data

                      task['request'] = base64.decodestring(task['request'])

-                 task['request'] = xmlrpclib.loads(task['request'])[0]

+                 task['request'] = xmlrpc_client.loads(task['request'])[0]

          return results

  

      def runCallbacks(self, cbtype, old_info, attr, new_val):
@@ -490,7 +495,7 @@ 

          r = _fetchSingle(q, opts)

          if not r:

              raise koji.GenericError("Invalid parent task: %(parent)s" % opts)

-         pdata = dict(zip(fields, r))

+         pdata = dict(list(zip(fields, r)))

          if pdata['state'] != koji.TASK_STATES['OPEN']:

              raise koji.GenericError("Parent task (id %(parent)s) is not open" % opts)

          #default to a higher priority than parent
@@ -559,7 +564,7 @@ 

              raise koji.GenericError("invalid channel policy")

  

      # encode xmlrpc request

-     opts['request'] = xmlrpclib.dumps(tuple(arglist), methodname=method,

+     opts['request'] = xmlrpc_client.dumps(tuple(arglist), methodname=method,

                                        allow_none=1)

      opts['state'] = koji.TASK_STATES['FREE']

      opts['method'] = method
@@ -585,7 +590,7 @@ 

          table += '.'

      if event is None:

          return """(%(table)sactive = TRUE)""" % locals()

-     elif isinstance(event, int) or isinstance(event, long):

+     elif isinstance(event, int) or isinstance(event, int):

          return """(%(table)screate_event <= %(event)d AND ( %(table)srevoke_event IS NULL OR %(event)d < %(table)srevoke_event ))""" \

              % locals()

      else:
@@ -601,7 +606,7 @@ 

      """ % (",".join(fields), eventCondition(event))

      c.execute(q, locals())

      #convert list of lists into a list of dictionaries

-     return [dict(zip(fields, x)) for x in c.fetchall()]

+     return [dict(list(zip(fields, x))) for x in c.fetchall()]

  

  def readInheritanceData(tag_id, event=None):

      c = context.cnx.cursor()
@@ -612,7 +617,7 @@ 

      """ % (",".join(fields), eventCondition(event))

      c.execute(q, locals())

      #convert list of lists into a list of dictionaries

-     data = [dict(zip(fields, x)) for x in c.fetchall()]

+     data = [dict(list(zip(fields, x))) for x in c.fetchall()]

      # include the current tag_id as child_id, so we can retrace the inheritance chain later

      for datum in data:

          datum['child_id'] = tag_id
@@ -627,7 +632,7 @@ 

      """ % (",".join(fields), eventCondition(event))

      c.execute(q, locals())

      #convert list of lists into a list of dictionaries

-     data = [dict(zip(fields, x)) for x in c.fetchall()]

+     data = [dict(list(zip(fields, x))) for x in c.fetchall()]

      return data

  

  
@@ -667,12 +672,12 @@ 

                      data[parent_id] = link

                      break

      if clear:

-         for link in data.itervalues():

+         for link in six.itervalues(data):

              if not link.get('is_update'):

                  link['delete link'] = True

                  link['is_update'] = True

      changed = False

-     for link in data.itervalues():

+     for link in six.itervalues(data):

          if link.get('is_update'):

              changed = True

              break
@@ -682,17 +687,17 @@ 

          return

      #check for duplicate priorities

      pri_index = {}

-     for link in data.itervalues():

+     for link in six.itervalues(data):

          if link.get('delete link'):

              continue

          pri_index.setdefault(link['priority'], []).append(link)

-     for pri, dups in pri_index.iteritems():

+     for pri, dups in six.iteritems(pri_index):

          if len(dups) <= 1:

              continue

          #oops, duplicate entries for a single priority

          dup_ids = [link['parent_id'] for link in dups]

          raise koji.GenericError("Inheritance priorities must be unique (pri %s: %r )" % (pri, dup_ids))

-     for parent_id, link in data.iteritems():

+     for parent_id, link in six.iteritems(data):

          if not link.get('is_update'):

              continue

          # revoke old values
@@ -700,7 +705,7 @@ 

                      clauses=['tag_id=%(tag_id)s', 'parent_id = %(parent_id)s'])

          update.make_revoke()

          update.execute()

-     for parent_id, link in data.iteritems():

+     for parent_id, link in six.iteritems(data):

          if not link.get('is_update'):

              continue

          # skip rest if we are just deleting
@@ -858,7 +863,7 @@ 

      tag_id = tag['id']

      pkg = lookup_package(pkginfo, strict=False)

      if not pkg:

-         if not isinstance(pkginfo, basestring):

+         if not isinstance(pkginfo, six.string_types):

              raise koji.GenericError("Invalid package: %s" % pkginfo)

      if owner is not None:

          owner = get_user(owner, strict=True)['id']
@@ -1028,7 +1033,7 @@ 

          q += """

          AND users.id = %%(userID)i"""

      if pkgID != None:

-         if isinstance(pkgID, int) or isinstance(pkgID, long):

+         if isinstance(pkgID, int) or isinstance(pkgID, int):

              q += """

              AND package.id = %%(pkgID)i"""

          else:
@@ -1157,7 +1162,7 @@ 

      # build - id pkg_id version release epoch

      # tag_listing - id build_id tag_id

  

-     if not isinstance(latest, (int, long, float)):

+     if not isinstance(latest, (int, int, float)):

          latest = bool(latest)

  

      taglist = [tag]
@@ -1299,14 +1304,14 @@ 

          joins.append('LEFT OUTER JOIN rpmsigs on rpminfo.id = rpmsigs.rpm_id')

      if arch:

          data['arch'] = arch

-         if isinstance(arch, basestring):

+         if isinstance(arch, six.string_types):

              clauses.append('rpminfo.arch = %(arch)s')

          elif isinstance(arch, (list, tuple)):

              clauses.append('rpminfo.arch IN %(arch)s')

          else:

              raise koji.GenericError('invalid arch option: %s' % arch)

  

-     fields, aliases = zip(*fields)

+     fields, aliases = list(zip(*fields))

      query = QueryProcessor(tables=tables, joins=joins, clauses=clauses,

                             columns=fields, aliases=aliases, values=data, transform=_fix_rpm_row)

  
@@ -1968,7 +1973,7 @@ 

              groups.setdefault(grp_id, group)

  

      if incl_pkgs:

-         for group in groups.itervalues():

+         for group in six.itervalues(groups):

              group['packagelist'] = {}

          fields = ('group_id', 'tag_id', 'package', 'blocked', 'type', 'basearchonly', 'requires')

          q = """
@@ -1990,7 +1995,7 @@ 

  

      if incl_reqs:

          # and now the group reqs

-         for group in groups.itervalues():

+         for group in six.itervalues(groups):

              group['grouplist'] = {}

          fields = ('group_id', 'tag_id', 'req_id', 'blocked', 'type', 'is_metapkg', 'name')

          q = """SELECT %s FROM group_req_listing JOIN groups on req_id = id
@@ -2085,7 +2090,7 @@ 

  def rename_channel(old, new):

      """Rename a channel"""

      context.session.assertPerm('admin')

-     if not isinstance(new, basestring):

+     if not isinstance(new, six.string_types):

          raise koji.GenericError("new channel name must be a string")

      cinfo = get_channel(old, strict=True)

      dup_check = get_channel(new, strict=False)
@@ -2139,7 +2144,7 @@ 

      """ % ','.join(fields)

      # XXX - magic number in query

      c.execute(q)

-     hosts = [dict(zip(aliases, row)) for row in c.fetchall()]

+     hosts = [dict(list(zip(aliases, row))) for row in c.fetchall()]

      for host in hosts:

          q = """SELECT channel_id FROM host_channels WHERE host_id=%(id)s"""

          c.execute(q, host)
@@ -2156,7 +2161,7 @@ 

              #in a perfect world, this list would only include canonical

              #arches, but not all admins will undertand that.

              ret[koji.canonArch(arch)] = 1

-     return ret.keys()

+     return list(ret.keys())

  

  def get_active_tasks(host=None):

      """Return data on tasks that are yet to be run"""
@@ -2326,7 +2331,7 @@ 

      groupsdir = "%s/groups" % (repodir)

      koji.ensuredir(groupsdir)

      comps = koji.generate_comps(groups, expand_groups=True)

-     fo = file("%s/comps.xml" % groupsdir, 'w')

+     fo = open("%s/comps.xml" % groupsdir, 'w')

      fo.write(comps)

      fo.close()

  
@@ -2345,7 +2350,7 @@ 

          top_relpath = koji.util.relpath(koji.pathinfo.topdir, archdir)

          top_link = os.path.join(archdir, 'toplink')

          os.symlink(top_relpath, top_link)

-         pkglist[repoarch] = file(os.path.join(archdir, 'pkglist'), 'w')

+         pkglist[repoarch] = open(os.path.join(archdir, 'pkglist'), 'w')

      #NOTE - rpms is now an iterator

      for rpminfo in rpms:

          if not with_debuginfo and koji.is_debuginfo(rpminfo['name']):
@@ -2370,7 +2375,7 @@ 

  

      #write blocked package lists

      for repoarch in repo_arches:

-         blocklist = file(os.path.join(repodir, repoarch, 'blocklist'), 'w')

+         blocklist = open(os.path.join(repodir, repoarch, 'blocklist'), 'w')

          for pkg in blocks:

              blocklist.write(pkg['package_name'])

              blocklist.write('\n')
@@ -2406,7 +2411,7 @@ 

                  os.symlink(relpath, destlink)

              except:

                  log_error('Error linking %s to %s' % (destlink, relpath))

-         for artifact_dir, artifacts in artifact_dirs.iteritems():

+         for artifact_dir, artifacts in six.iteritems(artifact_dirs):

              _write_maven_repo_metadata(artifact_dir, artifacts)

  

      koji.plugin.run_callbacks('postRepoInit', tag=tinfo, with_src=with_src, with_debuginfo=with_debuginfo,
@@ -2418,7 +2423,7 @@ 

      # group_id and artifact_id should be the same for all entries,

      # so we're really only comparing versions.

      artifacts = sorted(artifacts, cmp=lambda a, b: rpm.labelCompare(a, b))

-     artifactinfo = dict(zip(['group_id', 'artifact_id', 'version'], artifacts[-1]))

+     artifactinfo = dict(list(zip(['group_id', 'artifact_id', 'version'], artifacts[-1])))

      artifactinfo['timestamp'] = datetime.datetime.now().strftime('%Y%m%d%H%M%S')

      contents = """<?xml version="1.0"?>

  <metadata>
@@ -2437,7 +2442,7 @@ 

    </versioning>

  </metadata>

  """ % datetime.datetime.now().strftime('%Y%m%d%H%M%S')

-     mdfile = file(os.path.join(destdir, 'maven-metadata.xml'), 'w')

+     mdfile = open(os.path.join(destdir, 'maven-metadata.xml'), 'w')

      mdfile.write(contents)

      mdfile.close()

      _generate_maven_metadata(destdir)
@@ -2511,7 +2516,7 @@ 

          'host_id': 'host_id',

          'create_event': 'create_event',

          'state': 'state'}

-     fields, aliases = zip(*fields.items())

+     fields, aliases = list(zip(*list(fields.items())))

      values = {'repo_id': repo_id}

      clauses = ['repo_id=%(repo_id)s', 'retire_event IS NULL']

      query = QueryProcessor(columns=fields, aliases=aliases, tables=['standard_buildroot'],
@@ -2711,7 +2716,7 @@ 

      if info:

          if isinstance(info, str):

              clauses.append('build_target.name = %(info)s')

-         elif isinstance(info, int) or isinstance(info, long):

+         elif isinstance(info, int) or isinstance(info, int):

              clauses.append('build_target.id = %(info)i')

          else:

              raise koji.GenericError('invalid type for lookup: %s' % type(info))
@@ -2754,11 +2759,11 @@ 

      create option will fail.

      """

      fields = ('id', 'name')

-     if isinstance(info, int) or isinstance(info, long):

+     if isinstance(info, int) or isinstance(info, int):

          q = """SELECT id,name FROM %s WHERE id=%%(info)d""" % table

      elif isinstance(info, str):

          q = """SELECT id,name FROM %s WHERE name=%%(info)s""" % table

-     elif isinstance(info, unicode):

+     elif isinstance(info, six.text_type):

          info = koji.fixEncoding(info)

          q = """SELECT id,name FROM %s WHERE name=%%(info)s""" % table

      else:
@@ -2871,7 +2876,7 @@ 

  

      # add extra data

      if extra is not None:

-         for key, value in extra.iteritems():

+         for key, value in six.iteritems(extra):

              data = {

                  'tag_id': tag_id,

                  'key': key,
@@ -2928,15 +2933,15 @@ 

                'tag_config.maven_include_all': 'maven_include_all'

               }

      clauses = [eventCondition(event, table='tag_config')]

-     if isinstance(tagInfo, (int, long)):

+     if isinstance(tagInfo, six.integer_types):

          clauses.append("tag.id = %(tagInfo)i")

-     elif isinstance(tagInfo, basestring):

+     elif isinstance(tagInfo, six.string_types):

          clauses.append("tag.name = %(tagInfo)s")

      else:

          raise koji.GenericError('invalid type for tagInfo: %s' % type(tagInfo))

  

      data = {'tagInfo': tagInfo}

-     fields, aliases = zip(*fields.items())

+     fields, aliases = list(zip(*list(fields.items())))

      query = QueryProcessor(columns=fields, aliases=aliases, tables=tables,

                             joins=joins, clauses=clauses, values=data)

      result = query.executeOne()
@@ -3156,7 +3161,7 @@ 

      if info is not None:

          if isinstance(info, str):

              clauses.append('name = %(info)s')

-         elif isinstance(info, (int, long)):

+         elif isinstance(info, six.integer_types):

              clauses.append('id = %(info)i')

          else:

              raise koji.GenericError('invalid type for lookup: %s' % type(info))
@@ -3381,7 +3386,7 @@ 

      fields = ['id', 'name', 'status', 'usertype', 'krb_principal']

      #fields, aliases = zip(*fields.items())

      data = {'info' : userInfo}

-     if isinstance(userInfo, int) or isinstance(userInfo, long):

+     if isinstance(userInfo, int) or isinstance(userInfo, int):

          clauses = ['id = %(info)i']

      elif isinstance(userInfo, str):

          clauses = ['krb_principal = %(info)s OR name = %(info)s']
@@ -3396,7 +3401,7 @@ 

  

  

  def find_build_id(X, strict=False):

-     if isinstance(X, int) or isinstance(X, long):

+     if isinstance(X, int) or isinstance(X, int):

          return X

      elif isinstance(X, str):

          data = koji.parse_NVR(X)
@@ -3478,7 +3483,7 @@ 

                ('users.id', 'owner_id'), ('users.name', 'owner_name'),

                ('build.source', 'source'),

                ('build.extra', 'extra'))

-     fields, aliases = zip(*fields)

+     fields, aliases = list(zip(*fields))

      joins = ['events ON build.create_event = events.id',

               'package on build.pkg_id = package.id',

               'volume on build.volume_id = volume.id',
@@ -3595,7 +3600,7 @@ 

          )

      # we can look up by id or NVRA

      data = None

-     if isinstance(rpminfo, (int, long)):

+     if isinstance(rpminfo, six.integer_types):

          data = {'id': rpminfo}

      elif isinstance(rpminfo, str):

          data = koji.parse_NVRA(rpminfo)
@@ -3713,7 +3718,7 @@ 

          else:

              raise koji.GenericError('invalid type for "arches" parameter: %s' % type(arches))

  

-     fields, aliases = zip(*fields)

+     fields, aliases = list(zip(*fields))

      query = QueryProcessor(columns=fields, aliases=aliases,

                             tables=['rpminfo'], joins=joins, clauses=clauses,

                             values=locals(), transform=_fix_rpm_row, opts=queryOpts)
@@ -4038,7 +4043,7 @@ 

          clauses.append('archiveinfo.btype_id = %(btype_id)s')

          values['btype_id'] = btype['id']

  

-     columns, aliases = zip(*fields)

+     columns, aliases = list(zip(*fields))

      ret = QueryProcessor(tables=tables, columns=columns, aliases=aliases, joins=joins,

                            transform=_fix_archive_row,

                            clauses=clauses, values=values, opts=queryOpts).execute()
@@ -4324,7 +4329,7 @@ 

      as a list of maps.  Each map in the list will have a key for each

      element in the "fields" list.  If there are no results, an empty

      list will be returned."""

-     return [dict(zip(fields, row)) for row in _fetchMulti(query, values)]

+     return [dict(list(zip(fields, row))) for row in _fetchMulti(query, values)]

  

  def _singleRow(query, values, fields, strict=False):

      """Return a single row from "query".  Named parameters can be
@@ -4336,7 +4341,7 @@ 

      returned."""

      row = _fetchSingle(query, values, strict)

      if row:

-         return dict(zip(fields, row))

+         return dict(list(zip(fields, row)))

      else:

          #strict enforced by _fetchSingle

          return None
@@ -4387,7 +4392,7 @@ 

                'capacity', 'description', 'comment', 'ready', 'enabled')

      query = """SELECT %s FROM host

      WHERE """ % ', '.join(fields)

-     if isinstance(hostInfo, int) or isinstance(hostInfo, long):

+     if isinstance(hostInfo, int) or isinstance(hostInfo, int):

          query += """id = %(hostInfo)i"""

      elif isinstance(hostInfo, str):

          query += """name = %(hostInfo)s"""
@@ -4436,7 +4441,7 @@ 

      fields = ('id', 'name')

      query = """SELECT %s FROM channels

      WHERE """ % ', '.join(fields)

-     if isinstance(channelInfo, int) or isinstance(channelInfo, long):

+     if isinstance(channelInfo, int) or isinstance(channelInfo, int):

          query += """id = %(channelInfo)i"""

      elif isinstance(channelInfo, str):

          query += """name = %(channelInfo)s"""
@@ -4888,7 +4893,7 @@ 

          import_rpm_file(fn, binfo, rpminfo)

          add_rpm_sig(rpminfo['id'], koji.rip_rpm_sighdr(fn))

      if logs:

-         for key, files in logs.iteritems():

+         for key, files in six.iteritems(logs):

              if not key:

                  key = None

              for relpath in files:
@@ -5057,7 +5062,7 @@ 

          if metadata is None:

              #default to looking for uploaded file

              metadata = 'metadata.json'

-         if not isinstance(metadata, (str, unicode)):

+         if not isinstance(metadata, (str, six.text_type)):

              raise koji.GenericError("Invalid metadata value: %r" % metadata)

          if metadata.endswith('.json'):

              # handle uploaded metadata
@@ -5115,7 +5120,7 @@ 

                  datetime.datetime.fromtimestamp(float(metadata['build']['end_time'])).isoformat(' ')

              owner = metadata['build'].get('owner', None)

              if owner:

-                 if not isinstance(owner, basestring):

+                 if not isinstance(owner, six.string_types):

                      raise koji.GenericError("Invalid owner format (expected username): %s" % owner)

                  buildinfo['owner'] = get_user(owner, strict=True)['id']

          self.buildinfo = buildinfo
@@ -5456,14 +5461,14 @@ 

  

      #sanity check rpminfo

      dtypes = (

-         ('name', basestring),

-         ('version', basestring),

-         ('release', basestring),

+         ('name', six.string_types),

+         ('version', six.string_types),

+         ('release', six.string_types),

          ('epoch', (int, type(None))),

-         ('arch', basestring),

+         ('arch', six.string_types),

          ('payloadhash', str),

          ('size', int),

-         ('buildtime', (int, long)))

+         ('buildtime', six.integer_types))

      for field, allowed in dtypes:

          if field not in rpminfo:

              raise koji.GenericError("%s field missing: %r" % (field, rpminfo))
@@ -5897,7 +5902,7 @@ 

                      (img_path, img_size, old['filesize']))

      # old images always used sha256 hashes

      sha256sum = hashlib.sha256()

-     image_fo = file(img_path, 'r')

+     image_fo = open(img_path, 'r')

      while True:

          data = image_fo.read(1048576)

          sha256sum.update(data)
@@ -6049,7 +6054,7 @@ 

          filename = koji.fixEncoding(os.path.basename(filepath))

          archiveinfo['filename'] = filename

          archiveinfo['size'] = os.path.getsize(filepath)

-         archivefp = file(filepath)

+         archivefp = open(filepath)

          m = md5_constructor()

          while True:

              contents = archivefp.read(8192)
@@ -6189,14 +6194,14 @@ 

              sumfile = mavenfile + ext

              if sumfile not in mavenfiles:

                  sum = sum_constr()

-                 fobj = file('%s/%s' % (mavendir, mavenfile))

+                 fobj = open('%s/%s' % (mavendir, mavenfile))

                  while True:

                      content = fobj.read(8192)

                      if not content:

                          break

                      sum.update(content)

                  fobj.close()

-                 sumobj = file('%s/%s' % (mavendir, sumfile), 'w')

+                 sumobj = open('%s/%s' % (mavendir, sumfile), 'w')

                  sumobj.write(sum.hexdigest())

                  sumobj.close()

  
@@ -6252,7 +6257,7 @@ 

      # - write to fs

      sigpath = "%s/%s" % (builddir, koji.pathinfo.sighdr(rinfo, sigkey))

      koji.ensuredir(os.path.dirname(sigpath))

-     fo = file(sigpath, 'wb')

+     fo = open(sigpath, 'wb')

      fo.write(sighdr)

      fo.close()

      koji.plugin.run_callbacks('postRPMSign', sigkey=sigkey, sighash=sighash, build=binfo, rpm=rinfo)
@@ -6268,7 +6273,7 @@ 

      sig_start, sigsize = koji.find_rpm_sighdr(fn)

      hdr_start = sig_start + sigsize

      hdrsize = koji.rpm_hdr_size(fn, hdr_start)

-     inp = file(fn, 'rb')

+     inp = open(fn, 'rb')

      outp = tempfile.TemporaryFile(mode='w+b')

      #before signature

      outp.write(inp.read(sig_start))
@@ -6305,7 +6310,7 @@ 

          koji.splice_rpm_sighdr(sighdr, rpm_path, temp)

          ts = rpm.TransactionSet()

          ts.setVSFlags(0)  #full verify

-         fo = file(temp, 'rb')

+         fo = open(temp, 'rb')

          hdr = ts.hdrFromFdno(fo.fileno())

          fo.close()

      except:
@@ -6368,7 +6373,7 @@ 

          else:

              os.unlink(signedpath)

      sigpath = "%s/%s" % (builddir, koji.pathinfo.sighdr(rinfo, sigkey))

-     fo = file(sigpath, 'rb')

+     fo = open(sigpath, 'rb')

      sighdr = fo.read()

      fo.close()

      koji.ensuredir(os.path.dirname(signedpath))
@@ -6601,7 +6606,7 @@ 

                  fields['creator.id = %(editor)i'] = '_created_by'

                  fields['revoker.id = %(editor)i'] = '_revoked_by'

              elif arg == 'after':

-                 if not isinstance(value, basestring):

+                 if not isinstance(value, six.string_types):

                      value = datetime.datetime.fromtimestamp(value).isoformat(' ')

                  data['after'] = value

                  clauses.append('ev1.time > %(after)s OR ev2.time > %(after)s')
@@ -6616,7 +6621,7 @@ 

                  fields[c_test] = '_created_after_event'

                  fields[r_test] = '_revoked_after_event'

              elif arg == 'before':

-                 if not isinstance(value, basestring):

+                 if not isinstance(value, six.string_types):

                      value = datetime.datetime.fromtimestamp(value).isoformat(' ')

                  data['before'] = value

                  clauses.append('ev1.time < %(before)s OR ev2.time < %(before)s')
@@ -6632,7 +6637,7 @@ 

                  fields[r_test] = '_revoked_before_event'

          if skip:

              continue

-         fields, aliases = zip(*fields.items())

+         fields, aliases = list(zip(*list(fields.items())))

          query = QueryProcessor(columns=fields, aliases=aliases, tables=[table],

                                 joins=joins, clauses=clauses, values=data)

          ret[table] = query.iterate()
@@ -6771,7 +6776,7 @@ 

              idx.setdefault(row['id'], row)

          if limit is not None and len(idx) > limit:

              break

-     ret['rpms'] = idx.values()

+     ret['rpms'] = list(idx.values())

  

      ret['component_of'] = []

      # find images/archives that contain the build rpms
@@ -6802,7 +6807,7 @@ 

              idx.setdefault(row['id'], row)

          if limit is not None and len(idx) > limit:

              break

-     ret['archives'] = idx.values()

+     ret['archives'] = list(idx.values())

  

      # find images/archives that contain the build archives

      fields = ['archive_id']
@@ -7126,7 +7131,7 @@ 

          #FIXME - if tag_id is None, we don't have a good way to get the package owner.

          #   using all package owners from all tags would be way overkill.

  

-     emails_uniq = dict([(x, 1) for x in emails]).keys()

+     emails_uniq = list(dict([(x, 1) for x in emails]).keys())

      return emails_uniq

  

  def tag_notification(is_successful, tag_id, from_id, build_id, user_id, ignore_success=False, failure_msg=''):
@@ -7149,7 +7154,7 @@ 

          from_tag = get_tag(from_id)

          for email in get_notification_recipients(build, from_tag['id'], state):

              recipients[email] = 1

-     recipients_uniq = recipients.keys()

+     recipients_uniq = list(recipients.keys())

      if len(recipients_uniq) > 0 and not (is_successful and ignore_success):

          task_id = make_task('tagNotification', [recipients_uniq, is_successful, tag_id, from_id, build_id, user_id, ignore_success, failure_msg])

          return task_id
@@ -7357,8 +7362,8 @@ 

          if not self.data and not self.rawdata:

              return "-- incomplete update: no assigns"

          parts = ['INSERT INTO %s ' % self.table]

-         columns = self.data.keys()

-         columns.extend(self.rawdata.keys())

+         columns = list(self.data.keys())

+         columns.extend(list(self.rawdata.keys()))

          parts.append("(%s) " % ', '.join(columns))

          values = []

          for key in columns:
@@ -7401,7 +7406,7 @@ 

              del data['create_event']

              del data['creator_id']

          clauses = ["%s = %%(%s)s" % (k, k) for k in data]

-         query = QueryProcessor(columns=data.keys(), tables=[self.table],

+         query = QueryProcessor(columns=list(data.keys()), tables=[self.table],

                                 clauses=clauses, values=data)

          if query.execute():

              return True
@@ -7520,7 +7525,7 @@ 

          if columns and aliases:

              if len(columns) != len(aliases):

                  raise Exception('column and alias lists must be the same length')

-             self.colsByAlias = dict(zip(aliases, columns))

+             self.colsByAlias = dict(list(zip(aliases, columns)))

          else:

              self.colsByAlias = {}

          self.tables = tables
@@ -7780,7 +7785,7 @@ 

          if not pkginfo:

              #for some operations (e.g. adding a new package), the package

              #entry may not exist yet

-             if isinstance(data['package'], basestring):

+             if isinstance(data['package'], six.string_types):

                  return {'id' : None, 'name' : data['package']}

              else:

                  raise koji.GenericError("Invalid package: %s" % data['package'])
@@ -8062,7 +8067,7 @@ 

              return False

          groups = koji.auth.get_user_groups(user['id'])

          args = self.str.split()[1:]

-         for group_id, group in groups.iteritems():

+         for group_id, group in six.iteritems(groups):

              for pattern in args:

                  if fnmatch.fnmatch(group, pattern):

                      return True
@@ -8668,7 +8673,7 @@ 

          values = {}

          q = """SELECT id, EXTRACT(EPOCH FROM time) FROM events"""

          if before is not None:

-             if not isinstance(before, (int, long, float)):

+             if not isinstance(before, (int, int, float)):

                  raise koji.GenericError('invalid type for before: %s' % type(before))

              # use the repr() conversion because it retains more precision than the

              # string conversion
@@ -8700,7 +8705,7 @@ 

          # we will accept offset and size as strings to work around xmlrpc limits

          offset = koji.decode_int(offset)

          size = koji.decode_int(size)

-         if isinstance(md5sum, basestring):

+         if isinstance(md5sum, six.string_types):

              # this case is for backwards compatibility

              verify = "md5"

              digest = md5sum
@@ -8718,7 +8723,7 @@ 

          fn = get_upload_path(path, name, create=True)

          try:

              st = os.lstat(fn)

-         except OSError, e:

+         except OSError as e:

              if e.errno == errno.ENOENT:

                  pass

              else:
@@ -8732,7 +8737,7 @@ 

                      # but we allow .log files to be uploaded multiple times to support

                      # realtime log-file viewing

                      raise koji.GenericError("file already exists: %s" % fn)

-         fd = os.open(fn, os.O_RDWR | os.O_CREAT, 0666)

+         fd = os.open(fn, os.O_RDWR | os.O_CREAT, 0o666)

          # log_error("fd=%r" %fd)

          try:

              if offset == 0 or (offset == -1 and size == len(contents)):
@@ -8787,7 +8792,7 @@ 

          data = {}

          try:

              fd = os.open(fn, os.O_RDONLY)

-         except OSError, e:

+         except OSError as e:

              if e.errno == errno.ENOENT:

                  return None

              else:
@@ -8795,7 +8800,7 @@ 

          try:

              try:

                  fcntl.lockf(fd, fcntl.LOCK_SH|fcntl.LOCK_NB)

-             except IOError, e:

+             except IOError as e:

                  raise koji.LockError(e)

              st = os.fstat(fd)

              if not stat.S_ISREG(st.st_mode):
@@ -8834,7 +8839,7 @@ 

          if not os.path.isfile(filePath):

              raise koji.GenericError('no file "%s" output by task %i' % (fileName, taskID))

          # Let the caller handler any IO or permission errors

-         f = file(filePath, 'r')

+         f = open(filePath, 'r')

          if isinstance(offset, str):

              offset = int(offset)

          if offset != None and offset > 0:
@@ -9260,9 +9265,9 @@ 

          if before:

              if isinstance(before, datetime.datetime):

                  before = calendar.timegm(before.utctimetuple())

-             elif isinstance(before, (str, unicode)):

+             elif isinstance(before, (str, six.text_type)):

                  before = koji.util.parseTime(before)

-             elif isinstance(before, (int, long)):

+             elif isinstance(before, six.integer_types):

                  pass

              else:

                  raise koji.GenericError('invalid type for before: %s' % type(before))
@@ -9270,9 +9275,9 @@ 

          if after:

              if isinstance(after, datetime.datetime):

                  after = calendar.timegm(after.utctimetuple())

-             elif isinstance(after, (str, unicode)):

+             elif isinstance(after, (str, six.text_type)):

                  after = koji.util.parseTime(after)

-             elif isinstance(after, (int, long)):

+             elif isinstance(after, six.integer_types):

                  pass

              else:

                  raise koji.GenericError('invalid type for after: %s' % type(after))
@@ -9360,7 +9365,7 @@ 

  

      def listTagged(self, tag, event=None, inherit=False, prefix=None, latest=False, package=None, owner=None, type=None):

          """List builds tagged with tag"""

-         if not isinstance(tag, (int, long)):

+         if not isinstance(tag, six.integer_types):

              #lookup tag id

              tag = get_tag_id(tag, strict=True)

          results = readTaggedBuilds(tag, event, inherit=inherit, latest=latest, package=package, owner=owner, type=type)
@@ -9371,14 +9376,14 @@ 

  

      def listTaggedRPMS(self, tag, event=None, inherit=False, latest=False, package=None, arch=None, rpmsigs=False, owner=None, type=None):

          """List rpms and builds within tag"""

-         if not isinstance(tag, (int, long)):

+         if not isinstance(tag, six.integer_types):

              #lookup tag id

              tag = get_tag_id(tag, strict=True)

          return readTaggedRPMS(tag, event=event, inherit=inherit, latest=latest, package=package, arch=arch, rpmsigs=rpmsigs, owner=owner, type=type)

  

      def listTaggedArchives(self, tag, event=None, inherit=False, latest=False, package=None, type=None):

          """List archives and builds within a tag"""

-         if not isinstance(tag, (int, long)):

+         if not isinstance(tag, six.integer_types):

              tag = get_tag_id(tag, strict=True)

          return readTaggedArchives(tag, event=event, inherit=inherit, latest=latest, package=package, type=type)

  
@@ -9535,14 +9540,14 @@ 

  

      def getLatestBuilds(self, tag, event=None, package=None, type=None):

          """List latest builds for tag (inheritance enabled)"""

-         if not isinstance(tag, (int, long)):

+         if not isinstance(tag, six.integer_types):

              #lookup tag id

              tag = get_tag_id(tag, strict=True)

          return readTaggedBuilds(tag, event, inherit=True, latest=True, package=package, type=type)

  

      def getLatestRPMS(self, tag, package=None, arch=None, event=None, rpmsigs=False, type=None):

          """List latest RPMS for tag (inheritance enabled)"""

-         if not isinstance(tag, (int, long)):

+         if not isinstance(tag, six.integer_types):

              #lookup tag id

              tag = get_tag_id(tag, strict=True)

          return readTaggedRPMS(tag, package=package, arch=arch, event=event, inherit=True, latest=True, rpmsigs=rpmsigs, type=type)
@@ -9602,13 +9607,13 @@ 

  

      def getInheritanceData(self, tag, event=None):

          """Return inheritance data for tag"""

-         if not isinstance(tag, (int, long)):

+         if not isinstance(tag, six.integer_types):

              #lookup tag id

              tag = get_tag_id(tag, strict=True)

          return readInheritanceData(tag, event)

  

      def setInheritanceData(self, tag, data, clear=False):

-         if not isinstance(tag, (int, long)):

+         if not isinstance(tag, six.integer_types):

              #lookup tag id

              tag = get_tag_id(tag, strict=True)

          context.session.assertPerm('admin')
@@ -9619,7 +9624,7 @@ 

              stops = {}

          if jumps is None:

              jumps = {}

-         if not isinstance(tag, (int, long)):

+         if not isinstance(tag, six.integer_types):

              #lookup tag id

              tag = get_tag_id(tag, strict=True)

          for mapping in [stops, jumps]:
@@ -9646,7 +9651,7 @@ 

          - buildroot_id

  

          If no build has the given ID, or the build generated no RPMs, an empty list is returned."""

-         if not isinstance(build, (int, long)):

+         if not isinstance(build, six.integer_types):

              #lookup build id

              build = self.findBuildID(build, strict=True)

          return self.listRPMs(buildID=build)
@@ -9805,7 +9810,7 @@ 

  

          headers = koji.get_header_fields(rpm_path, headers)

          for key, value in headers.items():

-             if isinstance(value, basestring):

+             if isinstance(value, six.string_types):

                  headers[key] = koji.fixEncoding(value)

          return headers

  
@@ -9870,9 +9875,9 @@ 

                  userID = get_user(userID, strict=True)['id']

              if pkgID is not None:

                  pkgID = get_package_id(pkgID, strict=True)

-             result_list = readPackageList(tagID=tagID, userID=userID, pkgID=pkgID,

+             result_list = list(readPackageList(tagID=tagID, userID=userID, pkgID=pkgID,

                                            inherit=inherited, with_dups=with_dups,

-                                           event=event).values()

+                                           event=event).values())

              if with_dups:

                  # when with_dups=True, readPackageList returns a list of list of dicts

                  # convert it to a list of dicts for consistency
@@ -10037,7 +10042,7 @@ 

          return taginfo

  

      def getRepo(self, tag, state=None, event=None):

-         if isinstance(tag, (int, long)):

+         if isinstance(tag, six.integer_types):

              id = tag

          else:

              id = get_tag_id(tag, strict=True)
@@ -10292,8 +10297,8 @@ 

                          if val.find('<?xml', 0, 10) == -1:

                              #handle older base64 encoded data

                              val = base64.decodestring(val)

-                         data, method = xmlrpclib.loads(val)

-                     except xmlrpclib.Fault, fault:

+                         data, method = xmlrpc_client.loads(val)

+                     except xmlrpc_client.Fault as fault:

                          data = fault

                      task[f] = data

              yield task
@@ -10340,7 +10345,7 @@ 

          #XXX hard-coded interval

          c = context.cnx.cursor()

          c.execute(q, koji.TASK_STATES)

-         return [dict(zip([f[1] for f in fields], row)) for row in c.fetchall()]

+         return [dict(list(zip([f[1] for f in fields], row))) for row in c.fetchall()]

  

      def resubmitTask(self, taskID):

          """Retry a canceled or failed task, using the same parameter as the original task.
@@ -10528,14 +10533,14 @@ 

          buildinfo = get_build(build)

          if not buildinfo:

              raise koji.GenericError('build does not exist: %s' % build)

-         elif isinstance(ts, xmlrpclib.DateTime):

+         elif isinstance(ts, xmlrpc_client.DateTime):

              #not recommended

              #the xmlrpclib.DateTime class is almost useless

              try:

                  ts = time.mktime(time.strptime(str(ts), '%Y%m%dT%H:%M:%S'))

              except ValueError:

                  raise koji.GenericError("Invalid time: %s" % ts)

-         elif not isinstance(ts, (int, long, float)):

+         elif not isinstance(ts, (int, int, float)):

              raise koji.GenericError("Invalid type for timestamp")

          koji.plugin.run_callbacks('preBuildStateChange', attribute='completion_ts', old=buildinfo['completion_ts'], new=ts, info=buildinfo)

          buildid = buildinfo['id']
@@ -11046,7 +11051,7 @@ 

                    ('checksum_type', 'checksum_type'),

                    ('project_dep', 'project_dep'),

                   ]

-         columns, aliases = zip(*fields)

+         columns, aliases = list(zip(*fields))

          query = QueryProcessor(tables=tables, columns=columns,

                                 joins=joins, clauses=clauses,

                                 values=self.data,
@@ -11224,7 +11229,7 @@ 

          WHERE host_id = %%(host_id)s AND state = %%(st_open)s

          """  % (",".join(fields))

          c.execute(q, locals())

-         tasks = [dict(zip(fields, x)) for x in c.fetchall()]

+         tasks = [dict(list(zip(fields, x))) for x in c.fetchall()]

          for task in tasks:

              id = task['id']

              if task['waiting']:
@@ -11286,7 +11291,7 @@ 

          """  % (",".join(fields))

          c.execute(q, locals())

          for data in c.fetchall():

-             data = dict(zip(fields, data))

+             data = dict(list(zip(fields, data)))

              # XXX - we should do some pruning here, but for now...

              # check arch

              if data['arch'] not in arches:
@@ -11458,7 +11463,7 @@ 

              safer_move(fn, dest)

              os.symlink(dest, fn)

          if logs:

-             for key, files in logs.iteritems():

+             for key, files in six.iteritems(logs):

                  if key:

                      logdir = "%s/logs/%s" % (dir, key)

                  else:
@@ -11481,7 +11486,7 @@ 

          scratchdir = koji.pathinfo.scratch()

          username = get_user(task.getOwner())['name']

          destdir = os.path.join(scratchdir, username, 'task_%s' % task_id)

-         for reldir, files in results['files'].items() + [('', results['logs'])]:

+         for reldir, files in list(results['files'].items()) + [('', results['logs'])]:

              for filename in files:

                  if reldir:

                      relpath = os.path.join(reldir, filename)
@@ -11513,7 +11518,7 @@ 

          scratchdir = koji.pathinfo.scratch()

          username = get_user(task.getOwner())['name']

          destdir = os.path.join(scratchdir, username, 'task_%s' % task_id)

-         for relpath in results['output'].keys() + results['logs']:

+         for relpath in list(results['output'].keys()) + results['logs']:

              filename = os.path.join(koji.pathinfo.task(results['task_id']), relpath)

              dest = os.path.join(destdir, relpath)

              koji.ensuredir(os.path.dirname(dest))
@@ -11669,7 +11674,7 @@ 

          maven_buildroot_id = maven_results['buildroot_id']

          maven_task_dir = koji.pathinfo.task(maven_task_id)

          # import the build output

-         for relpath, files in maven_results['files'].iteritems():

+         for relpath, files in six.iteritems(maven_results['files']):

              dir_maven_info = maven_info

              poms = [f for f in files if f.endswith('.pom')]

              if len(poms) == 0:
@@ -11817,7 +11822,7 @@ 

  

          task_dir = koji.pathinfo.task(results['task_id'])

          # import the build output

-         for relpath, metadata in results['output'].iteritems():

+         for relpath, metadata in six.iteritems(results['output']):

              archivetype = get_archive_type(relpath)

              if not archivetype:

                  # Unknown archive type, fail the build
@@ -12041,9 +12046,9 @@ 

              extra_deps = []

          task_deps = {}

          for dep in extra_deps:

-             if isinstance(dep, (int, long)):

+             if isinstance(dep, six.integer_types):

                  task_output = list_task_output(dep, stat=True)

-                 for filepath, filestats in task_output.iteritems():

+                 for filepath, filestats in six.iteritems(task_output):

                      if os.path.splitext(filepath)[1] in ['.log', '.md5', '.sha1']:

                          continue

                      tokens = filepath.split('/')
@@ -12076,7 +12081,7 @@ 

                          logger.error("Current build is %s, new build is %s.", idx_build, archive['build_id'])

                          maven_build_index[archive['group_id']][archive['artifact_id']][archive['version']] = archive['build_id']

  

-         ignore.extend(task_deps.values())

+         ignore.extend(list(task_deps.values()))

  

          SNAPSHOT_RE = re.compile(r'-\d{8}\.\d{6}-\d+')

          ignore_by_label = {}
@@ -12129,7 +12134,7 @@ 

                          if build_id:

                              build = get_build(build_id)

                              logger.error("g:a:v supplied by build %(nvr)s", build)

-                             logger.error("Build supplies %i archives: %r", len(build_archives), build_archives.keys())

+                             logger.error("Build supplies %i archives: %r", len(build_archives), list(build_archives.keys()))

                          if tag_archive:

                              logger.error("Size mismatch, br: %i, db: %i", fileinfo['size'], tag_archive['size'])

                          raise koji.BuildrootError('Unknown file in build environment: %s, size: %s' % \
@@ -12203,7 +12208,7 @@ 

              raise koji.GenericError("Repo %(id)s not in INIT state (got %(state)s)" % rinfo)

          repodir = koji.pathinfo.repo(repo_id, rinfo['tag_name'])

          workdir = koji.pathinfo.work()

-         for arch, (uploadpath, files) in data.iteritems():

+         for arch, (uploadpath, files) in six.iteritems(data):

              archdir = "%s/%s" % (repodir, arch)

              if not os.path.isdir(archdir):

                  raise koji.GenericError("Repo arch directory missing: %s" % archdir)
@@ -12271,11 +12276,11 @@ 

              # assuming login was asserted earlier

              u_fn = os.path.join(udir, '.user')

              if os.path.exists(u_fn):

-                 user_id = int(file(u_fn, 'r').read())

+                 user_id = int(open(u_fn, 'r').read())

                  if context.session.user_id != user_id:

                      raise koji.GenericError("Invalid upload directory, not owner: %s" % orig_reldir)

              else:

-                 fo = file(u_fn, 'w')

+                 fo = open(u_fn, 'w')

                  fo.write(str(context.session.user_id))

                  fo.close()

      return os.path.join(udir, name)
@@ -12315,11 +12320,11 @@ 

      size = 0

      chksum = sum_cls()

      inf = environ['wsgi.input']

-     fd = os.open(fn, os.O_RDWR | os.O_CREAT, 0666)

+     fd = os.open(fn, os.O_RDWR | os.O_CREAT, 0o666)

      try:

          try:

              fcntl.lockf(fd, fcntl.LOCK_EX|fcntl.LOCK_NB)

-         except IOError, e:

+         except IOError as e:

              raise koji.LockError(e)

          if offset == -1:

              offset = os.lseek(fd, 0, 2)
@@ -12361,16 +12366,16 @@ 

      context.req = {}

      print("Creating a session")

      context.session = koji.auth.Session(None, hostip="127.0.0.1")

-     print(context.session)

+     print((context.session))

      test_user = "host/1"

      pw = "foobar"

-     print("Logging in as %s" % test_user)

+     print(("Logging in as %s" % test_user))

      session_info = context.session.login(test_user, pw, {'hostip':'127.0.0.1'})

      for k in session_info.keys():

          session_info[k] = [session_info[k]]

      s2 = koji.auth.Session(session_info, '127.0.0.1')

      print(s2)

-     print(s2.getHostId())

+     print((s2.getHostId()))

      context.session = s2

      print("Associating host")

      Host()

file modified
+28 -26
@@ -18,7 +18,8 @@ 

  # Authors:

  #       Mike McLean <mikem@redhat.com>

  

- from ConfigParser import RawConfigParser

+ from __future__ import absolute_import

+ from six.moves.configparser import RawConfigParser

  import datetime

  import inspect

  import logging
@@ -29,8 +30,7 @@ 

  import types

  import pprint

  import resource

- import xmlrpclib

- from xmlrpclib import getparser, dumps, Fault

+ from six.moves import xmlrpc_client

  from koji.server import WSGIWrapper

  

  import koji
@@ -40,12 +40,14 @@ 

  import koji.policy

  import koji.util

  from koji.context import context

+ import six

+ from six.moves import range

  

  

  # Workaround to allow xmlrpclib deal with iterators

- class Marshaller(xmlrpclib.Marshaller):

+ class Marshaller(six.moves.xmlrpc_client.Marshaller):

  

-     dispatch = xmlrpclib.Marshaller.dispatch.copy()

+     dispatch = six.moves.xmlrpc_client.Marshaller.dispatch.copy()

  

      def dump_generator(self, value, write):

          dump = self.__dump
@@ -61,7 +63,7 @@ 

          self.dump_string(value, write)

      dispatch[datetime.datetime] = dump_datetime

  

- xmlrpclib.Marshaller = Marshaller

+ six.moves.xmlrpc_client.Marshaller = Marshaller

  

  

  class HandlerRegistry(object):
@@ -109,7 +111,7 @@ 

  

          Handlers are functions marked with one of the decorators defined in koji.plugin

          """

-         for v in vars(plugin).itervalues():

+         for v in six.itervalues(vars(plugin)):

              if isinstance(v, type):

                  #skip classes

                  continue
@@ -129,7 +131,7 @@ 

          if ret:

              return ret

          ret = tuple(inspect.getargspec(func))

-         if inspect.ismethod(func) and func.im_self:

+         if inspect.ismethod(func) and func.__self__:

              # bound method, remove first arg

              args, varargs, varkw, defaults = ret

              if args:
@@ -156,17 +158,17 @@ 

  

      def _getFuncArgs(self, func):

          args = []

-         for x in range(0, func.func_code.co_argcount):

-             if x == 0 and func.func_code.co_varnames[x] == "self":

+         for x in range(0, func.__code__.co_argcount):

+             if x == 0 and func.__code__.co_varnames[x] == "self":

                  continue

-             if func.func_defaults and func.func_code.co_argcount - x <= len(func.func_defaults):

-                 args.append((func.func_code.co_varnames[x], func.func_defaults[x - func.func_code.co_argcount + len(func.func_defaults)]))

+             if func.__defaults__ and func.__code__.co_argcount - x <= len(func.__defaults__):

+                 args.append((func.__code__.co_varnames[x], func.__defaults__[x - func.__code__.co_argcount + len(func.__defaults__)]))

              else:

-                 args.append(func.func_code.co_varnames[x])

+                 args.append(func.__code__.co_varnames[x])

          return args

  

      def system_listMethods(self):

-         return self.funcs.keys()

+         return list(self.funcs.keys())

  

      def system_methodSignature(self, method):

          #it is not possible to autogenerate this data
@@ -219,7 +221,7 @@ 

              return self.handlers.get(name)

  

      def _read_request(self, stream):

-         parser, unmarshaller = getparser()

+         parser, unmarshaller = xmlrpc_client.getparser()

          rlen = 0

          maxlen = opts.get('MaxRequestLength', None)

          while True:
@@ -241,10 +243,10 @@ 

              response = handler(environ)

              # wrap response in a singleton tuple

              response = (response,)

-             response = dumps(response, methodresponse=1, allow_none=1)

-         except Fault, fault:

+             response = xmlrpc_client.dumps(response, methodresponse=1, allow_none=1)

+         except xmlrpc_client.Fault as fault:

              self.traceback = True

-             response = dumps(fault)

+             response = xmlrpc_client.dumps(fault)

          except:

              self.traceback = True

              # report exception back to server
@@ -268,7 +270,7 @@ 

                  else:

                      faultString = "%s: %s" % (e_class, e)

              self.logger.warning(tb_str)

-             response = dumps(Fault(faultCode, faultString))

+             response = xmlrpc_client.dumps(xmlrpc_client.Fault(faultCode, faultString))

  

          return response

  
@@ -336,7 +338,7 @@ 

          for call in calls:

              try:

                  result = self._dispatch(call['methodName'], call['params'])

-             except Fault, fault:

+             except xmlrpc_client.Fault as fault:

                  results.append({'faultCode': fault.faultCode, 'faultString': fault.faultString})

              except:

                  # transform unknown exceptions into XML-RPC Faults
@@ -371,7 +373,7 @@ 

          faultString = "server is offline"

      else:

          faultString = msg

-     response = dumps(Fault(faultCode, faultString))

+     response = xmlrpc_client.dumps(xmlrpc_client.Fault(faultCode, faultString))

      headers = [

          ('Content-Length', str(len(response))),

          ('Content-Type', "text/xml"),
@@ -512,7 +514,7 @@ 

          opts['policy'] = dict(config.items('policy'))

      else:

          opts['policy'] = {}

-     for pname, text in _default_policies.iteritems():

+     for pname, text in six.iteritems(_default_policies):

          opts['policy'].setdefault(pname, text)

      # use configured KojiDir

      if opts.get('KojiDir') is not None:
@@ -574,14 +576,14 @@ 

      for plugin_name in opts.get('Plugins', '').split():

          alltests.append(koji.policy.findSimpleTests(vars(plugins.get(plugin_name))))

      policy = {}

-     for pname, text in opts['policy'].iteritems():

+     for pname, text in six.iteritems(opts['policy']):

          #filter/merge tests

          merged = {}

          for tests in alltests:

              # tests can be limited to certain policies by setting a class variable

-             for name, test in tests.iteritems():

+             for name, test in six.iteritems(tests):

                  if hasattr(test, 'policy'):

-                     if isinstance(test.policy, basestring):

+                     if isinstance(test.policy, six.string_types):

                          if pname != test.policy:

                              continue

                      elif pname not in test.policy:
@@ -673,7 +675,7 @@ 

      global kojihub

      scriptsdir = os.path.dirname(environ['SCRIPT_FILENAME'])

      sys.path.insert(0, scriptsdir)

-     import kojihub

+     from . import kojihub

  

  

  #

file modified
-1
@@ -293,7 +293,6 @@ 

  %files vm

  %defattr(-,root,root)

  %{_sbindir}/kojivmd

- #dir %{_datadir}/kojivmd

  %{_datadir}/kojivmd/kojikamid

  %if %{use_systemd}

  %{_unitdir}/kojivmd.service

file modified
+1 -1
@@ -4,7 +4,7 @@ 

  PACKAGE = $(shell basename `pwd`)

  PYFILES = $(wildcard *.py)

  PYSCRIPTS = context.py

- PYVER := $(shell $(PYTHON) -c 'import sys; print("%.3s" % (sys.version))')

+ PYVER := $(shell $(PYTHON) -c 'import sys; print("{:.3}".format(sys.version))')

  PYSYSDIR := $(shell $(PYTHON) -c 'import sys; print(sys.prefix)')

  PYLIBDIR = $(PYSYSDIR)/lib/python$(PYVER)

  PKGDIR = $(PYLIBDIR)/site-packages/$(PACKAGE)

file modified
+98 -95
@@ -21,7 +21,12 @@ 

  #       Mike McLean <mikem@redhat.com>

  #       Mike Bonnet <mikeb@redhat.com>

  

+ from __future__ import absolute_import

+ from __future__ import print_function

  import sys

+ import six

+ from six.moves import range

+ from six.moves import zip

  try:

      import krbV

  except ImportError:  # pragma: no cover
@@ -29,11 +34,10 @@ 

      sys.stderr.flush()

  import base64

  import datetime

- import ConfigParser

+ from six.moves import configparser

  import errno

- import exceptions

  from fnmatch import fnmatch

- import httplib

+ import six.moves.http_client

  import imp

  import logging

  import logging.handlers
@@ -65,19 +69,25 @@ 

  import shutil

  import signal

  import socket

+ from .ssl import SSLCommon

+ try:

+     from .ssl import ssl as pyssl

+ except ImportError:  # pragma: no cover

+     pass

  import struct

  import tempfile

  import time

  import traceback

- import urllib

- import urllib2

- import urlparse

- import util

- import warnings

- import xmlrpclib

+ import six

+ six.add_move(six.MovedAttribute("urllib_splitport", "urllib2", "urllib.parse", "splitport"))

+ from six.moves import urllib_splitport

+ from six.moves import urllib

+ from . import util

+ from six.moves import xmlrpc_client

  import xml.sax

  import xml.sax.handler

- from xmlrpclib import loads, dumps, Fault

+ from six.moves.xmlrpc_client import loads, dumps, Fault

+ from six.moves import builtins

  

  PROFILE_MODULES = {}  # {module_name: module_instance}

  
@@ -460,7 +470,7 @@ 

  

  def decode_int(n):

      """If n is not an integer, attempt to convert it"""

-     if isinstance(n, (int, long)):

+     if isinstance(n, six.integer_types):

          return n

      #else

      return int(n)
@@ -470,8 +480,8 @@ 

  def safe_xmlrpc_loads(s):

      """Load xmlrpc data from a string, but catch faults"""

      try:

-         return loads(s)

-     except Fault, f:

+         return xmlrpc_client.loads(s)

+     except xmlrpc_client.Fault as f:

          return f

  

  ## BEGIN kojikamid dup
@@ -528,7 +538,7 @@ 

      """Convert a list of bytes to an integer (network byte order)"""

      sum = 0

      n = len(data)

-     for i in xrange(n):

+     for i in range(n):

          sum += data[i] << (8 * (n - i - 1))

      return sum

  
@@ -547,8 +557,8 @@ 

      f = filename or file object

      ofs = offset of the header

      """

-     if isinstance(f, (str, unicode)):

-         fo = file(f, 'rb')

+     if isinstance(f, (six.text_type, six.binary_type)):

+         fo = open(f, 'rb')

      else:

          fo = f

      if ofs != None:
@@ -577,7 +587,7 @@ 

      # add eight bytes for section header

      hdrsize = hdrsize + 8

  

-     if not isinstance(f, (str, unicode)):

+     if not isinstance(f, (str, six.text_type)):

          fo.close()

      return hdrsize

  
@@ -606,9 +616,9 @@ 

  

          #read the index (starts at offset 16)

          index = {}

-         for i in xrange(il):

+         for i in range(il):

              entry = []

-             for j in xrange(4):

+             for j in range(4):

                  ofs = 16 + i*16 + j*4

                  data = [ord(x) for x in self.header[ofs:ofs+4]]

                  entry.append(multibyte(data))
@@ -624,14 +634,14 @@ 

          store = 16 + il * 16

          #print("start is: %d" % start)

          #print("index length: %d" % il)

-         print("Store at offset %d (%0x)" % (store, store))

+         print(("Store at offset %d (%0x)" % (store, store)))

          #sort entries by offset, dtype

          #also rearrange: tag, dtype, offset, count -> offset, dtype, tag, count

-         order = sorted([(x[2], x[1], x[0], x[3]) for x in self.index.itervalues()])

+         order = sorted([(x[2], x[1], x[0], x[3]) for x in six.itervalues(self.index)])

          next = store

          #map some rpmtag codes

          tags = {}

-         for name, code in rpm.__dict__.iteritems():

+         for name, code in rpm.__dict__.items():

              if name.startswith('RPMTAG_') and isinstance(code, int):

                  tags[code] = name[7:].lower()

          for entry in order:
@@ -641,63 +651,63 @@ 

              if next is not None:

                  if pos > next:

                      print("** HOLE between entries")

-                     print("Hex: %s" % hex_string(self.header[next:pos]))

-                     print("Data: %r" % self.header[next:pos])

+                     print(("Hex: %s" % hex_string(self.header[next:pos])))

+                     print(("Data: %r" % self.header[next:pos]))

                  elif pos < next:

                      print("** OVERLAPPING entries")

-             print("Tag: %d [%s], Type: %d, Offset: %x, Count: %d" \

-                     % (tag, tags.get(tag, '?'), dtype, offset, count))

+             print(("Tag: %d [%s], Type: %d, Offset: %x, Count: %d" \

+                     % (tag, tags.get(tag, '?'), dtype, offset, count)))

              if dtype == 0:

                  #null

                  print("[NULL entry]")

                  next = pos

              elif dtype == 1:

                  #char

-                 for i in xrange(count):

-                     print("Char: %r" % self.header[pos])

+                 for i in range(count):

+                     print(("Char: %r" % self.header[pos]))

                      pos += 1

                  next = pos

              elif dtype >= 2 and dtype <= 5:

                  #integer

                  n = 1 << (dtype - 2)

-                 for i in xrange(count):

+                 for i in range(count):

                      data = [ord(x) for x in self.header[pos:pos+n]]

-                     print("%r" % data)

+                     print(("%r" % data))

                      num = multibyte(data)

-                     print("Int(%d): %d" % (n, num))

+                     print(("Int(%d): %d" % (n, num)))

                      pos += n

                  next = pos

              elif dtype == 6:

                  # string (null terminated)

                  end = self.header.find('\0', pos)

-                 print("String(%d): %r" % (end-pos, self.header[pos:end]))

+                 print(("String(%d): %r" % (end-pos, self.header[pos:end])))

                  next = end + 1

              elif dtype == 7:

-                 print("Data: %s" % hex_string(self.header[pos:pos+count]))

+                 print(("Data: %s" % hex_string(self.header[pos:pos+count])))

                  next = pos+count

              elif dtype == 8:

                  # string array

-                 for i in xrange(count):

+                 for i in range(count):

                      end = self.header.find('\0', pos)

-                     print("String(%d): %r" % (end-pos, self.header[pos:end]))

+                     print(("String(%d): %r" % (end-pos, self.header[pos:end])))

                      pos = end + 1

                  next = pos

              elif dtype == 9:

                  # unicode string array

-                 for i in xrange(count):

+                 for i in range(count):

                      end = self.header.find('\0', pos)

-                     print("i18n(%d): %r" % (end-pos, self.header[pos:end]))

+                     print(("i18n(%d): %r" % (end-pos, self.header[pos:end])))

                      pos = end + 1

                  next = pos

              else:

-                 print("Skipping data type %x" % dtype)

+                 print(("Skipping data type %x" % dtype))

                  next = None

          if next is not None:

              pos = store + self.datalen

              if next < pos:

                  print("** HOLE at end of data block")

-                 print("Hex: %s" % hex_string(self.header[next:pos]))

-                 print("Data: %r" % self.header[next:pos])

+                 print(("Hex: %s" % hex_string(self.header[next:pos])))

+                 print(("Data: %r" % self.header[next:pos]))

              elif pos > next:

                  print("** OVERFLOW in data block")

  
@@ -738,7 +748,7 @@ 

  def rip_rpm_sighdr(src):

      """Rip the signature header out of an rpm"""

      (start, size) = find_rpm_sighdr(src)

-     fo = file(src, 'rb')

+     fo = open(src, 'rb')

      fo.seek(start, 0)

      sighdr = fo.read(size)

      fo.close()
@@ -749,7 +759,7 @@ 

      (start, size) = find_rpm_sighdr(src)

      start += size

      size = rpm_hdr_size(src, start)

-     fo = file(src, 'rb')

+     fo = open(src, 'rb')

      fo.seek(start, 0)

      hdr = fo.read(size)

      fo.close()
@@ -848,8 +858,8 @@ 

      if dst is None:

          (fd, dst) = tempfile.mkstemp()

          os.close(fd)

-     src_fo = file(src, 'rb')

-     dst_fo = file(dst, 'wb')

+     src_fo = open(src, 'rb')

+     dst_fo = open(dst, 'wb')

      dst_fo.write(src_fo.read(start))

      dst_fo.write(sighdr)

      src_fo.seek(size, 1)
@@ -867,8 +877,8 @@ 

      if ts is None:

          ts = rpm.TransactionSet()

          ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES|rpm._RPMVSF_NODIGESTS)

-     if isinstance(f, (str, unicode)):

-         fo = file(f, "r")

+     if isinstance(f, (six.text_type, six.binary_type)):

+         fo = open(f, "r")

      else:

          fo = f

      hdr = ts.hdrFromFdno(fo.fileno())
@@ -966,7 +976,7 @@ 

              return False

  

  def _check_NVR(nvr):

-     if isinstance(nvr, basestring):

+     if isinstance(nvr, six.string_types):

          nvr = parse_NVR(nvr)

      if '-' in nvr['version']:

          raise GenericError('The "-" character not allowed in version field')
@@ -995,7 +1005,7 @@ 

  

  

  def _check_NVRA(nvra):

-     if isinstance(nvra, basestring):

+     if isinstance(nvra, six.string_types):

              nvra = parse_NVRA(nvra)

      if '-' in nvra['version']:

          raise GenericError('The "-" character not allowed in version field')
@@ -1085,7 +1095,7 @@ 

      values = {}

      handler = POMHandler(values, fields)

      if path:

-         fd = file(path)

+         fd = open(path)

          contents = fd.read()

          fd.close()

  
@@ -1106,7 +1116,7 @@ 

          xml.sax.parseString(contents, handler)

  

      for field in fields:

-         if field not in values.keys():

+         if field not in list(values.keys()):

              raise GenericError('could not extract %s from POM: %s' % (field, (path or '<contents>')))

      return values

  
@@ -1400,14 +1410,14 @@ 

      if opts.get('use_host_resolv', False) and os.path.exists('/etc/hosts'):

          # if we're setting up DNS,

          # also copy /etc/hosts from the host

-         etc_hosts = file('/etc/hosts')

+         etc_hosts = open('/etc/hosts')

          files['etc/hosts'] = etc_hosts.read()

          etc_hosts.close()

      mavenrc = ''

      if opts.get('maven_opts'):

          mavenrc = 'export MAVEN_OPTS="%s"\n' % ' '.join(opts['maven_opts'])

      if opts.get('maven_envs'):

-         for name, val in opts['maven_envs'].iteritems():

+         for name, val in opts['maven_envs'].items():

              mavenrc += 'export %s="%s"\n' % (name, val)

      if mavenrc:

          files['etc/mavenrc'] = mavenrc
@@ -1470,10 +1480,10 @@ 

  """ % locals())

  

      parts.append("\n")

-     for key, value in config_opts.iteritems():

+     for key, value in config_opts.items():

          parts.append("config_opts[%r] = %r\n" % (key, value))

      parts.append("\n")

-     for key, value in plugin_conf.iteritems():

+     for key, value in plugin_conf.items():

          parts.append("config_opts['plugin_conf'][%r] = %r\n" % (key, value))

      parts.append("\n")

  
@@ -1481,14 +1491,14 @@ 

          # This line is REQUIRED for mock to work if bind_opts defined.

          parts.append("config_opts['internal_dev_setup'] = False\n")

          for key in bind_opts.keys():

-             for mnt_src, mnt_dest in bind_opts.get(key).iteritems():

+             for mnt_src, mnt_dest in bind_opts.get(key).items():

                  parts.append("config_opts['plugin_conf']['bind_mount_opts'][%r].append((%r, %r))\n" % (key, mnt_src, mnt_dest))

          parts.append("\n")

  

-     for key, value in macros.iteritems():

+     for key, value in macros.items():

          parts.append("config_opts['macros'][%r] = %r\n" % (key, value))

      parts.append("\n")

-     for key, value in files.iteritems():

+     for key, value in files.items():

          parts.append("config_opts['files'][%r] = %r\n" % (key, value))

  

      return ''.join(parts)
@@ -1534,7 +1544,7 @@ 

      on options"""

      if topurl:

          url = "%s/%s" % (topurl, relpath)

-         src = urllib2.urlopen(url)

+         src = urllib.request.urlopen(url)

          fo = tempfile.TemporaryFile(dir=tempdir)

          shutil.copyfileobj(src, fo)

          src.close()
@@ -1551,7 +1561,7 @@ 

      configs = []

      try:

          conf_dir_contents = os.listdir(dir_name)

-     except OSError, exception:

+     except OSError as exception:

          if exception.errno != errno.ENOENT:

              raise

      else:
@@ -1630,7 +1640,7 @@ 

      got_conf = False

      for configFile in configs:

          f = open(configFile)

-         config = ConfigParser.ConfigParser()

+         config = configparser.ConfigParser()

          config.readfp(f)

          f.close()

          if config.has_section(profile_name):
@@ -1725,7 +1735,7 @@ 

  

  class PathInfo(object):

      # ASCII numbers and upper- and lower-case letter for use in tmpdir()

-     ASCII_CHARS = [chr(i) for i in range(48, 58) + range(65, 91) + range(97, 123)]

+     ASCII_CHARS = [chr(i) for i in list(range(48, 58)) + list(range(65, 91)) + list(range(97, 123))]

  

      def __init__(self, topdir=None):

          self._topdir = topdir
@@ -1909,7 +1919,7 @@ 

              return True

          # else

          return False

-     if isinstance(e, httplib.BadStatusLine):

+     if isinstance(e, six.moves.http_client.BadStatusLine):

          return True

      if requests is not None:

          try:
@@ -1919,7 +1929,7 @@ 

                  e2 = getattr(e, 'args', [None])[0]

                  if isinstance(e2, requests.packages.urllib3.exceptions.ProtocolError):

                      e3 = getattr(e2, 'args', [None, None])[1]

-                     if isinstance(e3, httplib.BadStatusLine):

+                     if isinstance(e3, six.moves.http_client.BadStatusLine):

                          return True

                  if isinstance(e2, socket.error):

                      # same check as unwrapped socket error
@@ -2053,9 +2063,7 @@ 

                  pass

  

          if not krbV:

-             raise exceptions.ImportError(

-                 "Please install python-krbV to use kerberos."

-             )

+             raise builtins.ImportError("Please install python-krbV to use kerberos.")

  

          ctx = krbV.default_context()

  
@@ -2104,7 +2112,7 @@ 

          # decode and decrypt the login info

          sinfo_priv = base64.decodestring(sinfo_enc)

          sinfo_str = ac.rd_priv(sinfo_priv)

-         sinfo = dict(zip(['session-id', 'session-key'], sinfo_str.split()))

+         sinfo = dict(list(zip(['session-id', 'session-key'], sinfo_str.split())))

  

          if not sinfo:

              self.logger.warn('No session info received')
@@ -2118,8 +2126,8 @@ 

          """Get the Kerberos principal of the server we're connecting

          to, based on baseurl."""

  

-         uri = urlparse.urlsplit(self.baseurl)

-         host, port = urllib.splitport(uri[1])

+         uri = urllib.parse.urlsplit(self.baseurl)

+         host, port = urllib_splitport(uri[1])

          if self.opts.get('krb_rdns', True):

              servername = socket.getfqdn(host)

          else:
@@ -2131,13 +2139,11 @@ 

  

      def gssapi_login(self, proxyuser=None):

          if not HTTPKerberosAuth:

-             raise exceptions.ImportError(

-                 "Please install python-requests-kerberos to use GSSAPI."

-             )

+             raise builtins.ImportError("Please install python-requests-kerberos to use GSSAPI.")

  

          # force https

          old_baseurl = self.baseurl

-         uri = urlparse.urlsplit(self.baseurl)

+         uri = urllib.parse.urlsplit(self.baseurl)

          if uri[0] != 'https':

              self.baseurl = 'https://%s%s' % (uri[1], uri[2])

  
@@ -2181,7 +2187,7 @@ 

          # when API is changed

  

          # force https

-         uri = urlparse.urlsplit(self.baseurl)

+         uri = urllib.parse.urlsplit(self.baseurl)

          if uri[0] != 'https':

              self.baseurl = 'https://%s%s' % (uri[1], uri[2])

  
@@ -2256,12 +2262,12 @@ 

              sinfo = self.sinfo.copy()

              sinfo['callnum'] = self.callnum

              self.callnum += 1

-             handler = "%s?%s" % (self.baseurl, urllib.urlencode(sinfo))

+             handler = "%s?%s" % (self.baseurl, urllib.parse.urlencode(sinfo))

          elif name == 'sslLogin':

              handler = self.baseurl + '/ssllogin'

          else:

              handler = self.baseurl

-         request = dumps(args, name, allow_none=1)

+         request = xmlrpc_client.dumps(args, name, allow_none=1)

          headers = [

              # connection class handles Host

              ('User-Agent', 'koji/1.7'),  #XXX
@@ -2275,7 +2281,7 @@ 

          for i in (0, 1):

              try:

                  return self._sendOneCall(handler, headers, request)

-             except Exception, e:

+             except Exception as e:

                  if i or not is_conn_error(e):

                      raise

                  self.logger.debug("Connection Error: %s", e)
@@ -2305,13 +2311,14 @@ 

          if timeout:

              callopts['timeout'] = timeout

          if self.opts.get('debug_xmlrpc', False):

-             print("url: %s" % handler)

+             print(("url: %s" % handler))

              for _key in callopts:

                  _val = callopts[_key]

                  if _key == 'data' and len(_val) > 1024:

                      _val = _val[:1024] + '...'

-                 print("%s: %r" % (_key, _val))

+                 print(("%s: %r" % (_key, _val)))

          catcher = None

+         import warnings

          if hasattr(warnings, 'catch_warnings'):

              # TODO: convert to a with statement when we drop 2.4.3 support

              catcher = warnings.catch_warnings()
@@ -2331,10 +2338,10 @@ 

          return ret

  

      def _read_xmlrpc_response(self, response):

-         p, u = xmlrpclib.getparser()

+         p, u = six.moves.xmlrpc_client.getparser()

          for chunk in response.iter_content(8192):

              if self.opts.get('debug_xmlrpc', False):

-                 print("body: %r" % chunk)

+                 print(("body: %r" % chunk))

              p.feed(chunk)

          p.close()

          result = u.close()
@@ -2368,7 +2375,7 @@ 

                  # note that, for logged-in sessions the server should tell us (via a RetryError fault)

                  # if the call cannot be retried. For non-logged-in sessions, all calls should be read-only

                  # and hence retryable.

-                 except Fault, fault:

+                 except xmlrpc_client.Fault as fault:

                      #try to convert the fault to a known exception

                      err = convertFault(fault)

                      if isinstance(err, ServerOffline):
@@ -2384,14 +2391,10 @@ 

                  except (SystemExit, KeyboardInterrupt):

                      #(depending on the python version, these may or may not be subclasses of Exception)

                      raise

-                 except Exception, e:

+                 except Exception as e:

                      tb_str = ''.join(traceback.format_exception(*sys.exc_info()))

                      self.new_session()

  

-                     if is_cert_error(e) or is_requests_cert_error(e):

-                         # There's no point in retrying for this

-                         raise

- 

                      if not self.logged_in:

                          #in the past, non-logged-in sessions did not retry. For compatibility purposes

                          #this behavior is governed by the anon_retry opt.
@@ -2438,7 +2441,7 @@ 

              #check for faults and raise first one

              for entry in ret:

                  if isinstance(entry, dict):

-                     fault = Fault(entry['faultCode'], entry['faultString'])

+                     fault = xmlrpc_client.Fault(entry['faultCode'], entry['faultString'])

                      err = convertFault(fault)

                      raise err

          return ret
@@ -2457,7 +2460,7 @@ 

          if name is None:

              name = os.path.basename(localfile)

          self.logger.debug("Fast upload: %s to %s/%s", localfile, path, name)

-         fo = file(localfile, 'rb')

+         fo = open(localfile, 'rb')

          ofs = 0

          size = os.path.getsize(localfile)

          start = time.time()
@@ -2520,7 +2523,7 @@ 

              args['overwrite'] = "1"

          size = len(chunk)

          self.callnum += 1

-         handler = "%s?%s" % (self.baseurl, urllib.urlencode(args))

+         handler = "%s?%s" % (self.baseurl, urllib.parse.urlencode(args))

          headers = [

              ('User-Agent', 'koji/1.7'),  #XXX

              ("Content-Type", "application/octet-stream"),
@@ -2554,7 +2557,7 @@ 

          start = time.time()

          # XXX - stick in a config or something

          retries = 3

-         fo = file(localfile, "r")  #specify bufsize?

+         fo = open(localfile, "r")  #specify bufsize?

          totalsize = os.path.getsize(localfile)

          ofs = 0

          md5sum = md5_constructor()
@@ -2648,7 +2651,7 @@ 

              values = []

              data = {}

              record.message = record.getMessage()

-             for key, value in self.mapping.iteritems():

+             for key, value in self.mapping.items():

                  value = str(value)

                  if value.find("%(asctime)") >= 0:

                      if self.formatter:
@@ -2851,7 +2854,7 @@ 

      if not value:

          return ''

  

-     if isinstance(value, unicode):

+     if isinstance(value, six.text_type):

          # value is already unicode, so just convert it

          # to a utf8-encoded str

          return value.encode('utf8')
@@ -2881,7 +2884,7 @@ 

              k = fixEncodingRecurse(k)

              ret[k] = v

          return ret

-     elif isinstance(value, unicode):

+     elif isinstance(value, six.text_type):

          return value.encode('utf8')

      elif isinstance(value, str):

          # value is a str, but may be encoded in utf8 or some
@@ -2889,7 +2892,7 @@ 

          # decode it using the fallback encoding.

          try:

              return value.decode('utf8').encode('utf8')

-         except UnicodeDecodeError, err:

+         except UnicodeDecodeError as err:

              return value.decode(fallback).encode('utf8')

      else:

          return value

file modified
+19 -14
@@ -19,6 +19,8 @@ 

  #       Mike McLean <mikem@redhat.com>

  #       Mike Bonnet <mikeb@redhat.com>

  

+ from __future__ import absolute_import

+ from __future__ import print_function

  import socket

  import string

  import random
@@ -26,7 +28,10 @@ 

  import krbV

  import koji

  import cgi      #for parse_qs

- from context import context

+ from .context import context

+ import six

+ from six.moves import range

+ from six.moves import zip

  

  # 1 - load session if provided

  #       - check uri for session id
@@ -74,9 +79,9 @@ 

              args = cgi.parse_qs(args, strict_parsing=True)

          hostip = self.get_remote_ip(override=hostip)

          try:

-             id = long(args['session-id'][0])

+             id = int(args['session-id'][0])

              key = args['session-key'][0]

-         except KeyError, field:

+         except KeyError as field:

              raise koji.AuthError('%s not specified in session args' % field)

          try:

              callnum = args['callnum'][0]
@@ -96,7 +101,7 @@ 

              'EXTRACT(EPOCH FROM update_time)': 'update_ts',

              'user_id': 'user_id',

              }

-         fields, aliases = zip(*fields.items())

+         fields, aliases = list(zip(*list(fields.items())))

          q = """

          SELECT %s FROM sessions

          WHERE id = %%(id)i
@@ -108,7 +113,7 @@ 

          row = c.fetchone()

          if not row:

              raise koji.AuthError('Invalid session or bad credentials')

-         session_data = dict(zip(aliases, row))

+         session_data = dict(list(zip(aliases, row)))

          #check for expiration

          if session_data['expired']:

              raise koji.AuthExpired('session "%i" has expired' % id)
@@ -146,7 +151,7 @@ 

          fields = ('name', 'status', 'usertype')

          q = """SELECT %s FROM users WHERE id=%%(user_id)s""" % ','.join(fields)

          c.execute(q, session_data)

-         user_data = dict(zip(fields, c.fetchone()))

+         user_data = dict(list(zip(fields, c.fetchone())))

  

          if user_data['status'] != koji.USER_STATUS['NORMAL']:

              raise koji.AuthError('logins by %s are not allowed' % user_data['name'])
@@ -527,7 +532,7 @@ 

      def getPerms(self):

          if not self.logged_in:

              return []

-         return self.perms.keys()

+         return list(self.perms.keys())

  

      def hasPerm(self, name):

          if not self.logged_in:
@@ -686,7 +691,7 @@ 

      row = c.fetchone()

      if not row:

          return None

-     return dict(zip(fields, row))

+     return dict(list(zip(fields, row)))

  

  def login(*args, **opts):

      return context.session.login(*args, **opts)
@@ -717,19 +722,19 @@ 

  

  if __name__ == '__main__':

      # XXX - testing defaults

-     import db

+     from . import db

      db.setDBopts(database="test", user="test")

      print("Connecting to db")

      context.cnx = db.connect()

      print("starting session 1")

      sess = Session(None, hostip='127.0.0.1')

-     print("Session 1: %s" % sess)

+     print(("Session 1: %s" % sess))

      print("logging in with session 1")

      session_info = sess.login('host/1', 'foobar', {'hostip':'127.0.0.1'})

      #wrap values in lists

-     session_info = dict([[k, [v]] for k, v in session_info.iteritems()])

-     print("Session 1: %s" % sess)

-     print("Session 1 info: %r" % session_info)

+     session_info = dict([[k, [v]] for k, v in six.iteritems(session_info)])

+     print(("Session 1: %s" % sess))

+     print(("Session 1 info: %r" % session_info))

      print("Creating session 2")

      s2 = Session(session_info, '127.0.0.1')

-     print("Session 2: %s " % s2)

+     print(("Session 2: %s " % s2))

file modified
+11 -9
@@ -7,13 +7,15 @@ 

  the bits that koji needs.

  """

  

- import httplib

+ from __future__ import absolute_import

+ import six.moves.http_client

  import urlparse

- import urllib

+ import six.moves.urllib.request, six.moves.urllib.parse, six.moves.urllib.error

  import sys

- import ssl.SSLCommon

+ from . import ssl.SSLCommon

+ import six

  try:

-     from ssl import ssl as pyssl

+     from .ssl import ssl as pyssl

  except ImportError:  # pragma: no cover

      pass

  
@@ -44,7 +46,7 @@ 

  

      def get_connection(self, uri, cert, verify, timeout):

          scheme = uri[0]

-         host, port = urllib.splitport(uri[1])

+         host, port = six.moves.urllib.parse.splitport(uri[1])

          key = (scheme, host, cert, verify, timeout)

          #if self.connection and self.opts.get('keepalive'):

          if self.connection:   # XXX honor keepalive
@@ -55,7 +57,7 @@ 

          # Otherwise we make a new one

          default_port = 80

          certs = {}

-         if isinstance(verify, basestring):

+         if isinstance(verify, six.string_types):

              certs['peer_ca_cert'] = verify

          if cert:

              certs['key_and_cert'] = cert
@@ -84,11 +86,11 @@ 

                  # no verify

                  ctx = pyssl._create_unverified_context()

                  cnxOpts['context'] = ctx

-             cnxClass = httplib.HTTPSConnection

+             cnxClass = six.moves.http_client.HTTPSConnection

              default_port = 443

          elif scheme == 'http':

              cnxOpts = {}

-             cnxClass = httplib.HTTPConnection

+             cnxClass = six.moves.http_client.HTTPConnection

          else:

              raise IOError("unsupported protocol: %s" % scheme)

  
@@ -123,7 +125,7 @@ 

  

      def raise_for_status(self):

          if self.response.status >= 400:

-             raise httplib.HTTPException("HTTP %s: %s" % (self.response.status,

+             raise six.moves.http_client.HTTPException("HTTP %s: %s" % (self.response.status,

                      self.response.reason))

  

  

file modified
+14 -10
@@ -24,7 +24,11 @@ 

  #    - request data

  #    - auth data

  

- import thread

+ from __future__ import absolute_import

+ from __future__ import print_function

+ from six.moves import _thread

+ import six

+ from six.moves import range

  

  class _data(object):

      pass
@@ -35,7 +39,7 @@ 

  

      # should probably be getattribute, but easier to debug this way

      def __getattr__(self, key):

-         id = thread.get_ident()

+         id = _thread.get_ident()

          tdict = object.__getattribute__(self, '_tdict')

          if id not in tdict:

              raise AttributeError(key)
@@ -43,7 +47,7 @@ 

          return object.__getattribute__(data, key)

  

      def __setattr__(self, key, value):

-         id = thread.get_ident()

+         id = _thread.get_ident()

          tdict = object.__getattribute__(self, '_tdict')

          if id not in tdict:

              tdict[id] = _data()
@@ -51,7 +55,7 @@ 

          return object.__setattr__(data, key, value)

  

      def __delattr__(self, key):

-         id = thread.get_ident()

+         id = _thread.get_ident()

          tdict = object.__getattribute__(self, '_tdict')

          if id not in tdict:

              raise AttributeError(key)
@@ -62,14 +66,14 @@ 

          return ret

  

      def __str__(self):

-         id = thread.get_ident()

+         id = _thread.get_ident()

          tdict = object.__getattribute__(self, '_tdict')

          return "(current thread: %s) {" % id  + \

-             ", ".join(["%s : %s" %(k, v.__dict__) for (k, v) in tdict.iteritems()]) + \

+             ", ".join(["%s : %s" %(k, v.__dict__) for (k, v) in six.iteritems(tdict)]) + \

              "}"

  

      def _threadclear(self):

-         id = thread.get_ident()

+         id = _thread.get_ident()

          tdict = object.__getattribute__(self, '_tdict')

          if id not in tdict:

              return
@@ -97,8 +101,8 @@ 

          context._threadclear()

          print(context)

  

-     for x in xrange(1, 10):

-         thread.start_new_thread(test, ())

+     for x in range(1, 10):

+         _thread.start_new_thread(test, ())

  

      time.sleep(4)

      print('')
@@ -106,7 +110,7 @@ 

  

      context.foo = 1

      context.bar = 2

-     print(context.foo, context.bar)

+     print((context.foo, context.bar))

      print(context)

      context._threadclear()

      print(context)

file modified
+26 -22
@@ -20,6 +20,8 @@ 

  #       Mike McLean <mikem@redhat.com>

  #       Mike Bonnet <mikeb@redhat.com>

  

+ from __future__ import absolute_import

+ from __future__ import print_function

  import koji

  import koji.tasks

  from koji.tasks import safe_rmtree
@@ -34,7 +36,9 @@ 

  import sys

  import traceback

  import errno

- import xmlrpclib

+ from six.moves import xmlrpc_client

+ import six

+ from six.moves import range

  

  

  def incremental_upload(session, fname, fd, path, retries=5, logger=None):
@@ -111,7 +115,7 @@ 

              flags = os.O_CREAT | os.O_WRONLY

              if append:

                  flags |= os.O_APPEND

-             fd = os.open(outfile, flags, 0666)

+             fd = os.open(outfile, flags, 0o666)

              os.dup2(fd, 1)

              if logerror:

                  os.dup2(fd, 2)
@@ -142,13 +146,13 @@ 

  

              if not outfd:

                  try:

-                     outfd = file(outfile, 'r')

+                     outfd = open(outfile, 'r')

                  except IOError:

                      # will happen if the forked process has not created the logfile yet

                      continue

                  except:

-                     print('Error reading log file: %s' % outfile)

-                     print(''.join(traceback.format_exception(*sys.exc_info())))

+                     print(('Error reading log file: %s' % outfile))

+                     print((''.join(traceback.format_exception(*sys.exc_info()))))

  

              incremental_upload(session, remotename, outfd, uploadpath)

  
@@ -496,7 +500,7 @@ 

          """Attempt to shut down cleanly"""

          for task_id in self.pids.keys():

              self.cleanupTask(task_id)

-         self.session.host.freeTasks(self.tasks.keys())

+         self.session.host.freeTasks(list(self.tasks.keys()))

          self.session.host.updateHost(task_load=0.0, ready=False)

  

      def updateBuildroots(self, nolocal=False):
@@ -527,14 +531,14 @@ 

                  #task not running - expire the buildroot

                  #TODO - consider recycling hooks here (with strong sanity checks)

                  self.logger.info("Expiring buildroot: %(id)i/%(tag_name)s/%(arch)s" % br)

-                 self.logger.debug("Buildroot task: %r, Current tasks: %r" % (task_id, self.tasks.keys()))

+                 self.logger.debug("Buildroot task: %r, Current tasks: %r" % (task_id, list(self.tasks.keys())))

                  self.session.host.setBuildRootState(id, st_expired)

                  continue

          if nolocal:

              return

          local_br = self._scanLocalBuildroots()

          # get info on local_only buildroots (most likely expired)

-         local_only = [id for id in local_br.iterkeys() if id not in db_br]

+         local_only = [id for id in six.iterkeys(local_br) if id not in db_br]

          if local_only:

              missed_br = self.session.listBuildroots(buildrootID=tuple(local_only))

              #get all the task info in one call
@@ -578,7 +582,7 @@ 

                      rootdir = "%s/root" % topdir

                      try:

                          st = os.lstat(rootdir)

-                     except OSError, e:

+                     except OSError as e:

                          if e.errno == errno.ENOENT:

                              rootdir = None

                          else:
@@ -599,13 +603,13 @@ 

                      #also remove the config

                      try:

                          os.unlink(data['cfg'])

-                     except OSError, e:

+                     except OSError as e:

                          self.logger.warn("%s: can't remove config: %s" % (desc, e))

                  elif age > 120:

                      if rootdir:

                          try:

                              flist = os.listdir(rootdir)

-                         except OSError, e:

+                         except OSError as e:

                              self.logger.warn("%s: can't list rootdir: %s" % (desc, e))

                              continue

                          if flist:
@@ -632,10 +636,10 @@ 

              fn = "%s/%s" % (configdir, f)

              if not os.path.isfile(fn):

                  continue

-             fo = file(fn, 'r')

+             fo = open(fn, 'r')

              id = None

              name = None

-             for n in xrange(10):

+             for n in range(10):

                  # data should be in first few lines

                  line = fo.readline()

                  if line.startswith('# Koji buildroot id:'):
@@ -766,7 +770,7 @@ 

              # Note: we may still take an assigned task below

          #sort available capacities for each of our bins

          avail = {}

-         for bin in bins.iterkeys():

+         for bin in six.iterkeys(bins):

              avail[bin] = [host['capacity'] - host['task_load'] for host in bin_hosts[bin]]

              avail[bin].sort()

              avail[bin].reverse()
@@ -833,7 +837,7 @@ 

          prefix = "Task %i (pid %i)" % (task_id, pid)

          try:

              (childpid, status) = os.waitpid(pid, os.WNOHANG)

-         except OSError, e:

+         except OSError as e:

              #check errno

              if e.errno != errno.ECHILD:

                  #should not happen
@@ -874,7 +878,7 @@ 

  

              try:

                  os.kill(pid, sig)

-             except OSError, e:

+             except OSError as e:

                  # process probably went away, we'll find out on the next iteration

                  self.logger.info('Error sending signal %i to %s (pid %i, taskID %i): %s' %

                                   (sig, execname, pid, task_id, e))
@@ -898,14 +902,14 @@ 

              proc_path = '/proc/%i/stat' % pid

              if not os.path.isfile(proc_path):

                  return None

-             proc_file = file(proc_path)

+             proc_file = open(proc_path)

              procstats = [not field.isdigit() and field or int(field) for field in proc_file.read().split()]

              proc_file.close()

  

              cmd_path = '/proc/%i/cmdline' % pid

              if not os.path.isfile(cmd_path):

                  return None

-             cmd_file = file(cmd_path)

+             cmd_file = open(cmd_path)

              procstats[1] = cmd_file.read().replace('\0', ' ').strip()

              cmd_file.close()

              if not procstats[1]:
@@ -1159,12 +1163,12 @@ 

          try:

              response = (handler.run(),)

              # note that we wrap response in a singleton tuple

-             response = xmlrpclib.dumps(response, methodresponse=1, allow_none=1)

+             response = xmlrpc_client.dumps(response, methodresponse=1, allow_none=1)

              self.logger.info("RESPONSE: %r" % response)

              self.session.host.closeTask(handler.id, response)

              return

-         except xmlrpclib.Fault, fault:

-             response = xmlrpclib.dumps(fault)

+         except xmlrpc_client.Fault as fault:

+             response = xmlrpc_client.dumps(fault)

              tb = ''.join(traceback.format_exception(*sys.exc_info())).replace(r"\n", "\n")

              self.logger.warn("FAULT:\n%s" % tb)

          except (SystemExit, koji.tasks.ServerExit, KeyboardInterrupt):
@@ -1183,7 +1187,7 @@ 

              if issubclass(e_class, koji.GenericError):

                  #just pass it through

                  tb = str(e)

-             response = xmlrpclib.dumps(xmlrpclib.Fault(faultCode, tb))

+             response = xmlrpc_client.dumps(xmlrpc_client.Fault(faultCode, tb))

  

          # if we get here, then we're handling an exception, so fail the task

          self.session.host.failTask(handler.id, response)

file modified
+3 -1
@@ -21,6 +21,8 @@ 

  #       Mike McLean <mikem@redhat.com>

  

  

+ from __future__ import absolute_import

+ from __future__ import print_function

  import logging

  import sys

  import psycopg2
@@ -33,7 +35,7 @@ 

  # del psycopg2.extensions.string_types[1266]

  import time

  import traceback

- import context

+ from . import context

  import re

  

  POSITIONAL_RE = re.compile(r'%[a-z]')

file modified
+3 -1
@@ -19,11 +19,13 @@ 

  #       Mike McLean <mikem@redhat.com>

  #       Mike Bonnet <mikeb@redhat.com>

  

+ from __future__ import absolute_import

  import imp

  import koji

  import logging

  import sys

  import traceback

+ import six

  

  # the available callback hooks and a list

  # of functions to be called for each event
@@ -85,7 +87,7 @@ 

          return self.plugins.get(name)

  

      def pathlist(self, path):

-         if isinstance(path, basestring):

+         if isinstance(path, six.string_types):

              return [path]

          else:

              return path

file modified
+4 -2
@@ -17,8 +17,10 @@ 

  # Authors:

  #       Mike McLean <mikem@redhat.com>

  

+ from __future__ import absolute_import

  import fnmatch

  import koji

+ import six

  

  

  class BaseSimpleTest(object):
@@ -289,7 +291,7 @@ 

                      index[name] = 1

          index = {}

          _recurse(self.ruleset, index)

-         return index.keys()

+         return list(index.keys())

  

      def _apply(self, rules, data, top=False):

          for tests, negate, action in rules:
@@ -352,7 +354,7 @@ 

          namespace = (namespace,)

      ret = {}

      for ns in namespace:

-         for key, value in ns.iteritems():

+         for key, value in six.iteritems(ns):

              if value is BaseSimpleTest:

                  # skip this abstract base class if we encounter it

                  # this module contains generic tests, so it is valid to include it

file modified
+1
@@ -19,6 +19,7 @@ 

  # Authors:

  #       Mike McLean <mikem@redhat.com>

  

+ from __future__ import absolute_import

  import sys

  import traceback

  from koji.util import LazyDict

file modified
+9 -7
@@ -14,10 +14,12 @@ 

  #

  # Copyright 2005 Dan Williams <dcbw@redhat.com> and Red Hat, Inc.

  

+ from __future__ import absolute_import

+ from __future__ import print_function

  import os, sys

  from OpenSSL import SSL

- import SSLConnection

- import httplib

+ from . import SSLConnection

+ from six.moves import http_client

  import socket

  

  def our_verify(connection, x509, errNum, errDepth, preverifyOK):
@@ -46,13 +48,13 @@ 

      return ctx

  

  

- class PlgHTTPSConnection(httplib.HTTPConnection):

+ class PlgHTTPSConnection(http_client.HTTPConnection):

      "This class allows communication via SSL."

  

-     response_class = httplib.HTTPResponse

+     response_class = http_client.HTTPResponse

  

      def __init__(self, host, port=None, ssl_context=None, strict=None, timeout=None):

-         httplib.HTTPConnection.__init__(self, host, port, strict)

+         http_client.HTTPConnection.__init__(self, host, port, strict)

          self.ssl_ctx = ssl_context

          self._timeout = timeout

  
@@ -67,10 +69,10 @@ 

                      self.sock.settimeout(self._timeout)

                  self.sock.connect(sa)

                  if self.debuglevel > 0:

-                     print("connect: (%s, %s) [ssl]" % (self.host, self.port))

+                     print(("connect: (%s, %s) [ssl]" % (self.host, self.port)))

              except socket.error:

                  if self.debuglevel > 0:

-                     print('connect fail:', (self.host, self.port))

+                     print(('connect fail:', (self.host, self.port)))

                  if self.sock:

                      self.sock.close()

                  self.sock = None

file modified
+35 -4
@@ -6,6 +6,7 @@ 

  # Modifications by Dan Williams <dcbw@redhat.com>

  

  

+ from __future__ import absolute_import

  from OpenSSL import SSL

  import time, socket, select

  
@@ -72,7 +73,10 @@ 

          socket object and don't actually close until its count is 0.

          """

          self.__dict__["close_refcount"] = self.__dict__["close_refcount"] + 1

-         return PlgFileObject(self, mode, bufsize)

+         try:

+             return PlgFileObject(self, mode, bufsize)

+         except TypeError:

+             return PlgFileObject(self, mode)

  

      def close(self):

          if self.__dict__["closed"]:
@@ -106,7 +110,7 @@ 

  

              try:

                  sent = con.send(data, flags)

-             except SSL.SysCallError, e:

+             except SSL.SysCallError as e:

                  if e[0] == 32:      # Broken Pipe

                      self.close()

                      sent = 0
@@ -142,13 +146,40 @@ 

                  return None

              except SSL.WantReadError:

                  time.sleep(0.2)

-             except SSL.SysCallError, e:

+             except SSL.SysCallError as e:

                  if e.args == (-1, 'Unexpected EOF'):

                      break

                  raise

          return None

  

- class PlgFileObject(socket._fileobject):

+ 

+ try:

+     # Python 3.x

+     BaseFileObject = socket.SocketIO

+ except AttributeError:

+     # Python 2.x

+     BaseFileObject = socket._fileobject

+ 

+ class PlgFileObject(BaseFileObject):

+     def readinto(self, b):

+         """

+         Python 3.x HTTP client calls `SocketIO.readline()` which in

+         turn results in calling `PlgFileObject.readinto()` and that

+         one fails with `SSL.WantReadError` exception more often than

+         not. Therefore we override it and run the parent implementation

+         in a loop. We also ignore spurious exceptions reporting

+         unexpected EOF.

+         """

+         while True:

+             try:

+                 select.select([self.fileno()], [], [])

+                 return super(PlgFileObject, self).readinto(b)

+             except SSL.WantReadError:

+                 pass

+             except SSL.SysCallError as e:

+                 if e.args == (-1, 'Unexpected EOF'):

+                     return 0

+ 

      def close(self):

          """

          socket._fileobject doesn't actually _close_ the socket,

file modified
+1
@@ -1,6 +1,7 @@ 

  # identify this as the ssl module

  

  # our own ssl submodule masks python's in the main lib, so we import this here

+ from __future__ import absolute_import

  try:

      import ssl      # python's ssl module

  except ImportError:  # pragma: no cover

file modified
+9 -7
@@ -20,17 +20,19 @@ 

  #       Mike McLean <mikem@redhat.com>

  #       Mike Bonnet <mikeb@redhat.com>

  

+ from __future__ import absolute_import

  import koji

  import koji.util

  import os

  import logging

- import xmlrpclib

+ from six.moves import xmlrpc_client

  import signal

- import urllib2

+ import six.moves.urllib.request, six.moves.urllib.error, six.moves.urllib.parse

  import shutil

  import random

  import time

  import pprint

+ from six.moves import range

  

  def scan_mounts(topdir):

      """Search path for mountpoints"""
@@ -234,7 +236,7 @@ 

                                  continue

                              try:

                                  self.session.getTaskResult(task)

-                             except (koji.GenericError, xmlrpclib.Fault), task_error:

+                             except (koji.GenericError, xmlrpc_client.Fault) as task_error:

                                  self.logger.info("task %s failed or was canceled" % task)

                                  failed = True

                                  break
@@ -305,10 +307,10 @@ 

                  return fn

              self.logger.debug("Downloading %s", relpath)

              url = "%s/%s" % (self.options.topurl, relpath)

-             fsrc = urllib2.urlopen(url)

+             fsrc = six.moves.urllib.request.urlopen(url)

              if not os.path.exists(os.path.dirname(fn)):

                  os.makedirs(os.path.dirname(fn))

-             fdst = file(fn, 'w')

+             fdst = open(fn, 'w')

              shutil.copyfileobj(fsrc, fdst)

              fsrc.close()

              fdst.close()
@@ -398,7 +400,7 @@ 

  class ForkTask(BaseTaskHandler):

      Methods = ['fork']

      def handler(self, n=5, m=37):

-         for i in xrange(n):

+         for i in range(n):

              os.spawnvp(os.P_NOWAIT, 'sleep', ['sleep', str(m)])

  

  class WaitTestTask(BaseTaskHandler):
@@ -413,7 +415,7 @@ 

      _taskWeight = 0.1

      def handler(self, count, seconds=10):

          tasks = []

-         for i in xrange(count):

+         for i in range(count):

              task_id = self.subtask(method='sleep', arglist=[seconds], label=str(i), parent=self.id)

              tasks.append(task_id)

          bad_task = self.subtask('sleep', ['BAD_ARG'], label='bad')

file modified
+16 -13
@@ -18,6 +18,7 @@ 

  #       Mike McLean <mikem@redhat.com>

  #       Mike Bonnet <mikeb@redhat.com>

  

+ from __future__ import absolute_import

  import calendar

  from fnmatch import fnmatch

  import koji
@@ -30,8 +31,10 @@ 

  import stat

  import sys

  import time

- import ConfigParser

+ from six.moves import configparser

  from zlib import adler32

+ import six

+ from six.moves import range

  

  # imported from kojiweb and kojihub

  try:
@@ -124,7 +127,7 @@ 

  

      If patterns is a string, it will be split() first

      """

-     if isinstance(patterns, basestring):

+     if isinstance(patterns, six.string_types):

          patterns = patterns.split()

      for pat in patterns:

          if fnmatch(s, pat):
@@ -154,7 +157,7 @@ 

          kwargs = {}

      try:

          return func(*args, **kwargs)

-     except TypeError, e:

+     except TypeError as e:

          if sys.exc_info()[2].tb_next is None:

              # The stack is only one high, so the error occurred in this function.

              # Therefore, we assume the TypeError is due to a parameter mismatch
@@ -238,11 +241,11 @@ 

          return [(key, lazy_eval(val)) for key, val in super(LazyDict, self).items()]

  

      def itervalues(self):

-         for val in super(LazyDict, self).itervalues():

+         for val in six.itervalues(super(LazyDict, self)):

              yield lazy_eval(val)

  

      def iteritems(self):

-         for key, val in super(LazyDict, self).iteritems():

+         for key, val in six.iteritems(super(LazyDict, self)):

              yield key, lazy_eval(val)

  

      def pop(self, key, *args, **kwargs):
@@ -457,19 +460,19 @@ 

          logger.warn('Setting resource limit: %s = %r', key, limits)

          try:

              resource.setrlimit(rcode, tuple(limits))

-         except ValueError, e:

+         except ValueError as e:

              logger.error("Unable to set %s: %s", key, e)

  

  class adler32_constructor(object):

  

      #mimicing the hashlib constructors

-     def __init__(self, arg=''):

-         self._value = adler32(arg) & 0xffffffffL

+     def __init__(self, arg=b''):

+         self._value = adler32(arg) & 0xffffffff

          #the bitwise and works around a bug in some versions of python

          #see: https://bugs.python.org/issue1202

  

      def update(self, arg):

-         self._value = adler32(arg, self._value) & 0xffffffffL

+         self._value = adler32(arg, self._value) & 0xffffffff

  

      def digest(self):

          return self._value
@@ -496,11 +499,11 @@ 

      parts = parts.copy()

      result = []

      while True:

-         level = set([name for name, deps in parts.iteritems() if not deps])

+         level = set([name for name, deps in six.iteritems(parts) if not deps])

          if not level:

              break

          result.append(level)

-         parts = dict([(name, deps - level) for name, deps in parts.iteritems()

+         parts = dict([(name, deps - level) for name, deps in six.iteritems(parts)

                        if name not in level])

      if parts:

          raise ValueError('total ordering not possible')
@@ -586,9 +589,9 @@ 

      """

      if not isinstance(confs, (list, tuple)):

          confs = [confs]

-     config = ConfigParser.ConfigParser()

+     config = configparser.ConfigParser()

      for conf in confs:

-         conf_fd = file(conf)

+         conf_fd = open(conf)

          config.readfp(conf_fd)

          conf_fd.close()

      builds = {}

Fix a number of Python 3.x issues. With this patch and the respective
patches in other packages I can successfully run at least some koji
commands using both Python 2.x and Python 3.x.

$ python -V
Python 3.4.3

$ koji moshimoshi
Warning: Could not install krbV module. Kerberos support will be disabled.
hej, pavlix!

You are using the hub at http://koji.fedoraproject.org/kojihub
Authenticated via client certificate /home/pavlix/.fedora.cert

Related: https://pagure.io/koji/issue/151

Signed-off-by: Pavel Šimerda pavlix@pavlix.net

rebased

7 years ago

rebased

7 years ago

rebased

7 years ago

Just updated the patchset using the six module. Is there anything else I can do for you? Part of the work can probably by also done using modernize so the rest would become a smaller patch then. Or the whole changeset can be applied at once.

What we need to check is whether I managed to fix all occurences of the renamed modules but otherwise the change should be good.

I took a quick look and the changes look good to me. I'd recommend eventually updating the %s formatting to use {}'s as well, but that can always be done later.

As mentioned in the issue you filed (#151), the p3 port is on our radar. There is some partial work here:

https://github.com/mikem23/koji-playground/tree/py3

I feel like using python-six and python-modernize is the way to go. However, there are a number of snags:

  1. we can't yet drop rhel5 support (2.4.3 means no python-six)
  2. ssl auth
  3. krb auth
  4. some other assorted libs we use

All together this means there is very little I can accept right this moment. Odds are we'll have to do something about snag no. 1 first before we can even consider taking changes along this line.

Also, currently this patch breaks a few unit tests

@mikem I know it was on your radar but I decided to make it happen so I can already use a patched version myself. As far as I see the rhel5/python-2.4.3 support is pretty much limiting as it doesn't even support the syntax used by 3.x and latest 2.x versions.

  • I was going to use python-modernize and rebase the patch on top of modernized code to split out changes that need to be done manually. But I'm not sure it's worth doing if you're not merging the changes anyway due to Python 2.4.3 compatibility.
  • Let's see what issues we get when using the modified code. So far I have checked basic koji and fedpkg operation. Libraries or unit tests won't be IMO so bad either.

I will be happy for anything that will make it easier for me to keep the patch working. I was already forced to modify it due to new changes in upstream code. But that's really hard as long as you need to keep 2.4.3 compatibility. What is the rationale by the way? Couldn't rhel5 just stick with whatever verision it's using? Tools like fedpkg/rpkg/rhpkg upstreams don't seem to keep 2.4.3 compatibility either.

@mikem Don't you have the ability to use python26 from EPEL for EL5? That should enable usage of python-six.

@mikem Don't you have the ability to use python26 from EPEL for EL5? That should enable usage of python-six.

Not only python-six. Python 2.4.3 cannot even parse the syntax that is required by Python 3.x, so you do need Python 2.6 or 2.7 to actually use Python 3.x friendly code.

rebased

7 years ago

9 new commits added

  • python3: fix up binary/text string check
  • python3: extend SSL code to support Python 3.x
  • python3: pass an empty bytestring to adler32
  • python3: fix up composite type usage after modernize
  • python3: fix up umask syntax after modernize
  • python3: fix up imports after modernize
  • python3: fix Makefile python checks
  • trivial: use ImportError directly
  • python3: modernize all client code
7 years ago

9 new commits added

  • python3: extend SSL code to support Python 3.x
  • python3: fix up binary/text string check
  • python3: pass an empty bytestring to adler32
  • python3: fix up composite type usage after modernize
  • python3: fix up umask syntax after modernize
  • python3: fix up imports after modernize
  • python3: fix Makefile python checks
  • trivial: use ImportError directly
  • python3: modernize all client code
7 years ago

I started using modernize for the client code to simplify the manually created patchset. As modernize is not perfect, I had to manually fix up the results and then make other manual changes. I carefully structured the changes into multiple commits.

Some of the commits could already be adapted to the current code base but most of them are based on the results of modernize.

rebased

7 years ago

rebased

7 years ago

rebased

7 years ago

@pingou I'm surprised that the comments tab no longer shows the current list of commits. The last one is 19 days ago and doesn't reflext the current patchset.

@pavlix, I'm seeing 11 commits from 2 days ago. Do you expect something else?

Sometimes it can be only dict, not module. It needs to be handled also as it gives me following error:

File "***/koji/koji/init.py", line 1927, in krb_login
raise builtins.ImportError("Please install python-krbV to use kerberos.")
AttributeError: 'dict' object has no attribute 'ImportError'

importlib.find_module is not available in python 2

I'm not able to run tests correctly on python 2.7 now. I've made inline comments to koji/__init__.py

@pavlix, I'm seeing 11 commits from 2 days ago. Do you expect something else?

@pingou I'm talking about that “9 new commits added” message.

11 new commits added

  • python3: fix composite type usage after modernize
  • python3: avoid deprecated imp module warning
  • python3: fix imports after modernize
  • python3: modernize all client code
  • python3: port umask octal numerals
  • python3: port binary/text string checks
  • python3: port adler32 numeric literals
  • python3: use binary string for adler32 input
  • python3: port SSLConnection code
  • python3: port Makefile python checks
  • python3: avoid using exceptions module
7 years ago

I'm not able to run tests correctly on python 2.7 now. I've made inline comments to koji/init.py

I don't have the chance to run the tests right now. I updated the patch to use six.moves for find_module() and load_module().

@pavlix, I'm seeing 11 commits from 2 days ago. Do you expect something else?

@pingou I'm talking about that “9 new commits added” message.

Did you push 9 commits in addition to 2 commits that were there before?

It looks like the notification picked up all 11 commits fine this time.

@pingou I'm talking about that “9 new commits added” message.

Did you push 9 commits in addition to 2 commits that were there before?

The commit messages differed as well. All messages had python3: prefix at the time.

It looks like the notification picked up all 11 commits fine this time.

Yep. Looks like all has been settled.

@pavlix Can you rebase against the current master branch?

@pavlix Can you rebase against the current master branch?

@ngompa Done. I previously somehow relied on git fetch but git fetch origin was needed.

rebased

7 years ago

@pavlix What's still left in terms of making Koji Python 3 compatible?

@pavlix What's still left in terms of making Koji Python 3 compatible?

1) My changes only affect the client and the library.

2) Apart from modernize I only made changes needed for the commands I actually run like koji moshimoshi or fedpkg build --scratch --srpm. I cannot currently run tests on my system due to some missing dependencies.

3) I'm still getting the following warning:

/usr/lib64/python3.4/site-packages/koji/ssl/SSLConnection.py:112: DeprecationWarning: str for buf is no longer accepted, use bytes
  sent = con.send(data, flags)

@mikem, @mikeb, @ralph: Can any of you take a look at this to see where it stands for merging?

@mikem, @mikeb, @ralph: Can any of you take a look at this to see where it stands for merging?

Any attempt to write code for Python 2.x and Python 3.x basically ends up requiring at least Python 2.6. So the merger is effectively blocked until the requirement to support Python 2.4.3 is lifted.

rebased

7 years ago

12 new commits added

  • use six.moves.builtins instead of builtins
  • avoid specfile warnings
  • python3: fix composite type usage after modernize
  • python3: fix imports after modernize
  • python3: modernize all client code
  • python3: port umask octal numerals
  • python3: port binary/text string checks
  • python3: port adler32 numeric literals
  • python3: use binary string for adler32 input
  • python3: port SSLConnection code
  • python3: port Makefile python checks
  • python3: avoid using exceptions module
7 years ago

Latest update works correctly with fedora infrastructure on Kerberos with Python 3.x and requests-kerberos, as long as you use one of the workarounds mentioned in #288.

rebased

7 years ago

rebased

7 years ago

We've merged PR #417 which obsoletes this one.

Pull-Request has been closed by tkopecek

6 years ago