#921 Py3 hub
Merged 5 years ago by mikem. Opened 5 years ago by tkopecek.
tkopecek/koji issue905a  into  master

file modified
-1
@@ -5,7 +5,6 @@ 

  omit =

      /usr/*

      tests/*

-     hub/*

      util/*

  

  [report]

file modified
+4 -4
@@ -80,10 +80,10 @@ 

  	coverage3 erase

  	PYTHONPATH=hub/.:plugins/hub/.:plugins/builder/.:plugins/cli/.:cli/. coverage3 run \

  	    --rcfile .coveragerc3 --source . \

- 	    /usr/bin/nosetests \

- 	    tests/test_lib tests/test_cli

- 	coverage3 report --rcfile .coveragerc3

- 	coverage3 html --rcfile .coveragerc3

+ 	    /usr/bin/nosetests-3 \

+ 	        tests/test_lib tests/test_cli tests/test_hub

+ 	coverage report --rcfile .coveragerc3

+ 	coverage html --rcfile .coveragerc3

  	@echo Full coverage report at file://${PWD}/htmlcov/index.html

  

  test-tarball:

file modified
+128 -110
@@ -42,13 +42,14 @@ 

  import tempfile

  import time

  import traceback

- import urlparse

  import six.moves.xmlrpc_client

  import zipfile

  

  import rpm

  import six

  

+ from six.moves.urllib.parse import parse_qs

+ 

  import koji

  import koji.auth

  import koji.db
@@ -2390,9 +2391,8 @@ 

      groupsdir = "%s/groups" % (repodir)

      koji.ensuredir(groupsdir)

      comps = koji.generate_comps(groups, expand_groups=True)

-     fo = open("%s/comps.xml" % groupsdir, 'w')

-     fo.write(comps)

-     fo.close()

+     with open("%s/comps.xml" % groupsdir, 'w') as fo:

+         fo.write(comps)

  

      #get build dirs

      relpathinfo = koji.PathInfo(topdir='toplink')
@@ -2505,9 +2505,8 @@ 

    </versioning>

  </metadata>

  """ % datetime.datetime.now().strftime('%Y%m%d%H%M%S')

-     mdfile = open(os.path.join(destdir, 'maven-metadata.xml'), 'w')

-     mdfile.write(contents)

-     mdfile.close()

+     with open(os.path.join(destdir, 'maven-metadata.xml'), 'w') as mdfile:

+         mdfile.write(contents)

      _generate_maven_metadata(destdir)

  

  def dist_repo_init(tag, keys, task_opts):
@@ -4352,14 +4351,16 @@ 

      result = []

      if not os.path.exists(zippath):

          return result

-     archive = zipfile.ZipFile(zippath, 'r')

-     for entry in archive.infolist():

-         filename = koji.fixEncoding(entry.filename)

-         result.append({'archive_id': archive_id,

-                        'name': filename,

-                        'size': entry.file_size,

-                        'mtime': int(time.mktime(entry.date_time + (0, 0, -1)))})

-     archive.close()

+     with zipfile.ZipFile(zippath, 'r') as archive:

+         for entry in archive.infolist():

+             if six.PY2:

+                 filename = koji.fixEncoding(entry.filename)

+             else:

+                 filename = entry.filename

+             result.append({'archive_id': archive_id,

+                            'name': filename,

+                            'size': entry.file_size,

+                            'mtime': int(time.mktime(entry.date_time + (0, 0, -1)))})

      return result

  

  def _get_tarball_list(archive_id, tarpath):
@@ -4378,17 +4379,19 @@ 

      result = []

      if not os.path.exists(tarpath):

          return result

-     archive = tarfile.open(tarpath, 'r')

-     for entry in archive:

-         filename = koji.fixEncoding(entry.name)

-         result.append({'archive_id': archive_id,

-                        'name': filename,

-                        'size': entry.size,

-                        'mtime': entry.mtime,

-                        'mode': entry.mode,

-                        'user': entry.uname,

-                        'group': entry.gname})

-     archive.close()

+     with tarfile.open(tarpath, 'r') as archive:

+         for entry in archive:

+             if six.PY2:

+                 filename = koji.fixEncoding(entry.name)

+             else:

+                 filename = entry.name

+             result.append({'archive_id': archive_id,

+                            'name': filename,

+                            'size': entry.size,

+                            'mtime': entry.mtime,

+                            'mode': entry.mode,

+                            'user': entry.uname,

+                            'group': entry.gname})

      return result

  

  def list_archive_files(archive_id, queryOpts=None, strict=False):
@@ -5515,9 +5518,8 @@ 

              path = os.path.join(workdir, directory, metadata)

              if not os.path.exists(path):

                  raise koji.GenericError("No such file: %s" % metadata)

-             fo = open(path, 'rb')

-             metadata = fo.read()

-             fo.close()

+             with open(path, 'rt') as fo:

+                 metadata = fo.read()

          self.raw_metadata = metadata

          self.metadata = parse_json(metadata, desc='metadata')

          return self.metadata
@@ -5657,11 +5659,8 @@ 

          builddir = koji.pathinfo.build(self.buildinfo)

          koji.ensuredir(builddir)

          path = os.path.join(builddir, 'metadata.json')

-         fo = open(path, 'w')

-         try:

+         with open(path, 'w') as fo:

              fo.write(self.raw_metadata)

-         finally:

-             fo.close()

  

  

      def prep_brs(self):
@@ -6156,16 +6155,16 @@ 

          raise koji.ImportError('SCM URLs for the task and build do not match: %s, %s' % \

                (task_info['request'][0], build_task_info['request'][0]))

      build_arches = set()

-     for rpm in list_rpms(buildID=build['id']):

-         if rpm['arch'] == 'src':

-             build_srpm = '%s.src.rpm' % rpm['nvr']

+     for rpminfo in list_rpms(buildID=build['id']):

+         if rpminfo['arch'] == 'src':

+             build_srpm = '%s.src.rpm' % rpminfo['nvr']

              if srpm != build_srpm:

                  raise koji.ImportError('task and build srpm names do not match: %s, %s' % \

                        (srpm, build_srpm))

-         elif rpm['arch'] == 'noarch':

+         elif rpminfo['arch'] == 'noarch':

              continue

          else:

-             build_arches.add(rpm['arch'])

+             build_arches.add(rpminfo['arch'])

      if not build_arches:

          raise koji.ImportError('no arch-specific rpms found for %s' % build['nvr'])

      task_arches = set([t['arch'] for t in tasks.values()])
@@ -6359,7 +6358,10 @@ 

      archiveinfo = {'buildroot_id': buildroot_id}

      archiveinfo['build_id'] = buildinfo['id']

      if metadata_only:

-         filename = koji.fixEncoding(fileinfo['filename'])

+         if six.PY2:

+             filename = koji.fixEncoding(fileinfo['filename'])

+         else:

+             filename = fileinfo['filename']

          archiveinfo['filename'] = filename

          archiveinfo['size'] = fileinfo['filesize']

          archiveinfo['checksum'] = fileinfo['checksum']
@@ -6370,19 +6372,21 @@ 

          archiveinfo['checksum_type'] = koji.CHECKSUM_TYPES[fileinfo['checksum_type']]

          archiveinfo['metadata_only'] = True

      else:

-         filename = koji.fixEncoding(os.path.basename(filepath))

+         if six.PY2:

+             filename = koji.fixEncoding(os.path.basename(filepath))

+         else:

+             filename = os.path.basename(filepath)

          archiveinfo['filename'] = filename

          archiveinfo['size'] = os.path.getsize(filepath)

          # trust values computed on hub (CG_Importer.prep_outputs)

          if not fileinfo or not fileinfo.get('hub.checked_md5'):

-             archivefp = open(filepath)

-             m = md5_constructor()

-             while True:

-                 contents = archivefp.read(8192)

-                 if not contents:

-                     break

-                 m.update(contents)

-             archivefp.close()

+             with open(filepath, 'rb') as archivefp:

+                 m = md5_constructor()

+                 while True:

+                     contents = archivefp.read(8192)

+                     if not contents:

+                         break

+                     m.update(contents)

              archiveinfo['checksum'] = m.hexdigest()

          else:

              archiveinfo['checksum'] = fileinfo['checksum']
@@ -6492,8 +6496,10 @@ 

      A symlink pointing from the old location to the new location will

      be created.

      """

-     final_path = "%s/%s" % (destdir,

-                             koji.fixEncoding(os.path.basename(filepath)))

+     fname = os.path.basename(filepath)

+     if six.PY2:

+         fname = koji.fixEncoding(fname)

+     final_path = "%s/%s" % (destdir, fname)

      if os.path.exists(final_path):

          raise koji.GenericError("Error importing archive file, %s already exists" % final_path)

      if os.path.islink(filepath) or not os.path.isfile(filepath):
@@ -6515,16 +6521,14 @@ 

              sumfile = mavenfile + ext

              if sumfile not in mavenfiles:

                  sum = sum_constr()

-                 fobj = open('%s/%s' % (mavendir, mavenfile))

-                 while True:

-                     content = fobj.read(8192)

-                     if not content:

-                         break

-                     sum.update(content)

-                 fobj.close()

-                 sumobj = open('%s/%s' % (mavendir, sumfile), 'w')

-                 sumobj.write(sum.hexdigest())

-                 sumobj.close()

+                 with open('%s/%s' % (mavendir, mavenfile), 'rb') as fobj:

+                     while True:

+                         content = fobj.read(8192)

+                         if not content:

+                             break

+                         sum.update(content)

+                 with open('%s/%s' % (mavendir, sumfile), 'w') as sumobj:

+                     sumobj.write(sum.hexdigest())

  

  def add_rpm_sig(an_rpm, sighdr):

      """Store a signature header for an rpm"""
@@ -6578,9 +6582,8 @@ 

      # - write to fs

      sigpath = "%s/%s" % (builddir, koji.pathinfo.sighdr(rinfo, sigkey))

      koji.ensuredir(os.path.dirname(sigpath))

-     fo = open(sigpath, 'wb')

-     fo.write(sighdr)

-     fo.close()

+     with open(sigpath, 'wb') as fo:

+         fo.write(sighdr)

      koji.plugin.run_callbacks('postRPMSign', sigkey=sigkey, sighash=sighash, build=binfo, rpm=rinfo)

  

  def _scan_sighdr(sighdr, fn):
@@ -6631,9 +6634,8 @@ 

          koji.splice_rpm_sighdr(sighdr, rpm_path, temp)

          ts = rpm.TransactionSet()

          ts.setVSFlags(0)  #full verify

-         fo = open(temp, 'rb')

-         hdr = ts.hdrFromFdno(fo.fileno())

-         fo.close()

+         with open(temp, 'rb') as fo:

+             hdr = ts.hdrFromFdno(fo.fileno())

      except:

          try:

              os.unlink(temp)
@@ -6694,9 +6696,8 @@ 

          else:

              os.unlink(signedpath)

      sigpath = "%s/%s" % (builddir, koji.pathinfo.sighdr(rinfo, sigkey))

-     fo = open(sigpath, 'rb')

-     sighdr = fo.read()

-     fo.close()

+     with open(sigpath, 'rb') as fo:

+         sighdr = fo.read()

      koji.ensuredir(os.path.dirname(signedpath))

      koji.splice_rpm_sighdr(sighdr, rpm_path, signedpath)

  
@@ -7682,7 +7683,10 @@ 

      if value is None:

          return value

      try:

-         return koji.fixEncodingRecurse(json.loads(value))

+         if six.PY2:

+             return koji.fixEncodingRecurse(json.loads(value))

+         else:

+             return json.loads(value)

      except Exception:

          if errstr is None:

              if desc is None:
@@ -7720,8 +7724,7 @@ 

          if not self.data and not self.rawdata:

              return "-- incomplete update: no assigns"

          parts = ['INSERT INTO %s ' % self.table]

-         columns = to_list(self.data.keys())

-         columns.extend(to_list(self.rawdata.keys()))

+         columns = sorted(to_list(self.data.keys()) + to_list(self.rawdata.keys()))

          parts.append("(%s) " % ', '.join(columns))

          values = []

          for key in columns:
@@ -7807,10 +7810,10 @@ 

          parts = ['UPDATE %s SET ' % self.table]

          assigns = ["%s = %%(data.%s)s" % (key, key) for key in self.data]

          assigns.extend(["%s = (%s)" % (key, self.rawdata[key]) for key in self.rawdata])

-         parts.append(', '.join(assigns))

+         parts.append(', '.join(sorted(assigns)))

          if self.clauses:

              parts.append('\nWHERE ')

-             parts.append(' AND '.join(["( %s )" % c for c in self.clauses]))

+             parts.append(' AND '.join(["( %s )" % c for c in sorted(self.clauses)]))

          return ''.join(parts)

  

      def __repr__(self):
@@ -7883,12 +7886,23 @@ 

          if columns and aliases:

              if len(columns) != len(aliases):

                  raise Exception('column and alias lists must be the same length')

-             self.colsByAlias = dict(zip(aliases, columns))

+             # reorder

+             alias_table = sorted(zip(aliases, columns))

+             self.aliases = [x[0] for x in alias_table]

+             self.columns = [x[1] for x in alias_table]

+             self.colsByAlias = dict(alias_table)

          else:

              self.colsByAlias = {}

+             if columns:

+                 self.columns = sorted(columns)

+             if aliases:

+                 self.aliases = sorted(aliases)

          self.tables = tables

          self.joins = joins

-         self.clauses = clauses

+         if clauses:

+             self.clauses = sorted(clauses)

+         else:

+             self.clauses = clauses

          self.cursors = 0

          if values:

              self.values = values
@@ -7926,7 +7940,7 @@ 

                  col_str = 'count(*)'

          else:

              col_str = self._seqtostr(self.columns)

-         table_str = self._seqtostr(self.tables)

+         table_str = self._seqtostr(self.tables, sort=True)

          join_str = self._joinstr()

          clause_str = self._seqtostr(self.clauses, sep=')\n   AND (')

          if clause_str:
@@ -7947,8 +7961,10 @@ 

          return '<QueryProcessor: columns=%r, aliases=%r, tables=%r, joins=%r, clauses=%r, values=%r, opts=%r>' % \

                 (self.columns, self.aliases, self.tables, self.joins, self.clauses, self.values, self.opts)

  

-     def _seqtostr(self, seq, sep=', '):

+     def _seqtostr(self, seq, sep=', ', sort=False):

          if seq:

+             if sort:

+                 seq = sorted(seq)

              return sep.join(seq)

          else:

              return ''
@@ -9194,15 +9210,14 @@ 

          if not os.path.isfile(filePath):

              raise koji.GenericError('no file "%s" output by task %i' % (fileName, taskID))

          # Let the caller handler any IO or permission errors

-         f = open(filePath, 'r')

-         if isinstance(offset, str):

-             offset = int(offset)

-         if offset != None and offset > 0:

-             f.seek(offset, 0)

-         elif offset != None and offset < 0:

-             f.seek(offset, 2)

-         contents = f.read(size)

-         f.close()

+         with open(filePath, 'r') as f:

+             if isinstance(offset, str):

+                 offset = int(offset)

+             if offset != None and offset > 0:

+                 f.seek(offset, 0)

+             elif offset != None and offset < 0:

+                 f.seek(offset, 2)

+             contents = f.read(size)

          return base64.encodestring(contents)

  

      listTaskOutput = staticmethod(list_task_output)
@@ -9672,8 +9687,9 @@ 

          for (cltime, clname, cltext) in zip(fields['changelogtime'], fields['changelogname'],

                                              fields['changelogtext']):

              cldate = datetime.datetime.fromtimestamp(cltime).isoformat(' ')

-             clname = koji.fixEncoding(clname)

-             cltext = koji.fixEncoding(cltext)

+             if six.PY2:

+                 clname = koji.fixEncoding(clname)

+                 cltext = koji.fixEncoding(cltext)

  

              if author and author != clname:

                  continue
@@ -9688,7 +9704,10 @@ 

                  results.append({'date': cldate, 'date_ts': cltime, 'author': clname, 'text': cltext})

  

          results = _applyQueryOpts(results, queryOpts)

-         return koji.fixEncodingRecurse(results, remove_nonprintable=True)

+         if six.PY2:

+             return koji.fixEncodingRecurse(results, remove_nonprintable=True)

+         else:

+             return results

  

      def cancelBuild(self, buildID):

          """Cancel the build with the given buildID
@@ -11029,23 +11048,15 @@ 

          else:

              return 1

  

-     def _sortByKeyFunc(self, key, noneGreatest=True):

+     def _sortByKeyFuncNoneGreatest(key):

          """Return a function to sort a list of maps by the given key.

-         If the key starts with '-', sort in reverse order.  If noneGreatest

-         is True, None will sort higher than all other values (instead of lower).

+         None will sort higher than all other values (instead of lower).

          """

-         if noneGreatest:

-             # Normally None evaluates to be less than every other value

-             # Invert the comparison so it always evaluates to greater

-             cmpFunc = lambda a, b: (a is None or b is None) and -(cmp(a, b)) or cmp(a, b)

-         else:

-             cmpFunc = cmp

- 

-         if key.startswith('-'):

-             key = key[1:]

-             return lambda a, b: cmpFunc(b[key], a[key])

-         else:

-             return lambda a, b: cmpFunc(a[key], b[key])

+         def internal_key(obj):

+             v = obj[key]

+             # Nones has priority, others are second

+             return (v is None, v)

+         return internal_key

  

      def filterResults(self, methodName, *args, **kw):

          """Execute the XML-RPC method with the given name and filter the results
@@ -11100,7 +11111,15 @@ 

  

          order = filterOpts.get('order')

          if order:

-             results.sort(self._sortByKeyFunc(order, filterOpts.get('noneGreatest', True)))

+             if order.startswith('-'):

+                 reverse = True

+                 order = order[1:]

+             else:

+                 reverse = False

+             if filterOpts.get('noneGreatest', True):

+                 results.sort(self._sortByKeyFuncNoneGreatest(order), reverse=reverse)

+             else:

+                 results.sort(key=order, reverse=reverse)

  

          offset = filterOpts.get('offset')

          if offset is not None:
@@ -12950,9 +12969,8 @@ 

                  if context.session.user_id != user_id:

                      raise koji.GenericError("Invalid upload directory, not owner: %s" % orig_reldir)

              else:

-                 fo = open(u_fn, 'w')

-                 fo.write(str(context.session.user_id))

-                 fo.close()

+                 with open(u_fn, 'w') as fo:

+                     fo.write(str(context.session.user_id))

      return os.path.join(udir, name)

  

  def get_verify_class(verify):
@@ -12972,7 +12990,7 @@ 

      start = time.time()

      if not context.session.logged_in:

          raise koji.ActionNotAllowed('you must be logged-in to upload a file')

-     args = urlparse.parse_qs(environ.get('QUERY_STRING', ''), strict_parsing=True)

+     args = parse_qs(environ.get('QUERY_STRING', ''), strict_parsing=True)

      #XXX - already parsed by auth

      name = args['filename'][0]

      path = args.get('filepath', ('',))[0]

file modified
+11 -2
@@ -51,7 +51,10 @@ 

      def dump_datetime(self, value, write):

          # For backwards compatibility, we return datetime objects as strings

          value = value.isoformat(' ')

-         self.dump_string(value, write)

+         if six.PY2:

+             self.dump_string(value, write)

+         else:

+             self.dump_unicode(value, write)

      dispatch[datetime.datetime] = dump_datetime

  

  
@@ -363,6 +366,8 @@ 

      else:

          faultString = msg

      response = dumps(Fault(faultCode, faultString))

+     if six.PY3:

+         response = response.encode()

      headers = [

          ('Content-Length', str(len(response))),

          ('Content-Type', "text/xml"),
@@ -628,7 +633,7 @@ 

              name = 'koji' + name

          elif not name.startswith('koji'):

              name = 'koji.' + name

-         level_code = logging._levelNames[level]

+         level_code = logging.getLevelName(level)

          logging.getLogger(name).setLevel(level_code)

      logger = logging.getLogger("koji")

      # if KojiDebug is set, force main log level to DEBUG
@@ -699,6 +704,8 @@ 

          ]

          start_response('405 Method Not Allowed', headers)

          response = "Method Not Allowed\nThis is an XML-RPC server. Only POST requests are accepted."

+         if six.PY3:

+             response = response.encode()

          headers = [

              ('Content-Length', str(len(response))),

              ('Content-Type', "text/plain"),
@@ -728,6 +735,8 @@ 

                  response = h._wrap_handler(h.handle_upload, environ)

              else:

                  response = h._wrap_handler(h.handle_rpc, environ)

+             if six.PY3:

+                 response = response.encode()

              headers = [

                  ('Content-Length', str(len(response))),

                  ('Content-Type', "text/xml"),

file modified
+101 -16
@@ -123,6 +123,20 @@ 

  %package hub

  Summary: Koji XMLRPC interface

  Group: Applications/Internet

+ License: LGPLv2

+ Requires: %{name} = %{version}-%{release}

+ Requires: %{name}-hub-code

+ %if 0%{?fedora} || 0%{?rhel} > 7

+ Suggests: python%{python3_pkgversion}-%{name}-hub

+ Suggests: python%{python3_pkgversion}-%{name}-hub-plugins

+ %endif

+ 

+ %description hub

+ koji-hub is the XMLRPC interface to the koji database

+ 

+ %package -n python2-%{name}-hub

+ Summary: Koji XMLRPC interface

+ Group: Applications/Internet

  License: LGPLv2 and GPLv2

  # rpmdiff lib (from rpmlint) is GPLv2 (only)

  Requires: httpd
@@ -134,25 +148,78 @@ 

  Requires: %{name} = %{version}-%{release}

  # we need the python2 lib here

  Requires: python2-%{name} = %{version}-%{release}

+ # py2 xor py3

+ Provides: %{name}-hub-code = %{version}-%{release}

  

- %description hub

+ %description -n python2-%{name}-hub

+ koji-hub is the XMLRPC interface to the koji database

+ 

+ %if 0%{with python3}

+ %package -n python%{python3_pkgversion}-%{name}-hub

+ Summary: Koji XMLRPC interface

+ Group: Applications/Internet

+ License: LGPLv2 and GPLv2

+ # rpmdiff lib (from rpmlint) is GPLv2 (only)

+ Requires: httpd

+ Requires: mod_wsgi

+ %if 0%{?fedora} >= 21 || 0%{?rhel} >= 7

+ Requires: mod_auth_gssapi

+ %endif

+ Requires: python-psycopg2

+ Requires: %{name} = %{version}-%{release}

+ # we need the python2 lib here

+ Requires: python%{python3_pkgversion}-%{name} = %{version}-%{release}

+ # py2 xor py3

+ Provides: %{name}-hub-code = %{version}-%{release}

+ 

+ %description -n python%{python3_pkgversion}-%{name}-hub

  koji-hub is the XMLRPC interface to the koji database

+ %endif

  

  %package hub-plugins

  Summary: Koji hub plugins

  Group: Applications/Internet

  License: LGPLv2

- Requires: %{name} = %{version}-%{release}

- Requires: %{name}-hub = %{version}-%{release}

+ Requires: %{name}-hub-plugins-code

+ %if 0%{?fedora} || 0%{?rhel} > 7

+ Suggests: python%{python3_pkgversion}-%{name}-hub-plugins

+ %endif

+ 

+ %description hub-plugins

+ Plugins to the koji XMLRPC interface

+ 

+ %package -n python2-%{name}-hub-plugins

+ Summary: Koji hub plugins

+ Group: Applications/Internet

+ License: LGPLv2

+ Requires: python2-%{name}-hub = %{version}-%{release}

  Requires: python-qpid >= 0.7

- %if 0%{?rhel} >= 6

+ %if 0%{?fedora} >= 27 || 0%{?rhel} >= 6

  Requires: python-qpid-proton

  %endif

  Requires: cpio

+ Provides: %{name}-hub-plugins-code

  

- %description hub-plugins

+ %description -n python2-%{name}-hub-plugins

  Plugins to the koji XMLRPC interface

  

+ %if 0%{with python3}

+ %package -n python%{python3_pkgversion}-%{name}-hub-plugins

+ Summary: Koji hub plugins

+ Group: Applications/Internet

+ License: LGPLv2

+ Requires: python%{python3_pkgversion}-%{name}-hub = %{version}-%{release}

+ Requires: python-qpid >= 0.7

+ %if 0%{?fedora} >= 27 ||  0%{?rhel} >= 6

+ Requires: python%{python3_pkgversion}-qpid-proton

+ %endif

+ Requires: cpio

+ Provides: %{name}-hub-plugins-code

+ 

+ %description -n python%{python3_pkgversion}-%{name}-hub-plugins

+ Plugins to the koji XMLRPC interface

+ %endif

+ 

  %package builder-plugins

  Summary: Koji builder plugins

  Group: Applications/Internet
@@ -270,12 +337,11 @@ 

  rm -rf $RPM_BUILD_ROOT

  make DESTDIR=$RPM_BUILD_ROOT PYTHON=%{__python2} %{?install_opt} install

  %if 0%{with python3}

- cd koji

- make DESTDIR=$RPM_BUILD_ROOT PYTHON=%{__python3} %{?install_opt} install

- cd ../cli

- make DESTDIR=$RPM_BUILD_ROOT PYTHON=%{__python3} %{?install_opt} install

- cd ../plugins

- make DESTDIR=$RPM_BUILD_ROOT PYTHON=%{__python3} %{?install_opt} install

+ for d in koji cli plugins hub ; do

+     pushd $d

+     make DESTDIR=$RPM_BUILD_ROOT PYTHON=%{__python3} %{?install_opt} install

+     popd

+ done

  # alter python interpreter in koji CLI

  sed -i 's/\#\!\/usr\/bin\/python2/\#\!\/usr\/bin\/python3/' $RPM_BUILD_ROOT/usr/bin/koji

  %endif
@@ -319,20 +385,39 @@ 

  

  %files hub

  %defattr(-,root,root)

- %{_datadir}/koji-hub

- %dir %{_libexecdir}/koji-hub

  %config(noreplace) /etc/httpd/conf.d/kojihub.conf

  %dir /etc/koji-hub

  %config(noreplace) /etc/koji-hub/hub.conf

  %dir /etc/koji-hub/hub.conf.d

  

- %files hub-plugins

+ %files -n python2-%{name}-hub

  %defattr(-,root,root)

- %dir %{_prefix}/lib/koji-hub-plugins

- %{_prefix}/lib/koji-hub-plugins/*.py*

+ %{_datadir}/koji-hub/*.py*

+ %dir %{_libexecdir}/koji-hub

+ 

+ %if 0%{with python3}

+ %files -n python%{python3_pkgversion}-%{name}-hub

+ %defattr(-,root,root)

+ %{_datadir}/koji-hub/*.py

+ %{_datadir}/koji-hub/__pycache__

+ %dir %{_libexecdir}/koji-hub

+ %endif

+ 

+ %files hub-plugins

  %dir /etc/koji-hub/plugins

  %config(noreplace) /etc/koji-hub/plugins/*.conf

  

+ %files -n python2-%{name}-hub-plugins

+ %defattr(-,root,root)

+ %{_prefix}/lib/koji-hub-plugins/*.py*

+ 

+ %if 0%{with python3}

+ %files -n python%{python3_pkgversion}-%{name}-hub-plugins

+ %defattr(-,root,root)

+ %{_prefix}/lib/koji-hub-plugins/*.py

+ %{_prefix}/lib/koji-hub-plugins/__pycache__

+ %endif

+ 

  %files builder-plugins

  %defattr(-,root,root)

  %dir /etc/kojid/plugins

file modified
+17 -7
@@ -890,6 +890,22 @@ 

      return hdr

  

  

+ def _decode_item(item):

+     """Decode rpm header byte strings to str in py3"""

+     if six.PY2:

+         return item

+     elif isinstance(item, bytes):

+         try:

+             return item.decode()

+         except UnicodeDecodeError:

+             # typically signatures

+             return item

+     elif isinstance(item, list):

+         return [_decode_item(x) for x in item]

+     else:

+         return item

+ 

+ 

  def get_header_field(hdr, name, src_arch=False):

      """Extract named field from an rpm header"""

      name = name.upper()
@@ -913,12 +929,6 @@ 

              result = []

          elif isinstance(result, six.integer_types):

              result = [result]

-     if six.PY3 and isinstance(result, bytes):

-         try:

-             result = result.decode('utf-8')

-         except UnicodeDecodeError:

-             # typically signatures

-             pass

  

      sizetags = ('SIZE', 'ARCHIVESIZE', 'FILESIZES', 'SIGSIZE')

      if name in sizetags and (result is None or result == []):
@@ -928,7 +938,7 @@ 

              # no such header

              pass

  

-     return result

+     return _decode_item(result)

  

  

  def _get_header_field(hdr, name):

file modified
+2 -2
@@ -23,7 +23,6 @@ 

  

  from __future__ import absolute_import

  import logging

- import sys

  import psycopg2

  # import psycopg2.extensions

  # # don't convert timestamp fields to DateTime objects
@@ -32,10 +31,11 @@ 

  # del psycopg2.extensions.string_types[1082]

  # del psycopg2.extensions.string_types[1083]

  # del psycopg2.extensions.string_types[1266]

+ import re

+ import sys

  import time

  import traceback

  from . import context

- import re

  

  POSITIONAL_RE = re.compile(r'%[a-z]')

  NAMED_RE = re.compile(r'%\(([^\)]+)\)[a-z]')

file modified
+5 -5
@@ -29,16 +29,16 @@ 

  		echo "ERROR: A destdir is required"; \

  		exit 1; \

  	fi

+ 	mkdir -p $(DESTDIR)/$(HUBPLUGINDIR); \

+ 	install -p -m 644 $(HUBFILES) $(DESTDIR)/$(HUBPLUGINDIR); \

+ 	$(PYTHON) -c "import compileall; compileall.compile_dir('$(DESTDIR)/$(HUBPLUGINDIR)', 1, '$(HUBPLUGINDIR)', 1)"; \

+ 	mkdir -p $(DESTDIR)/$(HUBCONFDIR); \

+ 	install -p -m 644 $(HUBCONFFILES) $(DESTDIR)/$(HUBCONFDIR); \

  	if [ "$(PYMAJORVER)" == "2" ] ; then \

- 		mkdir -p $(DESTDIR)/$(HUBPLUGINDIR); \

  		mkdir -p $(DESTDIR)/$(BUILDERPLUGINDIR); \

- 		install -p -m 644 $(HUBFILES) $(DESTDIR)/$(HUBPLUGINDIR); \

  		install -p -m 644 $(BUILDERFILES) $(DESTDIR)/$(BUILDERPLUGINDIR); \

- 		$(PYTHON) -c "import compileall; compileall.compile_dir('$(DESTDIR)/$(HUBPLUGINDIR)', 1, '$(HUBPLUGINDIR)', 1)"; \

  		$(PYTHON) -c "import compileall; compileall.compile_dir('$(DESTDIR)/$(BUILDERPLUGINDIR)', 1, '$(BUILDERPLUGINDIR)', 1)"; \

- 		mkdir -p $(DESTDIR)/$(HUBCONFDIR); \

  		mkdir -p $(DESTDIR)/$(BUILDERCONFDIR); \

- 		install -p -m 644 $(HUBCONFFILES) $(DESTDIR)/$(HUBCONFDIR); \

  		install -p -m 644 $(BUILDERCONFFILES) $(DESTDIR)/$(BUILDERCONFDIR); \

  	fi

  

@@ -1,205 +1,205 @@ 

  {

      "inserts": [

          [

-             "INSERT INTO archiveinfo (build_id, archive_id, type_id, checksum, filename, checksum_type, btype_id, buildroot_id, id, size) VALUES (%(build_id)s, %(archive_id)s, %(type_id)s, %(checksum)s, %(filename)s, %(checksum_type)s, %(btype_id)s, %(buildroot_id)s, %(id)s, %(size)s)", 

-             {

-                 "build_id": 137, 

-                 "archive_id": "ARCHIVE_ID", 

-                 "type_id": "ARCHIVETYPE", 

-                 "checksum": "19a674d997af7098a444b60d7b51cee6", 

-                 "filename": "tdl-x86_64.xml", 

-                 "checksum_type": 0, 

-                 "btype_id": "BTYPEID:image", 

-                 "buildroot_id": null, 

-                 "id": 1001, 

+             "INSERT INTO archiveinfo (archive_id, btype_id, build_id, buildroot_id, checksum, checksum_type, filename, id, size, type_id) VALUES (%(archive_id)s, %(btype_id)s, %(build_id)s, %(buildroot_id)s, %(checksum)s, %(checksum_type)s, %(filename)s, %(id)s, %(size)s, %(type_id)s)",

+             {

+                 "build_id": 137,

+                 "archive_id": "ARCHIVE_ID",

+                 "type_id": "ARCHIVETYPE",

+                 "checksum": "19a674d997af7098a444b60d7b51cee6",

+                 "filename": "tdl-x86_64.xml",

+                 "checksum_type": 0,

+                 "btype_id": "BTYPEID:image",

+                 "buildroot_id": null,

+                 "id": 1001,

                  "size": 36

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO image_archives (archive_id, arch) VALUES (%(archive_id)s, %(arch)s)", 

+             "INSERT INTO image_archives (arch, archive_id) VALUES (%(arch)s, %(archive_id)s)",

              {

-                 "archive_id": 1001, 

+                 "archive_id": 1001,

                  "arch": "x86_64"

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO archiveinfo (build_id, archive_id, type_id, checksum, filename, checksum_type, btype_id, buildroot_id, id, size) VALUES (%(build_id)s, %(archive_id)s, %(type_id)s, %(checksum)s, %(filename)s, %(checksum_type)s, %(btype_id)s, %(buildroot_id)s, %(id)s, %(size)s)", 

+             "INSERT INTO archiveinfo (archive_id, btype_id, build_id, buildroot_id, checksum, checksum_type, filename, id, size, type_id) VALUES (%(archive_id)s, %(btype_id)s, %(build_id)s, %(buildroot_id)s, %(checksum)s, %(checksum_type)s, %(filename)s, %(id)s, %(size)s, %(type_id)s)",

              {

-                 "build_id": 137, 

-                 "archive_id": "ARCHIVE_ID", 

-                 "type_id": "ARCHIVETYPE", 

-                 "checksum": "a5114a20d790cf17eca1b1115a4546f8", 

-                 "filename": "image.ks", 

-                 "checksum_type": 0, 

-                 "btype_id": "BTYPEID:image", 

-                 "buildroot_id": null, 

-                 "id": 1002, 

+                 "build_id": 137,

+                 "archive_id": "ARCHIVE_ID",

+                 "type_id": "ARCHIVETYPE",

+                 "checksum": "a5114a20d790cf17eca1b1115a4546f8",

+                 "filename": "image.ks",

+                 "checksum_type": 0,

+                 "btype_id": "BTYPEID:image",

+                 "buildroot_id": null,

+                 "id": 1002,

                  "size": 30

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO image_archives (archive_id, arch) VALUES (%(archive_id)s, %(arch)s)", 

+             "INSERT INTO image_archives (arch, archive_id) VALUES (%(arch)s, %(archive_id)s)",

              {

-                 "archive_id": 1002, 

+                 "archive_id": 1002,

                  "arch": "x86_64"

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO archiveinfo (build_id, archive_id, type_id, checksum, filename, checksum_type, btype_id, buildroot_id, id, size) VALUES (%(build_id)s, %(archive_id)s, %(type_id)s, %(checksum)s, %(filename)s, %(checksum_type)s, %(btype_id)s, %(buildroot_id)s, %(id)s, %(size)s)", 

+             "INSERT INTO archiveinfo (archive_id, btype_id, build_id, buildroot_id, checksum, checksum_type, filename, id, size, type_id) VALUES (%(archive_id)s, %(btype_id)s, %(build_id)s, %(buildroot_id)s, %(checksum)s, %(checksum_type)s, %(filename)s, %(id)s, %(size)s, %(type_id)s)",

              {

-                 "build_id": 137, 

-                 "archive_id": "ARCHIVE_ID", 

-                 "type_id": "ARCHIVETYPE", 

-                 "checksum": "9828cf75d9d17ac8e79e53ed71c6a71c", 

-                 "filename": "image-base.ks", 

-                 "checksum_type": 0, 

-                 "btype_id": "BTYPEID:image", 

-                 "buildroot_id": null, 

-                 "id": 1003, 

+                 "build_id": 137,

+                 "archive_id": "ARCHIVE_ID",

+                 "type_id": "ARCHIVETYPE",

+                 "checksum": "9828cf75d9d17ac8e79e53ed71c6a71c",

+                 "filename": "image-base.ks",

+                 "checksum_type": 0,

+                 "btype_id": "BTYPEID:image",

+                 "buildroot_id": null,

+                 "id": 1003,

                  "size": 35

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO image_archives (archive_id, arch) VALUES (%(archive_id)s, %(arch)s)", 

+             "INSERT INTO image_archives (arch, archive_id) VALUES (%(arch)s, %(archive_id)s)",

              {

-                 "archive_id": 1003, 

+                 "archive_id": 1003,

                  "arch": "x86_64"

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO archiveinfo (build_id, archive_id, type_id, checksum, filename, checksum_type, btype_id, buildroot_id, id, size) VALUES (%(build_id)s, %(archive_id)s, %(type_id)s, %(checksum)s, %(filename)s, %(checksum_type)s, %(btype_id)s, %(buildroot_id)s, %(id)s, %(size)s)", 

+             "INSERT INTO archiveinfo (archive_id, btype_id, build_id, buildroot_id, checksum, checksum_type, filename, id, size, type_id) VALUES (%(archive_id)s, %(btype_id)s, %(build_id)s, %(buildroot_id)s, %(checksum)s, %(checksum_type)s, %(filename)s, %(id)s, %(size)s, %(type_id)s)",

              {

-                 "build_id": 137, 

-                 "archive_id": "ARCHIVE_ID", 

-                 "type_id": "ARCHIVETYPE", 

-                 "checksum": "f601c0f647d7cdd4c92aa511876f8533", 

-                 "filename": "foo-x86_64.xml", 

-                 "checksum_type": 0, 

-                 "btype_id": "BTYPEID:image", 

-                 "buildroot_id": null, 

-                 "id": 1004, 

+                 "build_id": 137,

+                 "archive_id": "ARCHIVE_ID",

+                 "type_id": "ARCHIVETYPE",

+                 "checksum": "f601c0f647d7cdd4c92aa511876f8533",

+                 "filename": "foo-x86_64.xml",

+                 "checksum_type": 0,

+                 "btype_id": "BTYPEID:image",

+                 "buildroot_id": null,

+                 "id": 1004,

                  "size": 36

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO image_archives (archive_id, arch) VALUES (%(archive_id)s, %(arch)s)", 

+             "INSERT INTO image_archives (arch, archive_id) VALUES (%(arch)s, %(archive_id)s)",

              {

-                 "archive_id": 1004, 

+                 "archive_id": 1004,

                  "arch": "x86_64"

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO archiveinfo (build_id, archive_id, type_id, checksum, filename, checksum_type, btype_id, buildroot_id, id, size) VALUES (%(build_id)s, %(archive_id)s, %(type_id)s, %(checksum)s, %(filename)s, %(checksum_type)s, %(btype_id)s, %(buildroot_id)s, %(id)s, %(size)s)", 

+             "INSERT INTO archiveinfo (archive_id, btype_id, build_id, buildroot_id, checksum, checksum_type, filename, id, size, type_id) VALUES (%(archive_id)s, %(btype_id)s, %(build_id)s, %(buildroot_id)s, %(checksum)s, %(checksum_type)s, %(filename)s, %(id)s, %(size)s, %(type_id)s)",

              {

-                 "build_id": 137, 

-                 "archive_id": "ARCHIVE_ID", 

-                 "type_id": "ARCHIVETYPE", 

-                 "checksum": "84547200ef5002292ecdd50c62de518e", 

-                 "filename": "my-image-7.4.2-2.x86_64.ova", 

-                 "checksum_type": 0, 

-                 "btype_id": "BTYPEID:image", 

-                 "buildroot_id": null, 

-                 "id": 1005, 

+                 "build_id": 137,

+                 "archive_id": "ARCHIVE_ID",

+                 "type_id": "ARCHIVETYPE",

+                 "checksum": "84547200ef5002292ecdd50c62de518e",

+                 "filename": "my-image-7.4.2-2.x86_64.ova",

+                 "checksum_type": 0,

+                 "btype_id": "BTYPEID:image",

+                 "buildroot_id": null,

+                 "id": 1005,

                  "size": 49

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO image_archives (archive_id, arch) VALUES (%(archive_id)s, %(arch)s)", 

+             "INSERT INTO image_archives (arch, archive_id) VALUES (%(arch)s, %(archive_id)s)",

              {

-                 "archive_id": 1005, 

+                 "archive_id": 1005,

                  "arch": "x86_64"

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", 

+             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)",

              {

-                 "archive_id": 1002, 

+                 "archive_id": 1002,

                  "rpm_id": 1000

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", 

+             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)",

              {

-                 "archive_id": 1002, 

+                 "archive_id": 1002,

                  "rpm_id": 1001

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", 

+             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)",

              {

-                 "archive_id": 1002, 

+                 "archive_id": 1002,

                  "rpm_id": 1002

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", 

+             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)",

              {

-                 "archive_id": 1003, 

+                 "archive_id": 1003,

                  "rpm_id": 1000

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", 

+             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)",

              {

-                 "archive_id": 1003, 

+                 "archive_id": 1003,

                  "rpm_id": 1001

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", 

+             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)",

              {

-                 "archive_id": 1003, 

+                 "archive_id": 1003,

                  "rpm_id": 1002

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", 

+             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)",

              {

-                 "archive_id": 1005, 

+                 "archive_id": 1005,

                  "rpm_id": 1000

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", 

+             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)",

              {

-                 "archive_id": 1005, 

+                 "archive_id": 1005,

                  "rpm_id": 1001

-             }, 

+             },

              {}

-         ], 

+         ],

          [

-             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)", 

+             "INSERT INTO archive_rpm_components (archive_id, rpm_id) VALUES (%(archive_id)s, %(rpm_id)s)",

              {

-                 "archive_id": 1005, 

+                 "archive_id": 1005,

                  "rpm_id": 1002

-             }, 

+             },

              {}

          ]

-     ], 

+     ],

      "updates": [

          [

-             "UPDATE build SET state = %(data.state)s, id = %(data.id)s, completion_time = (now())\nWHERE ( id=%(build_id)i )", 

+             "UPDATE build SET completion_time = (now()), id = %(data.id)s, state = %(data.state)s\nWHERE ( id=%(build_id)i )",

              {

-                 "state": 1, 

+                 "state": 1,

                  "id": "BUILD_ID"

-             }, 

+             },

              {

                  "completion_time": "now()"

              }

@@ -18,6 +18,7 @@ 

          self.pathinfo = mock.patch('koji.pathinfo').start()

          self.tempdir = tempfile.mkdtemp()

          koji.pathinfo.build_logs.return_value = self.tempdir

+         koji.pathinfo.topdir = '/'

  

      def tearDown(self):

          mock.patch.stopall()

@@ -47,7 +47,7 @@ 

          fullpath = '%s/work/%s' % (self.topdir, reldir)

          os.makedirs(fullpath)

  

-         with open('{0}/.user'.format(fullpath), 'wb') as f:

+         with open('{0}/.user'.format(fullpath), 'wt') as f:

              f.write('1')

  

          with self.assertRaises(GenericError):

@@ -76,18 +76,18 @@ 

          _singleValue.return_value = 9876

          kojihub.import_rpm(self.filename)

          fields = [

-             'build_id',

-             'name',

              'arch',

+             'build_id',

+             'buildroot_id',

              'buildtime',

-             'payloadhash',

              'epoch',

-             'version',

-             'buildroot_id',

-             'release',

              'external_repo_id',

              'id',

+             'name',

+             'payloadhash',

+             'release',

              'size',

+             'version',

          ]

          statement = 'INSERT INTO rpminfo (%s) VALUES (%s)' % (

              ", ".join(fields),
@@ -135,18 +135,18 @@ 

          _singleValue.return_value = 9876

          kojihub.import_rpm(self.src_filename)

          fields = [

-             'build_id',

-             'name',

              'arch',

+             'build_id',

+             'buildroot_id',

              'buildtime',

-             'payloadhash',

              'epoch',

-             'version',

-             'buildroot_id',

-             'release',

              'external_repo_id',

              'id',

+             'name',

+             'payloadhash',

+             'release',

              'size',

+             'version',

          ]

          statement = 'INSERT INTO rpminfo (%s) VALUES (%s)' % (

              ", ".join(fields),
@@ -253,19 +253,19 @@ 

          kojihub.import_build(self.src_filename, [self.filename])

  

          fields = [

-             'task_id',

+             'completion_time',

+             'epoch',

              'extra',

+             'id',

+             'owner',

+             'pkg_id',

+             'release',

+             'source',

              'start_time',

-             'epoch',

-             'completion_time',

              'state',

+             'task_id',

              'version',

-             'source',

              'volume_id',

-             'owner',

-             'release',

-             'pkg_id',

-             'id',

          ]

          statement = 'INSERT INTO build (%s) VALUES (%s)' % (

              ", ".join(fields),

@@ -141,8 +141,8 @@ 

                                    joins=['archivetypes on archiveinfo.type_id = archivetypes.id',

                                           'btype ON archiveinfo.btype_id = btype.id',

                                           'maven_archives ON archiveinfo.id = maven_archives.archive_id'],

-                                   clauses=['maven_archives.group_id = %(group_id)s',

-                                            'maven_archives.artifact_id = %(artifact_id)s',

+                                   clauses=['maven_archives.artifact_id = %(artifact_id)s',

+                                            'maven_archives.group_id = %(group_id)s',

                                             'maven_archives.version = %(version)s'],

                                    values={'group_id': 'gid',

                                            'artifact_id': 'aid',
@@ -171,13 +171,15 @@ 

                                                      'platforms': 'all',

                                                      'flags': ['A', 'B']})

          self.assertLastQueryEqual(tables=['archiveinfo'],

-                                   joins=['archivetypes on archiveinfo.type_id = archivetypes.id',

+                                   joins=sorted([

+                                          'archivetypes on archiveinfo.type_id = archivetypes.id',

                                           'btype ON archiveinfo.btype_id = btype.id',

-                                          'win_archives ON archiveinfo.id = win_archives.archive_id'],

-                                   clauses=['win_archives.relpath = %(relpath)s',

+                                          'win_archives ON archiveinfo.id = win_archives.archive_id']),

+                                   clauses=sorted([

+                                            'win_archives.relpath = %(relpath)s',

                                             r"platforms ~ E'\\mall\\M'",

                                             r"flags ~ E'\\mA\\M'",

-                                            r"flags ~ E'\\mB\\M'"],

+                                            r"flags ~ E'\\mB\\M'"]),

                                    values={'relpath': 'somerelpath'},

                                    colsByAlias={'relpath': 'win_archives.relpath',

                                                 'platforms': 'win_archives.platforms',

@@ -37,10 +37,10 @@ 

          self.assertEqual(len(self.queries), 1)

          query = self.queries[0]

          self.assertEqual(query.tables, ['channels'])

-         self.assertEqual(query.aliases, ('name', 'id'))

+         self.assertEqual(query.aliases, ['id', 'name'])

          self.assertEqual(query.joins, None)

          self.assertEqual(query.values, {})

-         self.assertEqual(query.columns, ('channels.name', 'channels.id'))

+         self.assertEqual(query.columns, ['channels.id', 'channels.name'])

          self.assertEqual(query.clauses, None)

  

      def test_host(self):
@@ -54,10 +54,10 @@ 

              'host_channels.host_id = %(host_id)s'

          ]

          self.assertEqual(query.tables, ['host_channels'])

-         self.assertEqual(query.aliases, ('name', 'id'))

+         self.assertEqual(query.aliases, ['id', 'name'])

          self.assertEqual(query.joins, joins)

          self.assertEqual(query.values, {'host_id': 1234})

-         self.assertEqual(query.columns, ('channels.name', 'channels.id'))

+         self.assertEqual(query.columns, ['channels.id', 'channels.name'])

          self.assertEqual(query.clauses, clauses)

  

      def test_host_and_event(self):
@@ -71,10 +71,10 @@ 

              'host_channels.host_id = %(host_id)s',

          ]

          self.assertEqual(query.tables, ['host_channels'])

-         self.assertEqual(query.aliases, ('name', 'id'))

+         self.assertEqual(query.aliases, ['id', 'name'])

          self.assertEqual(query.joins, joins)

          self.assertEqual(query.values, {'host_id': 1234})

-         self.assertEqual(query.columns, ('channels.name', 'channels.id'))

+         self.assertEqual(query.columns, ['channels.id', 'channels.name'])

          self.assertEqual(query.clauses, clauses)

  

      def test_event_only(self):

@@ -59,9 +59,9 @@ 

          self.assertEqual(query.joins, ['host ON host.id = host_config.host_id',

                                         'host_channels ON host.id = host_channels.host_id'])

          self.assertEqual(query.clauses, [

-             'host_config.active IS TRUE',

-             'host_channels.channel_id = %(channelID)i',

              'host_channels.active IS TRUE',

+             'host_channels.channel_id = %(channelID)i',

+             'host_config.active IS TRUE',

              ])

  

      def test_list_hosts_single_arch(self):
@@ -71,7 +71,8 @@ 

          query = self.queries[0]

          self.assertEqual(query.tables, ['host_config'])

          self.assertEqual(query.joins, ['host ON host.id = host_config.host_id'])

-         self.assertEqual(query.clauses, ['host_config.active IS TRUE',r"""(arches ~ E'\\mx86_64\\M')"""])

+         self.assertEqual(query.clauses, [r"""(arches ~ E'\\mx86_64\\M')""",

+                                              'host_config.active IS TRUE'])

  

      def test_list_hosts_multi_arch(self):

          self.exports.listHosts(arches=['x86_64', 's390'])
@@ -80,7 +81,9 @@ 

          query = self.queries[0]

          self.assertEqual(query.tables, ['host_config'])

          self.assertEqual(query.joins, ['host ON host.id = host_config.host_id'])

-         self.assertEqual(query.clauses, ['host_config.active IS TRUE',r"""(arches ~ E'\\mx86_64\\M' OR arches ~ E'\\ms390\\M')"""])

+         self.assertEqual(query.clauses, [

+             r"""(arches ~ E'\\mx86_64\\M' OR arches ~ E'\\ms390\\M')""",

+             'host_config.active IS TRUE'])

  

      def test_list_hosts_bad_arch(self):

          with self.assertRaises(koji.GenericError):
@@ -111,7 +114,7 @@ 

          query = self.queries[0]

          self.assertEqual(query.tables, ['host_config'])

          self.assertEqual(query.joins, ['host ON host.id = host_config.host_id'])

-         self.assertEqual(query.clauses, ['host_config.active IS TRUE','enabled IS TRUE'])

+         self.assertEqual(query.clauses, ['enabled IS TRUE', 'host_config.active IS TRUE'])

  

      def test_list_hosts_disabled(self):

          self.exports.listHosts(enabled=0)
@@ -120,4 +123,4 @@ 

          query = self.queries[0]

          self.assertEqual(query.tables, ['host_config'])

          self.assertEqual(query.joins, ['host ON host.id = host_config.host_id'])

-         self.assertEqual(query.clauses, ['host_config.active IS TRUE','enabled IS FALSE'])

+         self.assertEqual(query.clauses, ['enabled IS FALSE', 'host_config.active IS TRUE'])

@@ -72,13 +72,13 @@ 

          # only query to watchers

          self.assertEqual(len(self.queries), 1)

          q = self.queries[0]

-         self.assertEqual(q.columns, ('email',))

+         self.assertEqual(q.columns, ['email'])

          self.assertEqual(q.tables, ['build_notifications'])

-         self.assertEqual(q.clauses, [ 'status = %(users_status)i',

-                                      'usertype IN %(users_usertypes)s',

-                                      'package_id IS NULL',

+         self.assertEqual(q.clauses, ['package_id IS NULL',

+                                      'status = %(users_status)i',

+                                      'success_only = FALSE',

                                       'tag_id IS NULL',

-                                      'success_only = FALSE'])

+                                      'usertype IN %(users_usertypes)s'])

          self.assertEqual(q.joins, ['JOIN users ON build_notifications.user_id = users.id'])

          self.assertEqual(q.values['state'], state)

          self.assertEqual(q.values['build'], build)
@@ -96,13 +96,13 @@ 

          # there should be only query to watchers

          self.assertEqual(len(self.queries), 1)

          q = self.queries[0]

-         self.assertEqual(q.columns, ('email',))

+         self.assertEqual(q.columns, ['email'])

          self.assertEqual(q.tables, ['build_notifications'])

-         self.assertEqual(q.clauses, ['status = %(users_status)i',

-                                      'usertype IN %(users_usertypes)s',

-                                      'package_id = %(package_id)i OR package_id IS NULL',

+         self.assertEqual(q.clauses, ['package_id = %(package_id)i OR package_id IS NULL',

+                                      'status = %(users_status)i',

+                                      'success_only = FALSE',

                                       'tag_id IS NULL',

-                                      'success_only = FALSE'])

+                                      'usertype IN %(users_usertypes)s'])

          self.assertEqual(q.joins, ['JOIN users ON build_notifications.user_id = users.id'])

          self.assertEqual(q.values['package_id'], build['package_id'])

          self.assertEqual(q.values['state'], state)
@@ -134,19 +134,19 @@ 

          }

  

          emails = kojihub.get_notification_recipients(build, tag_id, state)

-         self.assertEqual(emails, ['owner_name@test.domain.com', 'pkg_owner_name@test.domain.com'])

+         self.assertEqual(sorted(emails), ['owner_name@test.domain.com', 'pkg_owner_name@test.domain.com'])

  

  

          # there should be only query to watchers

          self.assertEqual(len(self.queries), 1)

          q = self.queries[0]

-         self.assertEqual(q.columns, ('email',))

+         self.assertEqual(q.columns, ['email'])

          self.assertEqual(q.tables, ['build_notifications'])

-         self.assertEqual(q.clauses, ['status = %(users_status)i',

-                                      'usertype IN %(users_usertypes)s',

-                                      'package_id = %(package_id)i OR package_id IS NULL',

+         self.assertEqual(q.clauses, ['package_id = %(package_id)i OR package_id IS NULL',

+                                      'status = %(users_status)i',

+                                      'success_only = FALSE',

                                       'tag_id = %(tag_id)i OR tag_id IS NULL',

-                                      'success_only = FALSE'])

+                                      'usertype IN %(users_usertypes)s'])

          self.assertEqual(q.joins, ['JOIN users ON build_notifications.user_id = users.id'])

          self.assertEqual(q.values['package_id'], build['package_id'])

          self.assertEqual(q.values['state'], state)

@@ -90,7 +90,7 @@ 

          rpm = os.path.join(data_path, 'test-pkg-1.0.0-1.el7.noarch.rpm')

  

          # dummy file info

-         defattr = [19L, 33188, 1531970408, 0, 0, 2, 1, -1, -1, 'root', 'root', '02d2c91b']

+         defattr = [19, 33188, 1531970408, 0, 0, 2, 1, -1, -1, 'root', 'root', '02d2c91b']

  

          rpm_dict_old = {'a_file': defattr }

  
@@ -113,7 +113,7 @@ 

                      self.assertEqual(diff.textdiff(), textdiff if token not in opt else '')

  

          # case 1 size diffrerent

-         check_diff_result('S', 0, 99L, "S.......... a_file")

+         check_diff_result('S', 0, 99, "S.......... a_file")

  

          # case 2 mode different

          check_diff_result('M', 1, 22188, ".M......... a_file")

@@ -7,7 +7,6 @@ 

  except ImportError:

      import unittest

  

- import koji

  from kojihub import _write_maven_repo_metadata

  

  class TestWriteMavenRepoMetadata(unittest.TestCase):
@@ -39,7 +38,7 @@ 

          openf_mock.assert_called_with(

              os.path.join(destdir, 'maven-metadata.xml'), 'w')

  

-         handle = openf_mock()

+         handle = openf_mock().__enter__()

          expected = """\

  <?xml version="1.0"?>

  <metadata>

rebased onto aaf70aabd3737037f49603a494767058fa77f87c

5 years ago

rebased onto f271a53203bbe8be57e1c404a1246d30ae0c3a3c

5 years ago

I get a unit test failure with the current version

  File "/tmp/tmpKZhq0j/tests/test_hub/test_rpmdiff.py", line 93
    defattr = [19L, 33188, 1531970408, 0, 0, 2, 1, -1, -1, 'root', 'root', '02d2c91b']
                 ^
SyntaxError: invalid syntax

I guess the jenkins tests are not running all the py3 tests?

Fixed. Problem with jenkins is that it is not running py3 tests at all. I've to figure out, how to run both and don't mix results.

1 new commit added

  • fix new test
5 years ago

@tkopecek
I found koji.get_rpm_header() will return different result between py2 and py3, because of the differences of the string/byte types.
like the case in PR #1068, should we make the result to only contain strings?

@julian8628 - I've more related changes here https://pagure.io/fork/tkopecek/koji/c/13d956f0a6069ce08fb91019aff7bdf782f54a5e?branch=builder-py3 It probably makes sense to pull it out and create separate PR for that (correct decoding of headers and unified usage of that)

rebased onto b4e031923a197328556dea1480939bc1f8f3d638

5 years ago

This seems to break the upgrade path. Rpm doesn't know that python2-koji-hub should replace koji-hub

1 new commit added

  • Add provides koji-hub to spec
5 years ago

Is there a reason we should be shipping both versions of the hub? To me, it makes sense to only ship Python 3 components when Python 3 is enabled for stuff that isn't importable modules.

And doing so will drastically simplify the packaging...

What's the status here? Moving to python3 would be very welcome because of the python2 deprecation in F30.

/cc @churchyard

rebased onto 60132d2

5 years ago

I've incorporated few more fixes and revamped packaging for cleaner upgrade path. @mikem, can you check it?

14 new commits added

  • spec changes for py3 hub
  • fix new test
  • fix import
  • fix bytes/str in rpm header
  • fix test
  • fix code inspects
  • marshaller update
  • rename masked rpm variable
  • db ordering fixes
  • convert py2 sorting to py3
  • fix encoding
  • encode xmlrpc responses correctly
  • fix file handling
  • six.moves xmlrpc_client, parse_qs
5 years ago

14 new commits added

  • spec changes for py3 hub
  • fix new test
  • fix import
  • fix bytes/str in rpm header
  • fix test
  • fix code inspects
  • marshaller update
  • rename masked rpm variable
  • db ordering fixes
  • convert py2 sorting to py3
  • fix encoding
  • encode xmlrpc responses correctly
  • fix file handling
  • six.moves xmlrpc_client, parse_qs
5 years ago

14 new commits added

  • spec changes for py3 hub
  • fix new test
  • fix import
  • fix bytes/str in rpm header
  • fix test
  • fix code inspects
  • marshaller update
  • rename masked rpm variable
  • db ordering fixes
  • convert py2 sorting to py3
  • fix encoding
  • encode xmlrpc responses correctly
  • fix file handling
  • six.moves xmlrpc_client, parse_qs
5 years ago

The spec isn't quite where I want it, but I'm going to merge this as-is and deal with that in a separate PR once I merge all three of the main py3 changes.

Commit 2460a00 fixes this pull-request

Pull-Request has been merged by mikem

5 years ago