#37 backports of Red Hat internal changes to upstream
Merged 8 years ago by ausil. Opened 8 years ago by dmach.
dmach/pungi backports  into  master

file modified
+7 -2
@@ -177,7 +177,11 @@ 

                        old_composes=opts.old_composes, koji_event=opts.koji_event, supported=opts.supported, logger=logger)

      kobo.log.add_file_logger(logger, compose.paths.log.log_file("global", "pungi.log"))

      COMPOSE = compose

-     run_compose(compose)

+     try:

+         run_compose(compose)

+     except Exception, ex:

+         compose.log_error("Compose run failed: %s" % ex)

+         raise

  

  

  def run_compose(compose):
@@ -335,6 +339,7 @@ 

              open(tb_path, "w").write(kobo.tback.Traceback().get_traceback())

          else:

              print("Exception: %s" % ex)

+             raise

          sys.stdout.flush()

          sys.stderr.flush()

-         raise

+         sys.exit(1)

file modified
+5
@@ -46,6 +46,7 @@ 

      ci.release.short = conf["release_short"]

      ci.release.version = conf["release_version"]

      ci.release.is_layered = bool(conf.get("release_is_layered", False))

+     ci.release.type = conf.get("release_type", "ga").lower()

      if ci.release.is_layered:

          ci.base_product.name = conf["base_product_name"]

          ci.base_product.short = conf["base_product_short"]
@@ -159,6 +160,10 @@ 

          return self.ci_base.compose.label

  

      @property

+     def compose_label_major_version(self):

+         return self.ci_base.compose.label_major_version

+ 

+     @property

      def has_comps(self):

          return bool(self.conf.get("comps_file", False))

  

file modified
+4
@@ -77,6 +77,7 @@ 

      ci.release.version = compose.conf["release_version"]

      ci.release.short = compose.conf["release_short"]

      ci.release.is_layered = compose.conf.get("release_is_layered", False)

+     ci.release.type = compose.conf.get("release_type", "ga").lower()

  

      # base product

      if ci.release.is_layered:
@@ -103,6 +104,7 @@ 

              var.release.short = variant.release_short

              var.release.version = variant.release_version

              var.release.is_layered = True

+             var.release.type = ci.release.type

  

          for arch in variant.arches:

              # paths: binaries
@@ -190,6 +192,7 @@ 

          ti.release.version = variant.release_version

          ti.release.short = variant.release_short

          ti.release.is_layered = True

+         ti.release.type = compose.conf.get("release_type", "ga").lower()

  

          # base product

          ti.base_product.name = compose.conf["release_name"]
@@ -205,6 +208,7 @@ 

          ti.release.version = compose.conf["release_version"]

          ti.release.short = compose.conf["release_short"]

          ti.release.is_layered = compose.conf.get("release_is_layered", False)

+         ti.release.type = compose.conf.get("release_type", "ga").lower()

  

          # base product

          if ti.release.is_layered:

file modified
+5 -2
@@ -464,7 +464,7 @@ 

                  makedirs(path)

          return path

  

-     def iso_path(self, arch, variant, disc_type="dvd", disc_num=1, suffix=".iso", symlink_to=None, create_dir=True, relative=False):

+     def iso_path(self, arch, variant, disc_type="dvd", disc_num=1, suffix=".iso", symlink_to=None, create_dir=True, relative=False, name=None):

          """

          Examples:

              compose/Server/x86_64/iso/rhel-7.0-20120127.0-Server-x86_64-dvd1.iso
@@ -489,7 +489,10 @@ 

              variant_uid = variant.parent.uid

          else:

              variant_uid = variant.uid

-         file_name = "%s-%s-%s-%s%s%s" % (compose_id, variant_uid, arch, disc_type, disc_num, suffix)

+         if not name:

+             file_name = "%s-%s-%s-%s%s%s" % (compose_id, variant_uid, arch, disc_type, disc_num, suffix)

+         else:

+             file_name = "%s-%s-%s-%s%s%s" % (name, variant_uid, arch, disc_type, disc_num, suffix)

          result = os.path.join(path, file_name)

          return result

  

file modified
+70 -39
@@ -79,20 +79,17 @@ 

                  if not iso_dir:

                      continue

  

-                 # XXX: hardcoded disc_type and disc_num

-                 iso_path = self.compose.paths.compose.iso_path(arch, variant, disc_type="live", disc_num=None, symlink_to=symlink_isos_to)

-                 if os.path.isfile(iso_path):

-                     self.compose.log_warning("Skipping creating live image, it already exists: %s" % iso_path)

-                     continue

- 

-                 iso_name = os.path.basename(iso_path)

- 

                  cmd = {

+                     "name": None,

+                     "version": None,

                      "arch": arch,

                      "variant": variant,

-                     "iso_path": iso_path,

+                     "iso_path": None,

+                     "wrapped_rpms_path": iso_dir,

                      "build_arch": arch,

                      "ks_file": ks_file,

+                     "specfile": None,

+                     "scratch": False,

                      "cmd": [],

                      "label": "",  # currently not used

                  }
@@ -105,6 +102,35 @@ 

                  data = get_arch_variant_data(self.compose.conf, "live_images", arch, variant)

                  cmd["repos"].extend(data[0].get("additional_repos", []))

  

+                 # Explicit name and version

+                 cmd["name"] = data[0].get("name", None)

+                 cmd["version"] = data[0].get("version", None)

+ 

+                 # Specfile (for images wrapped in rpm)

+                 cmd["specfile"] = data[0].get("specfile", None)

+ 

+                 # Scratch (only taken in consideration if specfile specified)

+                 # For images wrapped in rpm is scratch disabled by default

+                 # For other images is scratch always on

+                 cmd["scratch"] = data[0].get("scratch", False)

+ 

+                 # Custom name (prefix)

+                 custom_iso_name = None

+                 if cmd["name"]:

+                     custom_iso_name = cmd["name"]

+                     if cmd["version"]:

+                         custom_iso_name += "-%s" % cmd["version"]

+ 

+                 # XXX: hardcoded disc_type and disc_num

+                 iso_path = self.compose.paths.compose.iso_path(arch, variant, disc_type="live", disc_num=None, symlink_to=symlink_isos_to, name=custom_iso_name)

+                 if os.path.isfile(iso_path):

+                     self.compose.log_warning("Skipping creating live image, it already exists: %s" % iso_path)

+                     continue

+                 cmd["iso_path"] = iso_path

+                 iso_name = os.path.basename(iso_path)

+ 

+                 # Additional commands

+ 

                  chdir_cmd = "cd %s" % pipes.quote(iso_dir)

                  cmd["cmd"].append(chdir_cmd)

  
@@ -142,40 +168,45 @@ 

      def process(self, item, num):

          compose, cmd = item

  

-         runroot = compose.conf.get("runroot", False)

          log_file = compose.paths.log.log_file(cmd["arch"], "createiso-%s" % os.path.basename(cmd["iso_path"]))

  

          msg = "Creating ISO (arch: %s, variant: %s): %s" % (cmd["arch"], cmd["variant"], os.path.basename(cmd["iso_path"]))

          self.pool.log_info("[BEGIN] %s" % msg)

  

-         if runroot:

-             # run in a koji build root

- 

-             koji_wrapper = KojiWrapper(compose.conf["koji_profile"])

-             name, version = compose.compose_id.rsplit("-", 1)

-             target = compose.conf["live_target"]

-             koji_cmd = koji_wrapper.get_create_image_cmd(name, version, target, cmd["build_arch"], cmd["ks_file"], cmd["repos"], image_type="live", wait=True, archive=False)

- 

-             # avoid race conditions?

-             # Kerberos authentication failed: Permission denied in replay cache code (-1765328215)

-             time.sleep(num * 3)

- 

-             output = koji_wrapper.run_create_image_cmd(koji_cmd, log_file=log_file)

-             if output["retcode"] != 0:

-                 self.fail(compose, cmd)

-                 raise RuntimeError("LiveImage task failed: %s. See %s for more details." % (output["task_id"], log_file))

- 

-             # copy finished image to isos/

-             image_path = koji_wrapper.get_image_path(output["task_id"])

-             # TODO: assert len == 1

-             image_path = image_path[0]

-             shutil.copy2(image_path, cmd["iso_path"])

- 

-             # write checksum and manifest

-             run(cmd["cmd"])

- 

-         else:

-             raise RuntimeError("NOT IMPLEMENTED")

+         koji_wrapper = KojiWrapper()

+         name, version = compose.compose_id.rsplit("-", 1)

+         name = cmd["name"] or name

+         version = cmd["version"] or version

+         archive = False

+         if cmd["specfile"] and not cmd["scratch"]:

+             # Non scratch build are allowed only for rpm wrapped images

+             archive = True

+         target = compose.conf.get("live_target", "rhel-7.0-candidate")  # compatability for hardcoded target

+         koji_cmd = koji_wrapper.get_create_image_cmd(name, version, target, cmd["build_arch"], cmd["ks_file"], cmd["repos"], image_type="live", wait=True, archive=archive, specfile=cmd["specfile"])

+ 

+         # avoid race conditions?

+         # Kerberos authentication failed: Permission denied in replay cache code (-1765328215)

+         time.sleep(num * 3)

+ 

+         output = koji_wrapper.run_create_image_cmd(koji_cmd, log_file=log_file)

+         if output["retcode"] != 0:

+             self.fail(compose, cmd)

+             raise RuntimeError("LiveImage task failed: %s. See %s for more details." % (output["task_id"], log_file))

+ 

+         # copy finished image to isos/

+         image_path = koji_wrapper.get_image_path(output["task_id"])

+         # TODO: assert len == 1

+         image_path = image_path[0]

+         shutil.copy2(image_path, cmd["iso_path"])

+ 

+         # copy finished rpm to isos/ (if rpm wrapped ISO was built)

+         if cmd["specfile"]:

+             rpm_paths = koji.get_wrapped_rpm_path(output["task_id"])

+             for rpm_path in rpm_paths:

+                 shutil.copy2(rpm_path, cmd["wrapped_rpms_path"])

+ 

+         # write checksum and manifest

+         run(cmd["cmd"])

  

          self.pool.log_info("[DONE ] %s" % msg)

  
@@ -187,8 +218,8 @@ 

      scm_dict = data[0]["kickstart"]

  

      if isinstance(scm_dict, dict):

+         file_name = os.path.basename(os.path.basename(scm_dict["file"]))

          if scm_dict["scm"] == "file":

-             file_name = os.path.basename(os.path.basename(scm_dict["file"]))

              scm_dict["file"] = os.path.join(compose.config_dir, os.path.basename(scm_dict["file"]))

      else:

          file_name = os.path.basename(os.path.basename(scm_dict))

@@ -243,7 +243,7 @@ 

                      found = True

              else:

                  # or raise an exception

-                 raise RuntimeError("RPM not found for sigs: %s" % self.sigkey_ordering)

+                 raise RuntimeError("RPM %s not found for sigs: %s" % (rpm_info, self.sigkey_ordering))

  

          if not found:

              raise RuntimeError("Package not found: %s" % rpm_info)

file modified
+86 -1
@@ -97,7 +97,7 @@ 

          }

          return result

  

-     def get_create_image_cmd(self, name, version, target, arch, ks_file, repos, image_type="live", image_format=None, release=None, wait=True, archive=False):

+     def get_create_image_cmd(self, name, version, target, arch, ks_file, repos, image_type="live", image_format=None, release=None, wait=True, archive=False, specfile=None):

          # Usage: koji spin-livecd [options] <name> <version> <target> <arch> <kickstart-file>

          # Usage: koji spin-appliance [options] <name> <version> <target> <arch> <kickstart-file>

          # Examples:
@@ -126,6 +126,9 @@ 

          else:

              cmd.append("--nowait")

  

+         if specfile:

+             cmd.append("--specfile=%s" % specfile)

+ 

          if isinstance(repos, list):

              for repo in repos:

                  cmd.append("--repo=%s" % repo)
@@ -205,3 +208,85 @@ 

          for i in task_result["files"]:

              result.append(os.path.join(topdir, i))

          return result

+ 

+     def get_wrapped_rpm_path(self, task_id, srpm=False):

+         result = []

+         parent_task = self.koji_proxy.getTaskInfo(task_id, request=True)

+         task_info_list = []

+         task_info_list.extend(self.koji_proxy.getTaskChildren(task_id, request=True))

+ 

+         # scan parent and child tasks for certain methods

+         task_info = None

+         for i in task_info_list:

+             if i["method"] in ("wrapperRPM"):

+                 task_info = i

+                 break

+ 

+         # Check parent_task if it's scratch build

+         scratch = parent_task["request"][-1].get("scratch", False)

+ 

+         # Get results of wrapperRPM task

+         # {'buildroot_id': 2479520,

+         #  'logs': ['checkout.log', 'root.log', 'state.log', 'build.log'],

+         #  'rpms': ['foreman-discovery-image-2.1.0-2.el7sat.noarch.rpm'],

+         #  'srpm': 'foreman-discovery-image-2.1.0-2.el7sat.src.rpm'}

+         task_result = self.koji_proxy.getTaskResult(task_info["id"])

+ 

+         # Get koji dir with results (rpms, srpms, logs, ...)

+         topdir = os.path.join(self.koji_module.pathinfo.work(), self.koji_module.pathinfo.taskrelpath(task_info["id"]))

+ 

+         # TODO: Maybe use different approach for non-scratch builds - see get_image_path()

+ 

+         # Get list of filenames that should be returned

+         result_files = task_result["rpms"]

+         if srpm:

+             result_files += [task_result["srpm"]]

+ 

+         # Prepare list with paths to the required files

+         for i in result_files:

+             result.append(os.path.join(topdir, i))

+ 

+         return result

+ 

+     def get_signed_wrapped_rpms_paths(self, task_id, level, srpm=False):

+         result = []

+         parent_task = self.koji_proxy.getTaskInfo(task_id, request=True)

+         task_info_list = []

+         task_info_list.extend(self.koji_proxy.getTaskChildren(task_id, request=True))

+ 

+         # scan parent and child tasks for certain methods

+         task_info = None

+         for i in task_info_list:

+             if i["method"] in ("wrapperRPM"):

+                 task_info = i

+                 break

+ 

+         # Check parent_task if it's scratch build

+         scratch = parent_task["request"][-1].get("scratch", False)

+         if scratch:

+             raise RuntimeError("Scratch builds cannot be signed!")

+ 

+         # Get results of wrapperRPM task

+         # {'buildroot_id': 2479520,

+         #  'logs': ['checkout.log', 'root.log', 'state.log', 'build.log'],

+         #  'rpms': ['foreman-discovery-image-2.1.0-2.el7sat.noarch.rpm'],

+         #  'srpm': 'foreman-discovery-image-2.1.0-2.el7sat.src.rpm'}

+         task_result = self.koji_proxy.getTaskResult(task_info["id"])

+ 

+         # Get list of filenames that should be returned

+         result_files = task_result["rpms"]

+         if srpm:

+             result_files += [task_result["srpm"]]

+ 

+         # Prepare list with paths to the required files

+         for i in result_files:

+             rpminfo = self.koji_proxy.getRPM(i)

+             build = self.koji_proxy.getBuild(rpminfo["build_id"])

+             path = os.path.join(self.koji_module.pathinfo.build(build), self.koji_module.pathinfo.signed(rpminfo, level))

+             result.append(path)

+ 

+         return result

+ 

+     def get_build_nvrs(self, task_id):

+         builds = self.koji_proxy.listBuilds(taskID=task_id)

+         return [build.get("nvr") for build in builds if build.get("nvr")]

@@ -66,7 +66,6 @@ 

          variant_dict = {

              "id": str(variant_node.attrib["id"]),

              "name": str(variant_node.attrib["name"]),

-             "name": str(variant_node.attrib["name"]),

              "type": str(variant_node.attrib["type"]),

              "arches": [str(i) for i in variant_node.xpath("arches/arch/text()")],

              "groups": [],