| |
@@ -23,630 +23,645 @@
|
| |
"""The base class for all exceptions raised by this script."""
|
| |
|
| |
|
| |
- def print_user(content, success=None):
|
| |
- """ Prints the specified content to the user.
|
| |
- """
|
| |
- spaces = 90
|
| |
- if success is not None:
|
| |
- end = None
|
| |
- if success:
|
| |
- content = "{} {}".format(content.ljust(spaces), "[DONE]")
|
| |
- else:
|
| |
- content = "{} {}".format(content.ljust(spaces), "[FAILED]")
|
| |
- else:
|
| |
- end = "\r"
|
| |
+ class MonitoringUtils:
|
| |
|
| |
- now = datetime.datetime.utcnow()
|
| |
- time = now.strftime("%H:%M:%S")
|
| |
- print(f"{time} - {content}", end=end, flush=True)
|
| |
+ def __init__(self):
|
| |
+ """ Instanciate the object. """
|
| |
+ self.logs = []
|
| |
|
| |
+ def print_user(self, content, success=None):
|
| |
+ """ Prints the specified content to the user.
|
| |
+ """
|
| |
+ spaces = 90
|
| |
+ if success is not None:
|
| |
+ end = None
|
| |
+ if success:
|
| |
+ content = "{} {}".format(content.ljust(spaces), "[DONE]")
|
| |
+ else:
|
| |
+ content = "{} {}".format(content.ljust(spaces), "[FAILED]")
|
| |
+ else:
|
| |
+ end = "\r"
|
| |
|
| |
- def run_command(command, cwd=None):
|
| |
- """ Run the specified command in a specific working directory if one
|
| |
- is specified.
|
| |
- """
|
| |
- output = None
|
| |
- try:
|
| |
- output = subprocess.check_output(
|
| |
- command, cwd=cwd, stderr=subprocess.PIPE
|
| |
- )
|
| |
- except subprocess.CalledProcessError as e:
|
| |
- _log.error(
|
| |
- "Command `{}` return code: `{}`".format(
|
| |
- " ".join(command), e.returncode
|
| |
- )
|
| |
- )
|
| |
- _log.error("stdout:\n-------\n{}".format(e.stdout))
|
| |
- _log.error("stderr:\n-------\n{}".format(e.stderr))
|
| |
- raise MonitoringException("Command failed to run")
|
| |
+ now = datetime.datetime.utcnow()
|
| |
+ time = now.strftime("%H:%M:%S")
|
| |
+ self.logs.append(f"{time} - {content}")
|
| |
+ print(f"{time} - {content}", end=end, flush=True)
|
| |
|
| |
- return output
|
| |
|
| |
-
|
| |
- def clone_repo(command, namespace, name, folder):
|
| |
- """ Clone the specified git repo into the specified folder.
|
| |
- """
|
| |
- info_log = f"Cloning the git repo: {namespace}/{name}"
|
| |
- print_user(info_log)
|
| |
- try:
|
| |
- run_command([command, "clone", f"{namespace}/{name}"], cwd=folder)
|
| |
- print_user(info_log, success=True)
|
| |
- except MonitoringException:
|
| |
- print_user(info_log, success=False)
|
| |
+ def clone_repo(self, command, namespace, name, folder):
|
| |
+ """ Clone the specified git repo into the specified folder.
|
| |
+ """
|
| |
+ info_log = f"Cloning the git repo: {namespace}/{name}"
|
| |
+ self.print_user(info_log)
|
| |
+ try:
|
| |
+ run_command([command, "clone", f"{namespace}/{name}"], cwd=folder)
|
| |
+ self.print_user(info_log, success=True)
|
| |
+ except MonitoringException:
|
| |
+ self.print_user(info_log, success=False)
|
| |
|
| |
|
| |
- def add_remote(name, url, folder):
|
| |
- """ Add the specified remote to the git repo in the folder with the
|
| |
- specified url.
|
| |
- """
|
| |
- info_log = f"Adding remote: {name}"
|
| |
- print_user(info_log)
|
| |
- try:
|
| |
- run_command(["git", "remote", "add", name, url], cwd=folder)
|
| |
- print_user(info_log, success=True)
|
| |
- except MonitoringException:
|
| |
- print_user(info_log, success=False)
|
| |
+ def add_remote(self, name, url, folder):
|
| |
+ """ Add the specified remote to the git repo in the folder with the
|
| |
+ specified url.
|
| |
+ """
|
| |
+ info_log = f"Adding remote: {name}"
|
| |
+ self.print_user(info_log)
|
| |
+ try:
|
| |
+ run_command(["git", "remote", "add", name, url], cwd=folder)
|
| |
+ self.print_user(info_log, success=True)
|
| |
+ except MonitoringException:
|
| |
+ self.print_user(info_log, success=False)
|
| |
|
| |
|
| |
- def switch_branch(command, name, folder):
|
| |
- """ Switch to the specified git branch in the specified git repo.
|
| |
- """
|
| |
- info_log = f"Switching to branch: {name}"
|
| |
- print_user(info_log)
|
| |
- try:
|
| |
- run_command([command, "switch-branch", f"{name}"], cwd=folder)
|
| |
- print_user(info_log, success=True)
|
| |
- except MonitoringException:
|
| |
- print_user(info_log, success=False)
|
| |
+ def switch_branch(self, command, name, folder):
|
| |
+ """ Switch to the specified git branch in the specified git repo.
|
| |
+ """
|
| |
+ info_log = f"Switching to branch: {name}"
|
| |
+ self.print_user(info_log)
|
| |
+ try:
|
| |
+ run_command([command, "switch-branch", f"{name}"], cwd=folder)
|
| |
+ self.print_user(info_log, success=True)
|
| |
+ except MonitoringException:
|
| |
+ self.print_user(info_log, success=False)
|
| |
|
| |
|
| |
- def bump_release(name, folder):
|
| |
- """ Bump the release of the spec file the specified git repo.
|
| |
- """
|
| |
- info_log = f"Bumping release of: {name}.spec"
|
| |
- print_user(info_log)
|
| |
- try:
|
| |
- run_command(["rpmdev-bumpspec", f"{name}.spec"], cwd=folder)
|
| |
- print_user(info_log, success=True)
|
| |
- except MonitoringException:
|
| |
- print_user(info_log, success=False)
|
| |
+ def bump_release(self, name, folder):
|
| |
+ """ Bump the release of the spec file the specified git repo.
|
| |
+ """
|
| |
+ info_log = f"Bumping release of: {name}.spec"
|
| |
+ self.print_user(info_log)
|
| |
+ try:
|
| |
+ run_command(["rpmdev-bumpspec", f"{name}.spec"], cwd=folder)
|
| |
+ self.print_user(info_log, success=True)
|
| |
+ except MonitoringException:
|
| |
+ self.print_user(info_log, success=False)
|
| |
|
| |
|
| |
- def commit_changes(commit_log, folder):
|
| |
- """ Commit all the changes made to *tracked* files in the git repo
|
| |
- with the specified commit log.
|
| |
- """
|
| |
- info_log = f"Commiting changes"
|
| |
- print_user(info_log)
|
| |
- try:
|
| |
- run_command(["git", "commit", "-asm", commit_log], cwd=folder)
|
| |
- print_user(info_log, success=True)
|
| |
- except MonitoringException:
|
| |
- print_user(info_log, success=False)
|
| |
+ def commit_changes(self, commit_log, folder):
|
| |
+ """ Commit all the changes made to *tracked* files in the git repo
|
| |
+ with the specified commit log.
|
| |
+ """
|
| |
+ info_log = f"Commiting changes"
|
| |
+ self.print_user(info_log)
|
| |
+ try:
|
| |
+ run_command(["git", "commit", "-asm", commit_log], cwd=folder)
|
| |
+ self.print_user(info_log, success=True)
|
| |
+ except MonitoringException:
|
| |
+ self.print_user(info_log, success=False)
|
| |
|
| |
|
| |
- def push_changes(folder, target, branch, force=False):
|
| |
- """ Push all changes using git.
|
| |
- """
|
| |
- info_log = f"Pushing changes"
|
| |
- print_user(info_log)
|
| |
- try:
|
| |
- cmd = ["git", "push", target, branch]
|
| |
- if force:
|
| |
- cmd.append("-f")
|
| |
- run_command(cmd, cwd=folder)
|
| |
- print_user(info_log, success=True)
|
| |
- except MonitoringException:
|
| |
- print_user(info_log, success=False)
|
| |
+ def push_changes(self, folder, target, branch, force=False):
|
| |
+ """ Push all changes using git.
|
| |
+ """
|
| |
+ info_log = f"Pushing changes"
|
| |
+ self.print_user(info_log)
|
| |
+ try:
|
| |
+ cmd = ["git", "push", target, branch]
|
| |
+ if force:
|
| |
+ cmd.append("-f")
|
| |
+ run_command(cmd, cwd=folder)
|
| |
+ self.print_user(info_log, success=True)
|
| |
+ except MonitoringException:
|
| |
+ self.print_user(info_log, success=False)
|
| |
|
| |
|
| |
- def pull_changes(folder, target, branch):
|
| |
- """ Pull all changes using git.
|
| |
- """
|
| |
- info_log = f"Pushing changes"
|
| |
- print_user(info_log)
|
| |
- try:
|
| |
- cmd = ["git", "pull", "--rebase", target, branch]
|
| |
- run_command(cmd, cwd=folder)
|
| |
- print_user(info_log, success=True)
|
| |
- except MonitoringException:
|
| |
- print_user(info_log, success=False)
|
| |
+ def pull_changes(self, folder, target, branch):
|
| |
+ """ Pull all changes using git.
|
| |
+ """
|
| |
+ info_log = f"Pushing changes"
|
| |
+ self.print_user(info_log)
|
| |
+ try:
|
| |
+ cmd = ["git", "pull", "--rebase", target, branch]
|
| |
+ run_command(cmd, cwd=folder)
|
| |
+ self.print_user(info_log, success=True)
|
| |
+ except MonitoringException:
|
| |
+ self.print_user(info_log, success=False)
|
| |
|
| |
|
| |
- def open_pullrequest(base_url, username, namespace, name, branch, token):
|
| |
- """ Open a pull-request from the user's fork to the main project for
|
| |
- the specified branch.
|
| |
- """
|
| |
- info_log = f"Creating PR from forks/{username}/{namespace}/{name}"
|
| |
- print_user(info_log)
|
| |
- url = "/".join(
|
| |
- [base_url.rstrip("/"), "api/0", namespace, name, "pull-request/new"]
|
| |
- )
|
| |
- data = {
|
| |
- "branch_to": branch,
|
| |
- "branch_from": branch,
|
| |
- "repo_from": name,
|
| |
- "repo_from_username": username,
|
| |
- "repo_from_namespace": namespace,
|
| |
- "initial_comment": "Testing PR",
|
| |
- "title": "Test PR for monitoring",
|
| |
- }
|
| |
- headers = {"Authorization": f"token {token}"}
|
| |
- req = requests.post(url=url, data=data, headers=headers)
|
| |
- if not req.ok:
|
| |
- print(req.text)
|
| |
- success = False
|
| |
- pr_id = None
|
| |
- pr_uid = None
|
| |
- else:
|
| |
- output = req.json()
|
| |
- pr_id = str(output["id"])
|
| |
- pr_uid = output["uid"]
|
| |
+ def open_pullrequest(self, base_url, username, namespace, name, branch, token):
|
| |
+ """ Open a pull-request from the user's fork to the main project for
|
| |
+ the specified branch.
|
| |
+ """
|
| |
+ info_log = f"Creating PR from forks/{username}/{namespace}/{name}"
|
| |
+ self.print_user(info_log)
|
| |
url = "/".join(
|
| |
- [base_url.rstrip("/"), namespace, name, "pull-request", pr_id]
|
| |
+ [base_url.rstrip("/"), "api/0", namespace, name, "pull-request/new"]
|
| |
)
|
| |
- info_log = f"PR created {url}"
|
| |
- success = True
|
| |
- print_user(info_log, success=success)
|
| |
- return (success, pr_id, pr_uid)
|
| |
-
|
| |
+ data = {
|
| |
+ "branch_to": branch,
|
| |
+ "branch_from": branch,
|
| |
+ "repo_from": name,
|
| |
+ "repo_from_username": username,
|
| |
+ "repo_from_namespace": namespace,
|
| |
+ "initial_comment": "Testing PR",
|
| |
+ "title": "Test PR for monitoring",
|
| |
+ }
|
| |
+ headers = {"Authorization": f"token {token}"}
|
| |
+ req = requests.post(url=url, data=data, headers=headers)
|
| |
+ if not req.ok:
|
| |
+ print(req.text)
|
| |
+ success = False
|
| |
+ pr_id = None
|
| |
+ pr_uid = None
|
| |
+ else:
|
| |
+ output = req.json()
|
| |
+ pr_id = str(output["id"])
|
| |
+ pr_uid = output["uid"]
|
| |
+ url = "/".join(
|
| |
+ [base_url.rstrip("/"), namespace, name, "pull-request", pr_id]
|
| |
+ )
|
| |
+ info_log = f"PR created {url}"
|
| |
+ success = True
|
| |
+ self.print_user(info_log, success=success)
|
| |
+ return (success, pr_id, pr_uid)
|
| |
|
| |
- def get_nevr(command, folder):
|
| |
- """ Get the name-epoch-version-release presently in git
|
| |
- """
|
| |
- info_log = f"Getting nevr"
|
| |
- print_user(info_log)
|
| |
- try:
|
| |
- nevr = run_command([command, "verrel"], cwd=folder)
|
| |
- print_user(info_log, success=True)
|
| |
- return nevr.strip().decode("utf-8")
|
| |
- except MonitoringException:
|
| |
- print_user(info_log, success=False)
|
| |
|
| |
+ def get_nevr(self, command, folder):
|
| |
+ """ Get the name-epoch-version-release presently in git
|
| |
+ """
|
| |
+ info_log = f"Getting nevr"
|
| |
+ self.print_user(info_log)
|
| |
+ try:
|
| |
+ nevr = run_command([command, "verrel"], cwd=folder)
|
| |
+ self.print_user(info_log, success=True)
|
| |
+ return nevr.strip().decode("utf-8")
|
| |
+ except MonitoringException:
|
| |
+ self.print_user(info_log, success=False)
|
| |
|
| |
- def build_package(command, folder, target=None):
|
| |
- """ Build the package in the current branch
|
| |
- """
|
| |
- info_log = f"Building the package"
|
| |
- print_user(info_log)
|
| |
- command = [command, "build"]
|
| |
- if target:
|
| |
- command.extend(["--target", target])
|
| |
- try:
|
| |
- run_command(command, cwd=folder)
|
| |
- print_user(info_log, success=True)
|
| |
- except MonitoringException:
|
| |
- print_user(info_log, success=False)
|
| |
|
| |
+ def build_package(self, command, folder, target=None):
|
| |
+ """ Build the package in the current branch
|
| |
+ """
|
| |
+ info_log = f"Building the package"
|
| |
+ self.print_user(info_log)
|
| |
+ command = [command, "build"]
|
| |
+ if target:
|
| |
+ command.extend(["--target", target])
|
| |
+ try:
|
| |
+ run_command(command, cwd=folder)
|
| |
+ self.print_user(info_log, success=True)
|
| |
+ except MonitoringException:
|
| |
+ self.print_user(info_log, success=False)
|
| |
+
|
| |
+
|
| |
+ def chain_build_packages(self, command, packages, folder, target=None):
|
| |
+ """ Chain-build the packages in the current branch
|
| |
+ """
|
| |
+ if not isinstance(packages, list):
|
| |
+ packages = [packages]
|
| |
+ info_log = f"Chain-building the packages: {packages + [os.path.basename(folder)]}"
|
| |
+ self.print_user(info_log)
|
| |
+ command = [command, "chain-build"]
|
| |
+ command.extend(packages)
|
| |
+ if target:
|
| |
+ command.extend(["--target", target])
|
| |
+ try:
|
| |
+ run_command(command, cwd=folder)
|
| |
+ self.print_user(info_log, success=True)
|
| |
+ except MonitoringException:
|
| |
+ self.print_user(info_log, success=False)
|
| |
|
| |
- def chain_build_packages(command, packages, folder, target=None):
|
| |
- """ Chain-build the packages in the current branch
|
| |
- """
|
| |
- if not isinstance(packages, list):
|
| |
- packages = [packages]
|
| |
- info_log = f"Chain-building the packages: {packages + [os.path.basename(folder)]}"
|
| |
- print_user(info_log)
|
| |
- command = [command, "chain-build"]
|
| |
- command.extend(packages)
|
| |
- if target:
|
| |
- command.extend(["--target", target])
|
| |
- try:
|
| |
- run_command(command, cwd=folder)
|
| |
- print_user(info_log, success=True)
|
| |
- except MonitoringException:
|
| |
- print_user(info_log, success=False)
|
| |
|
| |
+ def get_build_tags(self, koji_url, nevr, expected_ends):
|
| |
+ """ List the tags associated with the specified build.
|
| |
+ """
|
| |
+ # return
|
| |
+ start = datetime.datetime.utcnow()
|
| |
+ info_log = f"Retrieving koji tags"
|
| |
+ self.print_user(info_log)
|
| |
+ command = [
|
| |
+ "koji",
|
| |
+ ]
|
| |
+ if koji_url:
|
| |
+ command.extend([
|
| |
+ "-s",
|
| |
+ koji_url,
|
| |
+ ])
|
| |
+ command.extend([
|
| |
+ "call",
|
| |
+ "listTags",
|
| |
+ nevr
|
| |
+ ])
|
| |
|
| |
- def get_build_tags(koji_url, nevr, expected_ends):
|
| |
- """ List the tags associated with the specified build.
|
| |
- """
|
| |
- # return
|
| |
- start = datetime.datetime.utcnow()
|
| |
- info_log = f"Retrieving koji tags"
|
| |
- print_user(info_log)
|
| |
- command = [
|
| |
- "koji",
|
| |
- "-s",
|
| |
- koji_url,
|
| |
- "call",
|
| |
- "listTags",
|
| |
- nevr
|
| |
- ]
|
| |
-
|
| |
- success = False
|
| |
- tags = None
|
| |
- broke = False
|
| |
- while True:
|
| |
- try:
|
| |
- output = run_command(command)
|
| |
- output = output.decode('utf-8')
|
| |
+ success = False
|
| |
+ tags = None
|
| |
+ broke = False
|
| |
+ while True:
|
| |
try:
|
| |
- data = ast.literal_eval(output.strip())
|
| |
- except Exception:
|
| |
- print("Could not decode JSON in:")
|
| |
- print(command)
|
| |
- print(output)
|
| |
- broke = True
|
| |
- break
|
| |
- tags = [tag.get("name") for tag in data]
|
| |
- for tag_name in tags:
|
| |
- for expectation in expected_ends:
|
| |
- if tag_name.endswith(expectation):
|
| |
- success = True
|
| |
+ output = run_command(command)
|
| |
+ output = output.decode('utf-8')
|
| |
+ try:
|
| |
+ data = ast.literal_eval(output.strip())
|
| |
+ except Exception:
|
| |
+ print("Could not decode JSON in:")
|
| |
+ print(command)
|
| |
+ print(output)
|
| |
+ broke = True
|
| |
+ break
|
| |
+ tags = [tag.get("name") for tag in data]
|
| |
+ for tag_name in tags:
|
| |
+ for expectation in expected_ends:
|
| |
+ if tag_name.endswith(expectation):
|
| |
+ success = True
|
| |
+ broke = True
|
| |
+ break
|
| |
+ if success:
|
| |
broke = True
|
| |
break
|
| |
- if success:
|
| |
- broke = True
|
| |
+ if broke:
|
| |
break
|
| |
- if broke:
|
| |
- break
|
| |
|
| |
- if (datetime.datetime.utcnow() - start).seconds > (15 * 60):
|
| |
+ if (datetime.datetime.utcnow() - start).seconds > (15 * 60):
|
| |
+ success = False
|
| |
+ info_log = f"Update for {nevr} not created within 15 minutes"
|
| |
+ break
|
| |
+
|
| |
+ # Only query koji every 30 seconds
|
| |
+ time.sleep(30)
|
| |
+ except MonitoringException:
|
| |
success = False
|
| |
- info_log = f"Update for {nevr} not created within 15 minutes"
|
| |
break
|
| |
|
| |
- # Only query koji every 30 seconds
|
| |
- time.sleep(30)
|
| |
+ info_log = f"Retrieving koji tags: {tags}"
|
| |
+ self.print_user(info_log, success=success)
|
| |
+
|
| |
+
|
| |
+ def create_update(self, command, item, prod=True, username=None, password=None, from_tag=False):
|
| |
+ """ Create the update for the package built.
|
| |
+ """
|
| |
+ info_log = f"Creating a bodhi update"
|
| |
+ self.print_user(info_log)
|
| |
+ command = [
|
| |
+ command,
|
| |
+ "updates",
|
| |
+ "new",
|
| |
+ "--notes",
|
| |
+ "Bump release to test CI",
|
| |
+ "--type",
|
| |
+ "bugfix",
|
| |
+ "--autotime",
|
| |
+ ]
|
| |
+ if from_tag:
|
| |
+ command.append("--from-tag")
|
| |
+ command.append(item)
|
| |
+
|
| |
+ if not prod:
|
| |
+ command.append("--staging")
|
| |
+ if username:
|
| |
+ command.extend(["--user", username])
|
| |
+ if password:
|
| |
+ command.extend(["--password", password])
|
| |
+ try:
|
| |
+ run_command(command)
|
| |
+ self.print_user(info_log, success=True)
|
| |
except MonitoringException:
|
| |
- success = False
|
| |
- break
|
| |
+ self.print_user(info_log, success=False)
|
| |
|
| |
- info_log = f"Retrieving koji tags: {tags}"
|
| |
- print_user(info_log, success=success)
|
| |
|
| |
+ def get_update_id(self, nevr, url):
|
| |
+ """ Retrieve the update identifier from bodhi for the given nevr. """
|
| |
+ start = datetime.datetime.utcnow()
|
| |
+ info_log = f"Retrieving update created"
|
| |
+ self.print_user(info_log)
|
| |
+ url = f"{url}/updates/?builds={nevr}"
|
| |
+ updateid = None
|
| |
+ success = True
|
| |
+ while True:
|
| |
+ req = requests.get(url)
|
| |
+ data = req.json()
|
| |
+ if data["updates"]:
|
| |
+ updateid = data["updates"][0]["updateid"]
|
| |
+ if updateid:
|
| |
+ break
|
| |
|
| |
- def create_update(command, item, prod=True, username=None, password=None, from_tag=False):
|
| |
- """ Create the update for the package built.
|
| |
- """
|
| |
- info_log = f"Creating a bodhi update"
|
| |
- print_user(info_log)
|
| |
- command = [
|
| |
- command,
|
| |
- "updates",
|
| |
- "new",
|
| |
- "--notes",
|
| |
- "Bump release to test CI",
|
| |
- "--type",
|
| |
- "bugfix",
|
| |
- "--autotime",
|
| |
- ]
|
| |
- if from_tag:
|
| |
- command.append("--from-tag")
|
| |
- command.append(item)
|
| |
-
|
| |
- if not prod:
|
| |
- command.append("--staging")
|
| |
- if username:
|
| |
- command.extend(["--user", username])
|
| |
- if password:
|
| |
- command.extend(["--password", password])
|
| |
- try:
|
| |
- run_command(command)
|
| |
- print_user(info_log, success=True)
|
| |
- except MonitoringException:
|
| |
- print_user(info_log, success=False)
|
| |
-
|
| |
-
|
| |
- def get_update_id(nevr, url):
|
| |
- """ Retrieve the update identifier from bodhi for the given nevr. """
|
| |
- start = datetime.datetime.utcnow()
|
| |
- info_log = f"Retrieving update created"
|
| |
- print_user(info_log)
|
| |
- url = f"{url}/updates/?builds={nevr}"
|
| |
- updateid = None
|
| |
- success = True
|
| |
- while True:
|
| |
- req = requests.get(url)
|
| |
- data = req.json()
|
| |
- if data["updates"]:
|
| |
- updateid = data["updates"][0]["updateid"]
|
| |
- if updateid:
|
| |
- break
|
| |
-
|
| |
- if (datetime.datetime.utcnow() - start).seconds > (15 * 60):
|
| |
- success = False
|
| |
- info_log = f"Update for {nevr} not created within 15 minutes"
|
| |
- break
|
| |
+ if (datetime.datetime.utcnow() - start).seconds > (15 * 60):
|
| |
+ success = False
|
| |
+ info_log = f"Update for {nevr} not created within 15 minutes"
|
| |
+ break
|
| |
|
| |
- # Only query bodhi every 30 seconds
|
| |
- time.sleep(30)
|
| |
+ # Only query bodhi every 30 seconds
|
| |
+ time.sleep(30)
|
| |
|
| |
- print_user(info_log, success=success)
|
| |
- return updateid
|
| |
+ self.print_user(info_log, success=success)
|
| |
+ return updateid
|
| |
+
|
| |
+
|
| |
+ def lookup_results_datagrepper(
|
| |
+ self, base_url, name, topic, nevr=None, nevrs=None, rev=None,
|
| |
+ bodhi_id=None, start=None, duration=15
|
| |
+ ):
|
| |
+ """ Check the CI results in datagrepper for results about our specified
|
| |
+ build.
|
| |
+ """
|
| |
+ if start is None:
|
| |
+ start = datetime.datetime.utcnow()
|
| |
+ info_log = f"Checking datagrepper for {name} messages"
|
| |
+ self.print_user(info_log)
|
| |
+ # Start pulling messages 10 minutes before now
|
| |
+ start_time = start - datetime.timedelta(minutes=10)
|
| |
+ # Limiting the number of row per page to 10 allows for quicker results
|
| |
+ url = (
|
| |
+ base_url + f"?topic={topic}"
|
| |
+ f"&start={start_time.timestamp()}&row_per_page=10"
|
| |
+ )
|
| |
|
| |
+ success = None
|
| |
+ returned_status = None
|
| |
+ info_log = None
|
| |
+ nevrs = nevrs or []
|
| |
+ while True:
|
| |
+ # We're assuming here that there won't be more than 100 messages for
|
| |
+ # that topic coming in between the one we're interested in and when we
|
| |
+ # are looking for it (10*10 == 100)
|
| |
+ for page in range(1, 11):
|
| |
+ end_url = url
|
| |
+ end_url += f"&page={page}"
|
| |
+ data = requests.get(end_url).json()
|
| |
+ for message in data["raw_messages"]:
|
| |
+
|
| |
+ # Old message format from the CI pipeline
|
| |
+ if "ci.pipeline" in message["topic"] and (
|
| |
+ message["msg"]["nvr"] == nevr
|
| |
+ or message["msg"]["nvr"] in nevrs
|
| |
+ or message["msg"]["rev"] == rev
|
| |
+ ):
|
| |
+ success = True
|
| |
+ returned_status = message["msg"]["status"]
|
| |
+ break
|
| |
|
| |
- def lookup_results_datagrepper(
|
| |
- base_url, name, topic, nevr=None, nevrs=None, rev=None, bodhi_id=None,
|
| |
- start=None, duration=15
|
| |
- ):
|
| |
- """ Check the CI results in datagrepper for results about our specified
|
| |
- build.
|
| |
- """
|
| |
- if start is None:
|
| |
- start = datetime.datetime.utcnow()
|
| |
- info_log = f"Checking datagrepper for {name} messages"
|
| |
- print_user(info_log)
|
| |
- # Start pulling messages 10 minutes before now
|
| |
- start_time = start - datetime.timedelta(minutes=10)
|
| |
- # Limiting the number of row per page to 10 allows for quicker results
|
| |
- url = (
|
| |
- base_url + f"?topic={topic}"
|
| |
- f"&start={start_time.timestamp()}&row_per_page=10"
|
| |
- )
|
| |
-
|
| |
- success = None
|
| |
- returned_status = None
|
| |
- info_log = None
|
| |
- nevrs = nevrs or []
|
| |
- while True:
|
| |
- # We're assuming here that there won't be more than 100 messages for
|
| |
- # that topic coming in between the one we're interested in and when we
|
| |
- # are looking for it (10*10 == 100)
|
| |
- for page in range(1, 11):
|
| |
- end_url = url
|
| |
- end_url += f"&page={page}"
|
| |
- data = requests.get(end_url).json()
|
| |
- for message in data["raw_messages"]:
|
| |
-
|
| |
- # Old message format from the CI pipeline
|
| |
- if "ci.pipeline" in message["topic"] and (
|
| |
- message["msg"]["nvr"] == nevr
|
| |
- or message["msg"]["nvr"] in nevrs
|
| |
- or message["msg"]["rev"] == rev
|
| |
- ):
|
| |
- success = True
|
| |
- returned_status = message["msg"]["status"]
|
| |
- break
|
| |
+ # New message format from the CI pipeline for koji builds
|
| |
+ if (
|
| |
+ "ci.koji-build" in message["topic"] and
|
| |
+ message["msg"]["artifact"]["nvr"] == nevr
|
| |
+ ):
|
| |
+ if message["topic"].endswith("test.complete"):
|
| |
+ success = True
|
| |
+ returned_status = message["msg"]["test"]["result"]
|
| |
+ elif message["topic"].endswith("test.error"):
|
| |
+ success = True
|
| |
+ returned_status = "error"
|
| |
+ elif message["topic"].endswith("test.running"):
|
| |
+ success = True
|
| |
+ returned_status = "running"
|
| |
+ break
|
| |
|
| |
- # New message format from the CI pipeline for koji builds
|
| |
- if (
|
| |
- "ci.koji-build" in message["topic"] and
|
| |
- message["msg"]["artifact"]["nvr"] == nevr
|
| |
- ):
|
| |
- if message["topic"].endswith("test.complete"):
|
| |
- success = True
|
| |
- returned_status = message["msg"]["test"]["result"]
|
| |
- elif message["topic"].endswith("test.error"):
|
| |
- success = True
|
| |
- returned_status = "error"
|
| |
- elif message["topic"].endswith("test.running"):
|
| |
- success = True
|
| |
- returned_status = "running"
|
| |
- break
|
| |
+ # New message format from the CI pipeline for dist-git PR
|
| |
+ if (
|
| |
+ "ci.dist-git-pr" in message["topic"]
|
| |
+ and message["msg"]["artifact"]["type"] == "pull-request"
|
| |
+ and message["msg"]["artifact"]["uid"] == rev
|
| |
+ ):
|
| |
+ if message["topic"].endswith("test.complete"):
|
| |
+ success = True
|
| |
+ returned_status = message["msg"]["test"]["result"]
|
| |
+ elif message["topic"].endswith("test.error"):
|
| |
+ success = True
|
| |
+ returned_status = "error"
|
| |
+ elif message["topic"].endswith("test.running"):
|
| |
+ success = True
|
| |
+ returned_status = "running"
|
| |
+ break
|
| |
|
| |
- # New message format from the CI pipeline for dist-git PR
|
| |
- if (
|
| |
- "ci.dist-git-pr" in message["topic"]
|
| |
- and message["msg"]["artifact"]["type"] == "pull-request"
|
| |
- and message["msg"]["artifact"]["uid"] == rev
|
| |
- ):
|
| |
- if message["topic"].endswith("test.complete"):
|
| |
- success = True
|
| |
- returned_status = message["msg"]["test"]["result"]
|
| |
- elif message["topic"].endswith("test.error"):
|
| |
+ # resultsdb messages
|
| |
+ if (
|
| |
+ "resultsdb" in message["topic"]
|
| |
+ and "nvr" in message["msg"]["data"]
|
| |
+ and (nevr in message["msg"]["data"]["nvr"]
|
| |
+ or message["msg"]["data"]["nvr"] in nevrs)
|
| |
+ ):
|
| |
success = True
|
| |
- returned_status = "error"
|
| |
- elif message["topic"].endswith("test.running"):
|
| |
- success = True
|
| |
- returned_status = "running"
|
| |
- break
|
| |
-
|
| |
- # resultsdb messages
|
| |
- if (
|
| |
- "resultsdb" in message["topic"]
|
| |
- and "nvr" in message["msg"]["data"]
|
| |
- and (nevr in message["msg"]["data"]["nvr"]
|
| |
- or message["msg"]["data"]["nvr"] in nevrs)
|
| |
- ):
|
| |
- success = True
|
| |
- returned_status = message["msg"]["outcome"]
|
| |
- break
|
| |
+ returned_status = message["msg"]["outcome"]
|
| |
+ break
|
| |
|
| |
- # greenwave messages
|
| |
- if (
|
| |
- "greenwave" in message["topic"]
|
| |
- and (
|
| |
- message["msg"]["subject_identifier"] == nevr
|
| |
- or
|
| |
- message["msg"]["subject_identifier"] in nevrs
|
| |
- )
|
| |
- ):
|
| |
- success = True
|
| |
- returned_status = message["msg"]["policies_satisfied"]
|
| |
- break
|
| |
+ # greenwave messages
|
| |
+ if (
|
| |
+ "greenwave" in message["topic"]
|
| |
+ and (
|
| |
+ message["msg"]["subject_identifier"] == nevr
|
| |
+ or
|
| |
+ message["msg"]["subject_identifier"] in nevrs
|
| |
+ )
|
| |
+ ):
|
| |
+ success = True
|
| |
+ returned_status = message["msg"]["policies_satisfied"]
|
| |
+ break
|
| |
|
| |
- # waiverdb messages
|
| |
- if (
|
| |
- "waiverdb" in message["topic"]
|
| |
- and (
|
| |
- message["msg"]["subject_identifier"] == nevr
|
| |
- or
|
| |
- message["msg"]["subject_identifier"] in nevrs
|
| |
- )
|
| |
- ):
|
| |
- success = True
|
| |
- returned_status = ""
|
| |
- break
|
| |
+ # waiverdb messages
|
| |
+ if (
|
| |
+ "waiverdb" in message["topic"]
|
| |
+ and (
|
| |
+ message["msg"]["subject_identifier"] == nevr
|
| |
+ or
|
| |
+ message["msg"]["subject_identifier"] in nevrs
|
| |
+ )
|
| |
+ ):
|
| |
+ success = True
|
| |
+ returned_status = ""
|
| |
+ break
|
| |
|
| |
- # bodhi messages
|
| |
- if (
|
| |
- "bodhi.update.status.testing" in message["topic"]
|
| |
- and message["msg"]["artifact"]["id"].startswith(bodhi_id)
|
| |
- ):
|
| |
- success = True
|
| |
- returned_status = ""
|
| |
+ # bodhi messages
|
| |
+ if (
|
| |
+ "bodhi.update.status.testing" in message["topic"]
|
| |
+ and message["msg"]["artifact"]["id"].startswith(bodhi_id)
|
| |
+ ):
|
| |
+ success = True
|
| |
+ returned_status = ""
|
| |
+ break
|
| |
+ if success is not None:
|
| |
break
|
| |
if success is not None:
|
| |
break
|
| |
- if success is not None:
|
| |
- break
|
| |
|
| |
- if (datetime.datetime.utcnow() - start).seconds > (duration * 60):
|
| |
- success = False
|
| |
- info_log = f"{name} results not found in datagrepper"
|
| |
- break
|
| |
+ if (datetime.datetime.utcnow() - start).seconds > (duration * 60):
|
| |
+ success = False
|
| |
+ info_log = f"{name} results not found in datagrepper"
|
| |
+ break
|
| |
|
| |
- # Only query datagrepper every 30 seconds
|
| |
- time.sleep(30)
|
| |
+ # Only query datagrepper every 30 seconds
|
| |
+ time.sleep(30)
|
| |
|
| |
- if info_log is None:
|
| |
- info_log = f"{name} results in datagrepper returned {returned_status}"
|
| |
+ if info_log is None:
|
| |
+ info_log = f"{name} results in datagrepper returned {returned_status}"
|
| |
|
| |
- end = datetime.datetime.utcnow()
|
| |
- info_log += f" - ran for: {(end - start).seconds}s"
|
| |
- print_user(info_log, success=success)
|
| |
+ end = datetime.datetime.utcnow()
|
| |
+ info_log += f" - ran for: {(end - start).seconds}s"
|
| |
+ self.print_user(info_log, success=success)
|
| |
|
| |
|
| |
- def lookup_ci_resultsdb(nevr, name, url):
|
| |
- """ Check the CI results in the specified resultsdb for results about
|
| |
- our specified build.
|
| |
- """
|
| |
- start = datetime.datetime.utcnow()
|
| |
- info_log = f"Checking {name} for CI results "
|
| |
- print_user(info_log)
|
| |
- topic = "org.centos.prod.ci.pipeline.allpackages-build.complete"
|
| |
- if ".stg" in url:
|
| |
- topic = "org.centos.stage.ci.pipeline.allpackages-build.complete"
|
| |
- url = f"{url}?testcases={topic}"
|
| |
-
|
| |
- success = False
|
| |
- returned_status = None
|
| |
- info_log = None
|
| |
- while True:
|
| |
- # Assume we won't have more than 3 pages of results coming in b/w
|
| |
- # our checks
|
| |
- for page in [0, 1, 2]:
|
| |
- end_url = url
|
| |
- end_url += f"&page={page}"
|
| |
- data = requests.get(end_url).json()
|
| |
- for result in data["data"]:
|
| |
- if nevr in result["data"]["nvr"]:
|
| |
- success = True
|
| |
- returned_status = result["data"]["status"][0]
|
| |
+ def lookup_ci_resultsdb(self, nevr, name, url):
|
| |
+ """ Check the CI results in the specified resultsdb for results about
|
| |
+ our specified build.
|
| |
+ """
|
| |
+ start = datetime.datetime.utcnow()
|
| |
+ info_log = f"Checking {name} for CI results "
|
| |
+ self.print_user(info_log)
|
| |
+ topic = "org.centos.prod.ci.pipeline.allpackages-build.complete"
|
| |
+ if ".stg" in url:
|
| |
+ topic = "org.centos.stage.ci.pipeline.allpackages-build.complete"
|
| |
+ url = f"{url}?testcases={topic}"
|
| |
+
|
| |
+ success = False
|
| |
+ returned_status = None
|
| |
+ info_log = None
|
| |
+ while True:
|
| |
+ # Assume we won't have more than 3 pages of results coming in b/w
|
| |
+ # our checks
|
| |
+ for page in [0, 1, 2]:
|
| |
+ end_url = url
|
| |
+ end_url += f"&page={page}"
|
| |
+ data = requests.get(end_url).json()
|
| |
+ for result in data["data"]:
|
| |
+ if nevr in result["data"]["nvr"]:
|
| |
+ success = True
|
| |
+ returned_status = result["data"]["status"][0]
|
| |
+ break
|
| |
+ if success:
|
| |
break
|
| |
if success:
|
| |
break
|
| |
- if success:
|
| |
- break
|
| |
|
| |
- if (datetime.datetime.utcnow() - start).seconds > (15 * 60):
|
| |
- success = False
|
| |
- info_log = f"CI results did not show in {name} for {nevr} within 15 minutes"
|
| |
- break
|
| |
+ if (datetime.datetime.utcnow() - start).seconds > (15 * 60):
|
| |
+ success = False
|
| |
+ info_log = f"CI results did not show in {name} for {nevr} within 15 minutes"
|
| |
+ break
|
| |
|
| |
- # Only query datagrepper every 30 seconds
|
| |
- time.sleep(30)
|
| |
+ # Only query datagrepper every 30 seconds
|
| |
+ time.sleep(30)
|
| |
|
| |
- if info_log is None:
|
| |
- info_log = f"CI results in {name} returned {returned_status}"
|
| |
+ if info_log is None:
|
| |
+ info_log = f"CI results in {name} returned {returned_status}"
|
| |
+
|
| |
+ end = datetime.datetime.utcnow()
|
| |
+ info_log += f" - ran for: {(end - start).seconds}s"
|
| |
+ self.print_user(info_log, success=success)
|
| |
+
|
| |
+
|
| |
+ def waive_update(self, command, updateid, prod=True, username=None, password=None):
|
| |
+ """ Waive all the tests results for the specified update using bodhi's
|
| |
+ CLI.
|
| |
+ """
|
| |
+ info_log = f"Waiving test results for bodhi update"
|
| |
+ self.print_user(info_log)
|
| |
+ command = [
|
| |
+ command,
|
| |
+ "updates",
|
| |
+ "waive",
|
| |
+ updateid,
|
| |
+ "'This is fine, we are testing the workflow'",
|
| |
+ "--debug",
|
| |
+ ]
|
| |
+ if not prod:
|
| |
+ command.append("--staging")
|
| |
+ if username:
|
| |
+ command.extend(["--user", username])
|
| |
+ if password:
|
| |
+ command.extend(["--password", password])
|
| |
+ try:
|
| |
+ run_command(command)
|
| |
+ self.print_user(info_log, success=True)
|
| |
+ except MonitoringException:
|
| |
+ self.print_user(info_log, success=False)
|
| |
+
|
| |
+
|
| |
+ def get_pr_flag(
|
| |
+ self,
|
| |
+ base_url,
|
| |
+ username,
|
| |
+ namespace,
|
| |
+ name,
|
| |
+ pr_id,
|
| |
+ flag_username,
|
| |
+ flag_status,
|
| |
+ duration=10,
|
| |
+ ):
|
| |
+ """ Retrieve the flags of the PR and assert the last one from the
|
| |
+ specified flag_username has the given status.
|
| |
+ """
|
| |
+ pr = "/".join([namespace, name, "pull-request", pr_id])
|
| |
+ info_log = f"Retreiving flags for PR: {pr}"
|
| |
+ self.print_user(info_log)
|
| |
+ url = "/".join([base_url.rstrip("/"), "api/0", pr, "flag"])
|
| |
|
| |
- end = datetime.datetime.utcnow()
|
| |
- info_log += f" - ran for: {(end - start).seconds}s"
|
| |
- print_user(info_log, success=success)
|
| |
+ start = datetime.datetime.utcnow()
|
| |
+ success = False
|
| |
|
| |
+ while True:
|
| |
+ try:
|
| |
+ req = requests.get(url=url)
|
| |
+ except requests.exceptions.ConnectionError:
|
| |
+ continue
|
| |
|
| |
- def waive_update(command, updateid, prod=True, username=None, password=None):
|
| |
- """ Waive all the tests results for the specified update using bodhi's
|
| |
- CLI.
|
| |
- """
|
| |
- info_log = f"Waiving test results for bodhi update"
|
| |
- print_user(info_log)
|
| |
- command = [
|
| |
- command,
|
| |
- "updates",
|
| |
- "waive",
|
| |
- updateid,
|
| |
- "'This is fine, we are testing the workflow'",
|
| |
- "--debug",
|
| |
- ]
|
| |
- if not prod:
|
| |
- command.append("--staging")
|
| |
- if username:
|
| |
- command.extend(["--user", username])
|
| |
- if password:
|
| |
- command.extend(["--password", password])
|
| |
- try:
|
| |
- run_command(command)
|
| |
- print_user(info_log, success=True)
|
| |
- except MonitoringException:
|
| |
- print_user(info_log, success=False)
|
| |
-
|
| |
-
|
| |
- def get_pr_flag(
|
| |
- base_url,
|
| |
- username,
|
| |
- namespace,
|
| |
- name,
|
| |
- pr_id,
|
| |
- flag_username,
|
| |
- flag_status,
|
| |
- duration=10,
|
| |
- ):
|
| |
- """ Retrieve the flags of the PR and assert the last one from the
|
| |
- specified flag_username has the given status.
|
| |
- """
|
| |
- pr = "/".join([namespace, name, "pull-request", pr_id])
|
| |
- info_log = f"Retreiving flags for PR: {pr}"
|
| |
- print_user(info_log)
|
| |
- url = "/".join([base_url.rstrip("/"), "api/0", pr, "flag"])
|
| |
+ if req.ok:
|
| |
+ break
|
| |
|
| |
- start = datetime.datetime.utcnow()
|
| |
- success = False
|
| |
+ if (datetime.datetime.utcnow() - start).seconds > (duration * 60):
|
| |
+ success = False
|
| |
+ info_log = f"Failed to retrieve flags for PR: {pr}"
|
| |
+ break
|
| |
|
| |
- while True:
|
| |
- try:
|
| |
- req = requests.get(url=url)
|
| |
- except requests.exceptions.ConnectionError:
|
| |
- continue
|
| |
+ # Only query pagure every 30 seconds
|
| |
+ time.sleep(30)
|
| |
|
| |
- if req.ok:
|
| |
- break
|
| |
+ if not req.ok:
|
| |
+ print(req.text)
|
| |
+ self.logs.append(f"Error retrieving PR flags: {req.text}")
|
| |
+ raise MonitoringException("Error retrieving PR flags")
|
| |
+ else:
|
| |
+ output = req.json()
|
| |
+ for flag in output["flags"]:
|
| |
+ if flag["username"] == flag_username:
|
| |
+ info_log = f"Retreived flag {flag['status']} on PR"
|
| |
+ success = flag["status"] == flag_status
|
| |
+ break
|
| |
+
|
| |
+ self.print_user(info_log, success=success)
|
| |
|
| |
- if (datetime.datetime.utcnow() - start).seconds > (duration * 60):
|
| |
- success = False
|
| |
- info_log = f"Failed to retrieve flags for PR: {pr}"
|
| |
- break
|
| |
-
|
| |
- # Only query pagure every 30 seconds
|
| |
- time.sleep(30)
|
| |
-
|
| |
- if not req.ok:
|
| |
- print(req.text)
|
| |
- raise MonitoringException("Error retrieving PR flags")
|
| |
- else:
|
| |
- output = req.json()
|
| |
- for flag in output["flags"]:
|
| |
- if flag["username"] == flag_username:
|
| |
- info_log = f"Retreived flag {flag['status']} on PR"
|
| |
- success = flag["status"] == flag_status
|
| |
- break
|
| |
|
| |
- print_user(info_log, success=success)
|
| |
+ def merge_pr(self, base_url, username, namespace, name, pr_id, token):
|
| |
+ """ Merge the specified PR
|
| |
+ """
|
| |
+ pr = "/".join([namespace, name, "pull-request", pr_id])
|
| |
+ info_log = f"Merge PR: {pr}"
|
| |
+ self.print_user(info_log)
|
| |
+ url = "/".join([base_url.rstrip("/"), "api/0", pr, "merge"])
|
| |
+ headers = {"Authorization": f"token {token}"}
|
| |
+ req = requests.post(url=url, data={"wait": True}, headers=headers)
|
| |
+ success = False
|
| |
+ if not req.ok:
|
| |
+ print(req.text)
|
| |
+ self.logs(f"Error Merging flags: {req.text}")
|
| |
+ raise MonitoringException("Error merging flags")
|
| |
+ else:
|
| |
+ success = True
|
| |
|
| |
+ self.print_user(info_log, success=success)
|
| |
|
| |
- def merge_pr(base_url, username, namespace, name, pr_id, token):
|
| |
- """ Merge the specified PR
|
| |
- """
|
| |
- pr = "/".join([namespace, name, "pull-request", pr_id])
|
| |
- info_log = f"Merge PR: {pr}"
|
| |
- print_user(info_log)
|
| |
- url = "/".join([base_url.rstrip("/"), "api/0", pr, "merge"])
|
| |
- headers = {"Authorization": f"token {token}"}
|
| |
- req = requests.post(url=url, data={"wait": True}, headers=headers)
|
| |
- success = False
|
| |
- if not req.ok:
|
| |
- print(req.text)
|
| |
- raise MonitoringException("Error merging flags")
|
| |
- else:
|
| |
- success = True
|
| |
|
| |
- print_user(info_log, success=success)
|
| |
+ def finalize(self, start):
|
| |
+ """ End data returned. """
|
| |
+ end = datetime.datetime.utcnow()
|
| |
+ delta = (end - start).seconds
|
| |
+ self.logs.append(f"Ran for {delta} seconds ({delta/60:.2f} minutes)")
|
| |
+ print(f"Ran for {delta} seconds ({delta/60:.2f} minutes)")
|
| |
|
| |
|
| |
- def finalize(start):
|
| |
- """ End data returned. """
|
| |
- end = datetime.datetime.utcnow()
|
| |
- delta = (end - start).seconds
|
| |
- print(f"Ran for {delta} seconds ({delta/60:.2f} minutes)")
|
| |
+ def run_command(command, cwd=None):
|
| |
+ """ Run the specified command in a specific working directory if one
|
| |
+ is specified.
|
| |
+ """
|
| |
+ output = None
|
| |
+ try:
|
| |
+ output = subprocess.check_output(
|
| |
+ command, cwd=cwd, stderr=subprocess.PIPE
|
| |
+ )
|
| |
+ except subprocess.CalledProcessError as e:
|
| |
+ _log.error(
|
| |
+ "Command `{}` return code: `{}`".format(
|
| |
+ " ".join(command), e.returncode
|
| |
+ )
|
| |
+ )
|
| |
+ _log.error("stdout:\n-------\n{}".format(e.stdout))
|
| |
+ _log.error("stderr:\n-------\n{}".format(e.stderr))
|
| |
+ raise MonitoringException("Command failed to run")
|
| |
|
| |
+ return output
|
| |