#305 inventory: Inroduce Podman (WIP)
Opened 5 years ago by ssahani. Modified 5 years ago
ssahani/standard-test-roles podman  into  master

inventory: Inroduce Podman
Susant Sahani • 5 years ago  
@@ -0,0 +1,231 @@ 

+ #!/usr/bin/python3

+ # SPDX Licence identifier MIT

+ # Copyright (c) 2019 Red Hat Inc.

+ 

+ import argparse

+ import distutils.util

+ import errno

+ import json

+ import logging

+ import os

+ import sys

+ import shlex

+ import signal

+ import subprocess

+ import time

+ from distutils import dir_util

+ 

+ EMPTY_INVENTORY = {}

+ LOG_FILE = "default_provisioners.log"

+ CONTAINER_TEMP_ARTIFACTS = "/tmp/artifacts"

+ PODMAN_BIN = "/usr/bin/podman"

+ 

+ 

+ def print_bad_inventory():

+     """Print bad inventory on any uncatched exception. This will prevent

+     running playbook on localhost.

+     """

+     fake_host = "fake_host"

+     fake_hostname = "standard-inventory-qcow2_failed_check_logs"

+     hosts = [fake_host]

+     bad_inv = {"localhost": {"hosts": hosts, "vars": {}},

+                "subjects": {"hosts": hosts, "vars": {}},

+                "_meta": {"hostvars": {fake_host: {"ansible_host": fake_hostname}}}}

+ 

+     sys.stdout.write(json.dumps(bad_inv, indent=4, separators=(',', ': ')))

+ 

+ 

+ def get_artifact_path(path=""):

+     """Return path to an artifact file in artifacts directory. If path == ""

+     than return path artifacts dir.  Create artifacts dir if necessary.

+     """

+     artifacts = os.environ.get("TEST_ARTIFACTS", os.path.join(os.getcwd(), "artifacts"))

+     try:

+         os.makedirs(artifacts)

+     except OSError as exc:

+         if exc.errno != errno.EEXIST or not os.path.isdir(artifacts):

+             raise

+     return os.path.join(artifacts, path)

+ 

+ 

+ def copy_artifacts_from_container(name):

+     container_path = subprocess.check_output([PODMAN_BIN, "mount", name]).rstrip().decode('utf-8')

+     container_artifacts_path = container_path + CONTAINER_TEMP_ARTIFACTS

+ 

+     dir_util.copy_tree(container_artifacts_path, CONTAINER_TEMP_ARTIFACTS)

+ 

+     subprocess.call([PODMAN_BIN, "umount", name])

+ 

+ 

+ def inv_list(subjects):

+     hosts = []

+     variables = {}

+ 

+     for subject in subjects:

+         name, host_vars = inv_host(subject)

+         if host_vars:

+             hosts.append(name)

+             variables[name] = host_vars

+ 

+     if not hosts:

+         return EMPTY_INVENTORY

+ 

+     return {"localhost": {"hosts": hosts, "vars": {}},

+             "subjects": {"hosts": hosts, "vars": {}},

+             "_meta": {"hostvars": variables}}

+ 

+ 

+ def inv_host(subject):

+     words = subject.split(":")

+     image = words[0]

+     name = words[1]

+ 

+     null = open(os.devnull, 'w')

+     try:

+         tty = os.open("/dev/tty", os.O_WRONLY)

+         os.dup2(tty, 2)

+     except OSError:

+         tty = None

+         pass

+ 

+     logger.info("Launching Podman container for {0}".format(image))

+ 

+     # Make sure the podman service is running

+     try:

+         subprocess.check_call(["/usr/bin/systemctl", "start", "io.podman.service"])

+     except subprocess.CalledProcessError:

+         raise RuntimeError("Could not start podman service")

+ 

+     try:

+         subprocess.check_call([PODMAN_BIN, "start", name], stdout=sys.stderr.fileno())

+     except subprocess.CalledProcessError:

+         cmd = [PODMAN_BIN, "run", "-d", "--name", format(name), "--entrypoint=/bin/sh", image, "-c", "sleep infinity"]

+         try:

+             subprocess.check_call(cmd, stdout=sys.stderr.fileno())

+         except subprocess.CalledProcessError:

+             raise RuntimeError("Could not start container image: {0}".format(image))

+ 

+     # Directory to place artifacts

+     artifacts = os.environ.get("TEST_ARTIFACTS", os.path.join(os.getcwd(), "artifacts"))

+ 

+     # The variables

+     variables = {

+         "ansible_connection": "podman",

+         "ansible_python_interpreter": "/usr/bin/python3"

+     }

+ 

+     ppid = os.getppid()

+     pid = os.fork()

+     if pid > 0:

+         return name, variables

+ 

+     # Daemonize and watch the processes

+     os.chdir("/")

+     os.setsid()

+     os.umask(0)

+     if tty is None:

+         tty = null.fileno()

+ 

+     # Duplicate standard input to standard output and standard error.

+     os.dup2(null.fileno(), 0)

+     os.dup2(tty, 1)

+     os.dup2(tty, 2)

+ 

+     # Now wait for the parent process to go away, then kill the VM

+     while True:

+         time.sleep(3)

+         try:

+             os.kill(ppid, 0)

+         except OSError:

+             break  # Either of the processes no longer exist

+ 

+     if diagnose:

+         def _signal_handler(*args):

+             logger.info("Diagnose ending.")

+ 

+         logger.info("kill {0} # when finished".format(os.getpid()))

+         signal.signal(signal.SIGTERM, _signal_handler)

+         signal.pause()

+ 

+     # Dump the container logs

+     os.makedirs(artifacts, exist_ok=True)

+     log = os.path.join(artifacts, "{0}.log".format(os.path.basename(image)))

+ 

+     with open(log, "w") as f:

+         subprocess.call([PODMAN_BIN, "logs", name], stdout=f.fileno())

+ 

+     # Fetch logs from container

+     copy_artifacts_from_container(name)

+     dir_util.copy_tree(CONTAINER_TEMP_ARTIFACTS, artifacts)

+ 

+     # Stop the container

+     subprocess.check_call([PODMAN_BIN, "stop", name])

+ 

+     sys.exit(0)

+ 

+ 

+ def main(argv):

+     global logger

+     global diagnose

+     logger = logging.getLogger(__name__)

+     logger.setLevel(logging.DEBUG)

+ 

+     # stderr output

+     conhandler = logging.StreamHandler()

+ 

+     # Print to strerr by default messages with level >= warning, can be changed

+     # with setting TEST_DEBUG=1.

+     try:

+         diagnose = distutils.util.strtobool(os.getenv("TEST_DEBUG", "0"))

+     except ValueError:

+         diagnose = 0

+     conhandler.setLevel(logging.WARNING)

+ 

+     if diagnose:

+         # Collect all messages with any log level to stderr.

+         conhandler.setLevel(logging.NOTSET)

+ 

+     # Log format for stderr.

+     log_format = "[%(levelname)-5.5s] {}: %(message)s".format(os.path.basename(__file__))

+     formatter = logging.Formatter(log_format)

+     conhandler.setFormatter(formatter)

+     logger.addHandler(conhandler)

+ 

+     parser = argparse.ArgumentParser(description="Inventory for a container image in a registry")

+     parser.add_argument("--list", action="store_true", help="Verbose output")

+     parser.add_argument('--host', help="Get host variables")

+     parser.add_argument("subjects", nargs="*", default=shlex.split(os.environ.get("TEST_SUBJECTS", "")))

+     opts = parser.parse_args()

+ 

+     # Send logs to common logfile for all default provisioners.

+     log_file = get_artifact_path(LOG_FILE)

+     fhandler = logging.FileHandler(log_file)

+ 

+     # Collect all messages with any log level to log file.

+     fhandler.setLevel(logging.NOTSET)

+     log_format = ("%(asctime)s [{}/%(threadName)-12.12s] [%(levelname)-5.5s]:"

+                   "%(message)s").format(os.path.basename(__file__))

+     logFormatter = logging.Formatter(log_format)

+     fhandler.setFormatter(logFormatter)

+     logger.addHandler(fhandler)

+ 

+     logger.info("Start provisioner.")

+     if opts.host:

+         _, data = inv_host(opts.host)

+     else:

+         data = inv_list(opts.subjects)

+ 

+     sys.stdout.write(json.dumps(data, indent=4, separators=(',', ': ')))

+ 

+ 

+ if __name__ == '__main__':

+     ret = -1

+     try:

+         main(sys.argv)

+         ret = 0

+     except Exception:

+         print_bad_inventory()

+         # Backtrace stack goes to log file. If TEST_DEBUG == 1, it goes to stderr too.

+         logger.info("Fatal error in provision script.", exc_info=True)

+ 

+     sys.exit(ret)

@@ -6,7 +6,9 @@ 

      src: "{{ remote_artifacts }}/"

      mode: pull

      ssh_args: "-o UserKnownHostsFile=/dev/null"

-   when: artifacts|default("") != ""

+   when:

+       - artifacts|default("") != ""

+       - ansible_connection|default("") != "podman"

  

  - name: Report role result

    vars:

@@ -25,12 +25,19 @@ 

  - name: Make artifacts directory

    file: path={{ remote_artifacts }} state=directory owner=root mode=755 recurse=yes

  

+ - name: Copy files to local podman container

+   copy:

+     src: "{{ playbook_dir }}/"

+     dest: "{{ tenv_workdir }}"

+   when: ansible_connection|default("") == "podman"

+ 

  # Next task requires rsync on test environment

  - name: Copy tests to test environment

    synchronize:

      src: "{{ playbook_dir }}/"

      dest: "{{ tenv_workdir }}"

      ssh_args: "-o UserKnownHostsFile=/dev/null"

+   when: ansible_connection|default("") != "podman"

  

  - name: Start services defined in `required_services`

    service:

This works allows to run tests in podman container.

How to test?

$ export TEST_SUBJECTS=registry.fedoraproject.org/fedora:29
$ ansible-playbook -vvv -i ./standard-inventory-podman tests.yml

Does it work using dynamic inventory?

I guess we would need to set it on test subjects, something like

TEST_SUBJECTS=podman:docker.io/fedora:rawhide

For example for docker we run:

ANSIBLE_INVENTORY=$(test -e inventory && echo inventory || echo /usr/share/ansible/inventory) TEST_SUBJECTS=docker:docker.io/fedora:rawhide ansible-playbook --tags=container tests.yml

I don't like the idea that another variable needs to be set export container=podman

I am still working on that to enhance it. how we propagate it. Any idea ? Once that is clear we can make that. There are some enhancement I am working on now more podman specific rather than docker.

export container=podman

Hmm this is there to distinguish between the rsync and general copy. rsync does not support podman . Probably adding a something in the TEST_SUBJECT would do that .

rebased onto a971023086784c22bc261dea898605f1f9b3facb

5 years ago

pretty please pagure-ci rebuild

5 years ago

rebased onto d80883ac9a4c280e9f3bf439d97ddb901aa073b7

5 years ago

pretty please pagure-ci rebuild

5 years ago

rebased onto 38fd9b0a1aae9fb323c0d99c2a981ebeb1073627

5 years ago

rebased onto c1edbd229ed23c6d9d5ea4909dfae29eb89079a2

5 years ago

rebased onto 0a2a8e057b56b86c3bc128def7e93bab18fa8e43

5 years ago

rebased onto 3bb7290a0cd2d3b441f1e73d1c89e9cd0baeb1cb

5 years ago

rebased onto f452764febaeac3df38b10944621dcbe20780c25

5 years ago

rebased onto 2ba12c7e0e8ab1c1a8dd519a603d399465e63039

5 years ago

rebased onto 70b81239859bb62fdd1785f3ababa4a8af0bdb8c

5 years ago

rebased onto 70b81239859bb62fdd1785f3ababa4a8af0bdb8c

5 years ago

rebased onto 7fb936c60bef72b27412eef8a9feebb19034cb65

5 years ago

rebased onto 6f2e6d7

5 years ago
TASK [str-common-final : Report role result] ***********************************
fatal: [localhost]: FAILED! => {
    "msg": [
        "Tests failed: True",
        "Tests msg: FAIL test-beakerlib-fail",
        ""
    ]
}
...ignoring

Not sure why this is failing

May be something wrong with

@@ -25,12 +25,19 @@ 
  - name: Make artifacts directory
    file: path={{ remote_artifacts }} state=directory owner=root mode=755 recurse=yes

+ - name: Copy files to local podman container
+   copy:
+     src: "{{ playbook_dir }}/"
+     dest: "{{ tenv_workdir }}"
+   when: ansible_connection|default("") == "podman"
+ 
  # Next task requires rsync on test environment
  - name: Copy tests to test environment
    synchronize:
      src: "{{ playbook_dir }}/"
      dest: "{{ tenv_workdir }}"
      ssh_args: "-o UserKnownHostsFile=/dev/null"
+   when: ansible_connection|default("") != "podman"

@ssahani hi.
Could you please give an example of
TEST_SUBJECTS=
env variable that this provisioner expects ?

For example Docker provisioner has next code:
https://pagure.io/standard-test-roles/blob/master/f/inventory/standard-inventory-docker#_70

def inv_host(subject, docker_extra_args):
    if not subject.startswith("docker:"):
        return None, EMPTY_INVENTORY

@ssahani
it seems that Ansible module synchronize doesn't support podman protocol.
https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/files/synchronize.py#L194
Could you please figure out if synchronize support podman protocol?
Maybe you can get more info from podman-implementation in Ansible author : @ttomecek

@ssahani how do you copy artifacts from container back to host?

I suggest copying the artifact logs in the inventory script if that's possible. If you wanted to use podman connection and synchronize, you would need to update code in synchronize module so that it would be able to handle podman connection.

copying the artifact logs in the inventory script

Proposed approach is very limited, and it is very different from what we do in other provisioners .

Sync logic must be inside ansible-roles, to be consistent.

If we cannot construct ansible-task that does sync, than I would propose to abandon podman as it has very limited support from Ansible point of view.

Honestly, I don't understand how are you able to use podman connection plugin since it was not released yet: https://github.com/ansible/ansible/tree/b4cba61b6725e2d2a9c9039504648ba552abff6b/lib/ansible/plugins/connection no podman in the list (ref of ansible 2.7.9)

You can do it on Ansible level with copy for example, instead of synchronize.

You can also wait a few months for the 2.8 release of ansible and ideally open an issue upstream to support synchronize with podman.

@ssahani hi.
Could you please give an example of
TEST_SUBJECTS=
env variable that this provisioner expects ?
For example Docker provisioner has next code:
https://pagure.io/standard-test-roles/blob/master/f/inventory/standard-inventory-docker#_70
def inv_host(subject, docker_extra_args):
if not subject.startswith("docker:"):
return None, EMPTY_INVENTORY

export TEST_SUBJECTS=registry.fedoraproject.org/fedora:29

@ssahani
it seems that Ansible module synchronize doesn't support podman protocol.
https://github.com/ansible/ansible/blob/devel/lib/ansible/modules/files/synchronize.py#L194

Yes it does not. That is why I replaced with copy module

Could you please figure out if synchronize support podman protocol?
Maybe you can get more info from podman-implementation in Ansible author : @ttomecek
@ssahani how do you copy artifacts from container back to host?

No it does not support

I suggest copying the artifact logs in the inventory script if that's possible. If you wanted to use podman connection and synchronize, you would need to update code in synchronize module so that it would be able to handle podman connection.

That is how this it is implemented now.

copying the artifact logs in the inventory script

Proposed approach is very limited, and it is very different from what we do in other provisioners .

True. It's a different approach.

Sync logic must be inside ansible-roles, to be consistent.
If we cannot construct ansible-task that does sync, than I would propose to abandon podman as it has very limited support from Ansible point of view.

yes. Even we merge It wont usable as connection module for podman in 2.8. We need to wait for that version and support for sync as well.

We just can abandon