#12 update docker and rhts roles to be compatible with revised cloud and beakerlib roles
Merged 6 years ago by merlinm. Opened 6 years ago by merlinm.

@@ -1,1 +1,2 @@

  localhost ansible_ssh_port=2222 ansible_ssh_host=127.0.0.3 ansible_ssh_user=root ansible_ssh_pass=foobar ansible_ssh_common_args='-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'

+ executor ansible_ssh_host=127.0.0.1 ansible_connection=local

@@ -1,15 +1,30 @@

  #!/bin/sh

+ authorized_key=

  python_version=2

+ rootpass=foobar

  while [ $# -gt 0 ]; do

      case "$1" in

-     --py3) python_version=3 ;;

+     --py3)

+         python_version=3

+         ;;

+     --authorized-key)

+         authorized_key="$2"

+         shift

+         ;;

      esac

      shift

  done

+ # install minimum requirements to run ansible

  dnf install -y openssh-server python${python_version}-dnf

+ # configure SSH

  ssh-keygen -q -t rsa -N '' -f /etc/ssh/ssh_host_rsa_key

- echo 'root:foobar' | chpasswd

- # start sshd in background

+ echo "root:$rootpass" | chpasswd

+ if [ -n "$authorized_key" ]; then

+     mkdir -p /root/.ssh

+     echo "$authorized_key" >> /root/.ssh/authorized_keys

+     chmod 644 /root/.ssh/authorized_keys

+ fi

+ # start SSHD in background

  /usr/sbin/sshd

  echo SSHD READY

  # wait forever

@@ -29,11 +29,28 @@

      my_playbook_extra_args: "{{my_playbook_extra_args}} -e \"rpms='{{rpms}}'\""

    when: rpms|default("") != ""

  

+ - name: Create temporary directory

+   tempfile:

+     state: directory

+     suffix: docker

+   register: my_docker_tempdir

+ 

+ - name: Create key pair for SSH authentication in container

+   shell: "/usr/bin/ssh-keygen -t rsa -N '' -f {{ my_docker_tempdir.path + '/key' }}"

+ 

+ - name: Read contents of generated SSH public key file

+   set_fact:

+     my_public_key: "{{ lookup('file', my_docker_tempdir.path + '/key.pub') }}"

+ 

+ - name: Configure arguments for SSH key based authentication

+   set_fact:

+     my_playbook_extra_args: "{{my_playbook_extra_args}} --private-key '{{ my_docker_tempdir.path + \'/key\' }}'"

+     my_run_cmd_extra_args: "{{my_run_cmd_extra_args}} --authorized-key '{{ my_public_key }}'"

+ 

  - name: Start ansible-ready docker container running SSHD

    shell: >

      docker run -d

      -p '{{docker_ssh_port}}:22'

-     -v '{{artifacts}}:/artifacts:z,rw'

      -v '{{role_path}}/files/docker-run-ssh:/run.sh:z'

      {{docker_extra_args}}

      '{{subjects}}'
@@ -47,19 +64,12 @@

    - name: Wait for container to initialize

      wait_for: port="{{docker_ssh_port}}" search_regex=OpenSSH

  

- # ********************************

- # HELP!

- # There must be a better way to run the actual test playbook without running

- # another instance of ansible-playbook in an external shell command.

- # The external run means we can't track the return status of the invidual

- # tasks in the test playbook--which is the whole point.

- # ********************************

    - name: Run the playbook in the container

      shell: >

        ANSIBLE_LOG_PATH='{{artifacts}}/container-playbook.log'

        ansible-playbook '{{playbooks}}'

        -i '{{role_path}}/files/docker-inventory'

-       -e artifacts='/artifacts'

+       -e artifacts='{{artifacts}}'

        {{my_playbook_extra_args}}

      register: playbook_output

  
@@ -75,6 +85,12 @@

         Continue when ready

      when: lookup('env','FEDORA_TEST_DIAGNOSE')|bool

  

+   - name: Remove temporary directory

+     file:

+       path: "{{ my_docker_tempdir.path }}/"

+       state: absent

+     when: my_docker_tempdir is defined

+ 

    - name: Save the container log as an artifact

      shell: docker logs "{{ container_id }}" >"{{artifacts}}/docker.log" 2>&1

  

@@ -4,6 +4,9 @@

  to have the following variables defined:

  

   * tests: A list of RHTS test directories

-  * artifacts: An artifacts directory

+  * artifacts: An artifacts directory on localhost to store logs

+  * remote_artifacts: The directory on the system under test

+      where the logs are stored.  Note: if this variable is left

+      undefined, it will default to /tmp/artifacts

   * required_packages: A list of prerequisite packages required by RHTS tests

   * rpms: Space separated list of RPMs to install (optional, may include SRPMs)

@@ -1,8 +1,9 @@

  ---

- - name: Install the RHTS pre-requirements

+ - name: Install the Ansible and RHTS pre-requirements

    package: name={{item}} state=latest

    with_items:

-   - dnf-plugins-core            # COPR plugin needed

+   - rsync                       # need rsync for Ansible synchronize module

+   - dnf-plugins-core            # need COPR plugin

    - beakerlib

    - make

    - createrepo
@@ -22,6 +23,11 @@

    with_items:

      - "{{ required_packages }}"

  

+ - name: Define remote_artifacts if it is not already defined

+   set_fact:

+     remote_artifacts: /tmp/artifacts

+   when: remote_artifacts is not defined

+ 

  - name: Create legacy beakerlib directories

    file:

      dest: "{{ item }}"
@@ -134,49 +140,51 @@

      chmod 644 /root/.ssh/authorized_keys

  

  - name: Make artifacts directory

-   file: path={{ artifacts }} state=directory owner=root mode=755 recurse=yes

+   file: path={{ remote_artifacts }} state=directory owner=root mode=755 recurse=yes

  

  - name: Start restraintd

    # instead of using service module, daemon is started directly since we want the

    # output as an artifact (and this could be running in a container)

-   shell: nohup /usr/bin/restraintd >"{{ artifacts }}/restraintd.log" 2>&1 &

+   shell: nohup /usr/bin/restraintd >"{{ remote_artifacts }}/restraintd.log" 2>&1 &

  

  - name: Start local web server for restraint

-   shell: nohup /usr/bin/python -m SimpleHTTPServer "{{ local_www_port }}" >"{{ artifacts }}/httpd.log" 2>&1 &

+   shell: nohup /usr/bin/python -m SimpleHTTPServer "{{ local_www_port }}" >"{{ remote_artifacts }}/httpd.log" 2>&1 &

    args:

      chdir: "{{ local_www_dir }}/"

  

  - block:

    - name: Execute RHTS tests using restraint

-     shell: /usr/bin/restraint --host localhost --job "{{ job_xml_file }}" >"{{ artifacts }}/restraint.log" 2>&1

+     shell: /usr/bin/restraint --host localhost --job "{{ job_xml_file }}" >"{{ remote_artifacts }}/restraint.log" 2>&1

      args:

-       chdir: "{{ artifacts }}/"

+       chdir: "{{ remote_artifacts }}/"

      ignore_errors: True

  

+ - always:

    - name: Extract job output directory from restraint logfile

-     shell: sed -n 's/^Using \([^ ]*\).*$/\1/p' "{{ artifacts }}/restraint.log"

+     shell: sed -n 's/^Using \([^ ]*\).*$/\1/p' "{{ remote_artifacts }}/restraint.log"

      register: restraint_job_dir

  

    - name: Make job ouput directory tree readable by all

      file:

-       path: "{{ artifacts }}/{{restraint_job_dir.stdout}}"

+       path: "{{ remote_artifacts }}/{{restraint_job_dir.stdout}}"

        mode: u=rwX,g=rX,o=rX

        recurse: yes

  

    - name: Set name of restraint XML job results file

-     set_fact: results_xml="{{ artifacts }}/{{restraint_job_dir.stdout}}/job.xml"

+     set_fact: results_xml="{{ remote_artifacts }}/{{restraint_job_dir.stdout}}/job.xml"

  

    - name: Convert restraint XML job results to text as main output artifact

-     shell: xsltproc /usr/local/share/job2text.xsl "{{ results_xml }}" >"{{ artifacts }}/test.log"

+     shell: xsltproc /usr/local/share/job2text.xsl "{{ results_xml }}" >"{{ remote_artifacts }}/test.log"

+ 

+   - name: Pull out the logs

+     synchronize:

+       dest: "{{ artifacts }}/"

+       src: "{{ remote_artifacts }}/"

+       mode: pull

+       ssh_args: "-o UserKnownHostsFile=/dev/null"

+     when: artifacts|default("") != ""

  

    - name: Check the results for failures

-     shell: grep '^FAIL ' "{{ artifacts }}/test.log"

+     shell: grep '^FAIL ' "{{ remote_artifacts }}/test.log"

      register: test_fails

      failed_when: test_fails.stdout or test_fails.stderr

- 

- - always:

-   - name: Pull out the logs

-     fetch:

-       dest: "{{artifacts}}/"

-       src: "{{artifacts}}/"

-       flat: yes

@@ -1,5 +1,6 @@

  ---

  artifacts: ./artifacts

+ remote_artifacts: /tmp/artifacts

  tests: []

  required_packages: []

  local_www_dir: "/var/www-test"

  • Implement configuration of SSH key-based authentication to docker image
  • Revised docker and rhts role usage conventions to be compatible and interchangeable with cloud and beakerlib roles

Everything here looks good to me. Have you run it start to finish with docker to execute a beakerlib test? I just ask because I created a docker container which calls the beakerlib task, and I had to do a little bit of hacking because delegating to 127.0.0.1 from inside the container was a little weird. Not the exact same case as this though, so this may just work which would be great. Just want to make sure first =)

Thank you for the review. Yes, I have used the standard-test-docker role to run full sets of standard-test-beakerlib tests using the docker.io/fedora:latest image. But, I suppose, your mileage may vary based on the docker image you use.

Pull-Request has been merged by merlinm

6 years ago