From c67710ae334022d5ba07a4c6c025af46873fe2db Mon Sep 17 00:00:00 2001 From: Alexander Bokovoy Date: Apr 21 2020 15:10:50 +0000 Subject: Add CI with Azure Pipelines Test bind-dyndb-ldap with FreeIPA integration tests using Azure Pipelines. FreeIPA integration tests maintained in FreeIPA project (ipatests/test_integration/*) and made available in Fedora through python3-ipatests package. Use infrastructure developed for FreeIPA project to run integration tests in Azure Pipelines. The following sequence of performed: - Packages for bind-dyndb-ldap are built using Fedora fedora-toolbox container image - A container image for tests is created using the packages just built and FreeIPA server / tests packages from Fedora repositories - Gating topology is generated according to the pipeline definition gating.yml - Test jobs scheduled, using docker instances created from the container image for test - Upon test completion, its results collected and published to Azure Pipelines run page Azure Pipelines integration code is based on FreeIPA Azure Pipelines integration. Signed-off-by: Alexander Bokovoy Signed-off-by: Stanislav Levin --- diff --git a/tests/azure/Dockerfiles/Dockerfile.build.fedora b/tests/azure/Dockerfiles/Dockerfile.build.fedora new file mode 100644 index 0000000..2f2e405 --- /dev/null +++ b/tests/azure/Dockerfiles/Dockerfile.build.fedora @@ -0,0 +1,23 @@ +FROM fedora:31 +MAINTAINER [BIND DynDB LDAP Developers freeipa-devel@lists.fedorahosted.org] +ENV container=docker LANG=en_US.utf8 LANGUAGE=en_US.utf8 LC_ALL=en_US.utf8 + +ADD distx /root +RUN echo 'deltarpm = false' >> /etc/dnf/dnf.conf \ + && dnf update -y dnf \ + && dnf install -y dnf-plugins-core sudo wget systemd firewalld nss-tools iptables \ + && sed -i 's/%_install_langs \(.*\)/\0:fr/g' /etc/rpm/macros.image-language-conf \ + && dnf install -y glibc-langpack-fr glibc-langpack-en \ + && dnf install -y /root/packages/*.rpm \ + && dnf install -y openssh-server freeipa-server-dns python3-ipatests \ + && dnf clean all && rm -rf /root/packages /root/srpms \ + && mkdir -p /root/ipatests \ + && sed -i 's/.*PermitRootLogin .*/#&/g' /etc/ssh/sshd_config \ + && echo 'PermitRootLogin yes' >> /etc/ssh/sshd_config \ + && systemctl enable sshd \ + && for i in /usr/lib/systemd/system/*-domainname.service; \ + do sed -i 's#^ExecStart=/#ExecStart=-/#' $i ; done + +STOPSIGNAL RTMIN+3 +VOLUME ["/dyndb-ldap", "/run", "/tmp"] +ENTRYPOINT [ "/usr/sbin/init" ] diff --git a/tests/azure/Dockerfiles/docker-compose.yml b/tests/azure/Dockerfiles/docker-compose.yml new file mode 100644 index 0000000..69d6653 --- /dev/null +++ b/tests/azure/Dockerfiles/docker-compose.yml @@ -0,0 +1,55 @@ +version: '2.1' +services: + master: + image: ${DYNDB_LDAP_DOCKER_IMAGE} + build: . + cap_add: + - ALL + security_opt: + - apparmor:unconfined + mem_limit: 1610612736 + volumes: + - /sys/fs/cgroup/systemd:/sys/fs/cgroup/systemd + - ./test-config.yaml:/root/.ipa/ipa-test-config.yaml:ro + - ${BUILD_REPOSITORY_LOCALPATH}:${DYNDB_LDAP_TESTS_REPO_PATH} + + networks: + - ${DYNDB_LDAP_NETWORK} + + replica: + image: ${DYNDB_LDAP_DOCKER_IMAGE} + build: . + cap_add: + - ALL + security_opt: + - apparmor:unconfined + mem_limit: 1610612736 + volumes: + - /sys/fs/cgroup/systemd:/sys/fs/cgroup/systemd + networks: + - ${DYNDB_LDAP_NETWORK} + + client: + image: ${DYNDB_LDAP_DOCKER_IMAGE} + build: . + cap_add: + - ALL + security_opt: + - apparmor:unconfined + mem_limit: 536870912 + volumes: + - /sys/fs/cgroup/systemd:/sys/fs/cgroup/systemd + # nfs server + - ./exports:/exports + - /lib/modules:/lib/modules:ro + networks: + - ${DYNDB_LDAP_NETWORK} + +networks: + ipanet: + driver: bridge + enable_ipv6: true + ipam: + driver: default + config: + - subnet: ${DYNDB_LDAP_IPV6_SUBNET} diff --git a/tests/azure/azure-pipelines.yml b/tests/azure/azure-pipelines.yml new file mode 100644 index 0000000..63b95c0 --- /dev/null +++ b/tests/azure/azure-pipelines.yml @@ -0,0 +1,61 @@ +trigger: +- master + +variables: +- template: templates/variables-common.yml +# platform specific variables, links to +- template: templates/variables-fedora.yml + +jobs: +- job: Build + pool: + vmImage: $(VM_IMAGE) + container: + image: $(DOCKER_BUILD_IMAGE) + options: --cap-add=SYS_PTRACE --security-opt seccomp=unconfined --privileged + steps: + - template: templates/${{ variables.PREPARE_BUILD_TEMPLATE }} + - script: | + set -e + echo "Running autoconf generator" + autoreconf -i -f + ./configure + displayName: Configure the project + - template: templates/${{ variables.BUILD_TEMPLATE }} + - template: templates/publish-build.yml + parameters: + artifactName: 'packages-$(Build.BuildId)-$(Agent.OS)-$(Agent.OSArchitecture)' + targetPath: $(Build.Repository.LocalPath)/distx + displayName: Publish packages + + - script: | + set -e + mkdir container + cp -pr distx container/ + cp $(DYNDB_LDAP_TESTS_DOCKERFILES)/$(DOCKER_DOCKERFILE) container/Dockerfile + cd container + docker build -t dyndb-ldap-azure-builder . + docker save dyndb-ldap-azure-builder | gzip > '$(builddir)/dyndb-ldap-azure-builder-container.tar.gz' + displayName: Create container image for test + - template: templates/publish-build.yml + parameters: + artifactName: 'image-$(Build.BuildId)-$(Agent.OS)-$(Agent.OSArchitecture)' + targetPath: $(Build.Repository.LocalPath)/dyndb-ldap-azure-builder-container.tar.gz + displayName: Publish container image + - template: templates/generate-matrix.yml + parameters: + definition: 'tests/azure/azure_definitions/gating-fedora.yml' + displayName: Generate Matrix for Gating tests + name: gating_matrix + +- job: GATING + pool: + vmImage: $(VM_IMAGE) + dependsOn: Build + condition: succeeded() + strategy: + matrix: $[ dependencies.Build.outputs['gating_matrix.matrix'] ] + timeoutInMinutes: 90 + steps: + - template: templates/generate-job-variables.yml + - template: templates/test-jobs.yml diff --git a/tests/azure/azure_definitions/gating-fedora.yml b/tests/azure/azure_definitions/gating-fedora.yml new file mode 100644 index 0000000..13f6d7a --- /dev/null +++ b/tests/azure/azure_definitions/gating-fedora.yml @@ -0,0 +1,34 @@ +vms: +- vm_jobs: + - container_job: Install_DNS_Master + tests: + - test_integration/test_installation.py::TestInstallMaster + +- vm_jobs: + - container_job: DNSSEC_tests + containers: + replicas: 2 + tests: + - test_integration/test_dnssec.py + +- vm_jobs: + - container_job: simple_replication + containers: + replicas: 1 + tests: + - test_integration/test_simple_replication.py + +- vm_jobs: + - container_job: Backup_and_Restore_with_DNSSEC + tests: + - test_integration/test_backup_and_restore.py::TestBackupAndRestoreWithDNSSEC + - test_integration/test_backup_and_restore.py::TestBackupReinstallRestoreWithDNSSEC + containers: + replicas: 1 + +- vm_jobs: + - container_job: DNSSEC_hidden_replica_promotion + containers: + replicas: 2 + tests: + - test_integration/test_replica_promotion.py::TestHiddenReplicaPromotion diff --git a/tests/azure/azure_definitions/gating.yml b/tests/azure/azure_definitions/gating.yml new file mode 120000 index 0000000..d5bb126 --- /dev/null +++ b/tests/azure/azure_definitions/gating.yml @@ -0,0 +1 @@ +gating-fedora.yml \ No newline at end of file diff --git a/tests/azure/scripts/azure-run-integration-tests.sh b/tests/azure/scripts/azure-run-integration-tests.sh new file mode 100755 index 0000000..300f049 --- /dev/null +++ b/tests/azure/scripts/azure-run-integration-tests.sh @@ -0,0 +1,34 @@ +#!/bin/bash -eux + +# this script is intended to be run within container +# +# distro-specifics +source "${DYNDB_LDAP_TESTS_SCRIPTS}/variables.sh" + +rm -rf "$DYNDB_LDAP_TESTS_LOGSDIR" +mkdir "$DYNDB_LDAP_TESTS_LOGSDIR" +pushd "$DYNDB_LDAP_TESTS_LOGSDIR" + +# Local directory for ipa-run-tests +# defined in ipa-test-config(1) +# defaults to /root/ipatests +mkdir -p /root/ipatests + +tests_result=1 +{ IPATEST_YAML_CONFIG=~/.ipa/ipa-test-config.yaml \ + ipa-run-tests \ + --logging-level=debug \ + --logfile-dir="$DYNDB_LDAP_TESTS_LOGSDIR" \ + --with-xunit \ + --verbose \ + $DYNDB_LDAP_TESTS_TO_IGNORE \ + $DYNDB_LDAP_TESTS_TO_RUN && tests_result=0 ; } || \ + tests_result=$? + +# fix permissions on logs to be readable by Azure's user (vsts) +chmod -R o+rX "$DYNDB_LDAP_TESTS_LOGSDIR" + +find "$DYNDB_LDAP_TESTS_LOGSDIR" -mindepth 1 -maxdepth 1 -not -name '.*' -type d \ + -exec tar --remove-files -czf {}.tar.gz {} \; + +exit $tests_result diff --git a/tests/azure/scripts/azure-run-tests.sh b/tests/azure/scripts/azure-run-tests.sh new file mode 100755 index 0000000..1639ae8 --- /dev/null +++ b/tests/azure/scripts/azure-run-tests.sh @@ -0,0 +1,107 @@ +#!/bin/bash -eux + +if [ $# -ne 1 ]; then + echo "Docker environment ID is not provided" + exit 1 +fi + +PROJECT_ID="$1" +BUILD_REPOSITORY_LOCALPATH="${BUILD_REPOSITORY_LOCALPATH:-$(realpath .)}" + +DYNDB_LDAP_TESTS_TO_RUN_VARNAME="DYNDB_LDAP_TESTS_TO_RUN_${PROJECT_ID}" +DYNDB_LDAP_TESTS_TO_RUN="${!DYNDB_LDAP_TESTS_TO_RUN_VARNAME:-}" +# in case of missing explicit list of tests to be run the Pytest run all the +# discovered tests, this is an error for this CI +[ -z "$DYNDB_LDAP_TESTS_TO_RUN" ] && { echo 'Nothing to test'; exit 1; } + +DYNDB_LDAP_TESTS_ENV_NAME_VARNAME="DYNDB_LDAP_TESTS_ENV_NAME_${PROJECT_ID}" +DYNDB_LDAP_TESTS_ENV_NAME="${!DYNDB_LDAP_TESTS_ENV_NAME_VARNAME:-}" +[ -z "$DYNDB_LDAP_TESTS_ENV_NAME" ] && \ + { echo "Project name is not set for project:${PROJECT_ID}"; exit 1 ;} + +DYNDB_LDAP_TESTS_TYPE_VARNAME="DYNDB_LDAP_TESTS_TYPE_${PROJECT_ID}" +DYNDB_LDAP_TESTS_TYPE="${!DYNDB_LDAP_TESTS_TYPE_VARNAME:-integration}" + +# Normalize spacing and expand the list afterwards. Remove {} for the single list element case +DYNDB_LDAP_TESTS_TO_RUN=$(eval "echo {$(echo $DYNDB_LDAP_TESTS_TO_RUN | sed -e 's/[ \t]+*/,/g')}" | tr -d '{}') + +DYNDB_LDAP_TESTS_TO_IGNORE_VARNAME="DYNDB_LDAP_TESTS_TO_IGNORE_${PROJECT_ID}" +DYNDB_LDAP_TESTS_TO_IGNORE="${!DYNDB_LDAP_TESTS_TO_IGNORE_VARNAME:-}" +[ -n "$DYNDB_LDAP_TESTS_TO_IGNORE" ] && \ +DYNDB_LDAP_TESTS_TO_IGNORE=$(eval "echo --ignore\ {$(echo $DYNDB_LDAP_TESTS_TO_IGNORE | sed -e 's/[ \t]+*/,/g')}" | tr -d '{}') + +DYNDB_LDAP_TESTS_CLIENTS_VARNAME="DYNDB_LDAP_TESTS_CLIENTS_${PROJECT_ID}" +DYNDB_LDAP_TESTS_CLIENTS="${!DYNDB_LDAP_TESTS_CLIENTS_VARNAME:-0}" + +DYNDB_LDAP_TESTS_REPLICAS_VARNAME="DYNDB_LDAP_TESTS_REPLICAS_${PROJECT_ID}" +DYNDB_LDAP_TESTS_REPLICAS="${!DYNDB_LDAP_TESTS_REPLICAS_VARNAME:-0}" + +DYNDB_LDAP_TESTS_CONTROLLER="${PROJECT_ID}_master_1" +DYNDB_LDAP_TESTS_LOGSDIR="${DYNDB_LDAP_TESTS_REPO_PATH}/dyndb_ldap_envs/${DYNDB_LDAP_TESTS_ENV_NAME}/${CI_RUNNER_LOGS_DIR}" + +DYNDB_LDAP_TESTS_DOMAIN="${DYNDB_LDAP_TESTS_DOMAIN:-ipa.test}" +# bash4 +DYNDB_LDAP_TESTS_REALM="${DYNDB_LDAP_TESTS_DOMAIN^^}" + +# for base tests only 1 master is needed even if another was specified +if [ "$DYNDB_LDAP_TESTS_TYPE" == "base" ]; then + DYNDB_LDAP_TESTS_CLIENTS="0" + DYNDB_LDAP_TESTS_REPLICAS="0" +fi + +project_dir="${DYNDB_LDAP_TESTS_ENV_WORKING_DIR}/${DYNDB_LDAP_TESTS_ENV_NAME}" +ln -sfr \ + "${DYNDB_LDAP_TESTS_DOCKERFILES}/docker-compose.yml" \ + "$project_dir"/ + +# will be generated later in setup_containers.py +touch "${project_dir}"/test-config.yaml + +pushd "$project_dir" + +BUILD_REPOSITORY_LOCALPATH="$BUILD_REPOSITORY_LOCALPATH" \ +DYNDB_LDAP_DOCKER_IMAGE="${DYNDB_LDAP_DOCKER_IMAGE:-dyndb-ldap-azure-builder}" \ +DYNDB_LDAP_NETWORK="${DYNDB_LDAP_NETWORK:-ipanet}" \ +DYNDB_LDAP_IPV6_SUBNET="2001:db8:1:${PROJECT_ID}::/64" \ +docker-compose -p "$PROJECT_ID" up \ + --scale replica="$DYNDB_LDAP_TESTS_REPLICAS" \ + --scale client="$DYNDB_LDAP_TESTS_CLIENTS" \ + --force-recreate --remove-orphans -d + +popd + +DYNDB_LDAP_TESTS_CLIENTS="$DYNDB_LDAP_TESTS_CLIENTS" \ +DYNDB_LDAP_TESTS_REPLICAS="$DYNDB_LDAP_TESTS_REPLICAS" \ +DYNDB_LDAP_TESTS_ENV_ID="$PROJECT_ID" \ +DYNDB_LDAP_TESTS_ENV_WORKING_DIR="$DYNDB_LDAP_TESTS_ENV_WORKING_DIR" \ +DYNDB_LDAP_TESTS_ENV_NAME="$DYNDB_LDAP_TESTS_ENV_NAME" \ +DYNDB_LDAP_TEST_CONFIG_TEMPLATE="${BUILD_REPOSITORY_LOCALPATH}/tests/azure/templates/test-config-template.yaml" \ +DYNDB_LDAP_TESTS_REPO_PATH="$DYNDB_LDAP_TESTS_REPO_PATH" \ +DYNDB_LDAP_TESTS_DOMAIN="$DYNDB_LDAP_TESTS_DOMAIN" \ +python3 setup_containers.py + +# path to runner within container +tests_runner="${DYNDB_LDAP_TESTS_REPO_PATH}/${DYNDB_LDAP_TESTS_SCRIPTS}/azure-run-${DYNDB_LDAP_TESTS_TYPE}-tests.sh" + +tests_result=1 +{ docker exec -t \ + --env DYNDB_LDAP_TESTS_SCRIPTS="${DYNDB_LDAP_TESTS_REPO_PATH}/${DYNDB_LDAP_TESTS_SCRIPTS}" \ + --env DYNDB_LDAP_PLATFORM="$DYNDB_LDAP_PLATFORM" \ + --env DYNDB_LDAP_TESTS_DOMAIN="$DYNDB_LDAP_TESTS_DOMAIN" \ + --env DYNDB_LDAP_TESTS_REALM="$DYNDB_LDAP_TESTS_REALM" \ + --env DYNDB_LDAP_TESTS_LOGSDIR="$DYNDB_LDAP_TESTS_LOGSDIR" \ + --env DYNDB_LDAP_TESTS_TO_RUN="$DYNDB_LDAP_TESTS_TO_RUN" \ + --env DYNDB_LDAP_TESTS_TO_IGNORE="$DYNDB_LDAP_TESTS_TO_IGNORE" \ + "$DYNDB_LDAP_TESTS_CONTROLLER" \ + /bin/bash --noprofile --norc \ + -eux "$tests_runner" && tests_result=0 ; } || tests_result=$? + +pushd "$project_dir" +BUILD_REPOSITORY_LOCALPATH="$BUILD_REPOSITORY_LOCALPATH" \ +DYNDB_LDAP_DOCKER_IMAGE="${DYNDB_LDAP_DOCKER_IMAGE:-dyndb-ldap-azure-builder}" \ +DYNDB_LDAP_NETWORK="${DYNDB_LDAP_NETWORK:-ipanet}" \ +DYNDB_LDAP_IPV6_SUBNET="2001:db8:1:${PROJECT_ID}::/64" \ +docker-compose -p "$PROJECT_ID" down +popd + +exit $tests_result diff --git a/tests/azure/scripts/dump_cores.sh b/tests/azure/scripts/dump_cores.sh new file mode 100755 index 0000000..408a839 --- /dev/null +++ b/tests/azure/scripts/dump_cores.sh @@ -0,0 +1,54 @@ +#!/bin/bash -eu + +DYNDB_LDAP_TESTS_ENV_WORKING_DIR="${DYNDB_LDAP_TESTS_REPO_PATH}/dyndb_ldap_envs" +COREDUMPS_DIR="${DYNDB_LDAP_TESTS_ENV_WORKING_DIR}/${COREDUMPS_SUBDIR}" + +since_time="$(cat '/coredumpctl.time.mark' || echo '-1h')" +debugger="/debugger.sh" + +cat > "$debugger" < "\${CORE_PID}.stacktrace" 2>&1 +EOF +chmod +x "$debugger" + +# make sure coredumpctl installed +which coredumpctl +coredumpctl \ + --no-pager --directory="$HOST_JOURNAL" --since="$since_time" list ||: + +rm -rvf "$COREDUMPS_DIR" ||: +mkdir "$COREDUMPS_DIR" +cd "$COREDUMPS_DIR" + +pids="$(coredumpctl --no-pager --directory="$HOST_JOURNAL" --since="$since_time" -F COREDUMP_PID || echo '')" +for pid in $pids; do + # core dump + { coredumpctl \ + --no-pager \ + --since="$since_time" \ + --directory="$HOST_JOURNAL" \ + -o "${pid}.core" dump "$pid" && \ + tar -czf "${pid}.core.tar.gz" --remove-files "${pid}.core" ; } ||: + + # stacktrace + { CORE_PID="$pid" \ + coredumpctl \ + --no-pager \ + --since="$since_time" \ + --directory="$HOST_JOURNAL" \ + --debugger="$debugger" \ + debug "$pid" && \ + tar \ + -czf "${pid}.stacktrace.tar.gz" \ + --remove-files "${pid}.stacktrace" ; } ||: +done + +chmod a+rw -R "$COREDUMPS_DIR" diff --git a/tests/azure/scripts/generate-matrix.py b/tests/azure/scripts/generate-matrix.py new file mode 100644 index 0000000..1b75f8b --- /dev/null +++ b/tests/azure/scripts/generate-matrix.py @@ -0,0 +1,49 @@ +import argparse +import json + +import yaml + +parser = argparse.ArgumentParser(description='Generate Azure jobs matrix.') +parser.add_argument('azure_template', help='path to Azure template') + +parser.add_argument('max_azure_env_jobs', type=int, + help='maximum number of Docker envs within VM') + +args = parser.parse_args() + +with open(args.azure_template) as f: + data = yaml.safe_load(f) + matrix_jobs = {} + for vm in data['vms']: + vm_jobs = vm['vm_jobs'] + jobs = {} + job_name = '' + for job_id, vm_job in enumerate(vm_jobs, 1): + if not job_name: + job_name = f'{vm_job["container_job"]}_{job_id}' + jobs[f'dyndb_ldap_tests_env_name_{job_id}'] = vm_job['container_job'] + jobs[f'dyndb_ldap_tests_to_run_{job_id}'] = ' '.join(vm_job['tests']) + jobs[f'dyndb_ldap_tests_to_ignore_{job_id}'] = ' '.join( + vm_job.get('ignore', '')) + jobs[f'dyndb_ldap_tests_type_{job_id}'] = vm_job.get( + 'type', 'integration') + + containers = vm_job.get('containers') + replicas = 0 + clients = 0 + if containers: + replicas = containers.get('replicas', 0) + clients = containers.get('clients', 0) + jobs[f'dyndb_ldap_tests_replicas_{job_id}'] = replicas + jobs[f'dyndb_ldap_tests_clients_{job_id}'] = clients + + if len(vm_jobs) > args.max_azure_env_jobs: + raise ValueError( + f"Number of defined jobs:{len(vm_jobs)} within VM:'{job_name}'" + f" is greater than limit:{args.max_azure_env_jobs}") + job_name = f'{job_name}_to_{len(vm_jobs)}' + if job_name in matrix_jobs: + raise ValueError(f"Environment names should be unique:{job_name}") + matrix_jobs[job_name] = jobs + print("##vso[task.setVariable variable=matrix;isOutput=true]" + + json.dumps(matrix_jobs)) diff --git a/tests/azure/scripts/install-debuginfo-fedora.sh b/tests/azure/scripts/install-debuginfo-fedora.sh new file mode 100755 index 0000000..3020672 --- /dev/null +++ b/tests/azure/scripts/install-debuginfo-fedora.sh @@ -0,0 +1,20 @@ +#!/bin/bash -eu + +function install_debuginfo() { + dnf makecache ||: + dnf install -y \ + ${DYNDB_LDAP_TESTS_REPO_PATH}/distx/packages_debuginfo/*.rpm \ + gdb + + dnf debuginfo-install -y \ + 389-ds-base \ + bind \ + bind-dyndb-ldap \ + certmonger \ + gssproxy \ + httpd \ + krb5-server \ + krb5-workstation \ + samba \ + sssd +} diff --git a/tests/azure/scripts/install-debuginfo.sh b/tests/azure/scripts/install-debuginfo.sh new file mode 100755 index 0000000..daa0e50 --- /dev/null +++ b/tests/azure/scripts/install-debuginfo.sh @@ -0,0 +1,8 @@ +#!/bin/bash -eu + +function install_debuginfo() { :; } + +# override install_debuginfo for the platform specifics +source "${DYNDB_LDAP_TESTS_SCRIPTS}/install-debuginfo-${DYNDB_LDAP_PLATFORM}.sh" + +install_debuginfo diff --git a/tests/azure/scripts/setup_containers.py b/tests/azure/scripts/setup_containers.py new file mode 100644 index 0000000..f877183 --- /dev/null +++ b/tests/azure/scripts/setup_containers.py @@ -0,0 +1,292 @@ +import logging +import os +import subprocess + +import docker +from jinja2 import Template + +logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') + +DYNDB_LDAP_TESTS_ENV_WORKING_DIR = os.environ.get('DYNDB_LDAP_TESTS_ENV_WORKING_DIR') +DYNDB_LDAP_TESTS_ENV_NAME = os.environ.get('DYNDB_LDAP_TESTS_ENV_NAME') +DYNDB_LDAP_TESTS_ENV_ID = os.environ.get('DYNDB_LDAP_TESTS_ENV_ID', '1') +DYNDB_LDAP_TESTS_CLIENTS = int(os.environ.get('DYNDB_LDAP_TESTS_CLIENTS', 0)) +DYNDB_LDAP_TESTS_REPLICAS = int(os.environ.get('DYNDB_LDAP_TESTS_REPLICAS', 0)) +DYNDB_LDAP_TESTS_DOMAIN = os.environ.get('DYNDB_LDAP_TESTS_DOMAIN', 'ipa.test') +DYNDB_LDAP_SSH_PRIV_KEY = os.environ.get('DYNDB_LDAP_SSH_PRIV_KEY', '/root/.ssh/id_rsa') +DYNDB_LDAP_DNS_FORWARDER = os.environ.get('DYNDB_LDAP_DNS_FORWARDER', '8.8.8.8') +DYNDB_LDAP_NETWORK = os.environ.get('DYNDB_LDAP_NETWORK', 'ipanet') +DYNDB_LDAP_CONTROLLER_TYPE = os.environ.get('DYNDB_LDAP_CONTROLLER_TYPE', 'master') +DYNDB_LDAP_TEST_CONFIG_TEMPLATE = os.environ.get( + 'DYNDB_LDAP_TEST_CONFIG_TEMPLATE', './templates/test-config-template.yaml') + +DYNDB_LDAP_TESTS_ENV_DIR = os.path.join(DYNDB_LDAP_TESTS_ENV_WORKING_DIR, DYNDB_LDAP_TESTS_ENV_NAME) +DYNDB_LDAP_TEST_CONFIG = "test-config.yaml" + + +class Container: + """ + Represents group of Docker container + """ + def __init__(self, role, dns=DYNDB_LDAP_DNS_FORWARDER, num=1, + prefix=DYNDB_LDAP_TESTS_ENV_ID, domain=DYNDB_LDAP_TESTS_DOMAIN): + self.role = role + self.num = num + self.prefix = prefix + self.dns = dns + self.domain = domain + self.dclient = docker.from_env() + + @property + def hostnames(self): + """ + hostnames of containers within group + """ + if not hasattr(self, '_hostnames'): + self._hostnames = ['{}{}.{}'.format(self.role, c, self.domain) + for c in range(1, self.num + 1)] + return self._hostnames + + @property + def names(self): + """ + names of containers within group + """ + if not hasattr(self, '_names'): + self._names = ['{}_{}_{}'.format(self.prefix, self.role, c) + for c in range(1, self.num + 1)] + return self._names + + def ip(self, name): + """ + ipv4 address of container + """ + ipanet = '{}_{}'.format(DYNDB_LDAP_TESTS_ENV_ID, DYNDB_LDAP_NETWORK) + dcont = self.dclient.containers.get(name) + return dcont.attrs['NetworkSettings']['Networks'][ipanet]['IPAddress'] + + @property + def ips(self): + """ + ipv4 addresses of containers within group + """ + if not hasattr(self, '_ips'): + self._ips = [self.ip(n) for n in self.names] + return self._ips + + def umount_docker_resource(self, path): + """ + Umount resource by its path + """ + cmd = [ + "/bin/umount", path + ] + self.execute_all(cmd) + + cmd = [ + "/bin/chmod", + "a-x", + path, + ] + self.execute_all(cmd) + + def execute(self, name, args): + """ + Exec an arbitrary command within container + """ + dcont = self.dclient.containers.get(name) + logging.info("%s: run: %s", dcont.name, args) + result = dcont.exec_run(args, demux=True) + if result.output[0] is not None: + logging.info("%s: %s", dcont.name, result.output[0]) + logging.info("%s: result: %s", dcont.name, result.exit_code) + if result.exit_code: + logging.error("stderr: %s", result.output[1].decode()) + raise subprocess.CalledProcessError( + result.exit_code, args, + result.output[1] + ) + return result + + def execute_all(self, args): + """ + Exec an arbitrary command within every container of group + """ + results = [] + for n in self.names: + results.append(self.execute(n, args)) + return results + + def add_ssh_pubkey(self, key): + """ + Add ssh public key into every container of group + """ + home_ssh_dir = "/root/.ssh" + auth_keys = os.path.join(home_ssh_dir, "authorized_keys") + cmd = [ + "/bin/bash", "-c", + (f"mkdir {home_ssh_dir} " + f"; chmod 0700 {home_ssh_dir} " + f"&& touch {auth_keys} " + f"&& chmod 0600 {auth_keys} " + f"&& echo {key} >> {auth_keys}" + ) + ] + self.execute_all(cmd) + + def setup_hosts(self): + """ + Overwrite hosts within every container of group + """ + self.umount_docker_resource("/etc/hosts") + for n, i, h in zip(self.names, self.ips, self.hostnames): + hosts = "127.0.0.1 localhost\n::1 localhost\n{ip} {host}".format( + ip=i, host=h, + ) + cmd = [ + "/bin/bash", "-c", + "echo -e '{hosts}' > /etc/hosts".format(hosts=hosts), + ] + self.execute(name=n, args=cmd) + + def setup_hostname(self): + self.umount_docker_resource("/etc/hostname") + for n, h in zip(self.names, self.hostnames): + cmd = [ + "/bin/bash", "-c", + "echo -e '{hostname}' > /etc/hostname".format(hostname=h), + ] + self.execute(name=n, args=cmd) + + cmd = [ + "hostnamectl", + "set-hostname", h, + ] + self.execute(name=n, args=cmd) + + def setup_resolvconf(self): + """ + Overwrite resolv conf within every container of group + """ + self.umount_docker_resource("/etc/resolv.conf") + ns = "nameserver {dns}".format(dns=self.dns) + cmd = [ + "/bin/bash", "-c", + "echo {ns} > /etc/resolv.conf".format(ns=ns), + ] + self.execute_all(cmd) + + +class Controller(Container): + """ + Manages groups of containers + """ + def __init__(self, contr_type=DYNDB_LDAP_CONTROLLER_TYPE): + self.containers = [] + self.contr_type = contr_type + if self.contr_type == 'master': + self.master = None + + def append(self, container): + self.containers.append(container) + + def setup_ssh(self): + """ + Generate ssh key pair and copy public part to all containers + """ + cmd = ["rm", "-f", DYNDB_LDAP_SSH_PRIV_KEY] + self.execute(args=cmd) + + cmd = [ + "ssh-keygen", "-q", + "-f", DYNDB_LDAP_SSH_PRIV_KEY, + "-t", "rsa", + "-m", "PEM", + "-N", "", + ] + self.execute(args=cmd) + + cmd = ["/bin/bash", "-c", "cat {}.pub".format(DYNDB_LDAP_SSH_PRIV_KEY)] + key = self.execute(cmd).output[0].decode().rstrip() + for container in self.containers: + container.add_ssh_pubkey(key) + + def execute(self, args): + """ + Execute a command on controller (either master or local machine) + """ + if self.contr_type == 'master': + if self.master is None: + for container in self.containers: + if container.role == "master": + self.master = container + break + return self.master.execute(name=master.names[0], args=args) + + proc = subprocess.run(args, check=True, capture_output=True) + return [proc.stdout.decode().rstrip().strip("'")] + + def setup_hosts(self): + """ + Overwrite Docker's hosts + """ + hosts = [] + for container in self.containers: + container.setup_hosts() + for i, h in zip(container.ips, container.hostnames): + hosts.append("{} {}".format(i, h)) + + cmd = [ + "/bin/bash", "-c", + "echo -e '{hosts}' >> /etc/hosts".format(hosts='\n'.join(hosts)), + ] + self.execute(cmd) + + def setup_hostname(self): + """ + Overwrite Docker's hostname + """ + for container in self.containers: + container.setup_hostname() + + def setup_resolvconf(self): + """ + Overwrite Docker's embedded DNS ns + """ + for container in self.containers: + container.setup_resolvconf() + + def generate_ipa_test_config(self, config): + with open(DYNDB_LDAP_TEST_CONFIG_TEMPLATE, 'r') as f: + # assert foobar + template = Template(f.read(), trim_blocks=True, lstrip_blocks=True) + + print(template.render(config)) + + with open(os.path.join(DYNDB_LDAP_TESTS_ENV_DIR, DYNDB_LDAP_TEST_CONFIG), 'w') as f: + f.write(template.render(config)) + + +controller = Controller() +master = Container(role='master') +clients = Container(role='client', num=DYNDB_LDAP_TESTS_CLIENTS, dns=master.ips[0]) +replicas = Container(role='replica', num=DYNDB_LDAP_TESTS_REPLICAS, dns=master.ips[0]) + +controller.append(master) +controller.append(clients) +controller.append(replicas) + +controller.setup_ssh() +controller.setup_hosts() +controller.setup_hostname() +controller.setup_resolvconf() + +config = { + 'dns_forwarder': DYNDB_LDAP_DNS_FORWARDER, + 'ssh_private_key': DYNDB_LDAP_SSH_PRIV_KEY, + 'domain_name': DYNDB_LDAP_TESTS_DOMAIN, + 'master': master.ips, + 'replicas': replicas.ips, + 'clients': clients.ips, +} +controller.generate_ipa_test_config(config) diff --git a/tests/azure/scripts/variables-fedora.sh b/tests/azure/scripts/variables-fedora.sh new file mode 100755 index 0000000..41d7d88 --- /dev/null +++ b/tests/azure/scripts/variables-fedora.sh @@ -0,0 +1,7 @@ +#!/bin/bash -eux + +# Put the platform-specific definitions here + +function firewalld_cmd() { + firewall-cmd $@ +} diff --git a/tests/azure/scripts/variables.sh b/tests/azure/scripts/variables.sh new file mode 100755 index 0000000..4c21d89 --- /dev/null +++ b/tests/azure/scripts/variables.sh @@ -0,0 +1,14 @@ +#!/bin/bash -eu + +HTTPD_SYSTEMD_NAME='httpd.service' +HTTPD_LOGDIR='/var/log/httpd' +HTTPD_ERRORLOG="${HTTPD_LOGDIR}/error_log" +HTTPD_BASEDIR='/etc/httpd' +HTTPD_ALIASDIR="${HTTPD_BASEDIR}/alias" +BIND_BASEDIR='/var/named' +BIND_DATADIR="${BIND_BASEDIR}/data" + +function firewalld_cmd() { :; } + +# this should be the last to override base variables with platform specific +source "$DYNDB_LDAP_TESTS_SCRIPTS/variables-${DYNDB_LDAP_PLATFORM}.sh" diff --git a/tests/azure/templates/build-fedora.yml b/tests/azure/templates/build-fedora.yml new file mode 100644 index 0000000..c980f91 --- /dev/null +++ b/tests/azure/templates/build-fedora.yml @@ -0,0 +1,28 @@ +steps: +- script: | + set -e + mkdir -p $(builddir)/{distx/{rpms,srpms,packages},buildx} + make dist + touch `ls -1 bind-dyndb-ldap*.tar.bz2 | sed 's/bz2$/bz2.asc/g'` + mv bind-dyndb-ldap*.tar.bz2* $(builddir)/buildx + cp contrib/bind-dyndb-ldap.spec $(builddir)/buildx + rpmbuild -ba \ + --define "_topdir $(builddir)/buildx" \ + --define "_sourcedir $(builddir)/buildx" \ + --define "_specdir $(builddir)/buildx" \ + --define "_builddir $(builddir)/buildx" \ + --define "_srcrpmdir $(builddir)/distx/srpms" \ + --define "_rpmdir $(builddir)/distx/rpms" \ + $(builddir)/buildx/bind-dyndb-ldap.spec + displayName: Build packages +- script: | + set -e + mkdir -p $(builddir)/distx/packages_debuginfo + find $(builddir)/distx/rpms/ -type f \ + \( -name "*-debuginfo-*.rpm" -o -name '*-debugsource-*.rpm' \) \ + -exec mv {} $(builddir)/distx/packages_debuginfo/ \; + find $(builddir)/distx/rpms/ -type f \ + \( -name "*.rpm" \) \ + -exec mv {} $(builddir)/distx/packages/ \; + rm -rf $(builddir)/{distx/{rpms,srpms}} + displayName: Move packages into the final location diff --git a/tests/azure/templates/generate-job-variables.yml b/tests/azure/templates/generate-job-variables.yml new file mode 100644 index 0000000..5592c49 --- /dev/null +++ b/tests/azure/templates/generate-job-variables.yml @@ -0,0 +1,19 @@ +steps: +- script: | + # don't set 'set -x' here because this breaks variables + # https://github.com/microsoft/azure-pipelines-yaml/blob/master/design/readonly-variables.md + set -eu + total_envs=0 + for project in $(seq $(MAX_CONTAINER_ENVS)); do + # no more configured environments + tests_varname="DYNDB_LDAP_TESTS_TO_RUN_${project}" + [ -z "${!tests_varname:-}" ] && break; + let "total_envs=total_envs+1" + + name_varname="DYNDB_LDAP_TESTS_ENV_NAME_${project}" + [ -z "${!name_varname:-}" ] && \ + { echo "dyndb_ldap_tests_env_name_${project} is mandatory."; exit 1; } + done + [ "$total_envs" -eq 0 ] && { echo 'Nothing to test'; env | sort ; exit 1; } + echo "##vso[task.setvariable variable=dyndb_ldap_tests_total_envs]$total_envs" + displayName: Generate environment variables diff --git a/tests/azure/templates/generate-matrix.yml b/tests/azure/templates/generate-matrix.yml new file mode 100644 index 0000000..0e5c4c5 --- /dev/null +++ b/tests/azure/templates/generate-matrix.yml @@ -0,0 +1,9 @@ +parameters: + definition: '' + displayName: '' + name: '' + +steps: + - script: python3 $(DYNDB_LDAP_TESTS_SCRIPTS)/generate-matrix.py ${{ parameters.definition }} $(MAX_CONTAINER_ENVS) + name: ${{ parameters.name }} + displayName: ${{ parameters.displayName }} diff --git a/tests/azure/templates/prepare-build-fedora.yml b/tests/azure/templates/prepare-build-fedora.yml new file mode 100644 index 0000000..09b4d0e --- /dev/null +++ b/tests/azure/templates/prepare-build-fedora.yml @@ -0,0 +1,22 @@ +steps: +- script: | + set -e + sudo rm -rf /var/cache/dnf/* + sudo dnf makecache || : + echo "Installing base development environment" + sudo dnf install -y \ + gdb-minimal \ + make \ + autoconf \ + rpm-build \ + gettext-devel \ + automake \ + libtool \ + docker \ + python3-paramiko \ + python3-pyyaml \ + + echo "Installing development dependencies" + sudo dnf builddep -y bind-dyndb-ldap + sudo dnf builddep -y --skip-broken --spec contrib/bind-dyndb-ldap.spec --best --allowerasing --setopt=install_weak_deps=False + displayName: Prepare build environment diff --git a/tests/azure/templates/publish-build.yml b/tests/azure/templates/publish-build.yml new file mode 100644 index 0000000..94b034a --- /dev/null +++ b/tests/azure/templates/publish-build.yml @@ -0,0 +1,11 @@ +parameters: + artifactName: '' + targetPath: '' + displayName: '' + +steps: +- task: PublishPipelineArtifact@1 + inputs: + artifactName: ${{ parameters.artifactName }} + targetPath: ${{ parameters.targetPath }} + displayName: ${{ parameters.displayName }} diff --git a/tests/azure/templates/run-test.yml b/tests/azure/templates/run-test.yml new file mode 100644 index 0000000..166e712 --- /dev/null +++ b/tests/azure/templates/run-test.yml @@ -0,0 +1,58 @@ +steps: +- script: | + set -eux + + workdir="$DYNDB_LDAP_TESTS_ENV_WORKING_DIR" + rm -rf "$workdir" + mkdir "$workdir" + + ln -sfr \ + ${BUILD_REPOSITORY_LOCALPATH}/${DYNDB_LDAP_TESTS_SCRIPTS}/{azure-run-tests.sh,setup_containers.py} \ + ./ + + function runner() { + set -o pipefail + local project_id="$1" + local project_name_varname="DYNDB_LDAP_TESTS_ENV_NAME_${project_id}" + local project_name="${!project_name_varname}" + [ -z "$project_name" ] && \ + { echo "Project name is not set for project:${project_id}"; exit 1 ;} + local workdir="$DYNDB_LDAP_TESTS_ENV_WORKING_DIR" + local logfile="runner_${project_name}.log" + local project_dir="${workdir}/${project_name}" + rm -rf "$project_dir" + mkdir "$project_dir" + # live-logging of tests within environment: '1' + if [ "$project_id" == "1" ]; then + /usr/bin/time \ + --format="tests: ${project_name}, result: %x, time: %E" \ + --output="result_${project_id}" \ + -- \ + ./azure-run-tests.sh "$project_id" 2>&1 | \ + ts '[%Y-%m-%d %H:%M:%S]' 2>&1 | tee "${project_dir}/${logfile}" + result=$? + else + /usr/bin/time \ + --format="tests: ${project_name}, result: %x, time: %E" \ + --output="result_${project_id}" \ + -- \ + ./azure-run-tests.sh "$project_id" 2>&1 | \ + ts '[%Y-%m-%d %H:%M:%S]' 2>&1 > "${project_dir}/${logfile}" + result=$? + fi + exit $result + } + export -f runner + + result=1 + rm -f result_* + { parallel \ + --tag \ + --jobs $(MAX_CONTAINER_ENVS) \ + --linebuffer \ + 'runner {}' ::: "$(seq $(dyndb_ldap_tests_total_envs))" && result=0 ; } || \ + result=$? + echo "Results:" + cat $(eval echo result_{1..$(dyndb_ldap_tests_total_envs)}) + exit $result + displayName: Run tests diff --git a/tests/azure/templates/save-test-artifacts.yml b/tests/azure/templates/save-test-artifacts.yml new file mode 100644 index 0000000..300dc03 --- /dev/null +++ b/tests/azure/templates/save-test-artifacts.yml @@ -0,0 +1,12 @@ +parameters: + logsPath: $(DYNDB_LDAP_TESTS_ENV_WORKING_DIR) + logsArtifact: '' +steps: +- task: PublishPipelineArtifact@1 + displayName: Publish logs + inputs: + artifactName: ${{parameters.logsArtifact}} + # globbing is adjusted in .artifactignore on DYNDB_LDAP_TESTS_ENV_WORKING_DIR + targetPath: ${{parameters.logsPath}} + condition: always() + diff --git a/tests/azure/templates/setup-test-environment.yml b/tests/azure/templates/setup-test-environment.yml new file mode 100644 index 0000000..797b1db --- /dev/null +++ b/tests/azure/templates/setup-test-environment.yml @@ -0,0 +1,32 @@ +parameters: + imageName: 'dyndb-ldap-azure-builder:latest' + +steps: +- script: | + set -e + echo '{ "ipv6": true, "fixed-cidr-v6": "2001:db8::/64" }' > docker-daemon.json + sudo mkdir -p /etc/docker + sudo cp docker-daemon.json /etc/docker/daemon.json + sudo chown root:root /etc/docker/daemon.json + sudo systemctl restart docker + sudo modprobe ip6_tables + displayName: Configure containerization to allow IPv6 network + +- task: DownloadPipelineArtifact@0 + displayName: Download prebuilt packages + inputs: + artifactName: 'packages-$(Build.BuildId)-$(Agent.OS)-$(Agent.OSArchitecture)' + targetPath: $(Build.Repository.LocalPath)/distx + +- task: DownloadPipelineArtifact@0 + displayName: Download pre-built container + inputs: + artifactName: 'image-$(Build.BuildId)-$(Agent.OS)-$(Agent.OSArchitecture)' + targetPath: $(Build.Repository.LocalPath) + +- script: | + set -e + docker load --input $(Build.Repository.LocalPath)/dyndb-ldap-azure-builder-container.tar.gz + docker images + docker inspect dyndb-ldap-azure-builder:latest + displayName: Import pre-built container to the engine diff --git a/tests/azure/templates/test-config-template.yaml b/tests/azure/templates/test-config-template.yaml new file mode 100644 index 0000000..5b975b1 --- /dev/null +++ b/tests/azure/templates/test-config-template.yaml @@ -0,0 +1,28 @@ +admin_name: admin +admin_password: Secret123 +debug: false +dirman_dn: cn=Directory Manager +dirman_password: Secret123 +domain_level: 1 +dns_forwarder: {{ dns_forwarder }} +root_ssh_key_filename: {{ ssh_private_key }} +domains: +- name: {{ domain_name }} + type: IPA + hosts: + - external_hostname: master1.{{ domain_name }} + name: master1.{{ domain_name }} + ip: {{ master[0] }} + role: master +{% for repl_ip in replicas: %} + - external_hostname: replica{{ loop.index }}.{{ domain_name }} + name: replica{{ loop.index }}.{{ domain_name }} + ip: {{ repl_ip }} + role: replica +{% endfor %} +{% for client_ip in clients: %} + - external_hostname: client{{ loop.index }}.{{ domain_name }} + name: client{{ loop.index }}.{{ domain_name }} + ip: {{ client_ip }} + role: client +{% endfor %} diff --git a/tests/azure/templates/test-jobs.yml b/tests/azure/templates/test-jobs.yml new file mode 100644 index 0000000..9558ab4 --- /dev/null +++ b/tests/azure/templates/test-jobs.yml @@ -0,0 +1,117 @@ +steps: +- script: | + set -e + env | sort + displayName: Print Host Enviroment + +- script: | + set -e + printf "Available entropy: %s\n" $(cat /proc/sys/kernel/random/entropy_avail) + sudo apt-get install -y rng-tools + sudo service rng-tools start + sleep 3 + printf "Available entropy: %s\n" $(cat /proc/sys/kernel/random/entropy_avail) + displayName: Increase entropy level + +- script: | + set -e + sudo apt-get install -y \ + parallel \ + moreutils \ + systemd-coredump \ + python3-docker + # ubuntu's one is too old: different API + python3 -m pip install docker --user + displayName: Install Host's tests requirements + +- script: | + set -eu + date +'%Y-%m-%d %H:%M:%S' > coredumpctl.time.mark + systemd_conf="/etc/systemd/system.conf" + sudo sed -i 's/^DumpCore=.*/#&/g' "$systemd_conf" + sudo sed -i 's/^DefaultLimitCORE=.*/#&/g' "$systemd_conf" + echo -e 'DumpCore=yes\nDefaultLimitCORE=infinity' | \ + sudo tee -a "$systemd_conf" >/dev/null + cat "$systemd_conf" + coredump_conf="/etc/systemd/coredump.conf" + cat "$coredump_conf" + sudo systemctl daemon-reexec + # for ns-slapd debugging + sudo sysctl -w fs.suid_dumpable=1 + displayName: Allow coredumps + +- template: setup-test-environment.yml + +- template: run-test.yml + +- task: PublishTestResults@2 + inputs: + testResultsFiles: 'dyndb_ldap_envs/*/$(CI_RUNNER_LOGS_DIR)/nosetests.xml' + testRunTitle: $(System.JobIdentifier) results + condition: succeededOrFailed() + +- script: | + set -eu + # check the host first, containers cores were dumped here + COREDUMPS_SUBDIR="coredumps" + COREDUMPS_DIR="${DYNDB_LDAP_TESTS_ENV_WORKING_DIR}/${COREDUMPS_SUBDIR}" + rm -rfv "$COREDUMPS_DIR" ||: + mkdir "$COREDUMPS_DIR" + since_time="$(cat coredumpctl.time.mark || echo '-1h')" + sudo coredumpctl --no-pager --since="$since_time" list ||: + + pids="$(sudo coredumpctl --no-pager --since="$since_time" -F COREDUMP_PID || echo '')" + # nothing to dump + [ -z "$pids" ] && exit 0 + + # continue in container + HOST_JOURNAL="/var/log/host_journal" + CONTAINER_COREDUMP="dump_cores" + docker create --privileged \ + -v "$(realpath coredumpctl.time.mark)":/coredumpctl.time.mark:ro \ + -v /var/lib/systemd/coredump:/var/lib/systemd/coredump:ro \ + -v /var/log/journal:"$HOST_JOURNAL":ro \ + -v "${BUILD_REPOSITORY_LOCALPATH}":"${DYNDB_LDAP_TESTS_REPO_PATH}" \ + --name "$CONTAINER_COREDUMP" dyndb-ldap-azure-builder + docker start "$CONTAINER_COREDUMP" + + docker exec -t \ + --env DYNDB_LDAP_TESTS_REPO_PATH="${DYNDB_LDAP_TESTS_REPO_PATH}" \ + --env DYNDB_LDAP_TESTS_SCRIPTS="${DYNDB_LDAP_TESTS_REPO_PATH}/${DYNDB_LDAP_TESTS_SCRIPTS}" \ + --env DYNDB_LDAP_PLATFORM="${DYNDB_LDAP_PLATFORM}" \ + "$CONTAINER_COREDUMP" \ + /bin/bash --noprofile --norc -eux \ + "${DYNDB_LDAP_TESTS_REPO_PATH}/${DYNDB_LDAP_TESTS_SCRIPTS}/install-debuginfo.sh" + + docker exec -t \ + --env DYNDB_LDAP_TESTS_REPO_PATH="${DYNDB_LDAP_TESTS_REPO_PATH}" \ + --env COREDUMPS_SUBDIR="$COREDUMPS_SUBDIR" \ + --env HOST_JOURNAL="$HOST_JOURNAL" \ + "$CONTAINER_COREDUMP" \ + /bin/bash --noprofile --norc -eux \ + "${DYNDB_LDAP_TESTS_REPO_PATH}/${DYNDB_LDAP_TESTS_SCRIPTS}/dump_cores.sh" + # there should be no crashes + exit 1 + condition: succeededOrFailed() + displayName: Check for coredumps + +- script: | + set -e + + artifacts_ignore_path="${DYNDB_LDAP_TESTS_ENV_WORKING_DIR}/.artifactignore" + cat > "$artifacts_ignore_path" <