Browse Source

Bump requirements.txt versions / remove ansible_python_interpreter hack (#5847)

* requirements.txt: Bump versions

Ansible 2.8+ allow ansible_python_interpreter autodetection

Signed-off-by: Etienne Champetier <champetier.etienne@gmail.com>

* tests: do not force ansible_python_interpreter

we do not expect people to set ansible_python_interpreter, so we should not set it in the CI

Signed-off-by: Etienne Champetier <champetier.etienne@gmail.com>
pull/5903/head
Etienne Champetier 4 years ago
committed by GitHub
parent
commit
2f19d964f6
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 21 additions and 25 deletions
  1. 3
      README.md
  2. 12
      requirements.txt
  3. 31
      tests/scripts/testcases_run.sh

3
README.md

@ -141,8 +141,7 @@ Note: The list of validated [docker versions](https://github.com/kubernetes/kube
## Requirements ## Requirements
- **Minimum required version of Kubernetes is v1.15** - **Minimum required version of Kubernetes is v1.15**
- **Ansible v2.7.8 and python-netaddr is installed on the machine that will run Ansible commands**
- **Jinja 2.9 (or newer) is required to run the Ansible Playbooks**
- **Ansible v2.9+, Jinja 2.11+ and python-netaddr is installed on the machine that will run Ansible commands**
- The target servers must have **access to the Internet** in order to pull docker images. Otherwise, additional configuration is required (See [Offline Environment](https://github.com/kubernetes-sigs/kubespray/blob/master/docs/downloads.md#offline-environment)) - The target servers must have **access to the Internet** in order to pull docker images. Otherwise, additional configuration is required (See [Offline Environment](https://github.com/kubernetes-sigs/kubespray/blob/master/docs/downloads.md#offline-environment))
- The target servers are configured to allow **IPv4 forwarding**. - The target servers are configured to allow **IPv4 forwarding**.
- **Your ssh key must be copied** to all the servers part of your inventory. - **Your ssh key must be copied** to all the servers part of your inventory.

12
requirements.txt

@ -1,7 +1,7 @@
ansible==2.7.16
jinja2==2.10.1
ansible==2.9.6
jinja2==2.11.1
netaddr==0.7.19 netaddr==0.7.19
pbr==5.2.0
hvac==0.8.2
jmespath==0.9.4
ruamel.yaml==0.15.96
pbr==5.4.4
hvac==0.10.0
jmespath==0.9.5
ruamel.yaml==0.16.10

31
tests/scripts/testcases_run.sh

@ -1,9 +1,7 @@
#!/bin/bash #!/bin/bash
set -euxo pipefail set -euxo pipefail
export PYPATH=$([[ ! "$CI_JOB_NAME" =~ "coreos" ]] && echo /usr/bin/python || echo /opt/bin/python)
echo "CI_JOB_NAME is $CI_JOB_NAME" echo "CI_JOB_NAME is $CI_JOB_NAME"
echo "PYPATH is $PYPATH"
pwd pwd
ls ls
echo ${PWD} echo ${PWD}
@ -28,7 +26,6 @@ if [[ "$CI_JOB_NAME" =~ "opensuse" ]]; then
ansible all -m raw -a 'netconfig update -f' ansible all -m raw -a 'netconfig update -f'
# Auto import repo keys # Auto import repo keys
ansible all -m raw -a 'zypper --gpg-auto-import-keys refresh' ansible all -m raw -a 'zypper --gpg-auto-import-keys refresh'
PYPATH=/usr/bin/python3
fi fi
# Check out latest tag if testing upgrade # Check out latest tag if testing upgrade
@ -37,44 +34,44 @@ test "${UPGRADE_TEST}" != "false" && git fetch --all && git checkout "$KUBESPRAY
test "${UPGRADE_TEST}" != "false" && git checkout "${CI_BUILD_REF}" tests/files/${CI_JOB_NAME}.yml tests/testcases/*.yml test "${UPGRADE_TEST}" != "false" && git checkout "${CI_BUILD_REF}" tests/files/${CI_JOB_NAME}.yml tests/testcases/*.yml
# Create cluster # Create cluster
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e local_release_dir=${PWD}/downloads -e ansible_python_interpreter=${PYPATH} --limit "all:!fake_hosts" cluster.yml
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e local_release_dir=${PWD}/downloads --limit "all:!fake_hosts" cluster.yml
# Repeat deployment if testing upgrade # Repeat deployment if testing upgrade
if [ "${UPGRADE_TEST}" != "false" ]; then if [ "${UPGRADE_TEST}" != "false" ]; then
test "${UPGRADE_TEST}" == "basic" && PLAYBOOK="cluster.yml" test "${UPGRADE_TEST}" == "basic" && PLAYBOOK="cluster.yml"
test "${UPGRADE_TEST}" == "graceful" && PLAYBOOK="upgrade-cluster.yml" test "${UPGRADE_TEST}" == "graceful" && PLAYBOOK="upgrade-cluster.yml"
git checkout "${CI_BUILD_REF}" git checkout "${CI_BUILD_REF}"
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e local_release_dir=${PWD}/downloads -e ansible_python_interpreter=${PYPATH} --limit "all:!fake_hosts" $PLAYBOOK
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e local_release_dir=${PWD}/downloads --limit "all:!fake_hosts" $PLAYBOOK
fi fi
# Test control plane recovery # Test control plane recovery
if [ "${RECOVER_CONTROL_PLANE_TEST}" != "false" ]; then if [ "${RECOVER_CONTROL_PLANE_TEST}" != "false" ]; then
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e local_release_dir=${PWD}/downloads -e ansible_python_interpreter=${PYPATH} --limit "${RECOVER_CONTROL_PLANE_TEST_GROUPS}:!fake_hosts" -e reset_confirmation=yes reset.yml
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e local_release_dir=${PWD}/downloads -e ansible_python_interpreter=${PYPATH} -e etcd_retries=10 --limit etcd,kube-master:!fake_hosts recover-control-plane.yml
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e local_release_dir=${PWD}/downloads --limit "${RECOVER_CONTROL_PLANE_TEST_GROUPS}:!fake_hosts" -e reset_confirmation=yes reset.yml
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e local_release_dir=${PWD}/downloads -e etcd_retries=10 --limit etcd,kube-master:!fake_hosts recover-control-plane.yml
fi fi
# Tests Cases # Tests Cases
## Test Master API ## Test Master API
ansible-playbook -e ansible_python_interpreter=${PYPATH} --limit "all:!fake_hosts" tests/testcases/010_check-apiserver.yml $ANSIBLE_LOG_LEVEL
ansible-playbook --limit "all:!fake_hosts" tests/testcases/010_check-apiserver.yml $ANSIBLE_LOG_LEVEL
## Test that all pods are Running ## Test that all pods are Running
ansible-playbook -e ansible_python_interpreter=${PYPATH} --limit "all:!fake_hosts" tests/testcases/015_check-pods-running.yml $ANSIBLE_LOG_LEVEL
ansible-playbook --limit "all:!fake_hosts" tests/testcases/015_check-pods-running.yml $ANSIBLE_LOG_LEVEL
## Test that all nodes are Ready ## Test that all nodes are Ready
ansible-playbook -e ansible_python_interpreter=${PYPATH} --limit "all:!fake_hosts" tests/testcases/020_check-nodes-ready.yml $ANSIBLE_LOG_LEVEL
ansible-playbook --limit "all:!fake_hosts" tests/testcases/020_check-nodes-ready.yml $ANSIBLE_LOG_LEVEL
## Test pod creation and ping between them ## Test pod creation and ping between them
ansible-playbook -e ansible_python_interpreter=${PYPATH} --limit "all:!fake_hosts" tests/testcases/030_check-network.yml $ANSIBLE_LOG_LEVEL
ansible-playbook --limit "all:!fake_hosts" tests/testcases/030_check-network.yml $ANSIBLE_LOG_LEVEL
## Advanced DNS checks ## Advanced DNS checks
ansible-playbook -e ansible_python_interpreter=${PYPATH} --limit "all:!fake_hosts" tests/testcases/040_check-network-adv.yml $ANSIBLE_LOG_LEVEL
ansible-playbook --limit "all:!fake_hosts" tests/testcases/040_check-network-adv.yml $ANSIBLE_LOG_LEVEL
## Kubernetes conformance tests ## Kubernetes conformance tests
ansible-playbook -i ${ANSIBLE_INVENTORY} -e ansible_python_interpreter=${PYPATH} -e @${CI_TEST_VARS} --limit "all:!fake_hosts" tests/testcases/100_check-k8s-conformance.yml $ANSIBLE_LOG_LEVEL
ansible-playbook -i ${ANSIBLE_INVENTORY} -e @${CI_TEST_VARS} --limit "all:!fake_hosts" tests/testcases/100_check-k8s-conformance.yml $ANSIBLE_LOG_LEVEL
## Idempotency checks 1/5 (repeat deployment) ## Idempotency checks 1/5 (repeat deployment)
if [ "${IDEMPOT_CHECK}" = "true" ]; then if [ "${IDEMPOT_CHECK}" = "true" ]; then
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e ansible_python_interpreter=${PYPATH} -e local_release_dir=${PWD}/downloads --limit "all:!fake_hosts" cluster.yml
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e local_release_dir=${PWD}/downloads --limit "all:!fake_hosts" cluster.yml
fi fi
## Idempotency checks 2/5 (Advanced DNS checks) ## Idempotency checks 2/5 (Advanced DNS checks)
@ -84,15 +81,15 @@ fi
## Idempotency checks 3/5 (reset deployment) ## Idempotency checks 3/5 (reset deployment)
if [ "${IDEMPOT_CHECK}" = "true" -a "${RESET_CHECK}" = "true" ]; then if [ "${IDEMPOT_CHECK}" = "true" -a "${RESET_CHECK}" = "true" ]; then
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e ansible_python_interpreter=${PYPATH} -e reset_confirmation=yes --limit "all:!fake_hosts" reset.yml
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e reset_confirmation=yes --limit "all:!fake_hosts" reset.yml
fi fi
## Idempotency checks 4/5 (redeploy after reset) ## Idempotency checks 4/5 (redeploy after reset)
if [ "${IDEMPOT_CHECK}" = "true" -a "${RESET_CHECK}" = "true" ]; then if [ "${IDEMPOT_CHECK}" = "true" -a "${RESET_CHECK}" = "true" ]; then
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e ansible_python_interpreter=${PYPATH} -e local_release_dir=${PWD}/downloads --limit "all:!fake_hosts" cluster.yml
ansible-playbook ${ANSIBLE_LOG_LEVEL} -e @${CI_TEST_VARS} -e local_release_dir=${PWD}/downloads --limit "all:!fake_hosts" cluster.yml
fi fi
## Idempotency checks 5/5 (Advanced DNS checks) ## Idempotency checks 5/5 (Advanced DNS checks)
if [ "${IDEMPOT_CHECK}" = "true" -a "${RESET_CHECK}" = "true" ]; then if [ "${IDEMPOT_CHECK}" = "true" -a "${RESET_CHECK}" = "true" ]; then
ansible-playbook -e ansible_python_interpreter=${PYPATH} --limit "all:!fake_hosts" tests/testcases/040_check-network-adv.yml $ANSIBLE_LOG_LEVEL
ansible-playbook --limit "all:!fake_hosts" tests/testcases/040_check-network-adv.yml $ANSIBLE_LOG_LEVEL
fi fi
Loading…
Cancel
Save