Skip to content

Commit

Permalink
Merge pull request #883 from samdoran/backport/2.1/878-879-880/test-f…
Browse files Browse the repository at this point in the history
…ixes

[release_2.1] Test fixes

Backport of the following for Ansible Runner 2.1:

PR #878
PR #879
PR #880

Reviewed-by: David Shrewsbury <None>
Reviewed-by: None <None>
  • Loading branch information
ansible-zuul[bot] authored Oct 25, 2021
2 parents bdf0c7f + befed20 commit f74e2fa
Show file tree
Hide file tree
Showing 19 changed files with 206 additions and 292 deletions.
2 changes: 2 additions & 0 deletions test/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@ def skipif_pre_ansible28(is_pre_ansible28):
pytest.skip("Valid only on Ansible 2.8+")


# TODO: determine if we want to add docker / podman
# to zuul instances in order to run these tests
def pytest_generate_tests(metafunc):
"""If a test uses the custom marker ``test_all_runtimes``, generate marks
for all supported container runtimes. The requires the test to accept
Expand Down
32 changes: 0 additions & 32 deletions test/integration/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,38 +29,6 @@ def rc(tmp_path):
return rc


# TODO: determine if we want to add docker / podman
# to zuul instances in order to run these tests
@pytest.fixture(scope="session", autouse=True)
def container_runtime_available():
import subprocess
import warnings

runtimes_available = True
for runtime in ('docker', 'podman'):
try:
subprocess.run([runtime, '-v'])
except FileNotFoundError:
warnings.warn(UserWarning(f"{runtime} not available"))
runtimes_available = False
return runtimes_available


# TODO: determine if we want to add docker / podman
# to zuul instances in order to run these tests
@pytest.fixture(scope="session")
def container_runtime_installed():
import subprocess

for runtime in ('podman', 'docker'):
try:
subprocess.run([runtime, '-v'])
return runtime
except FileNotFoundError:
pass
pytest.skip('No container runtime is available.')


@pytest.fixture(scope='session')
def clear_integration_artifacts(request):
'''Fixture is session scoped to allow parallel runs without error
Expand Down
13 changes: 0 additions & 13 deletions test/integration/containerized/conftest.py

This file was deleted.

6 changes: 1 addition & 5 deletions test/integration/containerized/test_cleanup_images.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import json
import random
import shutil

from base64 import b64decode
from string import ascii_lowercase
Expand All @@ -11,11 +10,8 @@
from ansible_runner.defaults import default_container_image


@pytest.mark.parametrize('runtime', ['podman', 'docker'])
@pytest.mark.test_all_runtimes
def test_cleanup_new_image(cli, runtime, tmp_path):
if shutil.which(runtime) is None:
pytest.skip(f'{runtime} is unavailable')

# Create new image just for this test with a unique layer
random_string = ''.join(random.choice(ascii_lowercase) for i in range(10))
special_string = f"Verify this in test - {random_string}"
Expand Down
23 changes: 10 additions & 13 deletions test/integration/containerized/test_container_management.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import os
import shutil
import time
import json

Expand All @@ -11,8 +10,9 @@
from ansible_runner.interface import run


def is_running(cli, container_runtime_installed, container_name):
cmd = [container_runtime_installed, 'ps', '-aq', '--filter', f'name={container_name}']
@pytest.mark.test_all_runtimes
def is_running(cli, runtime, container_name):
cmd = [runtime, 'ps', '-aq', '--filter', f'name={container_name}']
r = cli(cmd, bare=True)
output = '{}{}'.format(r.stdout, r.stderr)
print(' '.join(cmd))
Expand Down Expand Up @@ -49,18 +49,19 @@ def cancel(self):
return True


def test_cancel_will_remove_container(project_fixtures, container_runtime_installed, cli):
@pytest.mark.test_all_runtimes
def test_cancel_will_remove_container(project_fixtures, runtime, cli):
private_data_dir = project_fixtures / 'sleep'
ident = uuid4().hex[:12]
container_name = f'ansible_runner_{ident}'

cancel_standin = CancelStandIn(container_runtime_installed, cli, container_name)
cancel_standin = CancelStandIn(runtime, cli, container_name)

res = run(
private_data_dir=private_data_dir,
playbook='sleep.yml',
settings={
'process_isolation_executable': container_runtime_installed,
'process_isolation_executable': runtime,
'process_isolation': True
},
cancel_callback=cancel_standin.cancel,
Expand All @@ -70,14 +71,12 @@ def test_cancel_will_remove_container(project_fixtures, container_runtime_instal
assert res.status == 'canceled'

assert not is_running(
cli, container_runtime_installed, container_name
cli, runtime, container_name
), 'Found a running container, they should have all been stopped'


@pytest.mark.parametrize('runtime', ['podman', 'docker'])
@pytest.mark.test_all_runtimes
def test_invalid_registry_host(tmp_path, runtime):
if shutil.which(runtime) is None:
pytest.skip(f'{runtime} is unavaialble')
pdd_path = tmp_path / 'private_data_dir'
pdd_path.mkdir()
private_data_dir = str(pdd_path)
Expand Down Expand Up @@ -125,10 +124,8 @@ def test_invalid_registry_host(tmp_path, runtime):
])


@pytest.mark.parametrize('runtime', ['podman', 'docker'])
@pytest.mark.test_all_runtimes
def test_registry_auth_file_cleanup(tmp_path, cli, runtime):
if shutil.which(runtime) is None:
pytest.skip(f'{runtime} is unavaialble')
pdd_path = tmp_path / 'private_data_dir'
pdd_path.mkdir()
private_data_dir = str(pdd_path)
Expand Down
4 changes: 2 additions & 2 deletions test/integration/test_display_callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,7 @@ def test_output_when_given_invalid_playbook(tmp_path):
private_data_dir = str(tmp_path)
executor = init_runner(
private_data_dir=private_data_dir,
inventory="localhost ansible_connection=local",
inventory='localhost ansible_connection=local ansible_python_interpreter="{{ ansible_playbook_python }}"',
envvars={"ANSIBLE_DEPRECATION_WARNINGS": "False"},
playbook=os.path.join(private_data_dir, 'fake_playbook.yml')
)
Expand Down Expand Up @@ -368,7 +368,7 @@ def test_output_when_given_non_playbook_script(tmp_path):

executor = init_runner(
private_data_dir=private_data_dir,
inventory="localhost ansible_connection=local",
inventory='localhost ansible_connection=local ansible_python_interpreter="{{ ansible_playbook_python }}"',
envvars={"ANSIBLE_DEPRECATION_WARNINGS": "False"}
)

Expand Down
77 changes: 30 additions & 47 deletions test/integration/test_events.py
Original file line number Diff line number Diff line change
@@ -1,33 +1,29 @@
import pytest
import tempfile
from distutils.spawn import find_executable
import json
import os
import json

from distutils.spawn import find_executable

import pytest

from ansible_runner import defaults, run, run_async


@pytest.mark.test_all_runtimes
@pytest.mark.parametrize('containerized', [True, False])
def test_basic_events(containerized, container_runtime_available, is_pre_ansible28, is_run_async=False, g_facts=False):
if containerized and not container_runtime_available:
pytest.skip('container runtime(s) not available')
tdir = tempfile.mkdtemp()
def test_basic_events(containerized, is_pre_ansible28, runtime, tmp_path, is_run_async=False, g_facts=False):

if is_pre_ansible28:
inventory = 'localhost ansible_connection=local ansible_python_interpreter="/usr/bin/env python"'
else:
inventory = 'localhost ansible_connection=local'
inventory = 'localhost ansible_connection=local ansible_python_interpreter="{{ ansible_playbook_python }}"'

playbook = [{'hosts': 'all', 'gather_facts': g_facts, 'tasks': [{'debug': {'msg': "test"}}]}]
run_args = {'private_data_dir': tdir,
run_args = {'private_data_dir': str(tmp_path),
'inventory': inventory,
'envvars': {"ANSIBLE_DEPRECATION_WARNINGS": "False", 'ANSIBLE_PYTHON_INTERPRETER': 'auto_silent'},
'playbook': playbook}
if containerized:
run_args.update({'process_isolation': True,
'process_isolation_executable': 'podman',
'process_isolation_executable': runtime,
'container_image': defaults.default_container_image,
'container_volume_mounts': [f'{tdir}:{tdir}']})
'container_volume_mounts': [f'{tmp_path}:{tmp_path}']})

if not is_run_async:
r = run(**run_args)
Expand Down Expand Up @@ -58,31 +54,24 @@ def test_basic_events(containerized, container_runtime_available, is_pre_ansible
assert "event_data" in okay_event and len(okay_event['event_data']) > 0


@pytest.mark.test_all_runtimes
@pytest.mark.parametrize('containerized', [True, False])
def test_async_events(containerized, container_runtime_available, is_pre_ansible28):
test_basic_events(containerized, container_runtime_available, is_pre_ansible28, is_run_async=True, g_facts=True)
def test_async_events(containerized, is_pre_ansible28, runtime, tmp_path):
test_basic_events(containerized, is_pre_ansible28, runtime, tmp_path, is_run_async=True, g_facts=True)


def test_basic_serializeable(is_pre_ansible28):
tdir = tempfile.mkdtemp()
if is_pre_ansible28:
inv = 'localhost ansible_connection=local ansible_python_interpreter="/usr/bin/env python"'
else:
inv = 'localhost ansible_connection=local'
r = run(private_data_dir=tdir,
def test_basic_serializeable(is_pre_ansible28, tmp_path):
inv = 'localhost ansible_connection=local ansible_python_interpreter="{{ ansible_playbook_python }}"'
r = run(private_data_dir=str(tmp_path),
inventory=inv,
playbook=[{'hosts': 'all', 'gather_facts': False, 'tasks': [{'debug': {'msg': "test"}}]}])
events = [x for x in r.events]
json.dumps(events)


def test_event_omission(is_pre_ansible28):
tdir = tempfile.mkdtemp()
if is_pre_ansible28:
inv = 'localhost ansible_connection=local ansible_python_interpreter="/usr/bin/env python"'
else:
inv = 'localhost ansible_connection=local'
r = run(private_data_dir=tdir,
def test_event_omission(is_pre_ansible28, tmp_path):
inv = 'localhost ansible_connection=local ansible_python_interpreter="{{ ansible_playbook_python }}"'
r = run(private_data_dir=str(tmp_path),
inventory=inv,
omit_event_data=True,
playbook=[{'hosts': 'all', 'gather_facts': False, 'tasks': [{'debug': {'msg': "test"}}]}])
Expand All @@ -97,13 +86,9 @@ def test_event_omission(is_pre_ansible28):
assert not any([x['event_data'] for x in events])


def test_event_omission_except_failed(is_pre_ansible28):
tdir = tempfile.mkdtemp()
if is_pre_ansible28:
inv = 'localhost ansible_connection=local ansible_python_interpreter="/usr/bin/env python"'
else:
inv = 'localhost ansible_connection=local'
r = run(private_data_dir=tdir,
def test_event_omission_except_failed(is_pre_ansible28, tmp_path):
inv = 'localhost ansible_connection=local ansible_python_interpreter="{{ ansible_playbook_python }}"'
r = run(private_data_dir=str(tmp_path),
inventory=inv,
only_failed_event_data=True,
playbook=[{'hosts': 'all', 'gather_facts': False, 'tasks': [{'fail': {'msg': "test"}}]}])
Expand All @@ -120,10 +105,9 @@ def test_event_omission_except_failed(is_pre_ansible28):
assert len(all_event_datas) == 1


def test_runner_on_start(rc, skipif_pre_ansible28):
tdir = tempfile.mkdtemp()
r = run(private_data_dir=tdir,
inventory="localhost ansible_connection=local",
def test_runner_on_start(rc, skipif_pre_ansible28, tmp_path):
r = run(private_data_dir=str(tmp_path),
inventory='localhost ansible_connection=local ansible_python_interpreter="{{ ansible_playbook_python }}"',
playbook=[{'hosts': 'all', 'gather_facts': False, 'tasks': [{'debug': {'msg': "test"}}]}])
start_events = [x for x in filter(lambda x: 'event' in x and x['event'] == 'runner_on_start',
r.events)]
Expand Down Expand Up @@ -165,17 +149,16 @@ def test_include_role_events(project_fixtures):

@pytest.mark.skipif(find_executable('cgexec') is None,
reason="cgexec not available")
def test_profile_data(skipif_pre_ansible28):
tdir = tempfile.mkdtemp()
def test_profile_data(skipif_pre_ansible28, tmp_path):
try:
r = run(private_data_dir=tdir,
inventory="localhost ansible_connection=local",
r = run(private_data_dir=str(tmp_path),
inventory='localhost ansible_connection=local ansible_python_interpreter="{{ ansible_playbook_python }}"',
resource_profiling=True,
resource_profiling_base_cgroup='ansible-runner',
playbook=[{'hosts': 'all', 'gather_facts': False, 'tasks': [{'debug': {'msg': "test"}}]}])
assert r.config.env['ANSIBLE_CALLBACK_WHITELIST'] == 'cgroup_perf_recap'
assert r.config.env['CGROUP_CONTROL_GROUP'].startswith('ansible-runner/')
expected_datadir = os.path.join(tdir, 'profiling_data')
expected_datadir = os.path.join(str(tmp_path), 'profiling_data')
assert r.config.env['CGROUP_OUTPUT_DIR'] == expected_datadir
assert r.config.env['CGROUP_OUTPUT_FORMAT'] == 'json'
assert r.config.env['CGROUP_CPU_POLL_INTERVAL'] == '0.25'
Expand Down
30 changes: 18 additions & 12 deletions test/integration/test_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,8 @@ def remove_test_env_var():
assert actual_env == res.config.env


def test_env_accuracy_inside_container(request, project_fixtures, container_runtime_installed):
@pytest.mark.test_all_runtimes
def test_env_accuracy_inside_container(request, project_fixtures, runtime):
printenv_example = project_fixtures / 'printenv'
os.environ['SET_BEFORE_TEST'] = 'MADE_UP_VALUE'

Expand All @@ -83,7 +84,7 @@ def remove_test_env_var():
inventory=None,
envvars={'FROM_TEST': 'FOOBAR'},
settings={
'process_isolation_executable': container_runtime_installed,
'process_isolation_executable': runtime,
'process_isolation': True
}
)
Expand Down Expand Up @@ -151,12 +152,13 @@ def test_run_command(project_fixtures):
assert err == ''


def test_run_ansible_command_within_container(project_fixtures, container_runtime_installed):
@pytest.mark.test_all_runtimes
def test_run_ansible_command_within_container(project_fixtures, runtime):
private_data_dir = project_fixtures / 'debug'
inventory = private_data_dir / 'inventory' / 'inv_1'
playbook = private_data_dir / 'project' / 'debug.yml'
container_kwargs = {
'process_isolation_executable': container_runtime_installed,
'process_isolation_executable': runtime,
'process_isolation': True,
'container_image': defaults.default_container_image
}
Expand All @@ -171,12 +173,13 @@ def test_run_ansible_command_within_container(project_fixtures, container_runtim
assert err == ''


def test_run_script_within_container(project_fixtures, container_runtime_installed):
@pytest.mark.test_all_runtimes
def test_run_script_within_container(project_fixtures, runtime):
private_data_dir = project_fixtures / 'debug'
script_path = project_fixtures / 'files'
container_volume_mounts = ["{}:{}:Z".format(script_path, script_path)]
container_kwargs = {
'process_isolation_executable': container_runtime_installed,
'process_isolation_executable': runtime,
'process_isolation': True,
'container_image': defaults.default_container_image,
'container_volume_mounts': container_volume_mounts
Expand Down Expand Up @@ -230,9 +233,10 @@ def test_get_plugin_docs_async():
assert r.status == 'successful'


def test_get_plugin_docs_within_container(container_runtime_installed):
@pytest.mark.test_all_runtimes
def test_get_plugin_docs_within_container(runtime):
container_kwargs = {
'process_isolation_executable': container_runtime_installed,
'process_isolation_executable': runtime,
'process_isolation': True,
'container_image': defaults.default_container_image
}
Expand All @@ -255,9 +259,10 @@ def test_get_plugin_docs_list():
assert 'file' in out


def test_get_plugin_docs_list_within_container(container_runtime_installed):
@pytest.mark.test_all_runtimes
def test_get_plugin_docs_list_within_container(runtime):
container_kwargs = {
'process_isolation_executable': container_runtime_installed,
'process_isolation_executable': runtime,
'process_isolation': True,
'container_image': defaults.default_container_image
}
Expand Down Expand Up @@ -293,9 +298,10 @@ def test_get_inventory(project_fixtures):
assert 'host_2' in out['ungrouped']['hosts']


def test_get_inventory_within_container(project_fixtures, container_runtime_installed):
@pytest.mark.test_all_runtimes
def test_get_inventory_within_container(project_fixtures, runtime):
container_kwargs = {
'process_isolation_executable': container_runtime_installed,
'process_isolation_executable': runtime,
'process_isolation': True,
'container_image': defaults.default_container_image
}
Expand Down
Loading

0 comments on commit f74e2fa

Please sign in to comment.