Change chart `test` key to object and support cleanup flag

Previously the chart `test` key was a boolean.  This changes it to an
object which initially supports an `enabled` flag (covering the
previous use case) and adds support for helm's test cleanup option
(underneath an `options` key which mirrors what we have for `upgrade`).
Existing charts will continue to function the same, with cleanup always
turned on, and ability to use the old boolean `test` key for now.  When
using the new `test` object however, cleanup defaults to false to match
helm's interface and allow for test pod debugging.  Test pods can be
deleted on the next armada apply as well, to allow for debugging in the
meantime, by adding `pre`-`upgrade`-`delete` actions for the test pod.
The `test` commands in the API and CLI now support `cleanup` options as
well.

Change-Id: I92f8822aeaedb0767cb07515d42d8e4f3e088150
This commit is contained in:
Sean Eagan 2018-06-22 14:34:05 -05:00
parent f235512d57
commit 2a1a94828d
10 changed files with 196 additions and 34 deletions

View File

@ -45,7 +45,11 @@ class TestReleasesReleaseNameController(api.BaseResource):
CONF.tiller_port, CONF.tiller_port,
tiller_namespace=req.get_param( tiller_namespace=req.get_param(
'tiller_namespace', default=CONF.tiller_namespace)) 'tiller_namespace', default=CONF.tiller_namespace))
success = test_release_for_success(tiller, release) cleanup = req.get_param_as_bool('cleanup')
if cleanup is None:
cleanup = False
success = test_release_for_success(
tiller, release, cleanup=cleanup)
# TODO(fmontei): Provide more sensible exception(s) here. # TODO(fmontei): Provide more sensible exception(s) here.
except Exception as e: except Exception as e:
err_message = 'Failed to test {}: {}'.format(release, e) err_message = 'Failed to test {}: {}'.format(release, e)
@ -154,12 +158,24 @@ class TestReleasesManifestController(api.BaseResource):
for group in armada_obj.get(const.KEYWORD_ARMADA).get( for group in armada_obj.get(const.KEYWORD_ARMADA).get(
const.KEYWORD_GROUPS): const.KEYWORD_GROUPS):
for ch in group.get(const.KEYWORD_CHARTS): for ch in group.get(const.KEYWORD_CHARTS):
release_name = release_prefixer(prefix, chart = ch['chart']
ch.get('chart').get('release')) release_name = release_prefixer(prefix, chart.get('release'))
cleanup = req.get_param_as_bool('cleanup')
if cleanup is None:
test_chart_override = chart.get('test', {})
if isinstance(test_chart_override, bool):
self.logger.warn(
'Boolean value for chart `test` key is deprecated '
'and will be removed. Use `test.enabled` instead.')
# Use old default value.
cleanup = True
else:
cleanup = test_chart_override.get('options', {}).get(
'cleanup', False)
if release_name in known_releases: if release_name in known_releases:
self.logger.info('RUNNING: %s tests', release_name) self.logger.info('RUNNING: %s tests', release_name)
success = test_release_for_success(tiller, release_name) success = test_release_for_success(
tiller, release_name, cleanup=cleanup)
if success: if success:
self.logger.info("PASSED: %s", release_name) self.logger.info("PASSED: %s", release_name)
message['test']['passed'].append(release_name) message['test']['passed'].append(release_name)

View File

@ -74,19 +74,25 @@ SHORT_DESC = "Command tests releases."
help=("The target manifest to run. Required for specifying " help=("The target manifest to run. Required for specifying "
"which manifest to run when multiple are available."), "which manifest to run when multiple are available."),
default=None) default=None)
@click.option(
'--cleanup',
help=("Delete test pods upon completion."),
is_flag=True,
default=None)
@click.option('--debug', help="Enable debug logging.", is_flag=True) @click.option('--debug', help="Enable debug logging.", is_flag=True)
@click.pass_context @click.pass_context
def test_charts(ctx, file, release, tiller_host, tiller_port, tiller_namespace, def test_charts(ctx, file, release, tiller_host, tiller_port, tiller_namespace,
target_manifest, debug): target_manifest, cleanup, debug):
CONF.debug = debug CONF.debug = debug
TestChartManifest(ctx, file, release, tiller_host, tiller_port, TestChartManifest(ctx, file, release, tiller_host, tiller_port,
tiller_namespace, target_manifest).safe_invoke() tiller_namespace, target_manifest,
cleanup).safe_invoke()
class TestChartManifest(CliAction): class TestChartManifest(CliAction):
def __init__(self, ctx, file, release, tiller_host, tiller_port, def __init__(self, ctx, file, release, tiller_host, tiller_port,
tiller_namespace, target_manifest): tiller_namespace, target_manifest, cleanup):
super(TestChartManifest, self).__init__() super(TestChartManifest, self).__init__()
self.ctx = ctx self.ctx = ctx
@ -96,6 +102,7 @@ class TestChartManifest(CliAction):
self.tiller_port = tiller_port self.tiller_port = tiller_port
self.tiller_namespace = tiller_namespace self.tiller_namespace = tiller_namespace
self.target_manifest = target_manifest self.target_manifest = target_manifest
self.cleanup = cleanup
def invoke(self): def invoke(self):
tiller = Tiller( tiller = Tiller(
@ -107,7 +114,8 @@ class TestChartManifest(CliAction):
if self.release: if self.release:
if not self.ctx.obj.get('api', False): if not self.ctx.obj.get('api', False):
self.logger.info("RUNNING: %s tests", self.release) self.logger.info("RUNNING: %s tests", self.release)
success = test_release_for_success(tiller, self.release) success = test_release_for_success(
tiller, self.release, cleanup=self.cleanup)
if success: if success:
self.logger.info("PASSED: %s", self.release) self.logger.info("PASSED: %s", self.release)
else: else:
@ -127,7 +135,7 @@ class TestChartManifest(CliAction):
if self.file: if self.file:
if not self.ctx.obj.get('api', False): if not self.ctx.obj.get('api', False):
documents = yaml.safe_load_all(open(self.file).read()) documents = list(yaml.safe_load_all(open(self.file).read()))
armada_obj = Manifest( armada_obj = Manifest(
documents, documents,
target_manifest=self.target_manifest).get_manifest() target_manifest=self.target_manifest).get_manifest()
@ -137,14 +145,28 @@ class TestChartManifest(CliAction):
for group in armada_obj.get(const.KEYWORD_ARMADA).get( for group in armada_obj.get(const.KEYWORD_ARMADA).get(
const.KEYWORD_GROUPS): const.KEYWORD_GROUPS):
for ch in group.get(const.KEYWORD_CHARTS): for ch in group.get(const.KEYWORD_CHARTS):
chart = ch['chart']
release_name = release_prefixer( release_name = release_prefixer(
prefix, prefix, chart.get('release'))
ch.get('chart').get('release'))
if release_name in known_release_names: if release_name in known_release_names:
cleanup = self.cleanup
if cleanup is None:
test_chart_override = chart.get('test', {})
if isinstance(test_chart_override, bool):
self.logger.warn(
'Boolean value for chart `test` key is'
' deprecated and support for this will'
' be removed. Use `test.enabled` '
'instead.')
# Use old default value.
cleanup = True
else:
cleanup = test_chart_override.get(
'options', {}).get('cleanup', False)
self.logger.info('RUNNING: %s tests', release_name) self.logger.info('RUNNING: %s tests', release_name)
success = test_release_for_success( success = test_release_for_success(
tiller, release_name) tiller, release_name, cleanup=cleanup)
if success: if success:
self.logger.info("PASSED: %s", release_name) self.logger.info("PASSED: %s", release_name)
else: else:

View File

@ -13,6 +13,7 @@
# limitations under the License. # limitations under the License.
import difflib import difflib
import functools
import time import time
import yaml import yaml
@ -326,8 +327,21 @@ class Armada(object):
# TODO(MarshM) better handling of timeout/timer # TODO(MarshM) better handling of timeout/timer
cg_max_timeout = max(wait_timeout, cg_max_timeout) cg_max_timeout = max(wait_timeout, cg_max_timeout)
# Chart test policy can override ChartGroup, if specified test_chart_override = chart.get('test')
test_this_chart = chart.get('test', cg_test_all_charts) # Use old default value when not using newer `test` key
test_cleanup = True
if test_chart_override is None:
test_this_chart = cg_test_all_charts
elif isinstance(test_chart_override, bool):
LOG.warn('Boolean value for chart `test` key is'
' deprecated and support for this will'
' be removed. Use `test.enabled` '
'instead.')
test_this_chart = test_chart_override
else:
test_this_chart = test_chart_override['enabled']
test_cleanup = test_chart_override.get('options', {}).get(
'cleanup', False)
chartbuilder = ChartBuilder(chart) chartbuilder = ChartBuilder(chart)
protoc_chart = chartbuilder.get_helm_chart() protoc_chart = chartbuilder.get_helm_chart()
@ -438,6 +452,7 @@ class Armada(object):
# Sequenced ChartGroup should run tests after each Chart # Sequenced ChartGroup should run tests after each Chart
timer = int(round(deadline - time.time())) timer = int(round(deadline - time.time()))
test_chart_args = (release_name, timer, test_cleanup)
if test_this_chart: if test_this_chart:
if cg_sequenced: if cg_sequenced:
LOG.info( LOG.info(
@ -449,12 +464,14 @@ class Armada(object):
LOG.error(reason) LOG.error(reason)
raise armada_exceptions.ArmadaTimeoutException( raise armada_exceptions.ArmadaTimeoutException(
reason) reason)
self._test_chart(release_name, timer) self._test_chart(*test_chart_args)
# Un-sequenced ChartGroup should run tests at the end # Un-sequenced ChartGroup should run tests at the end
else: else:
# Keeping track of time remaining # Keeping track of time remaining
tests_to_run.append((release_name, timer)) tests_to_run.append(
functools.partial(self._test_chart,
*test_chart_args))
# End of Charts in ChartGroup # End of Charts in ChartGroup
LOG.info('All Charts applied in ChartGroup %s.', cg_name) LOG.info('All Charts applied in ChartGroup %s.', cg_name)
@ -486,8 +503,8 @@ class Armada(object):
timeout=timer) timeout=timer)
# After entire ChartGroup is healthy, run any pending tests # After entire ChartGroup is healthy, run any pending tests
for (test, test_timer) in tests_to_run: for callback in tests_to_run:
self._test_chart(test, test_timer) callback()
self.post_flight_ops() self.post_flight_ops()
@ -531,7 +548,7 @@ class Armada(object):
k8s_wait_attempt_sleep=self.k8s_wait_attempt_sleep, k8s_wait_attempt_sleep=self.k8s_wait_attempt_sleep,
timeout=timeout) timeout=timeout)
def _test_chart(self, release_name, timeout): def _test_chart(self, release_name, timeout, cleanup):
if self.dry_run: if self.dry_run:
LOG.info( LOG.info(
'Skipping test during `dry-run`, would have tested ' 'Skipping test during `dry-run`, would have tested '
@ -539,7 +556,7 @@ class Armada(object):
return True return True
success = test_release_for_success( success = test_release_for_success(
self.tiller, release_name, timeout=timeout) self.tiller, release_name, timeout=timeout, cleanup=cleanup)
if success: if success:
LOG.info("Test passed for release: %s", release_name) LOG.info("Test passed for release: %s", release_name)
else: else:

View File

@ -22,8 +22,10 @@ TESTRUN_STATUS_RUNNING = 3
def test_release_for_success(tiller, def test_release_for_success(tiller,
release, release,
timeout=const.DEFAULT_TILLER_TIMEOUT): timeout=const.DEFAULT_TILLER_TIMEOUT,
test_suite_run = tiller.test_release(release, timeout) cleanup=False):
test_suite_run = tiller.test_release(
release, timeout=timeout, cleanup=cleanup)
results = getattr(test_suite_run, 'results', []) results = getattr(test_suite_run, 'results', [])
failed_results = [r for r in results if r.status != TESTRUN_STATUS_SUCCESS] failed_results = [r for r in results if r.status != TESTRUN_STATUS_SUCCESS]
return len(failed_results) == 0 return len(failed_results) == 0

View File

@ -440,7 +440,7 @@ class Tiller(object):
def test_release(self, def test_release(self,
release, release,
timeout=const.DEFAULT_TILLER_TIMEOUT, timeout=const.DEFAULT_TILLER_TIMEOUT,
cleanup=True): cleanup=False):
''' '''
:param release - name of release to test :param release - name of release to test
:param timeout - runtime before exiting :param timeout - runtime before exiting

View File

@ -59,7 +59,23 @@ data:
type: boolean type: boolean
additionalProperties: false additionalProperties: false
test: test:
type: boolean anyOf:
# TODO: Remove boolean support after deprecation period.
- type: boolean
- type: object
properties:
enabled:
type: boolean
options:
type: object
properties:
cleanup:
type: boolean
additionalProperties: false
additionalProperties: false
required:
- enabled
timeout: timeout:
type: integer type: integer
wait: wait:

View File

@ -33,6 +33,8 @@ class TestReleasesManifestControllerTest(base.BaseControllerTest):
rules = {'armada:tests_manifest': '@'} rules = {'armada:tests_manifest': '@'}
self.policy.set_rules(rules) self.policy.set_rules(rules)
# TODO: Don't use example charts in tests.
# TODO: Test cleanup arg is taken from url, then manifest.
manifest_path = os.path.join(os.getcwd(), 'examples', manifest_path = os.path.join(os.getcwd(), 'examples',
'keystone-manifest.yaml') 'keystone-manifest.yaml')
with open(manifest_path, 'r') as f: with open(manifest_path, 'r') as f:
@ -61,7 +63,10 @@ class TestReleasesReleaseNameControllerTest(base.BaseControllerTest):
mock_test_release_for_success.return_value = True mock_test_release_for_success.return_value = True
resp = self.app.simulate_get('/api/v1.0/test/fake-release') release = 'fake-release'
resp = self.app.simulate_get('/api/v1.0/test/{}'.format(release))
mock_test_release_for_success.assert_has_calls(
[mock.call(mock_tiller.return_value, release, cleanup=False)])
self.assertEqual(200, resp.status_code) self.assertEqual(200, resp.status_code)
self.assertEqual('MESSAGE: Test Pass', self.assertEqual('MESSAGE: Test Pass',
json.loads(resp.text)['message']) json.loads(resp.text)['message'])
@ -74,12 +79,29 @@ class TestReleasesReleaseNameControllerTest(base.BaseControllerTest):
self.policy.set_rules(rules) self.policy.set_rules(rules)
mock_test_release_for_success.return_value = False mock_test_release_for_success.return_value = False
release = 'fake-release'
resp = self.app.simulate_get('/api/v1.0/test/fake-release') resp = self.app.simulate_get('/api/v1.0/test/{}'.format(release))
self.assertEqual(200, resp.status_code) self.assertEqual(200, resp.status_code)
self.assertEqual('MESSAGE: Test Fail', self.assertEqual('MESSAGE: Test Fail',
json.loads(resp.text)['message']) json.loads(resp.text)['message'])
@mock.patch.object(test, 'test_release_for_success')
@mock.patch.object(test, 'Tiller')
def test_test_controller_cleanup(self, mock_tiller,
mock_test_release_for_success):
rules = {'armada:test_release': '@'}
self.policy.set_rules(rules)
mock_test_release_for_success.return_value = True
release = 'fake-release'
resp = self.app.simulate_get(
'/api/v1.0/test/{}'.format(release), query_string='cleanup=true')
mock_test_release_for_success.assert_has_calls(
[mock.call(mock_tiller.return_value, release, cleanup=True)])
self.assertEqual(200, resp.status_code)
self.assertEqual('MESSAGE: Test Pass',
json.loads(resp.text)['message'])
@test_utils.attr(type=['negative']) @test_utils.attr(type=['negative'])
class TestReleasesManifestControllerNegativeTest(base.BaseControllerTest): class TestReleasesManifestControllerNegativeTest(base.BaseControllerTest):

View File

@ -111,6 +111,10 @@ data:
options: options:
force: true force: true
recreate_pods: true recreate_pods: true
test:
enabled: true
options:
cleanup: true
--- ---
schema: armada/Chart/v1 schema: armada/Chart/v1
metadata: metadata:
@ -129,6 +133,8 @@ data:
dependencies: [] dependencies: []
wait: wait:
timeout: 10 timeout: 10
test:
enabled: true
""" """
CHART_SOURCES = [('git://github.com/dummy/armada', CHART_SOURCES = [('git://github.com/dummy/armada',
@ -163,6 +169,9 @@ class ArmadaHandlerTestCase(base.ArmadaTestCase):
'values': {}, 'values': {},
'wait': { 'wait': {
'timeout': 10 'timeout': 10
},
'test': {
'enabled': True
} }
} }
}, { }, {
@ -190,6 +199,12 @@ class ArmadaHandlerTestCase(base.ArmadaTestCase):
'force': True, 'force': True,
'recreate_pods': True 'recreate_pods': True
} }
},
'test': {
'enabled': True,
'options': {
'cleanup': True
}
} }
} }
}, { }, {
@ -319,13 +334,14 @@ class ArmadaHandlerTestCase(base.ArmadaTestCase):
chart_group = armada_obj.manifest['armada']['chart_groups'][0] chart_group = armada_obj.manifest['armada']['chart_groups'][0]
charts = chart_group['chart_group'] charts = chart_group['chart_group']
cg_test_all_charts = chart_group.get('test_charts', False)
m_tiller = mock_tiller.return_value m_tiller = mock_tiller.return_value
m_tiller.list_charts.return_value = known_releases m_tiller.list_charts.return_value = known_releases
if test_failure_to_run: if test_failure_to_run:
def fail(tiller, release, timeout=None): def fail(tiller, release, timeout=None, cleanup=False):
status = AttrDict( status = AttrDict(
**{'info': AttrDict(**{'Description': 'Failed'})}) **{'info': AttrDict(**{'Description': 'Failed'})})
raise tiller_exceptions.ReleaseException( raise tiller_exceptions.ReleaseException(
@ -419,12 +435,26 @@ class ArmadaHandlerTestCase(base.ArmadaTestCase):
values=yaml.safe_dump(chart['values']), values=yaml.safe_dump(chart['values']),
wait=this_chart_should_wait, wait=this_chart_should_wait,
timeout=chart['wait']['timeout'])) timeout=chart['wait']['timeout']))
test_this_chart = chart.get(
'test', chart_group.get('test_charts', False)) test_chart_override = chart.get('test')
# Use old default value when not using newer `test` key
test_cleanup = True
if test_chart_override is None:
test_this_chart = cg_test_all_charts
elif isinstance(test_chart_override, bool):
test_this_chart = test_chart_override
else:
test_this_chart = test_chart_override.get('enabled', True)
test_cleanup = test_chart_override.get('options', {}).get(
'cleanup', False)
if test_this_chart: if test_this_chart:
expected_test_release_for_success_calls.append( expected_test_release_for_success_calls.append(
mock.call(m_tiller, release_name, timeout=mock.ANY)) mock.call(
m_tiller,
release_name,
timeout=mock.ANY,
cleanup=test_cleanup))
# Verify that at least 1 release is either installed or updated. # Verify that at least 1 release is either installed or updated.
self.assertTrue( self.assertTrue(

View File

@ -98,7 +98,7 @@ Chart
| protected | object | do not delete FAILED releases when encountered from previous run (provide the | | protected | object | do not delete FAILED releases when encountered from previous run (provide the |
| | | 'continue_processing' bool to continue or halt execution (default: halt)) | | | | 'continue_processing' bool to continue or halt execution (default: halt)) |
+-----------------+----------+---------------------------------------------------------------------------------------+ +-----------------+----------+---------------------------------------------------------------------------------------+
| test | bool | run pre-defined helm tests helm in a chart | | test | object | Run helm tests on the chart after install/upgrade |
+-----------------+----------+---------------------------------------------------------------------------------------+ +-----------------+----------+---------------------------------------------------------------------------------------+
| install | object | install the chart into your Kubernetes cluster | | install | object | install the chart into your Kubernetes cluster |
+-----------------+----------+---------------------------------------------------------------------------------------+ +-----------------+----------+---------------------------------------------------------------------------------------+
@ -113,6 +113,40 @@ Chart
| timeout | int | time (in seconds) allotted for chart to deploy when 'wait' flag is set (DEPRECATED) | | timeout | int | time (in seconds) allotted for chart to deploy when 'wait' flag is set (DEPRECATED) |
+-----------------+----------+---------------------------------------------------------------------------------------+ +-----------------+----------+---------------------------------------------------------------------------------------+
Test
^^^^
+-------------+----------+---------------------------------------------------------------+
| keyword | type | action |
+=============+==========+===============================================================+
| enabled | bool | whether to enable helm tests for this chart |
+-------------+----------+---------------------------------------------------------------+
| options | object | options to pass through to helm |
+-------------+----------+---------------------------------------------------------------+
.. DANGER::
DEPRECATION: In addition to an object with the above fields, the ``test``
key currently also supports ``bool``, which maps to ``enabled``, but this is
deprecated and will be removed. The ``cleanup`` option below is set to true
in this case for backward compatability.
Test - Options
^^^^^^^^^^^^^^
+-------------+----------+---------------------------------------------------------------+
| keyword | type | action |
+=============+==========+===============================================================+
| cleanup | bool | cleanup test pods after test completion, defaults to false |
+-------------+----------+---------------------------------------------------------------+
.. note::
The preferred way to achieve test cleanup is to add a pre-upgrade delete
action on the test pod, which allows for debugging the test pod up until the
next upgrade.
Upgrade, Install - Pre or Post Upgrade, Install - Pre or Post
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@ -181,6 +215,8 @@ Chart Example
timeout: 100 timeout: 100
protected: protected:
continue_processing: false continue_processing: false
test:
enabled: true
install: install:
no_hooks: false no_hooks: false
upgrade: upgrade:

View File

@ -77,7 +77,8 @@ metadata:
name: keystone name: keystone
data: data:
chart_name: keystone chart_name: keystone
test: true test:
enabled: true
release: keystone release: keystone
namespace: openstack namespace: openstack
timeout: 100 timeout: 100