Make layering work for grandparents not just parents

This PS makes layering work for grandparents, not just parents. This
means that given a document with layer N, then not only can a parent
document in layer N+1 be used, but so can a grandparent in layer N+2.
Note that the document in layer N+1 will be preferred over the one
in N+2.

To provide a concrete example, given layers 'global', 'region'
and 'site', a document with layer 'site' can be layered with
a grandparent in layer 'global' if a document in layer
'region' isn't found. Alternatively, if the document with
layer 'region' is found then it will be used as the parent
document.

This PS refactors algorithm for determining the parent document,
tweaks the layering algorithm itself to reference the
correct parent document, and adds unit tests to confirm the
two scenarios described above.

Change-Id: I3779c7ae030bbad44b2d5ddfa5105d1a073ba670
This commit is contained in:
Felipe Monteiro 2018-02-09 21:50:35 +00:00
parent 62cd76dc8f
commit 9f7ecc0582
6 changed files with 317 additions and 173 deletions

View File

@ -112,7 +112,9 @@ class RenderedDocumentsResource(api_base.BaseResource):
document_layering = layering.DocumentLayering(
documents, substitution_sources)
rendered_documents = document_layering.render()
except (errors.IndeterminateDocumentParent,
except (errors.InvalidDocumentLayer,
errors.InvalidDocumentParent,
errors.IndeterminateDocumentParent,
errors.UnsupportedActionMethod,
errors.MissingDocumentKey) as e:
raise falcon.HTTPBadRequest(description=e.format_message())

View File

@ -44,30 +44,65 @@ class DocumentLayering(object):
SUPPORTED_METHODS = ('merge', 'replace', 'delete')
def _is_actual_child_document(self, document, potential_child,
target_layer):
# Documents with different schemas are never layered together,
# so consider only documents with same schema as candidates.
is_potential_child = (
potential_child.layer == target_layer and
potential_child.schema == document.schema
)
if is_potential_child:
parent_selector = potential_child.parent_selector
labels = document.labels
# Labels are key-value pairs which are unhashable, so use ``all``
# instead.
return all(labels.get(x) == y for x, y in parent_selector.items())
return False
def _replace_older_parent_with_younger_parent(self, child, parent,
all_children):
# If child has layer N, parent N+1, and current_parent N+2, then swap
# parent with current_parent. In other words, if parent's layer is
# closer to child's layer than current_parent's layer, then use parent.
current_parent = self._parents.get((child.name, child.schema))
if current_parent:
if (self._layer_order.index(parent.layer) >
self._layer_order.index(current_parent.layer)):
self._parents[(child.name, child.schema)] = parent
self._children[
(current_parent.name, current_parent.schema)].remove(child)
all_children[child] -= 1
else:
self._parents.setdefault((child.name, child.schema), parent)
def _is_actual_child_document(self, document, potential_child):
if document == potential_child:
return False
document_layer_idx = self._layer_order.index(document.layer)
child_layer_idx = self._layer_order.index(potential_child.layer)
parent_selector = potential_child.parent_selector
labels = document.labels
# Labels are key-value pairs which are unhashable, so use ``all``
# instead.
is_actual_child = all(
labels.get(x) == y for x, y in parent_selector.items())
if is_actual_child:
# Documents with different `schema`s are never layered together,
# so consider only documents with same schema as candidates.
if potential_child.schema != document.schema:
reason = ('Child has parentSelector which references parent, '
'but their `schema`s do not match.')
LOG.error(reason)
raise errors.InvalidDocumentParent(
parent_schema=document.schema, parent_name=document.name,
document_schema=potential_child.schema,
document_name=potential_child.name, reason=reason)
# The highest order is 0, so the parent should be lower than the
# child.
if document_layer_idx >= child_layer_idx:
reason = ('Child has parentSelector which references parent, '
'but the child layer %s must be lower than the '
'parent layer %s for layerOrder %s.' % (
potential_child.layer, document.layer,
', '.join(self._layer_order)))
LOG.error(reason)
raise errors.InvalidDocumentParent(
parent_schema=document.schema, parent_name=document.name,
document_schema=potential_child.schema,
document_name=potential_child.name, reason=reason)
return is_actual_child
def _calc_document_children(self, document):
try:
document_layer_idx = self._layer_order.index(document.layer)
child_layer = self._layer_order[document_layer_idx + 1]
except IndexError:
# The lowest layer has been reached, so no children.
return
potential_children = set()
for label_key, label_val in document.labels.items():
_potential_children = self._documents_by_labels.get(
@ -75,8 +110,7 @@ class DocumentLayering(object):
potential_children |= set(_potential_children)
for potential_child in potential_children:
if self._is_actual_child_document(document, potential_child,
child_layer):
if self._is_actual_child_document(document, potential_child):
yield potential_child
def _calc_all_document_children(self):
@ -106,16 +140,21 @@ class DocumentLayering(object):
# Mapping of (doc.name, doc.metadata.name) => children, where children
# are the documents whose `parentSelector` references the doc.
self._children = {}
self._parents = {}
self._parentless_documents = []
for layer in self._layer_order:
documents_in_layer = self._documents_by_layer.get(layer, [])
for document in documents_in_layer:
children = list(self._calc_document_children(document))
self._children[(document.name, document.schema)] = children
if children:
all_children.update(children)
self._children.setdefault(
(document.name, document.schema), children)
for child in children:
self._replace_older_parent_with_younger_parent(
child, document, all_children)
all_children_elements = list(all_children.elements())
secondary_documents = []
@ -172,26 +211,56 @@ class DocumentLayering(object):
:param substitution_sources: List of documents that are potential
sources for substitution. Should only include concrete documents.
:type substitution_sources: List[dict]
:raises LayeringPolicyNotFound: If no LayeringPolicy was found among
list of ``documents``.
:raises InvalidDocumentLayer: If document layer not found in layerOrder
for provided LayeringPolicy.
:raises InvalidDocumentParent: If child references parent but they
don't have the same schema or their layers are incompatible.
:raises IndeterminateDocumentParent: If more than one parent document
was found for a document.
"""
self._documents_to_layer = []
self._documents_by_layer = {}
self._documents_by_labels = {}
self._layering_policy = None
layering_policies = list(
filter(lambda x: x.get('schema').startswith(
types.LAYERING_POLICY_SCHEMA), documents))
if layering_policies:
self._layering_policy = document_wrapper.DocumentDict(
layering_policies[0])
if len(layering_policies) > 1:
LOG.warning('More than one layering policy document was '
'passed in. Using the first one found: [%s] %s.',
self._layering_policy.schema,
self._layering_policy.name)
if self._layering_policy is None:
error_msg = (
'No layering policy found in the system so could not render '
'documents.')
LOG.error(error_msg)
raise errors.LayeringPolicyNotFound()
for document in documents:
document = document_wrapper.DocumentDict(document)
if document.schema.startswith(types.LAYERING_POLICY_SCHEMA):
if self._layering_policy:
LOG.warning('More than one layering policy document was '
'passed in. Using the first one found: [%s] '
'%s.', document.schema, document.name)
else:
self._layering_policy = document
continue
if document.layering_definition:
self._documents_to_layer.append(document)
if document.layer:
if document.layer not in self._layering_policy.layer_order:
LOG.error('Document layer %s for document [%s] %s not '
'in layerOrder: %s.', document.layer,
document.schema, document.name,
self._layering_policy.layer_order)
raise errors.InvalidDocumentLayer(
document_schema=document.schema,
document_name=document.name,
layer_order=', '.join(
self._layering_policy.layer_order),
layering_policy_name=self._layering_policy.name)
self._documents_by_layer.setdefault(document.layer, [])
self._documents_by_layer[document.layer].append(document)
if document.parent_selector:
@ -201,17 +270,9 @@ class DocumentLayering(object):
self._documents_by_labels[
(label_key, label_val)].append(document)
if self._layering_policy is None:
error_msg = (
'No layering policy found in the system so could not reder '
'documents.')
LOG.error(error_msg)
raise errors.LayeringPolicyNotFound()
self._layer_order = self._get_layering_order(self._layering_policy)
self._calc_all_document_children()
self._substitution_sources = substitution_sources or []
self.secrets_substitution = secrets_manager.SecretsSubstitution(
self._substitution_sources)
@ -228,6 +289,11 @@ class DocumentLayering(object):
* `replace` - overwrite data at the specified path and replace it
with the data given in this document
* `delete` - remove the data at the specified path
:raises UnsupportedActionMethod: If the layering action isn't found
among ``self.SUPPORTED_METHODS``.
:raises MissingDocumentKey: If a layering action path isn't found
in both the parent and child documents being layered together.
"""
method = action['method']
if method not in self.SUPPORTED_METHODS:
@ -320,6 +386,11 @@ class DocumentLayering(object):
:returns: The list of rendered documents (does not include layering
policy document).
:rtype: List[dict]
:raises UnsupportedActionMethod: If the layering action isn't found
among ``self.SUPPORTED_METHODS``.
:raises MissingDocumentKey: If a layering action path isn't found
in both the parent and child documents being layered together.
"""
# ``rendered_data_by_layer`` tracks the set of changes across all
# actions across each layer for a specific document.
@ -345,10 +416,12 @@ class DocumentLayering(object):
# Keep iterating as long as a child exists.
for child in self._get_children(doc):
# Retrieve the most up-to-date rendered_data (by
# referencing the child's parent's data).
# Retrieve the most up-to-date rendered_data (by referencing
# the child's parent's data).
child_layer_idx = self._layer_order.index(child.layer)
rendered_data = rendered_data_by_layer[child_layer_idx - 1]
parent = self._parents[child.name, child.schema]
parent_layer_idx = self._layer_order.index(parent.layer)
rendered_data = rendered_data_by_layer[parent_layer_idx]
# Apply each action to the current document.
for action in child.actions:
@ -385,4 +458,4 @@ class DocumentLayering(object):
if substituted_data:
doc = substituted_data[0]
return self._documents_to_layer + [self._layering_policy]
return self._documents_to_layer

View File

@ -180,6 +180,32 @@ class InvalidDocumentFormat(DeckhandException):
code = 400
class InvalidDocumentLayer(DeckhandException):
"""The document layer is invalid.
**Troubleshoot:**
* Check that the document layer is contained in the layerOrder in the
registered LayeringPolicy in the system.
"""
msg_fmt = ("Invalid layer for document [%(document_schema)s] "
"%(document_name)s was not found in layerOrder %(layer_order)s "
"for provided LayeringPolicy %(layering_policy_name)s.")
code = 400
class InvalidDocumentParent(DeckhandException):
"""The document parent is invalid.
**Troubleshoot:**
* Check that the document `schema` and parent `schema` match.
* Check that the document layer is lower-order than the parent layer.
"""
msg_fmt = ("The document parent [%(parent_schema)s] %(parent_name)s is "
"invalid for document [%(document_schema)s] %(document_name)s. "
"Reason: %(reason)s")
code = 400
class IndeterminateDocumentParent(DeckhandException):
"""More than one parent document was found for a document.

View File

@ -70,45 +70,3 @@ tests:
a:
z: 3
b: 4
- name: update_bucket_layering
desc: |-
Update `LayeringPolicy` in bucket 'layering', so that it only has 2
layers. This validates that, by dropping the middle layer "region",
layering is still performed using the global and site documents.
PUT: /api/v1.0/buckets/layering/documents
status: 200
data: |-
---
schema: deckhand/LayeringPolicy/v1
metadata:
schema: metadata/Control/v1
name: layering-policy
data:
layerOrder:
- global
- site
- name: verify_layering_again
desc: Check for expected layering with only global and site layers
GET: /api/v1.0/revisions/$RESPONSE['$.[0].status.revision']/rendered-documents
query_parameters:
sort:
- schema
- metadata.name
status: 200
response_multidoc_jsonpaths:
$.`len`: 3
$.[0].schema: deckhand/LayeringPolicy/v1
$.[1].schema: example/Kind/v1
$.[1].metadata.name: site-with-delete-action
$.[1].metadata.schema: metadata/Document/v1
$.[1].data: {}
$.[2].schema: example/Kind/v1
$.[2].metadata.name: site-with-merge-action
$.[2].metadata.schema: metadata/Document/v1
$.[2].data:
a:
x: 1
y: 2
b: 4

View File

@ -12,6 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import yaml
from deckhand.engine import layering
from deckhand import errors
from deckhand import factories
@ -23,20 +25,10 @@ class TestDocumentLayering(test_base.DeckhandTestCase):
def _test_layering(self, documents, site_expected=None,
region_expected=None, global_expected=None,
exception_expected=None, substitution_sources=None):
substitution_sources=None):
document_layering = layering.DocumentLayering(
documents, substitution_sources)
if all([site_expected, region_expected, global_expected,
exception_expected]):
raise ValueError(
'(site_expected|region_expected|global_expected) and '
'(exception_expected) are mutually exclusive.')
if exception_expected:
self.assertRaises(exception_expected, document_layering.render)
return
site_docs = []
region_docs = []
global_docs = []
@ -214,26 +206,6 @@ class TestDocumentLayering2Layers(TestDocumentLayering):
documents = doc_factory.gen_test(mapping, site_abstract=False)
self._test_layering(documents, site_expected[idx])
def test_layering_documents_with_different_schemas_do_not_layer(self):
"""Validates that documents with different schemas are not layered
together.
"""
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
documents[1]['schema'] = 'deckhand/Document/v1'
documents[2]['schema'] = 'deckhand/Document/v2'
global_expected = {"a": {"x": 1, "y": 2}}
site_expected = {'b': 4}
self._test_layering(documents, site_expected=site_expected,
global_expected=global_expected)
class TestDocumentLayering2LayersAbstractConcrete(TestDocumentLayering):
"""The the 2-layer payload with site/global layers concrete.
@ -441,8 +413,8 @@ class TestDocumentLayering3Layers(TestDocumentLayering):
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
self._test_layering(
documents, exception_expected=errors.MissingDocumentKey)
self.assertRaises(errors.MissingDocumentKey, self._test_layering,
documents)
def test_layering_delete_path_a(self):
mapping = {
@ -757,6 +729,126 @@ class TestDocumentLayering3LayersScenario(TestDocumentLayering):
site_expected = {'b': 4}
self._test_layering(documents, site_expected)
def test_layering_using_grandparent_as_parent(self):
"""Test that layering works when a child document has layer N and its
parent document has layer N+2. In other words, given layerOrder of
'global', 'region' and 'site', check that a document with 'layer' site
can be layered with a parent with layer 'global'.
"""
test_yaml = """
---
metadata:
labels: {name: kubernetes-etcd-global}
layeringDefinition: {abstract: true, layer: global}
name: kubernetes-etcd-global
schema: metadata/Document/v1
storagePolicy: cleartext
schema: armada/Chart/v1
data:
chart_name: etcd
---
# This document is included so that this middle layer isn't stripped away.
metadata:
layeringDefinition:
abstract: false
actions:
- {method: merge, path: .}
layer: region
name: kubernetes-etcd-region
schema: metadata/Document/v1
storagePolicy: cleartext
schema: armada/Chart/v1
data: {}
---
metadata:
layeringDefinition:
abstract: false
actions:
- {method: merge, path: .}
layer: site
parentSelector: {name: kubernetes-etcd-global}
name: kubernetes-etcd
schema: metadata/Document/v1
storagePolicy: cleartext
schema: armada/Chart/v1
data: {}
---
schema: deckhand/LayeringPolicy/v1
metadata:
schema: metadata/Control/v1
name: layering-policy
data:
layerOrder:
- global
- region
- site
...
"""
documents = list(yaml.safe_load_all(test_yaml))
self._test_layering(documents, site_expected={'chart_name': 'etcd'})
def test_layering_using_first_parent_as_actual_parent(self):
"""Test that layering works when a child document has layer N and has
a parent in layer N+1 and another parent in layer N+2 but selects
"younger" parent in layer N+1.
"""
test_yaml = """
---
metadata:
labels: {name: kubernetes-etcd}
layeringDefinition:
abstract: true
layer: global
name: kubernetes-etcd-global
schema: metadata/Document/v1
storagePolicy: cleartext
schema: armada/Chart/v1
data:
chart_name: global-etcd
---
metadata:
labels: {name: kubernetes-etcd}
layeringDefinition:
abstract: false
actions:
- {method: merge, path: .}
layer: region
parentSelector: {name: kubernetes-etcd}
name: kubernetes-etcd-region
schema: metadata/Document/v1
storagePolicy: cleartext
schema: armada/Chart/v1
data:
chart_name: region-etcd
---
metadata:
layeringDefinition:
abstract: false
actions:
- {method: merge, path: .}
layer: site
parentSelector: {name: kubernetes-etcd}
name: kubernetes-etcd
schema: metadata/Document/v1
storagePolicy: cleartext
schema: armada/Chart/v1
data: {}
---
schema: deckhand/LayeringPolicy/v1
metadata:
schema: metadata/Control/v1
name: layering-policy
data:
layerOrder:
- global
- region
- site
...
"""
documents = list(yaml.safe_load_all(test_yaml))
self._test_layering(
documents, site_expected={'chart_name': 'region-etcd'})
class TestDocumentLayering3Layers2Regions2Sites(TestDocumentLayering):

View File

@ -34,8 +34,8 @@ class TestDocumentLayeringNegative(
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
self._test_layering(
documents, exception_expected=errors.MissingDocumentKey)
self.assertRaises(errors.MissingDocumentKey, self._test_layering,
documents)
def test_layering_method_delete_key_not_in_child(self):
# The key will not be in the site after the global data is copied into
@ -49,8 +49,8 @@ class TestDocumentLayeringNegative(
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
self._test_layering(
documents, exception_expected=errors.MissingDocumentKey)
self.assertRaises(errors.MissingDocumentKey, self._test_layering,
documents)
def test_layering_method_replace_key_not_in_child(self):
mapping = {
@ -62,35 +62,15 @@ class TestDocumentLayeringNegative(
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
self._test_layering(
documents, exception_expected=errors.MissingDocumentKey)
self.assertRaises(errors.MissingDocumentKey, self._test_layering,
documents)
@mock.patch.object(layering, 'LOG', autospec=True)
def test_layering_with_broken_layer_order(self, mock_log):
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test({}, site_abstract=False)
layering_policy = documents[0]
broken_layer_orders = [
['site', 'region', 'global'], ['broken', 'global'], ['broken'],
['site', 'broken']]
for broken_layer_order in broken_layer_orders:
layering_policy['data']['layerOrder'] = broken_layer_order
# The site will not be able to find a correct parent.
layering.DocumentLayering(documents)
self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0],
'%s is an empty layer with no documents. '
'It will be discarded from the layerOrder'
' during the layering process.')
mock_log.info.reset_mock()
@mock.patch.object(layering, 'LOG', autospec=True)
def test_layering_with_invalid_layer(self, mock_log):
def test_layering_with_empty_layer(self, mock_log):
doc_factory = factories.DocumentFactory(1, [1])
documents = doc_factory.gen_test({}, site_abstract=False)
documents[-1]['metadata']['layeringDefinition']['layer'] = 'invalid'
self._test_layering(documents, global_expected={})
self._test_layering([documents[0]], global_expected={})
mock_log.info.assert_has_calls([
mock.call(
'%s is an empty layer with no documents. It will be discarded '
@ -101,6 +81,14 @@ class TestDocumentLayeringNegative(
'will be performed.')
])
def test_layering_document_with_invalid_layer_raises_exc(self):
doc_factory = factories.DocumentFactory(1, [1])
documents = doc_factory.gen_test({}, site_abstract=False)
documents[1]['metadata']['layeringDefinition']['layer'] = 'invalid'
self.assertRaises(errors.InvalidDocumentLayer, self._test_layering,
documents)
@mock.patch.object(layering, 'LOG', autospec=True)
def test_layering_child_with_invalid_parent_selector(self, mock_log):
doc_factory = factories.DocumentFactory(2, [1, 1])
@ -164,28 +152,6 @@ class TestDocumentLayeringNegative(
self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0],
'Could not find parent for document .*')
@mock.patch.object(layering, 'LOG', autospec=True)
def test_layering_documents_with_different_schemas(self, mock_log):
"""Validate that attempting to layer documents with different schemas
results in errors.
"""
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test({})
# Region and site documents should result in no parent being found
# since their schemas will not match that of their parent's.
for idx in range(1, 3): # Only region/site have parent.
prev_schema = documents[idx]['schema']
documents[idx]['schema'] = test_utils.rand_name('schema')
layering.DocumentLayering(documents)
self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0],
'Could not find parent for document .*')
mock_log.info.reset_mock()
# Restore schema for next test run.
documents[idx]['schema'] = prev_schema
def test_layering_without_layering_policy_raises_exc(self):
doc_factory = factories.DocumentFactory(1, [1])
documents = doc_factory.gen_test({}, site_abstract=False)[1:]
@ -204,3 +170,30 @@ class TestDocumentLayeringNegative(
'More than one layering policy document was passed in. Using the '
'first one found: [%s] %s.', documents[0]['schema'],
documents[0]['metadata']['name'])
def test_layering_documents_with_different_schemas_raises_exc(self):
"""Validate that attempting to layer documents with different `schema`s
results in errors.
"""
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test({})
# Region and site documents should result in no parent being found
# since their `schema`s will not match that of their parent's.
for idx in range(1, 3): # Only region/site have parent.
documents[idx]['schema'] = test_utils.rand_name('schema')
self.assertRaises(
errors.InvalidDocumentParent, self._test_layering, documents)
def test_layering_parent_and_child_with_same_layer_raises_exc(self):
"""Validate that attempting to layer documents with the same layer
results in an exception.
"""
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test({})
for x in range(1, 3):
documents[x]['metadata']['layeringDefinition']['layer'] = 'global'
self.assertRaises(
errors.InvalidDocumentParent, self._test_layering, documents)