DECKHAND-89: Integrate layering with rendered documents

This PS integrates layering functionality with rendered
documents API endpoint. No new functionality was really
added; instead, a lot of code was refactored to make
layering work alongside substitution.

The following changes have been made:
  - Moved document filtering functionality to deckhand.utils
    because rendered documents must be filtered twice: once
    to retrieve all documents necessary for rendering from
    the DB and again by the controller to filter out documents
    the user doesn't want returned
  - Additional LOG statements in the layering module
  - Additional layering unit tests
  - Additional functional tests
  - Removal of some stricter validations around layering:
    if a parent document is not found for a document,
    an error is no longer returned, as not all documents
    need to have a parent (that is, not all documents
    need to be rendered together, though this might need
    to be expanded on later: what if a document has a
    `parentSelector` but no parent is found?)

Change-Id: I6c66ed824fba0216ba868a6101a72cfe3bdda181
This commit is contained in:
Felipe Monteiro 2017-12-01 21:01:10 +00:00
parent 848d288290
commit 1505c76388
32 changed files with 935 additions and 401 deletions

View File

@ -70,8 +70,7 @@ class BucketsResource(api_base.BaseResource):
created_documents = self._create_revision_documents( created_documents = self._create_revision_documents(
bucket_name, documents, validations) bucket_name, documents, validations)
if created_documents: resp.body = self.view_builder.list(created_documents)
resp.body = self.view_builder.list(created_documents)
resp.status = falcon.HTTP_200 resp.status = falcon.HTTP_200
def _prepare_secret_documents(self, secret_documents): def _prepare_secret_documents(self, secret_documents):

View File

@ -21,9 +21,10 @@ from deckhand.control import common
from deckhand.control.views import document as document_view from deckhand.control.views import document as document_view
from deckhand.db.sqlalchemy import api as db_api from deckhand.db.sqlalchemy import api as db_api
from deckhand.engine import document_validation from deckhand.engine import document_validation
from deckhand.engine import secrets_manager from deckhand.engine import layering
from deckhand import errors from deckhand import errors
from deckhand import policy from deckhand import policy
from deckhand import types
from deckhand import utils from deckhand import utils
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
@ -97,35 +98,67 @@ class RenderedDocumentsResource(api_base.BaseResource):
def on_get(self, req, resp, sanitized_params, revision_id): def on_get(self, req, resp, sanitized_params, revision_id):
include_encrypted = policy.conditional_authorize( include_encrypted = policy.conditional_authorize(
'deckhand:list_encrypted_documents', req.context, do_raise=False) 'deckhand:list_encrypted_documents', req.context, do_raise=False)
filters = {
filters = sanitized_params.copy() 'metadata.storagePolicy': ['cleartext'],
filters['metadata.layeringDefinition.abstract'] = False 'deleted': False
filters['metadata.storagePolicy'] = ['cleartext'] }
filters['deleted'] = False # Never return deleted documents to user.
if include_encrypted: if include_encrypted:
filters['metadata.storagePolicy'].append('encrypted') filters['metadata.storagePolicy'].append('encrypted')
layering_policy = self._retrieve_layering_policy()
documents = self._retrieve_documents_for_rendering(revision_id,
**filters)
# Prevent the layering policy from appearing twice.
if layering_policy in documents:
documents.remove(layering_policy)
document_layering = layering.DocumentLayering(layering_policy,
documents)
rendered_documents = document_layering.render()
# Filters to be applied post-rendering, because many documents are
# involved in rendering. User filters can only be applied once all
# documents have been rendered.
user_filters = sanitized_params.copy()
user_filters['metadata.layeringDefinition.abstract'] = False
final_documents = [
d for d in rendered_documents if utils.deepfilter(
d, **user_filters)]
resp.status = falcon.HTTP_200
resp.body = self.view_builder.list(final_documents)
self._post_validate(final_documents)
def _retrieve_layering_policy(self):
try:
# NOTE(fmontei): Layering policies exist system-wide, across all
# revisions, so no need to filter by revision.
layering_policy_filters = {
'deleted': False,
'schema': types.LAYERING_POLICY_SCHEMA
}
layering_policy = db_api.document_get(**layering_policy_filters)
except errors.DocumentNotFound as e:
error_msg = (
'No layering policy found in the system so could not render '
'the documents.')
LOG.error(error_msg)
LOG.exception(six.text_type(e))
raise falcon.HTTPConflict(description=error_msg)
else:
return layering_policy
def _retrieve_documents_for_rendering(self, revision_id, **filters):
try: try:
documents = db_api.revision_get_documents( documents = db_api.revision_get_documents(
revision_id, **filters) revision_id, **filters)
except errors.RevisionNotFound as e: except errors.RevisionNotFound as e:
LOG.exception(six.text_type(e)) LOG.exception(six.text_type(e))
raise falcon.HTTPNotFound(description=e.format_message()) raise falcon.HTTPNotFound(description=e.format_message())
else:
return documents
# TODO(fmontei): Currently the only phase of rendering that is def _post_validate(self, documents):
# performed is secret substitution, which can be done in any randomized
# order. However, secret substitution logic will have to be moved into
# a separate module that handles layering alongside substitution once
# layering has been fully integrated into this endpoint.
secrets_substitution = secrets_manager.SecretsSubstitution(documents)
try:
rendered_documents = secrets_substitution.substitute_all()
except errors.DocumentNotFound as e:
LOG.error('Failed to render the documents because a secret '
'document could not be found.')
LOG.exception(six.text_type(e))
raise falcon.HTTPNotFound(description=e.format_message())
# Perform schema validation post-rendering to ensure that rendering # Perform schema validation post-rendering to ensure that rendering
# and substitution didn't break anything. # and substitution didn't break anything.
doc_validator = document_validation.DocumentValidation(documents) doc_validator = document_validation.DocumentValidation(documents)
@ -133,9 +166,7 @@ class RenderedDocumentsResource(api_base.BaseResource):
doc_validator.validate_all() doc_validator.validate_all()
except (errors.InvalidDocumentFormat, except (errors.InvalidDocumentFormat,
errors.InvalidDocumentSchema) as e: errors.InvalidDocumentSchema) as e:
LOG.error('Failed to post-validate rendered documents.')
LOG.exception(e.format_message()) LOG.exception(e.format_message())
raise falcon.HTTPInternalServerError( raise falcon.HTTPInternalServerError(
description=e.format_message()) description=e.format_message())
resp.status = falcon.HTTP_200
resp.body = self.view_builder.list(rendered_documents)

View File

@ -12,14 +12,11 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
"""Defines interface for DB access.""" """Defines interface for DB access."""
import ast
import copy import copy
import functools import functools
import hashlib import hashlib
import re
import threading import threading
from oslo_config import cfg from oslo_config import cfg
@ -28,7 +25,6 @@ from oslo_db import options
from oslo_db.sqlalchemy import session from oslo_db.sqlalchemy import session
from oslo_log import log as logging from oslo_log import log as logging
from oslo_serialization import jsonutils as json from oslo_serialization import jsonutils as json
import six
import sqlalchemy.orm as sa_orm import sqlalchemy.orm as sa_orm
from sqlalchemy import text from sqlalchemy import text
@ -362,7 +358,7 @@ def document_get(session=None, raw_dict=False, revision_id=None, **filters):
for doc in documents: for doc in documents:
d = doc.to_dict(raw_dict=raw_dict) d = doc.to_dict(raw_dict=raw_dict)
if _apply_filters(d, **nested_filters): if utils.deepfilter(d, **nested_filters):
return d return d
filters.update(nested_filters) filters.update(nested_filters)
@ -412,7 +408,7 @@ def document_get_all(session=None, raw_dict=False, revision_id=None,
final_documents = [] final_documents = []
for doc in documents: for doc in documents:
d = doc.to_dict(raw_dict=raw_dict) d = doc.to_dict(raw_dict=raw_dict)
if _apply_filters(d, **nested_filters): if utils.deepfilter(d, **nested_filters):
final_documents.append(d) final_documents.append(d)
return final_documents return final_documents
@ -536,97 +532,6 @@ def _update_revision_history(documents):
return documents return documents
def _add_microversion(value):
"""Hack for coercing all Deckhand schema fields (``schema`` and
``metadata.schema``) into ending with v1.0 rather than v1, for example.
"""
microversion_re = r'^.*/.*/v[0-9]{1}$'
if re.match(value, microversion_re):
return value + '.0'
return value
def _apply_filters(dct, **filters):
"""Apply filters to ``dct``.
Apply filters in ``filters`` to the dictionary ``dct``.
:param dct: The dictionary to check against all the ``filters``.
:param filters: Dictionary of key-value pairs used for filtering out
unwanted results.
:return: True if the dictionary satisfies all the filters, else False.
"""
def _transform_filter_bool(filter_val):
# Transform boolean values into string literals.
if isinstance(filter_val, six.string_types):
try:
filter_val = ast.literal_eval(filter_val.title())
except ValueError:
# If not True/False, set to None to avoid matching
# `actual_val` which is always boolean.
filter_val = None
return filter_val
for filter_key, filter_val in filters.items():
# If the filter is a list of possibilities, e.g. ['site', 'region']
# for metadata.layeringDefinition.layer, check whether the actual
# value is present.
if isinstance(filter_val, (list, tuple)):
actual_val = utils.jsonpath_parse(dct, filter_key, match_all=True)
if not actual_val:
return False
if isinstance(actual_val[0], bool):
filter_val = [_transform_filter_bool(x) for x in filter_val]
if not set(actual_val).intersection(set(filter_val)):
return False
else:
actual_val = utils.jsonpath_parse(dct, filter_key)
# Else if both the filter value and the actual value in the doc
# are dictionaries, check whether the filter dict is a subset
# of the actual dict.
if (isinstance(actual_val, dict)
and isinstance(filter_val, dict)):
is_subset = set(
filter_val.items()).issubset(set(actual_val.items()))
if not is_subset:
return False
# Else both filters are string literals.
else:
# Filtering by schema must support namespace matching
# (e.g. schema=promenade) such that all kind and schema
# documents with promenade namespace are returned, or
# (e.g. schema=promenade/Node) such that all version
# schemas with namespace=schema and kind=Node are returned.
if isinstance(actual_val, bool):
filter_val = _transform_filter_bool(filter_val)
if filter_key in ['schema', 'metadata.schema']:
actual_val = _add_microversion(actual_val)
filter_val = _add_microversion(filter_val)
parts = actual_val.split('/')[:2]
if len(parts) == 2:
actual_namespace, actual_kind = parts
elif len(parts) == 1:
actual_namespace = parts[0]
actual_kind = ''
else:
actual_namespace = actual_kind = ''
actual_minus_version = actual_namespace + '/' + actual_kind
if not (filter_val == actual_val or
actual_minus_version == filter_val or
actual_namespace == filter_val):
return False
else:
if actual_val != filter_val:
return False
return True
def revision_get_all(session=None, **filters): def revision_get_all(session=None, **filters):
"""Return list of all revisions. """Return list of all revisions.
@ -640,7 +545,7 @@ def revision_get_all(session=None, **filters):
result = [] result = []
for revision in revisions: for revision in revisions:
revision_dict = revision.to_dict() revision_dict = revision.to_dict()
if _apply_filters(revision_dict, **filters): if utils.deepfilter(revision_dict, **filters):
revision_dict['documents'] = _update_revision_history( revision_dict['documents'] = _update_revision_history(
revision_dict['documents']) revision_dict['documents'])
result.append(revision_dict) result.append(revision_dict)
@ -707,7 +612,7 @@ def _filter_revision_documents(documents, unique_only, **filters):
documents = _exclude_deleted_documents(documents) documents = _exclude_deleted_documents(documents)
for document in documents: for document in documents:
if _apply_filters(document, **filters): if utils.deepfilter(document, **filters):
# Filter out redundant documents from previous revisions, i.e. # Filter out redundant documents from previous revisions, i.e.
# documents schema and metadata.name are repeated. # documents schema and metadata.name are repeated.
if unique_only: if unique_only:

View File

@ -15,91 +15,146 @@
import collections import collections
import copy import copy
from oslo_log import log as logging
import six
from deckhand.engine import document from deckhand.engine import document
from deckhand.engine import secrets_manager
from deckhand.engine import utils from deckhand.engine import utils
from deckhand import errors from deckhand import errors
LOG = logging.getLogger(__name__)
class DocumentLayering(object): class DocumentLayering(object):
"""Class responsible for handling document layering. """Class responsible for handling document layering.
Layering is controlled in two places: Layering is controlled in two places:
1. The `LayeringPolicy` control document, which defines the valid layers 1. The ``LayeringPolicy`` control document, which defines the valid layers
and their order of precedence. and their order of precedence.
2. In the `metadata.layeringDefinition` section of normal 2. In the ``metadata.layeringDefinition`` section of normal
(`metadata.schema=metadata/Document/v1.0`) documents. (``metadata.schema=metadata/Document/v1.0``) documents.
.. note:: .. note::
Only documents with the same `schema` are allowed to be layered Only documents with the same ``schema`` are allowed to be layered
together into a fully rendered document. together into a fully rendered document.
""" """
SUPPORTED_METHODS = ('merge', 'replace', 'delete') SUPPORTED_METHODS = ('merge', 'replace', 'delete')
LAYERING_POLICY_SCHEMA = 'deckhand/LayeringPolicy/v1.0'
def __init__(self, documents): def _calc_document_children(self):
"""Determine each document's children.
For each document, attempts to find the document's children. Adds a new
key called "children" to the document's dictionary.
.. note::
A document should only have exactly one parent.
If a document does not have a parent, then its layer must be
the topmost layer defined by the ``layerOrder``.
:returns: Ordered list of documents that need to be layered. Each
document contains a "children" property in addition to original
data. List of documents returned is ordered from highest to lowest
layer.
:rtype: list of deckhand.engine.document.Document objects.
:raises IndeterminateDocumentParent: If more than one parent document
was found for a document.
"""
layered_docs = list(
filter(lambda x: 'layeringDefinition' in x['metadata'],
self.documents))
# ``all_children`` is a counter utility for verifying that each
# document has exactly one parent.
all_children = collections.Counter()
def _get_children(doc):
children = []
doc_layer = doc.get_layer()
try:
next_layer_idx = self.layer_order.index(doc_layer) + 1
children_doc_layer = self.layer_order[next_layer_idx]
except IndexError:
# The lowest layer has been reached, so no children. Return
# empty list.
return children
for other_doc in layered_docs:
# Documents with different schemas are never layered together,
# so consider only documents with same schema as candidates.
is_potential_child = (
other_doc.get_layer() == children_doc_layer and
other_doc.get_schema() == doc.get_schema()
)
if (is_potential_child):
# A document can have many labels but should only have one
# explicit label for the parentSelector.
parent_sel = other_doc.get_parent_selector()
parent_sel_key = list(parent_sel.keys())[0]
parent_sel_val = list(parent_sel.values())[0]
doc_labels = doc.get_labels()
if (parent_sel_key in doc_labels and
parent_sel_val == doc_labels[parent_sel_key]):
children.append(other_doc)
return children
for layer in self.layer_order:
docs_by_layer = list(filter(
(lambda x: x.get_layer() == layer), layered_docs))
for doc in docs_by_layer:
children = _get_children(doc)
if children:
all_children.update(children)
doc.to_dict().setdefault('children', children)
all_children_elements = list(all_children.elements())
secondary_docs = list(
filter(lambda d: d.get_layer() != self.layer_order[0],
layered_docs))
for doc in secondary_docs:
# Unless the document is the topmost document in the
# `layerOrder` of the LayeringPolicy, it should be a child document
# of another document.
if doc not in all_children_elements:
LOG.info('Could not find parent for document with name=%s, '
'schema=%s, layer=%s, parentSelector=%s.',
doc.get_name(), doc.get_schema(), doc.get_layer(),
doc.get_parent_selector())
# If the document is a child document of more than 1 parent, then
# the document has too many parents, which is a validation error.
elif all_children[doc] != 1:
LOG.info('%d parent documents were found for child document '
'with name=%s, schema=%s, layer=%s, parentSelector=%s'
'. Each document must only have 1 parent.',
all_children[doc], doc.get_name(), doc.get_schema(),
doc.get_layer(), doc.get_parent_selector())
raise errors.IndeterminateDocumentParent(document=doc)
return layered_docs
def __init__(self, layering_policy, documents):
"""Contructor for ``DocumentLayering``. """Contructor for ``DocumentLayering``.
:param documents: List of YAML documents represented as dictionaries. :param layering_policy: The document with schema
``deckhand/LayeringPolicy`` needed for layering.
:param documents: List of all other documents to be layered together
in accordance with the ``layerOrder`` defined by the
LayeringPolicy document.
""" """
self.layering_policy = document.Document(layering_policy)
self.documents = [document.Document(d) for d in documents] self.documents = [document.Document(d) for d in documents]
self._find_layering_policy() self.layer_order = list(self.layering_policy['data']['layerOrder'])
self.layered_docs = self._calc_document_children() self.layered_docs = self._calc_document_children()
def render(self):
"""Perform layering on the set of `documents`.
Each concrete document will undergo layering according to the actions
defined by its `layeringDefinition`.
:returns: the list of rendered documents (does not include layering
policy document).
"""
# ``rendered_data_by_layer`` agglomerates the set of changes across all
# actions across each layer for a specific document.
rendered_data_by_layer = {}
# NOTE(fmontei): ``global_docs`` represents the topmost documents in
# the system. It should probably be impossible for more than 1
# top-level doc to exist, but handle multiple for now.
global_docs = [doc for doc in self.layered_docs
if doc.get_layer() == self.layer_order[0]]
for doc in global_docs:
layer_idx = self.layer_order.index(doc.get_layer())
rendered_data_by_layer[layer_idx] = doc.to_dict()
# Keep iterating as long as a child exists.
for child in doc.get_children(nested=True):
# Retrieve the most up-to-date rendered_data (by
# referencing the child's parent's data).
child_layer_idx = self.layer_order.index(child.get_layer())
rendered_data = rendered_data_by_layer[child_layer_idx - 1]
# Apply each action to the current document.
actions = child.get_actions()
for action in actions:
rendered_data = self._apply_action(
action, child.to_dict(), rendered_data)
# Update the actual document data if concrete.
if not child.is_abstract():
self.layered_docs[self.layered_docs.index(child)][
'data'] = rendered_data['data']
# Update ``rendered_data_by_layer`` for this layer so that
# children in deeper layers can reference the most up-to-date
# changes.
rendered_data_by_layer[child_layer_idx] = rendered_data
if 'children' in doc:
del doc['children']
return [d.to_dict() for d in self.layered_docs]
def _apply_action(self, action, child_data, overall_data): def _apply_action(self, action, child_data, overall_data):
"""Apply actions to each layer that is rendered. """Apply actions to each layer that is rendered.
@ -175,121 +230,77 @@ class DocumentLayering(object):
return overall_data return overall_data
def _find_layering_policy(self): def _apply_substitutions(self, data):
"""Retrieve the current layering policy.
:raises LayeringPolicyMalformed: If the `layerOrder` could not be
found in the LayeringPolicy or if it is not a list.
:raises LayeringPolicyNotFound: If system has no layering policy.
"""
# TODO(fmontei): There should be a DB call here to fetch the layering
# policy from the DB.
for doc in self.documents:
if doc.to_dict()['schema'] == self.LAYERING_POLICY_SCHEMA:
self.layering_policy = doc
break
if not hasattr(self, 'layering_policy'):
raise errors.LayeringPolicyNotFound(
schema=self.LAYERING_POLICY_SCHEMA)
# TODO(fmontei): Rely on schema validation or some such for this.
try: try:
self.layer_order = list(self.layering_policy['data']['layerOrder']) secrets_substitution = secrets_manager.SecretsSubstitution(data)
except KeyError: return secrets_substitution.substitute_all()
raise errors.LayeringPolicyMalformed( except errors.DocumentNotFound as e:
schema=self.LAYERING_POLICY_SCHEMA, LOG.error('Failed to render the documents because a secret '
document=self.layering_policy) 'document could not be found.')
LOG.exception(six.text_type(e))
if not isinstance(self.layer_order, list): def render(self):
raise errors.LayeringPolicyMalformed( """Perform layering on the list of documents passed to ``__init__``.
schema=self.LAYERING_POLICY_SCHEMA,
document=self.layering_policy)
def _calc_document_children(self): Each concrete document will undergo layering according to the actions
"""Determine each document's children. defined by its ``metadata.layeringDefinition``. Documents are layered
with their parents. A parent document's ``schema`` must match that of
the child, and its ``metadata.labels`` must much the child's
``metadata.layeringDefinition.parentSelector``.
For each document, attempts to find the document's children. Adds a new :returns: The list of rendered documents (does not include layering
key called "children" to the document's dictionary. policy document).
:rtype: list[dict]
.. note::
A document should only have exactly one parent.
If a document does not have a parent, then its layer must be
the topmost layer defined by the `layerOrder`.
:returns: Ordered list of documents that need to be layered. Each
document contains a "children" property in addition to original
data. List of documents returned is ordered from highest to lowest
layer.
:rtype: list of deckhand.engine.document.Document objects.
:raises IndeterminateDocumentParent: If more than one parent document
was found for a document.
:raises MissingDocumentParent: If the parent document could not be
found. Only applies documents with `layeringDefinition` property.
""" """
layered_docs = list( # ``rendered_data_by_layer`` tracks the set of changes across all
filter(lambda x: 'layeringDefinition' in x['metadata'], # actions across each layer for a specific document.
self.documents)) rendered_data_by_layer = {}
# ``all_children`` is a counter utility for verifying that each # NOTE(fmontei): ``global_docs`` represents the topmost documents in
# document has exactly one parent. # the system. It should probably be impossible for more than 1
all_children = collections.Counter() # top-level doc to exist, but handle multiple for now.
global_docs = [doc for doc in self.layered_docs
if doc.get_layer() == self.layer_order[0]]
def _get_children(doc): for doc in global_docs:
children = [] layer_idx = self.layer_order.index(doc.get_layer())
doc_layer = doc.get_layer() if doc.get_substitutions():
try: substituted_data = self._apply_substitutions(doc.to_dict())
next_layer_idx = self.layer_order.index(doc_layer) + 1 rendered_data_by_layer[layer_idx] = substituted_data[0]
children_doc_layer = self.layer_order[next_layer_idx] else:
except IndexError: rendered_data_by_layer[layer_idx] = doc.to_dict()
# The lowest layer has been reached, so no children. Return
# empty list.
return children
for other_doc in layered_docs: # Keep iterating as long as a child exists.
# Documents with different schemas are never layered together, for child in doc.get_children(nested=True):
# so consider only documents with same schema as candidates. # Retrieve the most up-to-date rendered_data (by
if (other_doc.get_layer() == children_doc_layer # referencing the child's parent's data).
and other_doc.get_schema() == doc.get_schema()): child_layer_idx = self.layer_order.index(child.get_layer())
# A document can have many labels but should only have one rendered_data = rendered_data_by_layer[child_layer_idx - 1]
# explicit label for the parentSelector.
parent_sel = other_doc.get_parent_selector()
parent_sel_key = list(parent_sel.keys())[0]
parent_sel_val = list(parent_sel.values())[0]
doc_labels = doc.get_labels()
if (parent_sel_key in doc_labels and # Apply each action to the current document.
parent_sel_val == doc_labels[parent_sel_key]): for action in child.get_actions():
children.append(other_doc) LOG.debug('Applying action %s to child document with '
'name=%s, schema=%s, layer=%s.', action,
child.get_name(), child.get_schema(),
child.get_layer())
rendered_data = self._apply_action(
action, child.to_dict(), rendered_data)
return children # Update the actual document data if concrete.
if not child.is_abstract():
if child.get_substitutions():
rendered_data['metadata'][
'substitutions'] = child.get_substitutions()
self._apply_substitutions(rendered_data)
self.layered_docs[self.layered_docs.index(child)][
'data'] = rendered_data['data']
for layer in self.layer_order: # Update ``rendered_data_by_layer`` for this layer so that
docs_by_layer = list(filter( # children in deeper layers can reference the most up-to-date
(lambda x: x.get_layer() == layer), layered_docs)) # changes.
rendered_data_by_layer[child_layer_idx] = rendered_data
for doc in docs_by_layer: if 'children' in doc:
children = _get_children(doc) del doc['children']
if children: return [d.to_dict() for d in self.layered_docs]
all_children.update(children)
doc.to_dict().setdefault('children', children)
all_children_elements = list(all_children.elements())
secondary_docs = list(
filter(lambda d: d.get_layer() != self.layer_order[0],
layered_docs))
for doc in secondary_docs:
# Unless the document is the topmost document in the
# `layerOrder` of the LayeringPolicy, it should be a child document
# of another document.
if doc not in all_children_elements:
raise errors.MissingDocumentParent(document=doc)
# If the document is a child document of more than 1 parent, then
# the document has too many parents, which is a validation error.
elif all_children[doc] != 1:
raise errors.IndeterminateDocumentParent(document=doc)
return layered_docs

View File

@ -99,23 +99,21 @@ class SecretsSubstitution(object):
def __init__(self, documents): def __init__(self, documents):
"""SecretSubstitution constructor. """SecretSubstitution constructor.
:param documents: List of YAML documents in dictionary format that are :param documents: List of documents that are candidates for secret
candidates for secret substitution. This class will automatically substitution. This class will automatically detect documents that
detect documents that require substitution; documents need not be require substitution; documents need not be filtered prior to being
filtered prior to being passed to the constructor. passed to the constructor.
""" """
if not isinstance(documents, (list, tuple)): if not isinstance(documents, (list, tuple)):
documents = [documents] documents = [documents]
self.docs_to_sub = [] self.docs_to_sub = []
self.other_docs = []
for document in documents: for document in documents:
doc = document_wrapper.Document(document) if not isinstance(document, document_wrapper.Document):
if doc.get_substitutions(): document_obj = document_wrapper.Document(document)
self.docs_to_sub.append(doc) if document_obj.get_substitutions():
else: self.docs_to_sub.append(document_obj)
self.other_docs.append(document)
def substitute_all(self): def substitute_all(self):
"""Substitute all documents that have a `metadata.substitutions` field. """Substitute all documents that have a `metadata.substitutions` field.
@ -160,4 +158,4 @@ class SecretsSubstitution(object):
doc['data'].update(substituted_data) doc['data'].update(substituted_data)
substituted_docs.append(doc.to_dict()) substituted_docs.append(doc.to_dict())
return substituted_docs + self.other_docs return substituted_docs

View File

@ -196,12 +196,6 @@ class SingletonDocumentConflict(DeckhandException):
code = 409 code = 409
class LayeringPolicyMalformed(DeckhandException):
msg_fmt = ("LayeringPolicy with schema %(schema)s is improperly formatted:"
" %(document)s.")
code = 400
class IndeterminateDocumentParent(DeckhandException): class IndeterminateDocumentParent(DeckhandException):
msg_fmt = ("Too many parent documents found for document %(document)s.") msg_fmt = ("Too many parent documents found for document %(document)s.")
code = 400 code = 400
@ -217,12 +211,6 @@ class MissingDocumentKey(DeckhandException):
"Parent: %(parent)s. Child: %(child)s.") "Parent: %(parent)s. Child: %(child)s.")
class MissingDocumentPattern(DeckhandException):
msg_fmt = ("Substitution pattern %(pattern)s could not be found for the "
"JSON path %(path)s in the destination document data %(data)s.")
code = 400
class UnsupportedActionMethod(DeckhandException): class UnsupportedActionMethod(DeckhandException):
msg_fmt = ("Method in %(actions)s is invalid for document %(document)s.") msg_fmt = ("Method in %(actions)s is invalid for document %(document)s.")
code = 400 code = 400
@ -233,12 +221,6 @@ class DocumentNotFound(DeckhandException):
code = 404 code = 404
class LayeringPolicyNotFound(DeckhandException):
msg_fmt = ("LayeringPolicy with schema %(schema)s not found in the "
"system.")
code = 404
class RevisionNotFound(DeckhandException): class RevisionNotFound(DeckhandException):
msg_fmt = "The requested revision %(revision)s was not found." msg_fmt = "The requested revision %(revision)s was not found."
code = 404 code = 404

View File

@ -237,7 +237,7 @@ class DocumentFactory(DeckhandFactory):
# Set name. # Set name.
layer_template = copy.deepcopy(layer_template) layer_template = copy.deepcopy(layer_template)
layer_template['metadata']['name'] = "%s%d" % ( layer_template['metadata']['name'] = "%s%d" % (
layer_name, count + 1) test_utils.rand_name(layer_name), count + 1)
# Set layer. # Set layer.
layer_template['metadata']['layeringDefinition'][ layer_template['metadata']['layeringDefinition'][

View File

@ -49,7 +49,7 @@ tests:
desc: Create initial documents desc: Create initial documents
PUT: /api/v1.0/buckets/mop/documents PUT: /api/v1.0/buckets/mop/documents
status: 200 status: 200
data: <@resources/design-doc-layering-sample.yaml data: <@resources/design-doc-layering-sample-3-layers.yaml
- name: verify_initial - name: verify_initial
desc: Verify initial document count and revisions desc: Verify initial document count and revisions
@ -77,7 +77,7 @@ tests:
desc: Push a duplicate bucket of documents desc: Push a duplicate bucket of documents
PUT: /api/v1.0/buckets/mop/documents PUT: /api/v1.0/buckets/mop/documents
status: 200 status: 200
data: <@resources/design-doc-layering-sample.yaml data: <@resources/design-doc-layering-sample-3-layers.yaml
- name: verify_ignore - name: verify_ignore
desc: Verify duplicate documents were ignored desc: Verify duplicate documents were ignored

View File

@ -0,0 +1,29 @@
# Tests failure paths for layering.
#
# 1. Purges existing data to ensure test isolation
# 2. Adds initial documents that do not include a layering policy
# 3. Verifies that 409 is raised when attempting to layer without a layering policy
defaults:
request_headers:
content-type: application/x-yaml
response_headers:
content-type: application/x-yaml
tests:
- name: purge
desc: Begin testing from known state.
DELETE: /api/v1.0/revisions
status: 204
response_headers: null
- name: initialize
desc: Create initial documents
PUT: /api/v1.0/buckets/mop/documents
status: 200
data: <@resources/passphrase.yaml
- name: verify_missing_layering_policy_raises_conflict
desc: Verify that attempting to render documents without a layering policy raises a 409
GET: /api/v1.0/revisions/$RESPONSE['$.[0].status.revision']/rendered-documents
status: 409

View File

@ -0,0 +1,45 @@
# Tests success path for layering involving multiple source buckets.
#
# 1. Purges existing data to ensure test isolation
# 2. Adds documents to bucket a
# 3. Adds documents to bucket b
# 4. Verifies fully correctly layered document data
defaults:
request_headers:
content-type: application/x-yaml
response_headers:
content-type: application/x-yaml
tests:
- name: purge
desc: Begin testing from known state.
DELETE: /api/v1.0/revisions
status: 204
response_headers: null
- name: add_bucket_a
desc: Create documents for bucket a
PUT: /api/v1.0/buckets/a/documents
status: 200
data: <@resources/design-doc-layering-sample-split-bucket-a.yaml
- name: add_bucket_b
desc: Create documents for bucket b
PUT: /api/v1.0/buckets/b/documents
status: 200
data: <@resources/design-doc-layering-sample-split-bucket-b.yaml
- name: verify_layering
desc: Check for expected layering
GET: /api/v1.0/revisions/$RESPONSE['$.[0].status.revision']/rendered-documents
status: 200
response_multidoc_jsonpaths:
$.`len`: 1
$.[*].schema: example/Kind/v1
$.[*].metadata.name: site-1234
$.[*].metadata.schema: metadata/Document/v1
$.[*].data:
a:
z: 3
b: 4

View File

@ -0,0 +1,66 @@
# Tests success path for basic layering.
#
# 1. Purges existing data to ensure test isolation
# 2. Adds initial documents from layering sample of design doc
# 3. Verifies document data layered correctly (2 layers)
# 4. Verifies document data layered correctly (3 layers)
defaults:
request_headers:
content-type: application/x-yaml
response_headers:
content-type: application/x-yaml
tests:
- name: purge
desc: Begin testing from known state.
DELETE: /api/v1.0/revisions
status: 204
response_headers: null
- name: create_documents_for_validating_2_level_layering
desc: Create documents for validating 2 levels of layering (global, site)
PUT: /api/v1.0/buckets/mop/documents
status: 200
data: <@resources/design-doc-layering-sample-2-layers.yaml
- name: verify_layering_2_layers
desc: Check for expected layering with 2 layers
GET: /api/v1.0/revisions/$RESPONSE['$.[0].status.revision']/rendered-documents
status: 200
response_multidoc_jsonpaths:
$.`len`: 1
$.[*].schema: example/Kind/v1
$.[*].metadata.name: site-1234
$.[*].metadata.schema: metadata/Document/v1
$.[*].data:
a:
x: 1
y: 2
b: 5
- name: purge_again
desc: Begin testing from known state.
DELETE: /api/v1.0/revisions
status: 204
response_headers: null
- name: create_documents_for_validating_3_level_layering
desc: Create documents for validating 3 levels of layering (global, region, site)
PUT: /api/v1.0/buckets/mop/documents
status: 200
data: <@resources/design-doc-layering-sample-3-layers.yaml
- name: verify_layering_3_layers
desc: Check for expected layering with 3 layers
GET: /api/v1.0/revisions/$RESPONSE['$.[0].status.revision']/rendered-documents
status: 200
response_multidoc_jsonpaths:
$.`len`: 1
$.[*].schema: example/Kind/v1
$.[*].metadata.name: site-1234
$.[*].metadata.schema: metadata/Document/v1
$.[*].data:
a:
z: 3
b: 4

View File

@ -0,0 +1,37 @@
---
schema: deckhand/LayeringPolicy/v1
metadata:
schema: metadata/Control/v1
name: layering-policy
data:
layerOrder:
- global
- site
---
schema: example/Kind/v1
metadata:
schema: metadata/Document/v1
name: global-1234
labels:
key1: value1
layeringDefinition:
abstract: true
layer: global
data:
a:
x: 1
y: 2
---
schema: example/Kind/v1
metadata:
schema: metadata/Document/v1
name: site-1234
layeringDefinition:
layer: site
parentSelector:
key1: value1
actions:
- method: merge
path: .
data:
b: 5

View File

@ -0,0 +1,25 @@
---
schema: deckhand/LayeringPolicy/v1
metadata:
schema: metadata/Control/v1
name: layering-policy
data:
layerOrder:
- global
- region
- site
---
schema: example/Kind/v1
metadata:
schema: metadata/Document/v1
name: global-1234
labels:
key1: value1
layeringDefinition:
abstract: true
layer: global
data:
a:
x: 1
y: 2
...

View File

@ -0,0 +1,36 @@
---
schema: example/Kind/v1
metadata:
schema: metadata/Document/v1
name: region-1234
labels:
key1: value1
layeringDefinition:
abstract: true
layer: region
parentSelector:
key1: value1
actions:
- method: replace
path: .a
data:
a:
z: 3
---
schema: example/Kind/v1
metadata:
schema: metadata/Document/v1
name: site-1234
labels:
foo: bar
baz: qux
layeringDefinition:
layer: site
parentSelector:
key1: value1
actions:
- method: merge
path: .
data:
b: 4
...

View File

@ -1,4 +1,13 @@
--- ---
schema: deckhand/LayeringPolicy/v1
metadata:
schema: metadata/Control/v1
name: layering-policy
data:
layerOrder:
- region
- site
---
schema: deckhand/Certificate/v1 schema: deckhand/Certificate/v1
metadata: metadata:
name: example-cert name: example-cert

View File

@ -1,4 +1,13 @@
--- ---
schema: deckhand/LayeringPolicy/v1
metadata:
schema: metadata/Control/v1
name: layering-policy
data:
layerOrder:
- region
- site
---
schema: deckhand/Certificate/v1 schema: deckhand/Certificate/v1
metadata: metadata:
name: example-cert name: example-cert

View File

@ -27,7 +27,7 @@ tests:
desc: Create initial documents desc: Create initial documents
PUT: /api/v1.0/buckets/mop/documents PUT: /api/v1.0/buckets/mop/documents
status: 200 status: 200
data: <@resources/design-doc-layering-sample.yaml data: <@resources/design-doc-layering-sample-3-layers.yaml
# Validates whether revision was created. # Validates whether revision was created.
# Required parameters: # Required parameters:

View File

@ -20,7 +20,7 @@ tests:
desc: Create initial documents desc: Create initial documents
PUT: /api/v1.0/buckets/mop/documents PUT: /api/v1.0/buckets/mop/documents
status: 200 status: 200
data: <@resources/design-doc-layering-sample.yaml data: <@resources/design-doc-layering-sample-3-layers.yaml
- name: filter_by_schema_partial_namespace - name: filter_by_schema_partial_namespace
desc: Verify revision documents do not return results for partial namespace desc: Verify revision documents do not return results for partial namespace

View File

@ -24,7 +24,7 @@ tests:
desc: Create initial documents desc: Create initial documents
PUT: /api/v1.0/buckets/mop/documents PUT: /api/v1.0/buckets/mop/documents
status: 200 status: 200
data: <@resources/design-doc-layering-sample.yaml data: <@resources/design-doc-layering-sample-3-layers.yaml
- name: filter_by_schema - name: filter_by_schema
desc: Verify revision documents filtered by schema desc: Verify revision documents filtered by schema

View File

@ -23,7 +23,7 @@ tests:
desc: Create initial documents desc: Create initial documents
PUT: /api/v1.0/buckets/mop/documents PUT: /api/v1.0/buckets/mop/documents
status: 200 status: 200
data: <@resources/design-doc-layering-sample.yaml data: <@resources/design-doc-layering-sample-3-layers.yaml
- name: filter_by_multiple_different_filters_expect_site - name: filter_by_multiple_different_filters_expect_site
desc: Verify revision documents filtered by multiple repeated keys that are different desc: Verify revision documents filtered by multiple repeated keys that are different

View File

@ -21,7 +21,7 @@ tests:
desc: Create first revision for testing desc: Create first revision for testing
PUT: /api/v1.0/buckets/bucket_a/documents PUT: /api/v1.0/buckets/bucket_a/documents
status: 200 status: 200
data: <@resources/design-doc-layering-sample.yaml data: <@resources/design-doc-layering-sample-3-layers.yaml
- name: initialize_again - name: initialize_again
desc: Create second revision for testing desc: Create second revision for testing

View File

@ -41,7 +41,7 @@ tests:
desc: Create initial documents desc: Create initial documents
PUT: /api/v1.0/buckets/mop/documents PUT: /api/v1.0/buckets/mop/documents
status: 200 status: 200
data: <@resources/design-doc-layering-sample.yaml data: <@resources/design-doc-layering-sample-3-layers.yaml
- name: create_tag - name: create_tag
desc: Create a tag for the revision desc: Create a tag for the revision

View File

@ -29,7 +29,7 @@ tests:
desc: Create initial documents desc: Create initial documents
PUT: /api/v1.0/buckets/mop/documents PUT: /api/v1.0/buckets/mop/documents
status: 200 status: 200
data: <@resources/design-doc-layering-sample.yaml data: <@resources/design-doc-layering-sample-3-layers.yaml
- name: update_single_document - name: update_single_document
desc: Update a single document, ignore other documents in the bucket desc: Update a single document, ignore other documents in the bucket

View File

@ -28,6 +28,18 @@ CONF = cfg.CONF
class TestBucketsController(test_base.BaseControllerTest): class TestBucketsController(test_base.BaseControllerTest):
"""Test suite for validating positive scenarios for buckets controller.""" """Test suite for validating positive scenarios for buckets controller."""
def test_put_empty_bucket(self):
rules = {'deckhand:create_cleartext_documents': '@'}
self.policy.set_rules(rules)
resp = self.app.simulate_put(
'/api/v1.0/buckets/mop/documents',
headers={'Content-Type': 'application/x-yaml'},
body=yaml.safe_dump_all([]))
self.assertEqual(200, resp.status_code)
created_documents = list(yaml.safe_load_all(resp.text))
self.assertEmpty(created_documents)
def test_put_bucket(self): def test_put_bucket(self):
rules = {'deckhand:create_cleartext_documents': '@'} rules = {'deckhand:create_cleartext_documents': '@'}
self.policy.set_rules(rules) self.policy.set_rules(rules)

View File

@ -35,8 +35,8 @@ class TestRenderedDocumentsController(test_base.BaseControllerTest):
# Create 2 docs: one concrete, one abstract. # Create 2 docs: one concrete, one abstract.
documents_factory = factories.DocumentFactory(2, [1, 1]) documents_factory = factories.DocumentFactory(2, [1, 1])
payload = documents_factory.gen_test( payload = documents_factory.gen_test(
{}, global_abstract=False, region_abstract=True)[1:] {}, global_abstract=False, region_abstract=True)
concrete_doc = payload[0] concrete_doc = payload[1]
resp = self.app.simulate_put( resp = self.app.simulate_put(
'/api/v1.0/buckets/mop/documents', '/api/v1.0/buckets/mop/documents',
@ -78,23 +78,21 @@ class TestRenderedDocumentsController(test_base.BaseControllerTest):
self.policy.set_rules(rules) self.policy.set_rules(rules)
# Create 1st document. # Create 1st document.
documents_factory = factories.DocumentFactory(2, [1, 1]) documents_factory = factories.DocumentFactory(1, [1])
payload = documents_factory.gen_test({}, global_abstract=False)[1:] payload = documents_factory.gen_test({}, global_abstract=False)[1:]
payload[0]['metadata']['name'] = test_utils.rand_name('document')
resp = self.app.simulate_put( resp = self.app.simulate_put(
'/api/v1.0/buckets/mop/documents', '/api/v1.0/buckets/mop/documents',
headers={'Content-Type': 'application/x-yaml'}, headers={'Content-Type': 'application/x-yaml'},
body=yaml.safe_dump_all(payload)) body=yaml.safe_dump_all(payload))
self.assertEqual(200, resp.status_code) self.assertEqual(200, resp.status_code)
# Create 2nd document (exclude 1st document). # Create 2nd document (exclude 1st document in new payload).
payload = documents_factory.gen_test({}, global_abstract=False)[1:] payload = documents_factory.gen_test({}, global_abstract=False)
second_name = test_utils.rand_name('document') new_name = payload[-1]['metadata']['name']
payload[0]['metadata']['name'] = second_name
resp = self.app.simulate_put( resp = self.app.simulate_put(
'/api/v1.0/buckets/mop/documents', '/api/v1.0/buckets/mop/documents',
headers={'Content-Type': 'application/x-yaml'}, headers={'Content-Type': 'application/x-yaml'},
body=yaml.safe_dump_all([payload[0]])) body=yaml.safe_dump_all(payload))
self.assertEqual(200, resp.status_code) self.assertEqual(200, resp.status_code)
revision_id = list(yaml.safe_load_all(resp.text))[0]['status'][ revision_id = list(yaml.safe_load_all(resp.text))[0]['status'][
'revision'] 'revision']
@ -107,10 +105,37 @@ class TestRenderedDocumentsController(test_base.BaseControllerTest):
rendered_documents = list(yaml.safe_load_all(resp.text)) rendered_documents = list(yaml.safe_load_all(resp.text))
self.assertEqual(1, len(rendered_documents)) self.assertEqual(1, len(rendered_documents))
self.assertEqual(second_name, self.assertEqual(new_name, rendered_documents[0]['metadata']['name'])
rendered_documents[0]['metadata']['name'])
self.assertEqual(2, rendered_documents[0]['status']['revision']) self.assertEqual(2, rendered_documents[0]['status']['revision'])
def test_list_rendered_documents_multiple_buckets(self):
rules = {'deckhand:list_cleartext_documents': '@',
'deckhand:list_encrypted_documents': '@',
'deckhand:create_cleartext_documents': '@'}
self.policy.set_rules(rules)
documents_factory = factories.DocumentFactory(1, [1])
for idx in range(2):
payload = documents_factory.gen_test({})
if idx == 0:
# Pop off the first entry so that a conflicting layering
# policy isn't created during the 1st iteration.
payload.pop(0)
resp = self.app.simulate_put(
'/api/v1.0/buckets/%s/documents' % test_utils.rand_name(
'bucket'),
headers={'Content-Type': 'application/x-yaml'},
body=yaml.safe_dump_all(payload))
self.assertEqual(200, resp.status_code)
revision_id = list(yaml.safe_load_all(resp.text))[0]['status'][
'revision']
resp = self.app.simulate_get(
'/api/v1.0/revisions/%s/rendered-documents' % revision_id,
headers={'Content-Type': 'application/x-yaml'})
self.assertEqual(200, resp.status_code)
class TestRenderedDocumentsControllerNegative( class TestRenderedDocumentsControllerNegative(
test_base.BaseControllerTest): test_base.BaseControllerTest):
@ -125,8 +150,8 @@ class TestRenderedDocumentsControllerNegative(
self.policy.set_rules(rules) self.policy.set_rules(rules)
# Create a document for a bucket. # Create a document for a bucket.
secrets_factory = factories.DocumentSecretFactory() documents_factory = factories.DocumentFactory(1, [1])
payload = [secrets_factory.gen_test('Certificate', 'cleartext')] payload = documents_factory.gen_test({})
resp = self.app.simulate_put( resp = self.app.simulate_put(
'/api/v1.0/buckets/mop/documents', '/api/v1.0/buckets/mop/documents',
headers={'Content-Type': 'application/x-yaml'}, headers={'Content-Type': 'application/x-yaml'},
@ -161,8 +186,8 @@ class TestRenderedDocumentsControllerNegativeRBAC(
self.policy.set_rules(rules) self.policy.set_rules(rules)
# Create a document for a bucket. # Create a document for a bucket.
secrets_factory = factories.DocumentSecretFactory() documents_factory = factories.DocumentFactory(1, [1])
payload = [secrets_factory.gen_test('Certificate', 'cleartext')] payload = [documents_factory.gen_test({})[0]]
resp = self.app.simulate_put( resp = self.app.simulate_put(
'/api/v1.0/buckets/mop/documents', '/api/v1.0/buckets/mop/documents',
headers={'Content-Type': 'application/x-yaml'}, headers={'Content-Type': 'application/x-yaml'},
@ -185,8 +210,13 @@ class TestRenderedDocumentsControllerNegativeRBAC(
self.policy.set_rules(rules) self.policy.set_rules(rules)
# Create a document for a bucket. # Create a document for a bucket.
documents_factory = factories.DocumentFactory(1, [1])
layering_policy = documents_factory.gen_test({})[0]
secrets_factory = factories.DocumentSecretFactory() secrets_factory = factories.DocumentSecretFactory()
payload = [secrets_factory.gen_test('Certificate', 'encrypted')] encrypted_document = secrets_factory.gen_test('Certificate',
'encrypted')
payload = [layering_policy, encrypted_document]
with mock.patch.object(buckets.BucketsResource, 'secrets_mgr', with mock.patch.object(buckets.BucketsResource, 'secrets_mgr',
autospec=True) as mock_secrets_mgr: autospec=True) as mock_secrets_mgr:
mock_secrets_mgr.create.return_value = { mock_secrets_mgr.create.return_value = {

View File

@ -214,23 +214,25 @@ class TestDocuments(base.TestDbBase):
def test_delete_all_documents(self): def test_delete_all_documents(self):
payload = self.documents_factory.gen_test(self.document_mapping) payload = self.documents_factory.gen_test(self.document_mapping)
bucket_name = test_utils.rand_name('bucket') bucket_name = test_utils.rand_name('bucket')
documents = self.create_documents(bucket_name, payload) created_documents = self.create_documents(bucket_name, payload)
self.assertIsInstance(created_documents, list)
self.assertEqual(3, len(created_documents))
self.assertIsInstance(documents, list) deleted_documents = self.create_documents(bucket_name, [])
self.assertEqual(3, len(documents))
documents = self.create_documents(bucket_name, []) # Verify that all the expected documents were deleted.
documents = sorted( self.assertEqual(
documents, key=lambda d: d['name']) sorted([(d['metadata']['name'], d['schema'])
for d in created_documents]),
sorted([(d['name'], d['schema']) for d in deleted_documents]))
for idx in range(3): # Verify that all their attributes have been cleared and that the
self.assertTrue(documents[idx]['deleted']) # deleted/deleted_at attributes have been set to True.
self.assertTrue(documents[idx]['deleted_at']) for deleted_document in deleted_documents:
self.assertEqual(documents[idx]['schema'], payload[idx]['schema']) self.assertTrue(deleted_document['deleted'])
self.assertEqual(documents[idx]['name'], self.assertTrue(deleted_document['deleted_at'])
payload[idx]['metadata']['name']) self.assertEmpty(deleted_document['metadata'])
self.assertEmpty(documents[idx]['metadata']) self.assertEmpty(deleted_document['data'])
self.assertEmpty(documents[idx]['data'])
def test_delete_and_create_document_in_same_payload(self): def test_delete_and_create_document_in_same_payload(self):
payload = self.documents_factory.gen_test(self.document_mapping) payload = self.documents_factory.gen_test(self.document_mapping)

View File

@ -12,18 +12,31 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import copy
from deckhand.engine import layering from deckhand.engine import layering
from deckhand import errors from deckhand import errors
from deckhand import factories from deckhand import factories
from deckhand.tests.unit import base as test_base from deckhand.tests.unit import base as test_base
from deckhand import types
class TestDocumentLayering(test_base.DeckhandTestCase): class TestDocumentLayering(test_base.DeckhandTestCase):
def _extract_layering_policy(self, documents):
for doc in copy.copy(documents):
if doc['schema'].startswith(types.LAYERING_POLICY_SCHEMA):
layering_policy = doc
documents.remove(doc)
return layering_policy
return None
def _test_layering(self, documents, site_expected=None, def _test_layering(self, documents, site_expected=None,
region_expected=None, global_expected=None, region_expected=None, global_expected=None,
exception_expected=None): exception_expected=None):
document_layering = layering.DocumentLayering(documents) layering_policy = self._extract_layering_policy(documents)
document_layering = layering.DocumentLayering(
layering_policy, documents)
if all([site_expected, region_expected, global_expected, if all([site_expected, region_expected, global_expected,
exception_expected]): exception_expected]):
@ -56,19 +69,22 @@ class TestDocumentLayering(test_base.DeckhandTestCase):
site_expected = [site_expected] site_expected = [site_expected]
for idx, expected in enumerate(site_expected): for idx, expected in enumerate(site_expected):
self.assertEqual(expected, site_docs[idx].get('data')) self.assertEqual(expected, site_docs[idx].get('data'),
'Actual site data does not match expected.')
if region_expected: if region_expected:
if not isinstance(region_expected, list): if not isinstance(region_expected, list):
region_expected = [region_expected] region_expected = [region_expected]
for idx, expected in enumerate(region_expected): for idx, expected in enumerate(region_expected):
self.assertEqual(expected, region_docs[idx].get('data')) self.assertEqual(expected, region_docs[idx].get('data'),
'Actual region data does not match expected.')
if global_expected: if global_expected:
if not isinstance(global_expected, list): if not isinstance(global_expected, list):
global_expected = [global_expected] global_expected = [global_expected]
for idx, expected in enumerate(global_expected): for idx, expected in enumerate(global_expected):
self.assertEqual(expected, global_docs[idx].get('data')) self.assertEqual(expected, global_docs[idx].get('data'),
'Actual global data does not match expected.')
class TestDocumentLayering2Layers(TestDocumentLayering): class TestDocumentLayering2Layers(TestDocumentLayering):
@ -138,6 +154,26 @@ class TestDocumentLayering2Layers(TestDocumentLayering):
documents = doc_factory.gen_test(mapping, site_abstract=False) documents = doc_factory.gen_test(mapping, site_abstract=False)
self._test_layering(documents, site_expected[idx]) self._test_layering(documents, site_expected[idx])
def test_layering_documents_with_different_schemas_do_not_layer(self):
"""Validates that documents with different schemas are not layered
together.
"""
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_SITE_DATA_1_": {"data": {"b": 4}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
documents[1]['schema'] = 'deckhand/Document/v1'
documents[2]['schema'] = 'deckhand/Document/v2'
global_expected = {"a": {"x": 1, "y": 2}}
site_expected = {'b': 4}
self._test_layering(documents, site_expected=site_expected,
global_expected=global_expected)
class TestDocumentLayering2LayersAbstractConcrete(TestDocumentLayering): class TestDocumentLayering2LayersAbstractConcrete(TestDocumentLayering):
"""The the 2-layer payload with site/global layers concrete. """The the 2-layer payload with site/global layers concrete.

View File

@ -0,0 +1,162 @@
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from deckhand.engine import secrets_manager
from deckhand import factories
from deckhand.tests.unit.engine import test_document_layering
class TestDocumentLayeringWithSubstitution(
test_document_layering.TestDocumentLayering):
def test_layering_and_substitution_default_scenario(self):
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_GLOBAL_SUBSTITUTIONS_1_": [{
"dest": {
"path": ".c"
},
"src": {
"schema": "deckhand/Certificate/v1",
"name": "global-cert",
"path": "."
}
}],
"_SITE_DATA_1_": {"data": {"b": 4}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
secrets_factory = factories.DocumentSecretFactory()
certificate = secrets_factory.gen_test(
'Certificate', 'cleartext', data={'secret': 'global-secret'},
name='global-cert')
global_expected = {'a': {'x': 1, 'y': 2}, 'c': 'global-secret'}
site_expected = {'a': {'x': 1, 'y': 2}, 'b': 4, 'c': 'global-secret'}
with mock.patch.object(
secrets_manager.db_api, 'document_get',
return_value=certificate, autospec=True) as mock_document_get:
self._test_layering(documents, site_expected=site_expected,
global_expected=global_expected)
mock_document_get.assert_called_once_with(
schema=certificate['schema'], name=certificate['metadata']['name'],
is_secret=True, **{'metadata.layeringDefinition.abstract': False})
def test_layering_and_substitution_no_children(self):
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_GLOBAL_SUBSTITUTIONS_1_": [{
"dest": {
"path": ".c"
},
"src": {
"schema": "deckhand/Certificate/v1",
"name": "global-cert",
"path": "."
}
}],
"_SITE_DATA_1_": {"data": {"b": 4}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]}
}
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
documents[1]['metadata']['labels'] = {}
secrets_factory = factories.DocumentSecretFactory()
certificate = secrets_factory.gen_test(
'Certificate', 'cleartext', data={'secret': 'global-secret'},
name='global-cert')
global_expected = {'a': {'x': 1, 'y': 2}, 'c': 'global-secret'}
site_expected = {'b': 4}
with mock.patch.object(
secrets_manager.db_api, 'document_get',
return_value=certificate, autospec=True) as mock_document_get:
self._test_layering(documents, site_expected=site_expected,
global_expected=global_expected)
mock_document_get.assert_called_once_with(
schema=certificate['schema'], name=certificate['metadata']['name'],
is_secret=True, **{'metadata.layeringDefinition.abstract': False})
def test_layering_parent_and_child_undergo_substitution(self):
mapping = {
"_GLOBAL_DATA_1_": {"data": {"a": {"x": 1, "y": 2}}},
"_GLOBAL_SUBSTITUTIONS_1_": [{
"dest": {
"path": ".b"
},
"src": {
"schema": "deckhand/Certificate/v1",
"name": "global-cert",
"path": "."
}
}],
"_SITE_DATA_1_": {"data": {"c": "need-site-secret"}},
"_SITE_ACTIONS_1_": {
"actions": [{"method": "merge", "path": "."}]},
"_SITE_SUBSTITUTIONS_1_": [{
"dest": {
"path": ".c"
},
"src": {
"schema": "deckhand/CertificateKey/v1",
"name": "site-cert",
"path": "."
}
}],
}
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test(mapping, site_abstract=False)
secrets_factory = factories.DocumentSecretFactory()
global_expected = {'a': {'x': 1, 'y': 2}, 'b': 'global-secret'}
site_expected = {'a': {'x': 1, 'y': 2}, 'b': 'global-secret',
'c': 'site-secret'}
def _get_secret_document(*args, **kwargs):
name = kwargs['name']
prefix = name.split('-')[0]
return secrets_factory.gen_test(
'Certificate', 'cleartext',
data={'secret': '%s-secret' % prefix},
name='%s' % name)
with mock.patch.object(
secrets_manager.db_api, 'document_get',
autospec=True) as mock_document_get:
mock_document_get.side_effect = _get_secret_document
self._test_layering(documents, site_expected=site_expected,
global_expected=global_expected)
mock_document_get.assert_has_calls([
mock.call(
schema="deckhand/Certificate/v1", name='global-cert',
is_secret=True,
**{'metadata.layeringDefinition.abstract': False}),
mock.call(
schema="deckhand/CertificateKey/v1", name='site-cert',
is_secret=True,
**{'metadata.layeringDefinition.abstract': False})
])

View File

@ -12,6 +12,8 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import mock
from deckhand.engine import layering from deckhand.engine import layering
from deckhand import errors from deckhand import errors
from deckhand import factories from deckhand import factories
@ -63,107 +65,115 @@ class TestDocumentLayeringNegative(
self._test_layering( self._test_layering(
documents, exception_expected=errors.MissingDocumentKey) documents, exception_expected=errors.MissingDocumentKey)
def test_layering_without_layering_policy(self): @mock.patch.object(layering, 'LOG', autospec=True)
doc_factory = factories.DocumentFactory(2, [1, 1]) def test_layering_with_broken_layer_order(self, mock_log):
documents = doc_factory.gen_test({}, site_abstract=False)
documents.pop(0) # First doc is layering policy.
self.assertRaises(errors.LayeringPolicyNotFound,
layering.DocumentLayering, documents)
def test_layering_with_broken_layer_order(self):
doc_factory = factories.DocumentFactory(2, [1, 1]) doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test({}, site_abstract=False) documents = doc_factory.gen_test({}, site_abstract=False)
layering_policy = self._extract_layering_policy(documents)
broken_layer_orders = [ broken_layer_orders = [
['site', 'region', 'global'], ['broken', 'global'], ['broken'], ['site', 'region', 'global'], ['broken', 'global'], ['broken'],
['site', 'broken']] ['site', 'broken']]
for broken_layer_order in broken_layer_orders: for broken_layer_order in broken_layer_orders:
documents[0]['data']['layerOrder'] = broken_layer_order layering_policy['data']['layerOrder'] = broken_layer_order
# The site will not be able to find a correct parent. # The site will not be able to find a correct parent.
self.assertRaises(errors.MissingDocumentParent, layering.DocumentLayering(layering_policy, documents)
layering.DocumentLayering, documents) self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0],
'Could not find parent for document .*')
mock_log.info.reset_mock()
def test_layering_child_with_invalid_parent_selector(self): @mock.patch.object(layering, 'LOG', autospec=True)
def test_layering_child_with_invalid_parent_selector(self, mock_log):
doc_factory = factories.DocumentFactory(2, [1, 1]) doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test({}, site_abstract=False) documents = doc_factory.gen_test({}, site_abstract=False)
layering_policy = self._extract_layering_policy(documents)
for parent_selector in ({'key2': 'value2'}, {'key1': 'value2'}): for parent_selector in ({'key2': 'value2'}, {'key1': 'value2'}):
documents[-1]['metadata']['layeringDefinition'][ documents[-1]['metadata']['layeringDefinition'][
'parentSelector'] = parent_selector 'parentSelector'] = parent_selector
self.assertRaises(errors.MissingDocumentParent, layering.DocumentLayering(layering_policy, documents)
layering.DocumentLayering, documents) self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0],
'Could not find parent for document .*')
mock_log.info.reset_mock()
def test_layering_unreferenced_parent_label(self): @mock.patch.object(layering, 'LOG', autospec=True)
def test_layering_unreferenced_parent_label(self, mock_log):
doc_factory = factories.DocumentFactory(2, [1, 1]) doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test({}, site_abstract=False) documents = doc_factory.gen_test({}, site_abstract=False)
layering_policy = self._extract_layering_policy(documents)
for parent_label in ({'key2': 'value2'}, {'key1': 'value2'}): for parent_label in ({'key2': 'value2'}, {'key1': 'value2'}):
# Second doc is the global doc, or parent. # Second doc is the global doc, or parent.
documents[1]['metadata']['labels'] = [parent_label] documents[0]['metadata']['labels'] = [parent_label]
self.assertRaises(errors.MissingDocumentParent, layering.DocumentLayering(layering_policy, documents)
layering.DocumentLayering, documents) self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0],
'Could not find parent for document .*')
mock_log.info.reset_mock()
def test_layering_duplicate_parent_selector_2_layer(self): def test_layering_duplicate_parent_selector_2_layer(self):
# Validate that documents belonging to the same layer cannot have the # Validate that documents belonging to the same layer cannot have the
# same unique parent identifier referenced by `parentSelector`. # same unique parent identifier referenced by `parentSelector`.
doc_factory = factories.DocumentFactory(2, [1, 1]) doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test({}, site_abstract=False) documents = doc_factory.gen_test({}, site_abstract=False)
documents.append(documents[1]) # Copy global layer. layering_policy = self._extract_layering_policy(documents)
documents.append(documents[0]) # Copy global layer.
self.assertRaises(errors.IndeterminateDocumentParent, self.assertRaises(errors.IndeterminateDocumentParent,
layering.DocumentLayering, documents) layering.DocumentLayering, layering_policy,
documents)
def test_layering_duplicate_parent_selector_3_layer(self): def test_layering_duplicate_parent_selector_3_layer(self):
# Validate that documents belonging to the same layer cannot have the # Validate that documents belonging to the same layer cannot have the
# same unique parent identifier referenced by `parentSelector`. # same unique parent identifier referenced by `parentSelector`.
doc_factory = factories.DocumentFactory(3, [1, 1, 1]) doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test({}, site_abstract=False) documents = doc_factory.gen_test({}, site_abstract=False)
layering_policy = self._extract_layering_policy(documents)
# 1 is global layer, 2 is region layer. # 0 is global layer, 1 is region layer.
for idx in (1, 2): for idx in (0, 1):
documents.append(documents[idx]) documents.append(documents[idx])
self.assertRaises(errors.IndeterminateDocumentParent, self.assertRaises(errors.IndeterminateDocumentParent,
layering.DocumentLayering, documents) layering.DocumentLayering, layering_policy,
documents)
documents.pop(-1) # Remove the just-appended duplicate. documents.pop(-1) # Remove the just-appended duplicate.
def test_layering_document_references_itself(self): @mock.patch.object(layering, 'LOG', autospec=True)
def test_layering_document_references_itself(self, mock_log):
# Test that a parentSelector cannot reference the document itself # Test that a parentSelector cannot reference the document itself
# without an error being raised. # without an error being raised.
doc_factory = factories.DocumentFactory(3, [1, 1, 1]) doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test({}, site_abstract=False) documents = doc_factory.gen_test({}, site_abstract=False)
layering_policy = self._extract_layering_policy(documents)
self_ref = {"self": "self"} self_ref = {"self": "self"}
documents[2]['metadata']['labels'] = self_ref documents[2]['metadata']['labels'] = self_ref
documents[2]['metadata']['layeringDefinition'][ documents[2]['metadata']['layeringDefinition'][
'parentSelector'] = self_ref 'parentSelector'] = self_ref
# Escape '[' and ']' for regex to work. layering.DocumentLayering(layering_policy, documents)
expected_err = ("Missing parent document for document %s." self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0],
% documents[2]).replace('[', '\[').replace(']', '\]') 'Could not find parent for document .*')
self.assertRaisesRegex(errors.MissingDocumentParent, expected_err,
layering.DocumentLayering, documents)
def test_layering_documents_with_different_schemas(self): @mock.patch.object(layering, 'LOG', autospec=True)
def test_layering_documents_with_different_schemas(self, mock_log):
"""Validate that attempting to layer documents with different schemas """Validate that attempting to layer documents with different schemas
results in errors. results in errors.
""" """
doc_factory = factories.DocumentFactory(3, [1, 1, 1]) doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test({}) documents = doc_factory.gen_test({})
layering_policy = self._extract_layering_policy(documents)
# Region and site documents should result in no parent being found # Region and site documents should result in no parent being found
# since their schemas will not match that of their parent's. # since their schemas will not match that of their parent's.
for idx in range(2, 4): # Only region/site have parent. for idx in range(1, 3): # Only region/site have parent.
prev_schema = documents[idx]['schema'] prev_schema = documents[idx]['schema']
documents[idx]['schema'] = test_utils.rand_name('schema') documents[idx]['schema'] = test_utils.rand_name('schema')
# Escape '[' and ']' for regex to work. layering.DocumentLayering(layering_policy, documents)
expected_err = ( self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0],
"Missing parent document for document %s." 'Could not find parent for document .*')
% documents[idx]).replace('[', '\[').replace(']', '\]') mock_log.info.reset_mock()
self.assertRaisesRegex(errors.MissingDocumentParent, expected_err,
layering.DocumentLayering, documents)
# Restore schema for next test run. # Restore schema for next test run.
documents[idx]['schema'] = prev_schema documents[idx]['schema'] = prev_schema

View File

@ -12,10 +12,12 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import ast
import re import re
import string import string
import jsonpath_ng import jsonpath_ng
import six
from deckhand import errors from deckhand import errors
@ -167,3 +169,101 @@ def multisort(data, sort_by=None, order_by=None):
return sorted(data, key=lambda d: [ return sorted(data, key=lambda d: [
jsonpath_parse(d, sort_key) for sort_key in sort_by], jsonpath_parse(d, sort_key) for sort_key in sort_by],
reverse=True if order_by == 'desc' else False) reverse=True if order_by == 'desc' else False)
def _add_microversion(value):
"""Hack for coercing all Deckhand schema fields (``schema`` and
``metadata.schema``) into ending with v1.0 rather than v1, for example.
"""
microversion_re = r'^.*/.*/v[0-9]+$'
if re.match(value, microversion_re):
return value + '.0'
return value
def deepfilter(dct, **filters):
"""Match ``dct`` against all the filters in ``filters``.
Check whether ``dct`` matches all the fitlers in ``filters``. The filters
can reference nested attributes, attributes that are contained within
other dictionaries within ``dct``.
Useful for querying whether ``metadata.name`` or
``metadata.layeringDefinition.layerOrder`` match specific values.
:param dct: The dictionary to check against all the ``filters``.
:type dct: dict
:param filters: Dictionary of key-value pairs used for filtering out
unwanted results.
:type filters: dict
:returns: True if the dictionary satisfies all the filters, else False.
"""
def _transform_filter_bool(filter_val):
# Transform boolean values into string literals.
if isinstance(filter_val, six.string_types):
try:
filter_val = ast.literal_eval(filter_val.title())
except ValueError:
# If not True/False, set to None to avoid matching
# `actual_val` which is always boolean.
filter_val = None
return filter_val
for filter_key, filter_val in filters.items():
# If the filter is a list of possibilities, e.g. ['site', 'region']
# for metadata.layeringDefinition.layer, check whether the actual
# value is present.
if isinstance(filter_val, (list, tuple)):
actual_val = jsonpath_parse(dct, filter_key, match_all=True)
if not actual_val:
return False
if isinstance(actual_val[0], bool):
filter_val = [_transform_filter_bool(x) for x in filter_val]
if not set(actual_val).intersection(set(filter_val)):
return False
else:
actual_val = jsonpath_parse(dct, filter_key)
# Else if both the filter value and the actual value in the doc
# are dictionaries, check whether the filter dict is a subset
# of the actual dict.
if (isinstance(actual_val, dict)
and isinstance(filter_val, dict)):
is_subset = set(
filter_val.items()).issubset(set(actual_val.items()))
if not is_subset:
return False
# Else both filters are string literals.
else:
# Filtering by schema must support namespace matching
# (e.g. schema=promenade) such that all kind and schema
# documents with promenade namespace are returned, or
# (e.g. schema=promenade/Node) such that all version
# schemas with namespace=schema and kind=Node are returned.
if isinstance(actual_val, bool):
filter_val = _transform_filter_bool(filter_val)
if filter_key in ['schema', 'metadata.schema']:
actual_val = _add_microversion(actual_val)
filter_val = _add_microversion(filter_val)
parts = actual_val.split('/')[:2]
if len(parts) == 2:
actual_namespace, actual_kind = parts
elif len(parts) == 1:
actual_namespace = parts[0]
actual_kind = ''
else:
actual_namespace = actual_kind = ''
actual_minus_version = actual_namespace + '/' + actual_kind
if not (filter_val == actual_val or
actual_minus_version == filter_val or
actual_namespace == filter_val):
return False
else:
if actual_val != filter_val:
return False
return True

View File

@ -118,7 +118,7 @@ Validation Module
Validation Schemas Validation Schemas
================== ==================
Below are the schemas deckhand uses to validate documents. Below are the schemas Deckhand uses to validate documents.
.. automodule:: deckhand.engine.schema.base_schema .. automodule:: deckhand.engine.schema.base_schema
:members: schema :members: schema