Simplify document layering interface

This PS simplifies the layering interface, adds additional
exception handling around document layering in the rendered
documents controller, and removes an unused exception related
to document layering.

Change-Id: I0491ae32a43fe4a6a01c8da530528d8573f91a64
This commit is contained in:
Felipe Monteiro 2018-01-05 03:16:12 +00:00
parent 4dbcf5ba38
commit 8b428743ee
5 changed files with 76 additions and 72 deletions

View File

@ -105,16 +105,18 @@ class RenderedDocumentsResource(api_base.BaseResource):
if include_encrypted:
filters['metadata.storagePolicy'].append('encrypted')
layering_policy = self._retrieve_layering_policy()
documents = self._retrieve_documents_for_rendering(revision_id,
**filters)
# Prevent the layering policy from appearing twice.
if layering_policy in documents:
documents.remove(layering_policy)
document_layering = layering.DocumentLayering(layering_policy,
documents)
rendered_documents = document_layering.render()
try:
document_layering = layering.DocumentLayering(documents)
rendered_documents = document_layering.render()
except (errors.IndeterminateDocumentParent,
errors.UnsupportedActionMethod,
errors.MissingDocumentKey) as e:
raise falcon.HTTPBadRequest(description=e.format_message())
except errors.LayeringPolicyNotFound as e:
raise falcon.HTTPConflict(description=e.format_message())
# Filters to be applied post-rendering, because many documents are
# involved in rendering. User filters can only be applied once all
@ -129,34 +131,32 @@ class RenderedDocumentsResource(api_base.BaseResource):
resp.body = self.view_builder.list(final_documents)
self._post_validate(final_documents)
def _retrieve_layering_policy(self):
try:
# NOTE(fmontei): Layering policies exist system-wide, across all
# revisions, so no need to filter by revision.
layering_policy_filters = {
'deleted': False,
'schema': types.LAYERING_POLICY_SCHEMA
}
layering_policy = db_api.document_get(**layering_policy_filters)
except errors.DocumentNotFound as e:
error_msg = (
'No layering policy found in the system so could not render '
'the documents.')
LOG.error(error_msg)
LOG.exception(six.text_type(e))
raise falcon.HTTPConflict(description=error_msg)
else:
return layering_policy
def _retrieve_documents_for_rendering(self, revision_id, **filters):
"""Retrieve all necessary documents needed for rendering. If a layering
policy isn't found in the current revision, retrieve it in a subsequent
call and add it to the list of documents.
"""
try:
documents = db_api.revision_get_documents(
revision_id, **filters)
documents = db_api.revision_get_documents(revision_id, **filters)
except errors.RevisionNotFound as e:
LOG.exception(six.text_type(e))
raise falcon.HTTPNotFound(description=e.format_message())
else:
return documents
if not any([d['schema'].startswith(types.LAYERING_POLICY_SCHEMA)
for d in documents]):
try:
layering_policy_filters = {
'deleted': False,
'schema': types.LAYERING_POLICY_SCHEMA
}
layering_policy = db_api.document_get(
**layering_policy_filters)
except errors.DocumentNotFound as e:
LOG.exception(e.format_message())
else:
documents.append(layering_policy)
return documents
def _post_validate(self, documents):
# Perform schema validation post-rendering to ensure that rendering

View File

@ -22,6 +22,7 @@ from deckhand.engine import document
from deckhand.engine import secrets_manager
from deckhand.engine import utils
from deckhand import errors
from deckhand import types
LOG = logging.getLogger(__name__)
@ -141,7 +142,19 @@ class DocumentLayering(object):
return layered_docs
def __init__(self, layering_policy, documents):
def _extract_layering_policy(self, documents):
documents = copy.deepcopy(documents)
for doc in documents:
if doc['schema'].startswith(types.LAYERING_POLICY_SCHEMA):
layering_policy = doc
documents.remove(doc)
return (
document.Document(layering_policy),
[document.Document(d) for d in documents]
)
return None, [document.Document(d) for d in documents]
def __init__(self, documents):
"""Contructor for ``DocumentLayering``.
:param layering_policy: The document with schema
@ -150,8 +163,14 @@ class DocumentLayering(object):
in accordance with the ``layerOrder`` defined by the
LayeringPolicy document.
"""
self.layering_policy = document.Document(layering_policy)
self.documents = [document.Document(d) for d in documents]
self.layering_policy, self.documents = self._extract_layering_policy(
documents)
if self.layering_policy is None:
error_msg = (
'No layering policy found in the system so could not reder '
'documents.')
LOG.error(error_msg)
raise errors.LayeringPolicyNotFound()
self.layer_order = list(self.layering_policy['data']['layerOrder'])
self.layered_docs = self._calc_document_children()

View File

@ -201,11 +201,6 @@ class IndeterminateDocumentParent(DeckhandException):
code = 400
class MissingDocumentParent(DeckhandException):
msg_fmt = ("Missing parent document for document %(document)s.")
code = 400
class MissingDocumentKey(DeckhandException):
msg_fmt = ("Missing document key %(key)s from either parent or child. "
"Parent: %(parent)s. Child: %(child)s.")
@ -232,6 +227,11 @@ class RevisionTagNotFound(DeckhandException):
code = 404
class LayeringPolicyNotFound(DeckhandException):
msg_fmt = ("Required LayeringPolicy was not found for layering.")
code = 409
class ValidationNotFound(DeckhandException):
msg_fmt = ("The requested validation entry %(entry_id)s was not found "
"for validation name %(validation_name)s and revision ID "

View File

@ -12,31 +12,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
from deckhand.engine import layering
from deckhand import errors
from deckhand import factories
from deckhand.tests.unit import base as test_base
from deckhand import types
class TestDocumentLayering(test_base.DeckhandTestCase):
def _extract_layering_policy(self, documents):
for doc in copy.copy(documents):
if doc['schema'].startswith(types.LAYERING_POLICY_SCHEMA):
layering_policy = doc
documents.remove(doc)
return layering_policy
return None
def _test_layering(self, documents, site_expected=None,
region_expected=None, global_expected=None,
exception_expected=None):
layering_policy = self._extract_layering_policy(documents)
document_layering = layering.DocumentLayering(
layering_policy, documents)
document_layering = layering.DocumentLayering(documents)
if all([site_expected, region_expected, global_expected,
exception_expected]):

View File

@ -69,7 +69,7 @@ class TestDocumentLayeringNegative(
def test_layering_with_broken_layer_order(self, mock_log):
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test({}, site_abstract=False)
layering_policy = self._extract_layering_policy(documents)
layering_policy = documents[0]
broken_layer_orders = [
['site', 'region', 'global'], ['broken', 'global'], ['broken'],
['site', 'broken']]
@ -77,7 +77,7 @@ class TestDocumentLayeringNegative(
for broken_layer_order in broken_layer_orders:
layering_policy['data']['layerOrder'] = broken_layer_order
# The site will not be able to find a correct parent.
layering.DocumentLayering(layering_policy, documents)
layering.DocumentLayering(documents)
self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0],
'Could not find parent for document .*')
mock_log.info.reset_mock()
@ -86,13 +86,12 @@ class TestDocumentLayeringNegative(
def test_layering_child_with_invalid_parent_selector(self, mock_log):
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test({}, site_abstract=False)
layering_policy = self._extract_layering_policy(documents)
for parent_selector in ({'key2': 'value2'}, {'key1': 'value2'}):
documents[-1]['metadata']['layeringDefinition'][
'parentSelector'] = parent_selector
layering.DocumentLayering(layering_policy, documents)
layering.DocumentLayering(documents)
self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0],
'Could not find parent for document .*')
mock_log.info.reset_mock()
@ -101,13 +100,12 @@ class TestDocumentLayeringNegative(
def test_layering_unreferenced_parent_label(self, mock_log):
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test({}, site_abstract=False)
layering_policy = self._extract_layering_policy(documents)
for parent_label in ({'key2': 'value2'}, {'key1': 'value2'}):
# Second doc is the global doc, or parent.
documents[0]['metadata']['labels'] = [parent_label]
documents[1]['metadata']['labels'] = [parent_label]
layering.DocumentLayering(layering_policy, documents)
layering.DocumentLayering(documents)
self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0],
'Could not find parent for document .*')
mock_log.info.reset_mock()
@ -117,26 +115,22 @@ class TestDocumentLayeringNegative(
# same unique parent identifier referenced by `parentSelector`.
doc_factory = factories.DocumentFactory(2, [1, 1])
documents = doc_factory.gen_test({}, site_abstract=False)
layering_policy = self._extract_layering_policy(documents)
documents.append(documents[0]) # Copy global layer.
documents.append(documents[1]) # Copy global layer.
self.assertRaises(errors.IndeterminateDocumentParent,
layering.DocumentLayering, layering_policy,
documents)
layering.DocumentLayering, documents)
def test_layering_duplicate_parent_selector_3_layer(self):
# Validate that documents belonging to the same layer cannot have the
# same unique parent identifier referenced by `parentSelector`.
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test({}, site_abstract=False)
layering_policy = self._extract_layering_policy(documents)
# 0 is global layer, 1 is region layer.
for idx in (0, 1):
# 1 is global layer, 2 is region layer.
for idx in (1, 2):
documents.append(documents[idx])
self.assertRaises(errors.IndeterminateDocumentParent,
layering.DocumentLayering, layering_policy,
documents)
layering.DocumentLayering, documents)
documents.pop(-1) # Remove the just-appended duplicate.
@mock.patch.object(layering, 'LOG', autospec=True)
@ -145,13 +139,12 @@ class TestDocumentLayeringNegative(
# without an error being raised.
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test({}, site_abstract=False)
layering_policy = self._extract_layering_policy(documents)
self_ref = {"self": "self"}
documents[2]['metadata']['labels'] = self_ref
documents[2]['metadata']['layeringDefinition'][
'parentSelector'] = self_ref
layering.DocumentLayering(layering_policy, documents)
layering.DocumentLayering(documents)
self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0],
'Could not find parent for document .*')
@ -162,7 +155,6 @@ class TestDocumentLayeringNegative(
"""
doc_factory = factories.DocumentFactory(3, [1, 1, 1])
documents = doc_factory.gen_test({})
layering_policy = self._extract_layering_policy(documents)
# Region and site documents should result in no parent being found
# since their schemas will not match that of their parent's.
@ -170,10 +162,16 @@ class TestDocumentLayeringNegative(
prev_schema = documents[idx]['schema']
documents[idx]['schema'] = test_utils.rand_name('schema')
layering.DocumentLayering(layering_policy, documents)
layering.DocumentLayering(documents)
self.assertRegexpMatches(mock_log.info.mock_calls[0][1][0],
'Could not find parent for document .*')
mock_log.info.reset_mock()
# Restore schema for next test run.
documents[idx]['schema'] = prev_schema
def test_layering_without_layering_policy_raises_exc(self):
doc_factory = factories.DocumentFactory(1, [1])
documents = doc_factory.gen_test({}, site_abstract=False)[1:]
self.assertRaises(errors.LayeringPolicyNotFound,
layering.DocumentLayering, documents)