diff --git a/deckhand/control/revision_documents.py b/deckhand/control/revision_documents.py index af6f7363..c633275a 100644 --- a/deckhand/control/revision_documents.py +++ b/deckhand/control/revision_documents.py @@ -61,7 +61,7 @@ class RevisionDocumentsResource(api_base.BaseResource): filters['deleted'] = False # Never return deleted documents to user. try: - documents = db_api.revision_get_documents( + documents = db_api.revision_documents_get( revision_id, **filters) except errors.RevisionNotFound as e: LOG.exception(six.text_type(e)) @@ -106,7 +106,6 @@ class RenderedDocumentsResource(api_base.BaseResource): documents = self._retrieve_documents_for_rendering(revision_id, **filters) - try: document_layering = layering.DocumentLayering(documents) rendered_documents = document_layering.render() @@ -123,7 +122,6 @@ class RenderedDocumentsResource(api_base.BaseResource): # documents have been rendered. order_by = sanitized_params.pop('order', None) sort_by = sanitized_params.pop('sort', None) - user_filters = sanitized_params.copy() user_filters['metadata.layeringDefinition.abstract'] = False @@ -145,7 +143,7 @@ class RenderedDocumentsResource(api_base.BaseResource): call and add it to the list of documents. """ try: - documents = db_api.revision_get_documents(revision_id, **filters) + documents = db_api.revision_documents_get(revision_id, **filters) except errors.RevisionNotFound as e: LOG.exception(six.text_type(e)) raise falcon.HTTPNotFound(description=e.format_message()) diff --git a/deckhand/db/sqlalchemy/api.py b/deckhand/db/sqlalchemy/api.py index 5256bafe..55b92ad3 100644 --- a/deckhand/db/sqlalchemy/api.py +++ b/deckhand/db/sqlalchemy/api.py @@ -133,7 +133,7 @@ def require_unique_document_schema(schema=None): @functools.wraps(f) def wrapper(bucket_name, documents, *args, **kwargs): - existing_documents = revision_get_documents( + existing_documents = revision_documents_get( schema=schema, deleted=False, include_history=False) existing_document_names = [x['name'] for x in existing_documents] conflicting_names = [ @@ -176,7 +176,7 @@ def documents_create(bucket_name, documents, validations=None, # the previous revision (if it exists) that belong to `bucket_name` with # `documents`: the difference between the former and the latter. document_history = [(d['schema'], d['name']) - for d in revision_get_documents( + for d in revision_documents_get( bucket_name=bucket_name)] documents_to_delete = [ h for h in document_history if h not in @@ -626,7 +626,7 @@ def _filter_revision_documents(documents, unique_only, **filters): @require_revision_exists -def revision_get_documents(revision_id=None, include_history=True, +def revision_documents_get(revision_id=None, include_history=True, unique_only=True, session=None, **filters): """Return the documents that match filters for the specified `revision_id`. @@ -656,19 +656,17 @@ def revision_get_documents(revision_id=None, include_history=True, .order_by(models.Revision.created_at.desc())\ .first() - revision_documents = (revision.to_dict()['documents'] - if revision else []) - - if include_history and revision: - older_revisions = session.query(models.Revision)\ - .filter(models.Revision.created_at < revision.created_at)\ - .order_by(models.Revision.created_at)\ - .all() - - # Include documents from older revisions in response body. - for older_revision in older_revisions: - revision_documents.extend( - older_revision.to_dict()['documents']) + if revision: + revision_documents = revision.to_dict()['documents'] + if include_history: + relevant_revisions = session.query(models.Revision)\ + .filter(models.Revision.created_at < revision.created_at)\ + .order_by(models.Revision.created_at)\ + .all() + # Include documents from older revisions in response body. + for relevant_revision in relevant_revisions: + revision_documents.extend( + relevant_revision.to_dict()['documents']) except sa_orm.exc.NoResultFound: raise errors.RevisionNotFound(revision=revision_id) @@ -681,7 +679,7 @@ def revision_get_documents(revision_id=None, include_history=True, # NOTE(fmontei): No need to include `@require_revision_exists` decorator as -# the this function immediately calls `revision_get_documents` for both +# the this function immediately calls `revision_documents_get` for both # revision IDs, which has the decorator applied to it. def revision_diff(revision_id, comparison_revision_id): """Generate the diff between two revisions. @@ -730,11 +728,11 @@ def revision_diff(revision_id, comparison_revision_id): """ # Retrieve document history for each revision. Since `revision_id` of 0 # doesn't exist, treat it as a special case: empty list. - docs = (revision_get_documents(revision_id, + docs = (revision_documents_get(revision_id, include_history=True, unique_only=False) if revision_id != 0 else []) - comparison_docs = (revision_get_documents(comparison_revision_id, + comparison_docs = (revision_documents_get(comparison_revision_id, include_history=True, unique_only=False) if comparison_revision_id != 0 else []) diff --git a/deckhand/tests/unit/control/test_rendered_documents_controller.py b/deckhand/tests/unit/control/test_rendered_documents_controller.py index 093ac546..17f90ce4 100644 --- a/deckhand/tests/unit/control/test_rendered_documents_controller.py +++ b/deckhand/tests/unit/control/test_rendered_documents_controller.py @@ -20,7 +20,6 @@ from deckhand.control import buckets from deckhand.control import revision_documents from deckhand import errors from deckhand import factories -from deckhand.tests import test_utils from deckhand.tests.unit.control import base as test_base from deckhand import types @@ -122,33 +121,76 @@ class TestRenderedDocumentsController(test_base.BaseControllerTest): self.assertEqual(2, rendered_documents[0]['status']['revision']) def test_list_rendered_documents_multiple_buckets(self): + """Validates that only the documents from the most recent revision + for each bucket in the DB are used for layering. + """ rules = {'deckhand:list_cleartext_documents': '@', 'deckhand:list_encrypted_documents': '@', 'deckhand:create_cleartext_documents': '@'} self.policy.set_rules(rules) - documents_factory = factories.DocumentFactory(1, [1]) + bucket_names = ['first', 'first', 'second', 'second'] - for idx in range(2): - payload = documents_factory.gen_test({}) - if idx == 0: - # Pop off the first entry so that a conflicting layering - # policy isn't created during the 1st iteration. - payload.pop(0) + # Create 2 documents for each revision. (1 `LayeringPolicy` is created + # during the very 1st revision). Total = 9. + for x in range(4): + bucket_name = bucket_names[x] + documents_factory = factories.DocumentFactory(2, [1, 1]) + payload = documents_factory.gen_test({}, global_abstract=False, + site_abstract=False) + # Fix up the labels so that each document has a unique parent to + # avoid layering errors. + payload[-2]['metadata']['labels'] = { + 'global': bucket_name + } + payload[-1]['metadata']['layeringDefinition']['parentSelector'] = { + 'global': bucket_name + } + + if x > 0: + payload = payload[1:] resp = self.app.simulate_put( - '/api/v1.0/buckets/%s/documents' % test_utils.rand_name( - 'bucket'), + '/api/v1.0/buckets/%s/documents' % bucket_name, headers={'Content-Type': 'application/x-yaml'}, body=yaml.safe_dump_all(payload)) self.assertEqual(200, resp.status_code) revision_id = list(yaml.safe_load_all(resp.text))[0]['status'][ 'revision'] + # Although 9 documents have been created, 4 of those documents are + # stale: they were created in older revisions, so expect 5 documents. resp = self.app.simulate_get( '/api/v1.0/revisions/%s/rendered-documents' % revision_id, headers={'Content-Type': 'application/x-yaml'}) self.assertEqual(200, resp.status_code) + documents = list(yaml.safe_load_all(resp.text)) + documents = sorted(documents, key=lambda x: x['status']['bucket']) + + # Validate that the LayeringPolicy was returned, then remove it + # from documents to validate the rest of them. + layering_policies = [ + d for d in documents + if d['schema'].startswith(types.LAYERING_POLICY_SCHEMA) + ] + self.assertEqual(1, len(layering_policies)) + documents.remove(layering_policies[0]) + + first_revision_ids = [d['status']['revision'] for d in documents + if d['status']['bucket'] == 'first'] + second_revision_ids = [d['status']['revision'] for d in documents + if d['status']['bucket'] == 'second'] + + # Validate correct number of documents, the revision and bucket for + # each document. + self.assertEqual(4, len(documents)) + self.assertEqual(['first', 'first', 'second', 'second'], + [d['status']['bucket'] for d in documents]) + self.assertEqual(2, len(first_revision_ids)) + self.assertEqual(2, len(second_revision_ids)) + self.assertEqual([2, 2], first_revision_ids) + self.assertEqual([4, 4], second_revision_ids) + class TestRenderedDocumentsControllerNegative( test_base.BaseControllerTest): diff --git a/deckhand/tests/unit/db/base.py b/deckhand/tests/unit/db/base.py index 30cf2bfe..8694c498 100644 --- a/deckhand/tests/unit/db/base.py +++ b/deckhand/tests/unit/db/base.py @@ -94,7 +94,7 @@ class TestDbBase(base.DeckhandWithDBTestCase): return db_api.revision_delete_all() def list_revision_documents(self, revision_id, **filters): - documents = db_api.revision_get_documents(revision_id, **filters) + documents = db_api.revision_documents_get(revision_id, **filters) for document in documents: self.validate_document(document) return documents