Document buckets - update logic

Implement update logic for document buckets. Relevant functional
tests for a single bucket now pass.

Also removed validation policy DB model to simplify things.

Change-Id: Icd27cae1d2212a24604eb879518d0bd6974cdc9e
This commit is contained in:
Felipe Monteiro 2017-09-05 22:56:06 +01:00
parent 7b77ca707e
commit 7e5bb07541
7 changed files with 101 additions and 93 deletions

View File

@ -53,8 +53,9 @@ class BucketsResource(api_base.BaseResource):
raise falcon.HTTPBadRequest(description=e.format_message())
try:
documents.extend(validation_policies)
created_documents = db_api.documents_create(
bucket_name, documents, validation_policies)
bucket_name, documents)
except db_exc.DBDuplicateEntry as e:
raise falcon.HTTPConflict(description=e.format_message())
except Exception as e:

View File

@ -13,6 +13,7 @@
# limitations under the License.
from deckhand.control import common
from deckhand import types
class ViewBuilder(common.ViewBuilder):
@ -43,7 +44,8 @@ class ViewBuilder(common.ViewBuilder):
validation_policies = []
success_status = 'success'
for vp in revision['validation_policies']:
for vp in [d for d in revision['documents']
if d['schema'] == types.VALIDATION_POLICY_SCHEMA]:
validation_policy = {}
validation_policy['name'] = vp.get('name')
validation_policy['url'] = self._gen_url(vp)

View File

@ -88,19 +88,15 @@ def drop_db():
models.unregister_models(get_engine())
def documents_create(bucket_name, documents, validation_policies,
session=None):
def documents_create(bucket_name, documents, session=None):
session = session or get_session()
documents_created = _documents_create(documents, session)
val_policies_created = _documents_create(validation_policies, session)
all_docs_created = documents_created + val_policies_created
if all_docs_created:
if documents_created:
bucket = bucket_get_or_create(bucket_name)
revision = revision_create()
for doc in all_docs_created:
for doc in documents_created:
with session.begin():
doc['bucket_id'] = bucket['name']
doc['revision_id'] = revision['id']
@ -120,10 +116,11 @@ def _documents_create(values_list, session=None):
"""
values_list = copy.deepcopy(values_list)
session = session or get_session()
filters = models.Document.UNIQUE_CONSTRAINTS
filters = [c for c in models.Document.UNIQUE_CONSTRAINTS
if c != 'revision_id']
do_create = False
documents_created = []
documents_to_change = []
changed_documents = []
def _document_changed(existing_document):
# The document has changed if at least one value in ``values`` differs.
@ -132,14 +129,8 @@ def _documents_create(values_list, session=None):
return True
return False
def _get_model(schema):
if schema == types.VALIDATION_POLICY_SCHEMA:
return models.ValidationPolicy()
else:
return models.Document()
def _document_create(values):
document = _get_model(values['schema'])
document = models.Document()
with session.begin():
document.update(values)
return document
@ -150,24 +141,23 @@ def _documents_create(values_list, session=None):
try:
existing_document = document_get(
raw_dict=True,
**{c: values[c] for c in filters if c != 'revision_id'})
raw_dict=True, **{c: values[c] for c in filters})
except errors.DocumentNotFound:
# Ignore bad data at this point. Allow creation to bubble up the
# error related to bad data.
existing_document = None
if not existing_document:
do_create = True
documents_to_change.append(values)
elif existing_document and _document_changed(existing_document):
do_create = True
documents_to_change.append(values)
if do_create:
for values in values_list:
if documents_to_change:
for values in documents_to_change:
doc = _document_create(values)
documents_created.append(doc)
changed_documents.append(doc)
return documents_created
return changed_documents
def document_get(session=None, raw_dict=False, **filters):
@ -272,16 +262,20 @@ def revision_get_documents(revision_id, session=None, **filters):
try:
revision = session.query(models.Revision)\
.filter_by(id=revision_id)\
.one()\
.to_dict()
.one()
older_revisions = session.query(models.Revision)\
.filter(models.Revision.created_at < revision.created_at)\
.order_by(models.Revision.created_at)\
.all()
except sa_orm.exc.NoResultFound:
raise errors.RevisionNotFound(revision=revision_id)
if 'deleted' not in filters:
filters.update({'deleted': False})
document_history = []
for rev in ([revision] + older_revisions):
document_history.extend(rev.to_dict()['documents'])
filtered_documents = _filter_revision_documents(
revision['documents'], **filters)
document_history, **filters)
return filtered_documents
@ -292,9 +286,15 @@ def _filter_revision_documents(documents, **filters):
:returns: List of documents that match specified filters.
"""
# TODO(fmontei): Implement this as an sqlalchemy query.
filtered_documents = []
filtered_documents = {}
unique_filters = [c for c in models.Document.UNIQUE_CONSTRAINTS
if c != 'revision_id']
for document in documents:
# NOTE(fmontei): Only want to include non-validation policy documents
# for this endpoint.
if document['schema'] in types.VALIDATION_POLICY_SCHEMA:
continue
match = True
for filter_key, filter_val in filters.items():
@ -313,9 +313,13 @@ def _filter_revision_documents(documents, **filters):
match = False
if match:
filtered_documents.append(document)
# Filter out redundant documents from previous revisions, i.e.
# documents schema and metadata.name are repeated.
unique_key = tuple([document[filter] for filter in unique_filters])
if unique_key not in filtered_documents:
filtered_documents[unique_key] = document
return filtered_documents
return sorted(filtered_documents.values(), key=lambda d: d['created_at'])
####################

View File

@ -111,14 +111,11 @@ class Revision(BASE, DeckhandBase):
id = Column(String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
documents = relationship("Document")
validation_policies = relationship("ValidationPolicy")
tags = relationship("RevisionTag")
def to_dict(self):
d = super(Revision, self).to_dict()
d['documents'] = [doc.to_dict() for doc in self.documents]
d['validation_policies'] = [
vp.to_dict() for vp in self.validation_policies]
d['tags'] = [tag.to_dict() for tag in self.tags]
return d
@ -135,11 +132,13 @@ class RevisionTag(BASE, DeckhandBase):
nullable=False)
class DocumentMixin(object):
"""Mixin class for sharing common columns across all document resources
such as documents themselves, layering policies and validation policies.
"""
class Document(BASE, DeckhandBase):
UNIQUE_CONSTRAINTS = ('schema', 'name', 'revision_id')
__tablename__ = 'documents'
__table_args__ = (DeckhandBase.gen_unqiue_contraint(*UNIQUE_CONSTRAINTS),)
id = Column(String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
name = Column(String(64), nullable=False)
schema = Column(String(64), nullable=False)
# NOTE: Do not define a maximum length for these JSON data below. However,
@ -148,45 +147,23 @@ class DocumentMixin(object):
_metadata = Column(oslo_types.JsonEncodedDict(), nullable=False)
data = Column(oslo_types.JsonEncodedDict(), nullable=False)
@declarative.declared_attr
def bucket_id(cls):
return Column(Integer, ForeignKey('buckets.name', ondelete='CASCADE'),
nullable=False)
bucket_id = Column(Integer, ForeignKey('buckets.name', ondelete='CASCADE'),
nullable=False)
@declarative.declared_attr
def revision_id(cls):
return Column(Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
nullable=False)
class Document(BASE, DeckhandBase, DocumentMixin):
UNIQUE_CONSTRAINTS = ('schema', 'name', 'revision_id')
__tablename__ = 'documents'
__table_args__ = (DeckhandBase.gen_unqiue_contraint(*UNIQUE_CONSTRAINTS),)
id = Column(String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
class ValidationPolicy(BASE, DeckhandBase, DocumentMixin):
UNIQUE_CONSTRAINTS = ('schema', 'name', 'revision_id')
__tablename__ = 'validation_policies'
__table_args__ = (DeckhandBase.gen_unqiue_contraint(*UNIQUE_CONSTRAINTS),)
id = Column(String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
revision_id = Column(
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
nullable=False)
def register_models(engine):
"""Create database tables for all models with the given engine."""
models = [Bucket, Document, Revision, ValidationPolicy]
models = [Bucket, Document, Revision]
for model in models:
model.metadata.create_all(engine)
def unregister_models(engine):
"""Drop database tables for all models with the given engine."""
models = [Bucket, Document, Revision, ValidationPolicy]
models = [Bucket, Document, Revision]
for model in models:
model.metadata.drop_all(engine)

View File

@ -101,13 +101,12 @@ tests:
- name: update_single_document
desc: Update a single document, ignore other documents in the bucket
PUT: /api/v1.0/bucket/mop/documents
status: 201
status: 200
data: <@resources/design-doc-layering-sample-with-update.yaml
skip: Not implemented.
- name: verify_update
desc: Verify updated document count and revisions
GET: /api/v1.0/revisions/$RESPONSE['$.[0].revision']/documents
GET: /api/v1.0/revisions/$RESPONSE['$.[0].status.revision']/documents
status: 200
response_multidoc_jsonpaths:
$.[*].metadata.name:
@ -116,21 +115,20 @@ tests:
- region-1234
- site-1234
$.[*].status.revision:
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']"
- "$RESPONSE['$.[0].status.revision']"
$.[*].status.bucket:
- mop
- mop
- mop
- mop
$.[3].data.b: 5
skip: Not implemented.
- name: verify_initial_documents_preserved_after_update
desc: Verify initial documents count and revisions preserved after update
GET: /api/v1.0/revisions/$HISTORY['initialize'].$RESPONSE['$.[0].revision']/documents
GET: /api/v1.0/revisions/$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']/documents
status: 200
response_multidoc_jsonpaths:
$.[*].metadata.name:
@ -139,17 +137,16 @@ tests:
- region-1234
- site-1234
$.[*].status.revision:
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']"
$.[*].status.bucket:
- mop
- mop
- mop
- mop
$.[3].data.b: 4
skip: Not implemented.
- name: delete_document
desc: Delete a single document

View File

@ -21,7 +21,7 @@ from deckhand.tests.unit import base
BASE_EXPECTED_FIELDS = ("created_at", "updated_at", "deleted_at", "deleted")
DOCUMENT_EXPECTED_FIELDS = BASE_EXPECTED_FIELDS + (
"id", "schema", "name", "metadata", "data", "revision_id", "bucket_id")
REVISION_EXPECTED_FIELDS = ("id", "documents", "validation_policies", "tags")
REVISION_EXPECTED_FIELDS = ("id", "documents", "tags")
# TODO(fmontei): Move this into a separate module called `fixtures`.
@ -54,7 +54,7 @@ class DocumentFixture(object):
class TestDbBase(base.DeckhandWithDBTestCase):
def create_documents(self, bucket_name, documents,
validation_policies=None):
validation_policies=None, do_validation=True):
if not validation_policies:
validation_policies = []
@ -66,9 +66,10 @@ class TestDbBase(base.DeckhandWithDBTestCase):
docs = db_api.documents_create(
bucket_name, documents, validation_policies)
for idx, doc in enumerate(docs):
self.validate_document(expected=documents[idx], actual=doc)
self.assertEqual(bucket_name, doc['bucket_id'])
if do_validation:
for idx, doc in enumerate(docs):
self.validate_document(expected=documents[idx], actual=doc)
self.assertEqual(bucket_name, doc['bucket_id'])
return docs

View File

@ -32,20 +32,46 @@ class TestRevisions(base.TestDbBase):
self.assertEqual(1, len(revisions))
self.assertEqual(4, len(revisions[0]['documents']))
def test_create_many_update_one(self):
documents = [base.DocumentFixture.get_minimal_fixture()
for _ in range(4)]
bucket_name = test_utils.rand_name('bucket')
created_documents = self.create_documents(bucket_name, documents)
# Update the last document.
documents[-1]['data'] = {'foo': 'bar'}
updated_documents = self.create_documents(
bucket_name, documents, do_validation=False)
self.assertEqual(1, len(updated_documents))
self.assertEqual(created_documents[-1]['bucket_id'],
updated_documents[0]['bucket_id'])
self.assertNotEqual(created_documents[-1]['revision_id'],
updated_documents[0]['revision_id'])
revision_documents = self.list_revision_documents(
updated_documents[0]['revision_id'])
self.assertEqual(4, len(revision_documents))
self.assertEqual(created_documents[:-1] + updated_documents,
revision_documents)
def test_list_with_validation_policies(self):
documents = [base.DocumentFixture.get_minimal_fixture()
for _ in range(4)]
vp_factory = factories.ValidationPolicyFactory()
validation_policy = vp_factory.gen(types.DECKHAND_SCHEMA_VALIDATION,
'success')
documents.extend([validation_policy])
bucket_name = test_utils.rand_name('bucket')
self.create_documents(bucket_name, documents, [validation_policy])
self.create_documents(bucket_name, documents)
revisions = self.list_revisions()
self.assertIsInstance(revisions, list)
self.assertEqual(1, len(revisions))
self.assertEqual(4, len(revisions[0]['documents']))
self.assertEqual(1, len(revisions[0]['validation_policies']))
self.assertEqual(5, len(revisions[0]['documents']))
self.assertEqual(types.VALIDATION_POLICY_SCHEMA,
revisions[0]['documents'][-1]['schema'])
def test_delete_all(self):
all_created_documents = []