Initial implementation of buckets

This commit adds endpoints for:

  * DELETE /revisions
  * PUT /bucket/{{bucket_name}}/revisions

Included in this commit:

  * Initial DB code for buckets
  * Initial API code for API buckets
  * Refactored unit tests to work with buckets
  * Passing *some* functional tests for:
    - revision-crud-success-single-bucket (*all*)
    - document-crud-success-single-bucket (*some*)
  * Corrected document view for list and corrected
    loads in MultidocJsonpaths for test_gabbi to not
    fix up the response body

Change-Id: Idf941591d24804b77441ab84259f8b7063c88a33
This commit is contained in:
Felipe Monteiro 2017-08-20 16:47:39 -04:00
parent e62e9e9f40
commit c19309f347
32 changed files with 513 additions and 238 deletions

View File

@ -1,5 +1,7 @@
Alan Meadows <alan.meadows@gmail.com>
Anthony Lin <anthony.jclin@gmail.com>
Felipe Monteiro <felipe.monteiro@att.com>
Felipe Monteiro <fmontei@users.noreply.github.com>
Mark Burnett <mark.m.burnett@gmail.com>
Pete Birley <pete@port.direct>
Scott Hussey <sh8121@att.com>

View File

@ -19,8 +19,9 @@ from oslo_config import cfg
from oslo_log import log as logging
from deckhand.conf import config
from deckhand.control import base as api_base
from deckhand.control import documents
from deckhand.control import base
from deckhand.control import buckets
from deckhand.control import middleware
from deckhand.control import revision_documents
from deckhand.control import revisions
from deckhand.control import secrets
@ -54,9 +55,29 @@ def __setup_logging():
def __setup_db():
db_api.drop_db()
db_api.setup_db()
def _get_routing_map():
ROUTING_MAP = {
'/api/v1.0/bucket/.+/documents': ['PUT'],
'/api/v1.0/revisions': ['GET', 'DELETE'],
'/api/v1.0/revisions/.+': ['GET'],
'/api/v1.0/revisions/documents': ['GET']
}
for route in ROUTING_MAP.keys():
# Denote the start of the regex with "^".
route_re = '^.*' + route
# Debite the end of the regex with "$". Allow for an optional "/" at
# the end of each request uri.
route_re = route_re + '[/]{0,1}$'
ROUTING_MAP[route_re] = ROUTING_MAP.pop(route)
return ROUTING_MAP
def start_api(state_manager=None):
"""Main entry point for initializing the Deckhand API service.
@ -65,14 +86,17 @@ def start_api(state_manager=None):
__setup_logging()
__setup_db()
control_api = falcon.API(request_type=api_base.DeckhandRequest)
control_api = falcon.API(
request_type=base.DeckhandRequest,
middleware=[middleware.ContextMiddleware(_get_routing_map())])
v1_0_routes = [
('documents', documents.DocumentsResource()),
('bucket/{bucket_name}/documents', buckets.BucketsResource()),
('revisions', revisions.RevisionsResource()),
('revisions/{revision_id}', revisions.RevisionsResource()),
('revisions/{revision_id}/documents',
revision_documents.RevisionDocumentsResource()),
# TODO(fmontei): remove in follow-up commit.
('secrets', secrets.SecretsResource())
]

View File

@ -28,18 +28,16 @@ from deckhand import errors as deckhand_errors
LOG = logging.getLogger(__name__)
class DocumentsResource(api_base.BaseResource):
"""API resource for realizing CRUD endpoints for Documents."""
class BucketsResource(api_base.BaseResource):
"""API resource for realizing CRUD operations for buckets."""
def on_post(self, req, resp):
"""Create a document. Accepts YAML data only."""
if req.content_type != 'application/x-yaml':
LOG.warning('Requires application/yaml payload.')
view_builder = document_view.ViewBuilder()
def on_put(self, req, resp, bucket_name=None):
document_data = req.stream.read(req.content_length or 0)
try:
documents = [d for d in yaml.safe_load_all(document_data)]
documents = list(yaml.safe_load_all(document_data))
except yaml.YAMLError as e:
error_msg = ("Could not parse the document into YAML data. "
"Details: %s." % e)
@ -51,22 +49,19 @@ class DocumentsResource(api_base.BaseResource):
try:
validation_policies = document_validation.DocumentValidation(
documents).validate_all()
except (deckhand_errors.InvalidDocumentFormat,
deckhand_errors.UnknownDocumentFormat) as e:
except (deckhand_errors.InvalidDocumentFormat) as e:
return self.return_error(resp, falcon.HTTP_400, message=e)
try:
created_documents = db_api.documents_create(
documents, validation_policies)
bucket_name, documents, validation_policies)
except db_exc.DBDuplicateEntry as e:
return self.return_error(resp, falcon.HTTP_409, message=e)
raise falcon.HTTPConflict()
except Exception as e:
return self.return_error(resp, falcon.HTTP_500, message=e)
raise falcon.HTTPInternalServerError()
if created_documents:
resp.status = falcon.HTTP_201
resp.append_header('Content-Type', 'application/x-yaml')
resp_body = document_view.ViewBuilder().list(created_documents)
resp.body = self.to_yaml_body(resp_body)
else:
resp.status = falcon.HTTP_204
resp.body = self.to_yaml_body(
self.view_builder.list(created_documents))
resp.status = falcon.HTTP_200
resp.append_header('Content-Type', 'application/x-yaml')

View File

@ -12,8 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import string
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
def to_camel_case(s):
return (s[0].lower() + string.capwords(s, sep='_').replace('_', '')[1:]
@ -29,3 +35,29 @@ class ViewBuilder(object):
# TODO(fmontei): Use a config-based url for the base url below.
base_url = 'https://deckhand/api/v1.0/%s/%s'
return base_url % (self._collection_name, revision.get('id'))
def sanitize_params(allowed_params):
"""Sanitize query string parameters passed to an HTTP request.
Overrides the ``params`` attribute in the ``req`` object with the sanitized
params. Invalid parameters are ignored.
:param allowed_params: The request's query string parameters.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(self, req, *func_args, **func_kwargs):
req_params = req.params or {}
sanitized_params = {}
for key in req_params.keys():
if key in allowed_params:
sanitized_params[key] = req_params[key]
func_args = func_args + (sanitized_params,)
return func(self, req, *func_args, **func_kwargs)
return wrapper
return decorator

View File

@ -0,0 +1,40 @@
# Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import falcon
class ContextMiddleware(object):
def __init__(self, routing_map):
self.routing_map = routing_map
def process_request(self, req, resp):
# Determine whether the method is allowed.
req_method = req.method
req_uri = req.uri
found = False
for route_pattern, allowed_methods in self.routing_map.items():
if re.match(route_pattern, req_uri):
if req_method not in allowed_methods:
raise falcon.HTTPMethodNotAllowed(allowed_methods)
else:
found = True
break
if not found:
raise falcon.HTTPMethodNotAllowed([])

View File

@ -15,14 +15,21 @@
import falcon
from deckhand.control import base as api_base
from deckhand.control import common
from deckhand.control.views import document as document_view
from deckhand.db.sqlalchemy import api as db_api
from deckhand import errors
class RevisionDocumentsResource(api_base.BaseResource):
"""API resource for realizing CRUD endpoints for Document Revisions."""
"""API resource for realizing CRUD endpoints for revision documents."""
def on_get(self, req, resp, revision_id):
view_builder = document_view.ViewBuilder()
@common.sanitize_params([
'schema', 'metadata.name', 'metadata.layeringDefinition.abstract',
'metadata.layeringDefinition.layer', 'metadata.label'])
def on_get(self, req, resp, sanitized_params, revision_id):
"""Returns all documents for a `revision_id`.
Returns a multi-document YAML response containing all the documents
@ -30,12 +37,12 @@ class RevisionDocumentsResource(api_base.BaseResource):
documents will be as originally posted with no substitutions or
layering applied.
"""
params = req.params
try:
documents = db_api.revision_get_documents(revision_id, **params)
except errors.RevisionNotFound as e:
return self.return_error(resp, falcon.HTTP_404, message=e)
documents = db_api.revision_get_documents(
revision_id, **sanitized_params)
except errors.RevisionNotFound:
raise falcon.HTTPNotFound()
resp.status = falcon.HTTP_200
resp.append_header('Content-Type', 'application/x-yaml')
resp.body = self.to_yaml_body(documents)
resp.body = self.to_yaml_body(self.view_builder.list(documents))

View File

@ -23,6 +23,8 @@ from deckhand import errors
class RevisionsResource(api_base.BaseResource):
"""API resource for realizing CRUD operations for revisions."""
view_builder = revision_view.ViewBuilder()
def on_get(self, req, resp, revision_id=None):
"""Returns list of existing revisions.
@ -43,18 +45,23 @@ class RevisionsResource(api_base.BaseResource):
"""
try:
revision = db_api.revision_get(revision_id)
except errors.RevisionNotFound as e:
return self.return_error(resp, falcon.HTTP_404, message=e)
except errors.RevisionNotFound:
raise falcon.HTTPNotFound()
revision_resp = revision_view.ViewBuilder().show(revision)
revision_resp = self.view_builder.show(revision)
resp.status = falcon.HTTP_200
resp.append_header('Content-Type', 'application/x-yaml')
resp.body = self.to_yaml_body(revision_resp)
def _list_revisions(self, req, resp):
revisions = db_api.revision_get_all()
revisions_resp = revision_view.ViewBuilder().list(revisions)
revisions_resp = self.view_builder.list(revisions)
resp.status = falcon.HTTP_200
resp.append_header('Content-Type', 'application/x-yaml')
resp.body = self.to_yaml_body(revisions_resp)
def on_delete(self, req, resp):
db_api.revision_delete_all()
resp.append_header('Content-Type', 'application/x-yaml')
resp.status = falcon.HTTP_204

View File

@ -21,14 +21,18 @@ class ViewBuilder(common.ViewBuilder):
_collection_name = 'documents'
def list(self, documents):
resp_body = {
'documents': []
}
resp_list = []
# TODO(fmontei): Convert these IDs to URLs instead once URL conversion
# is implemented.
for document in documents:
resp_body.setdefault('revision_id', document['revision_id'])
resp_body['documents'].append(document['id'])
attrs = ['id', 'metadata', 'data', 'schema']
if document['deleted']:
attrs.append('deleted')
return resp_body
resp_obj = {x: document[x] for x in attrs}
resp_obj.setdefault('status', {})
resp_obj['status']['bucket'] = document['bucket_id']
resp_obj['status']['revision'] = document['revision_id']
resp_list.append(resp_obj)
return resp_list

View File

@ -62,7 +62,6 @@ class ViewBuilder(common.ViewBuilder):
'id': revision.get('id'),
'createdAt': revision.get('created_at'),
'url': self._gen_url(revision),
# TODO(fmontei): Not yet implemented.
'validationPolicies': validation_policies,
'status': success_status
}

View File

@ -87,7 +87,8 @@ def drop_db():
models.unregister_models(get_engine())
def documents_create(documents, validation_policies, session=None):
def documents_create(bucket_name, documents, validation_policies,
session=None):
session = session or get_session()
documents_created = _documents_create(documents, session)
@ -95,9 +96,12 @@ def documents_create(documents, validation_policies, session=None):
all_docs_created = documents_created + val_policies_created
if all_docs_created:
bucket = bucket_get_or_create(bucket_name)
revision = revision_create()
for doc in all_docs_created:
with session.begin():
doc['bucket_id'] = bucket['name']
doc['revision_id'] = revision['id']
doc.save(session=session)
@ -128,9 +132,7 @@ def _documents_create(values_list, session=None):
return False
def _get_model(schema):
if schema == types.LAYERING_POLICY_SCHEMA:
return models.LayeringPolicy()
elif schema == types.VALIDATION_POLICY_SCHEMA:
if schema == types.VALIDATION_POLICY_SCHEMA:
return models.ValidationPolicy()
else:
return models.Document()
@ -149,7 +151,7 @@ def _documents_create(values_list, session=None):
existing_document = document_get(
raw_dict=True,
**{c: values[c] for c in filters if c != 'revision_id'})
except db_exception.DBError:
except errors.DocumentNotFound:
# Ignore bad data at this point. Allow creation to bubble up the
# error related to bad data.
existing_document = None
@ -169,8 +171,36 @@ def _documents_create(values_list, session=None):
def document_get(session=None, raw_dict=False, **filters):
session = session or get_session()
document = session.query(models.Document).filter_by(**filters).first()
return document.to_dict(raw_dict=raw_dict) if document else {}
if 'document_id' in filters:
filters['id'] = filters.pop('document_id')
try:
document = session.query(models.Document)\
.filter_by(**filters)\
.one()
except sa_orm.exc.NoResultFound:
raise errors.DocumentNotFound(document=filters)
return document.to_dict(raw_dict=raw_dict)
####################
def bucket_get_or_create(bucket_name, session=None):
session = session or get_session()
try:
bucket = session.query(models.Bucket)\
.filter_by(name=bucket_name)\
.one()
except sa_orm.exc.NoResultFound:
bucket = models.Bucket()
with session.begin():
bucket.update({'name': bucket_name})
bucket.save(session=session)
return bucket.to_dict()
####################
@ -178,6 +208,7 @@ def document_get(session=None, raw_dict=False, **filters):
def revision_create(session=None):
session = session or get_session()
revision = models.Revision()
with session.begin():
revision.save(session=session)
@ -193,12 +224,13 @@ def revision_get(revision_id, session=None):
session = session or get_session()
try:
revision = session.query(models.Revision).filter_by(
id=revision_id).one().to_dict()
revision = session.query(models.Revision)\
.filter_by(id=revision_id)\
.one()
except sa_orm.exc.NoResultFound:
raise errors.RevisionNotFound(revision=revision_id)
return revision
return revision.to_dict()
def revision_get_all(session=None):
@ -208,27 +240,43 @@ def revision_get_all(session=None):
return [r.to_dict() for r in revisions]
def revision_delete_all(session=None):
"""Delete all revisions."""
session = session or get_session()
session.query(models.Revision)\
.delete(synchronize_session=False)
def revision_get_documents(revision_id, session=None, **filters):
"""Return the documents that match filters for the specified `revision_id`.
Deleted documents are not included unless deleted=True is provided in
``filters``.
:raises: RevisionNotFound if the revision was not found.
"""
session = session or get_session()
try:
revision = session.query(models.Revision).filter_by(
id=revision_id).one().to_dict()
revision = session.query(models.Revision)\
.filter_by(id=revision_id)\
.one()\
.to_dict()
except sa_orm.exc.NoResultFound:
raise errors.RevisionNotFound(revision=revision_id)
if 'deleted' not in filters:
filters.update({'deleted': False})
filtered_documents = _filter_revision_documents(
revision['documents'], **filters)
return filtered_documents
def _filter_revision_documents(documents, **filters):
"""Return the list of documents that match filters.
:returns: list of documents that match specified filters.
:returns: List of documents that match specified filters.
"""
# TODO(fmontei): Implement this as an sqlalchemy query.
filtered_documents = []
@ -240,7 +288,7 @@ def _filter_revision_documents(documents, **filters):
actual_val = utils.multi_getattr(filter_key, document)
if (isinstance(actual_val, bool)
and isinstance(filter_val, six.text_type)):
and isinstance(filter_val, six.string_types)):
try:
filter_val = ast.literal_eval(filter_val.title())
except ValueError:

View File

@ -52,7 +52,7 @@ class DeckhandBase(models.ModelBase, models.TimestampMixin):
deleted_at = Column(DateTime, nullable=True)
deleted = Column(Boolean, nullable=False, default=False)
def delete(self, session=None):
def safe_delete(self, session=None):
"""Delete this object."""
self.deleted = True
self.deleted_at = timeutils.utcnow()
@ -81,14 +81,14 @@ class DeckhandBase(models.ModelBase, models.TimestampMixin):
if 'deleted_at' not in d:
d.setdefault('deleted_at', None)
for k in ["created_at", "updated_at", "deleted_at", "deleted"]:
for k in ["created_at", "updated_at", "deleted_at"]:
if k in d and d[k]:
d[k] = d[k].isoformat()
# NOTE(fmontei): ``metadata`` is reserved by the DB, so ``_metadata``
# must be used to store document metadata information in the DB.
if not raw_dict and '_metadata' in self.keys():
d['metadata'] = d['_metadata']
d['metadata'] = d.pop('_metadata')
return d
@ -100,6 +100,13 @@ class DeckhandBase(models.ModelBase, models.TimestampMixin):
return schema.UniqueConstraint(*fields, name=constraint_name)
class Bucket(BASE, DeckhandBase):
__tablename__ = 'buckets'
name = Column(String(36), primary_key=True)
documents = relationship("Document")
class Revision(BASE, DeckhandBase):
__tablename__ = 'revisions'
@ -129,9 +136,15 @@ class DocumentMixin(object):
_metadata = Column(oslo_types.JsonEncodedDict(), nullable=False)
data = Column(oslo_types.JsonEncodedDict(), nullable=False)
@declarative.declared_attr
def bucket_id(cls):
return Column(Integer, ForeignKey('buckets.name', ondelete='CASCADE'),
nullable=False)
@declarative.declared_attr
def revision_id(cls):
return Column(Integer, ForeignKey('revisions.id'), nullable=False)
return Column(Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
nullable=False)
class Document(BASE, DeckhandBase, DocumentMixin):
@ -143,18 +156,6 @@ class Document(BASE, DeckhandBase, DocumentMixin):
default=lambda: str(uuid.uuid4()))
class LayeringPolicy(BASE, DeckhandBase, DocumentMixin):
# NOTE(fmontei): Only one layering policy can exist per revision, so
# enforce this constraint at the DB level.
UNIQUE_CONSTRAINTS = ('revision_id',)
__tablename__ = 'layering_policies'
__table_args__ = (DeckhandBase.gen_unqiue_contraint(*UNIQUE_CONSTRAINTS),)
id = Column(String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
class ValidationPolicy(BASE, DeckhandBase, DocumentMixin):
UNIQUE_CONSTRAINTS = ('schema', 'name', 'revision_id')
@ -167,13 +168,13 @@ class ValidationPolicy(BASE, DeckhandBase, DocumentMixin):
def register_models(engine):
"""Create database tables for all models with the given engine."""
models = [Document, Revision, LayeringPolicy, ValidationPolicy]
models = [Bucket, Document, Revision, ValidationPolicy]
for model in models:
model.metadata.create_all(engine)
def unregister_models(engine):
"""Drop database tables for all models with the given engine."""
models = [Document, Revision, LayeringPolicy, ValidationPolicy]
models = [Bucket, Document, Revision, ValidationPolicy]
for model in models:
model.metadata.drop_all(engine)

View File

@ -17,8 +17,8 @@ schema = {
'properties': {
'schema': {
'type': 'string',
# Currently supported versions include v1 only.
'pattern': '^([A-Za-z]+\/[A-Za-z]+\/v[1]{1}\.[0]{1})$'
# Currently supported versions include v1/v1.0 only.
'pattern': '^([A-Za-z]+\/[A-Za-z]+\/v[1]{1}(\.[0]{1}){0,1})$'
},
'metadata': {
'type': 'object',

View File

@ -17,14 +17,14 @@ schema = {
'properties': {
'schema': {
'type': 'string',
'pattern': '^(deckhand/CertificateKey/v[1]{1}\.[0]{1})$'
'pattern': '^(deckhand/CertificateKey/v[1]{1}(\.[0]{1}){0,1})$'
},
'metadata': {
'type': 'object',
'properties': {
'schema': {
'type': 'string',
'pattern': '^(metadata/Document/v[1]{1}\.[0]{1})$',
'pattern': '^(metadata/Document/v[1]{1}(\.[0]{1}){0,1})$',
},
'name': {'type': 'string'},
'storagePolicy': {

View File

@ -17,14 +17,14 @@ schema = {
'properties': {
'schema': {
'type': 'string',
'pattern': '^(deckhand/Certificate/v[1]{1}\.[0]{1})$'
'pattern': '^(deckhand/Certificate/v[1]{1}(\.[0]{1}){0,1})$'
},
'metadata': {
'type': 'object',
'properties': {
'schema': {
'type': 'string',
'pattern': '^(metadata/Document/v[1]{1}\.[0]{1})$',
'pattern': '^(metadata/Document/v[1]{1}(\.[0]{1}){0,1})$',
},
'name': {'type': 'string'},
'storagePolicy': {

View File

@ -20,14 +20,14 @@ schema = {
'properties': {
'schema': {
'type': 'string',
'pattern': '^(deckhand/DataSchema/v[1]{1}\.[0]{1})$'
'pattern': '^(deckhand/DataSchema/v[1]{1}(\.[0]{1}){0,1})$'
},
'metadata': {
'type': 'object',
'properties': {
'schema': {
'type': 'string',
'pattern': '^(metadata/Control/v[1]{1}\.[0]{1})$'
'pattern': '^(metadata/Control/v[1]{1}(\.[0]{1}){0,1})$'
},
'name': {'type': 'string'},
# Labels are optional.

View File

@ -44,14 +44,14 @@ schema = {
'properties': {
'schema': {
'type': 'string',
'pattern': '^([A-Za-z]+/[A-Za-z]+/v[1]{1}\.[0]{1})$'
'pattern': '^([A-Za-z]+/[A-Za-z]+/v[1]{1}(\.[0]{1}){0,1})$'
},
'metadata': {
'type': 'object',
'properties': {
'schema': {
'type': 'string',
'pattern': '^(metadata/Document/v[1]{1}\.[0]{1})$'
'pattern': '^(metadata/Document/v[1]{1}(\.[0]{1}){0,1})$'
},
'name': {'type': 'string'},
'labels': {'type': 'object'},
@ -78,7 +78,7 @@ schema = {
}
},
'additionalProperties': False,
'required': ['layer', 'abstract']
'required': ['layer']
},
# "substitutions" is optional.
'substitutions': {

View File

@ -17,14 +17,14 @@ schema = {
'properties': {
'schema': {
'type': 'string',
'pattern': '^(deckhand/LayeringPolicy/v[1]{1}\.[0]{1})$'
'pattern': '^(deckhand/LayeringPolicy/v[1]{1}(\.[0]{1}){0,1})$'
},
'metadata': {
'type': 'object',
'properties': {
'schema': {
'type': 'string',
'pattern': '^(metadata/Control/v[1]{1}\.[0]{1})$'
'pattern': '^(metadata/Control/v[1]{1}(\.[0]{1}){0,1})$'
},
'name': {'type': 'string'}
},

View File

@ -17,14 +17,14 @@ schema = {
'properties': {
'schema': {
'type': 'string',
'pattern': '^(deckhand/Passphrase/v[1]{1}\.[0]{1})$'
'pattern': '^(deckhand/Passphrase/v[1]{1}(\.[0]{1}){0,1})$'
},
'metadata': {
'type': 'object',
'properties': {
'schema': {
'type': 'string',
'pattern': '^(metadata/Document/v[1]{1}\.[0]{1})$',
'pattern': '^(metadata/Document/v[1]{1}(\.[0]{1}){0,1})$',
},
'name': {'type': 'string'},
'storagePolicy': {

View File

@ -17,14 +17,14 @@ schema = {
'properties': {
'schema': {
'type': 'string',
'pattern': '^(deckhand/ValidationPolicy/v[1]{1}\.[0]{1})$'
'pattern': '^(deckhand/ValidationPolicy/v[1]{1}(\.[0]{1}){0,1})$'
},
'metadata': {
'type': 'object',
'properties': {
'schema': {
'type': 'string',
'pattern': '^(metadata/Control/v[1]{1}\.[0]{1})$'
'pattern': '^(metadata/Control/v[1]{1}(\.[0]{1}){0,1})$'
},
'name': {'type': 'string'}
},

View File

@ -105,6 +105,11 @@ class UnsupportedActionMethod(DeckhandException):
code = 400
class DocumentNotFound(DeckhandException):
msg_fmt = ("The requested document %(document)s was not found.")
code = 404
class RevisionNotFound(DeckhandException):
msg_fmt = ("The requested revision %(revision)s was not found.")
code = 403
code = 404

View File

@ -43,65 +43,60 @@ tests:
desc: Begin testing from known state.
DELETE: /api/v1.0/revisions
status: 204
skip: Not implemented.
- name: initialize
desc: Create initial documents
PUT: /api/v1.0/bucket/mop/documents
status: 201
status: 200
data: <@resources/design-doc-layering-sample.yaml
skip: Not implemented.
- name: verify_initial
desc: Verify initial document count and revisions
GET: /api/v1.0/revisions/$RESPONSE['$.documents[0].revision']/documents
GET: /api/v1.0/revisions/$RESPONSE['$.[0].status.revision']/documents
status: 200
response_multidoc_jsonpaths:
$.documents[*].metadata.name:
$.[*].metadata.name:
- layering-policy
- global-1234
- region-1234
- site-1234
$.documents[*].status.revision:
- "$RESPONSE['$.documents[0].revision']"
- "$RESPONSE['$.documents[0].revision']"
- "$RESPONSE['$.documents[0].revision']"
- "$RESPONSE['$.documents[0].revision']"
$.documents[*].status.bucket:
$.[*].status.revision:
- "$RESPONSE['$.[0].status.revision']"
- "$RESPONSE['$.[0].status.revision']"
- "$RESPONSE['$.[0].status.revision']"
- "$RESPONSE['$.[0].status.revision']"
$.[*].status.bucket:
- mop
- mop
- mop
- mop
skip: Not implemented.
- name: ignore_duplicate
desc: Push a duplicate bucket of documents
PUT: /api/v1.0/bucket/mop/documents
status: 200
data: <@resources/design-doc-layering-sample.yaml
skip: Not implemented.
- name: verify_ignore
desc: Verify duplicate documents were ignored
GET: /api/v1.0/revisions/$RESPONSE['$.documents[0].revision']/documents
GET: /api/v1.0/revisions/$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']/documents
status: 200
response_multidoc_jsonpaths:
$.documents[*].metadata.name:
$.[*].metadata.name:
- layering-policy
- global-1234
- region-1234
- site-1234
$.documents[*].status.revision:
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
$.documents[*].status.bucket:
$.[*].status.revision:
- "$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']"
$.[*].status.bucket:
- mop
- mop
- mop
- mop
skip: Not implemented.
- name: update_single_document
desc: Update a single document, ignore other documents in the bucket
@ -112,48 +107,48 @@ tests:
- name: verify_update
desc: Verify updated document count and revisions
GET: /api/v1.0/revisions/$RESPONSE['$.documents[0].revision']/documents
GET: /api/v1.0/revisions/$RESPONSE['$.[0].revision']/documents
status: 200
response_multidoc_jsonpaths:
$.documents[*].metadata.name:
$.[*].metadata.name:
- layering-policy
- global-1234
- region-1234
- site-1234
$.documents[*].status.revision:
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
- "$RESPONSE['$.documents[0].revision']"
$.documents[*].status.bucket:
$.[*].status.revision:
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$RESPONSE['$.[0].revision']"
$.[*].status.bucket:
- mop
- mop
- mop
- mop
$.documents[3].data.b: 5
$.[3].data.b: 5
skip: Not implemented.
- name: verify_initial_documents_preserved_after_update
desc: Verify initial documents count and revisions preserved after update
GET: /api/v1.0/revisions/$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']/documents
GET: /api/v1.0/revisions/$HISTORY['initialize'].$RESPONSE['$.[0].revision']/documents
status: 200
response_multidoc_jsonpaths:
$.documents[*].metadata.name:
$.[*].metadata.name:
- layering-policy
- global-1234
- region-1234
- site-1234
$.documents[*].status.revision:
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
$.documents[*].status.bucket:
$.[*].status.revision:
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
$.[*].status.bucket:
- mop
- mop
- mop
- mop
$.documents[3].data.b: 4
$.[3].data.b: 4
skip: Not implemented.
- name: delete_document
@ -165,66 +160,66 @@ tests:
- name: verify_delete
desc: Verify document deletion
GET: /api/v1.0/revisions/$RESPONSE['$.documents[0].revision']/documents
GET: /api/v1.0/revisions/$RESPONSE['$.[0].revision']/documents
status: 200
response_multidoc_jsonpaths:
$.documents[*].status.revision:
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
- "$HISTORY['update_with_ignore'].$RESPONSE['$.documents[0].revision']"
$.documents[*].metadata.name:
$.[*].status.revision:
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['update_with_ignore'].$RESPONSE['$.[0].revision']"
$.[*].metadata.name:
- layering-policy
- global-1234
- site-1234
$.documents[*].status.bucket:
$.[*].status.bucket:
- mop
- mop
- mop
$.documents[2].data.b: 5
$.[2].data.b: 5
skip: Not implemented.
- name: verify_initial_documents_preserved_after_delete
desc: Verify initial documents count and revisions
GET: /api/v1.0/revisions/$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']/documents
GET: /api/v1.0/revisions/$HISTORY['initialize'].$RESPONSE['$.[0].revision']/documents
status: 200
response_multidoc_jsonpaths:
$.documents[*].metadata.name:
$.[*].metadata.name:
- layering-policy
- global-1234
- region-1234
- site-1234
$.documents[*].status.revision:
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
$.documents[*].status.bucket:
$.[*].status.revision:
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
$.[*].status.bucket:
- mop
- mop
- mop
- mop
$.documents[3].data.b: 4
$.[3].data.b: 4
skip: Not implemented.
- name: verify_updated_documents_preserved_after_delete
desc: Verify updated documents count and revisions preserved after delete
GET: /api/v1.0/revisions/$HISTORY['update_with_ignore'].$RESPONSE['$.documents[0].revision']/documents
GET: /api/v1.0/revisions/$HISTORY['update_with_ignore'].$RESPONSE['$.[0].revision']/documents
status: 200
response_multidoc_jsonpaths:
$.documents[*].metadata.name:
$.[*].metadata.name:
- layering-policy
- global-1234
- region-1234
- site-1234
$.documents[*].status.revision:
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.documents[0].revision']"
- "$HISTORY['update_with_ignore'].$RESPONSE['$.documents[0].revision']"
$.documents[*].status.bucket:
$.[*].status.revision:
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['initialize'].$RESPONSE['$.[0].revision']"
- "$HISTORY['update_with_ignore'].$RESPONSE['$.[0].revision']"
$.[*].status.bucket:
- mop
- mop
- mop
- mop
$.documents[3].data.b: 5
$.[3].data.b: 5
skip: Not implemented.

View File

@ -1,13 +0,0 @@
defaults:
request_headers:
content-type: application/x-yaml
response_headers:
content-type: application/x-yaml
tests:
- name: placeholder
desc: |
There must be at least one passing test to make the test harness happy.
This should be removed as soon as there is a passing functional test.
GET: /api/v1.0/revisions
status: 200

View File

@ -0,0 +1,60 @@
# Tests the following:
#
# 1) Creation of a single document
# 2) Implicit creation of a revision
# 3) Verify revision was created
# 4) Delete the revision
# 5) Verify the revision was deleted
defaults:
request_headers:
content-type: application/x-yaml
response_headers:
content-type: application/x-yaml
tests:
- name: purge
desc: Begin testing from known state.
DELETE: /api/v1.0/revisions
status: 204
# Validates whether creating a revision works.
# Required parameters:
# body: Any document payload.
# Asserts that status code and response headers are correct.
- name: initialize
desc: Create initial documents
PUT: /api/v1.0/bucket/mop/documents
status: 200
data: <@resources/design-doc-layering-sample.yaml
# Validates whether revision was created.
# Required parameters:
# path: revision_id.
# Asserts that status code and response headers are correct.
- name: verify_revision_created
desc: Verify that revision was created for document above
GET: /api/v1.0/revisions/$RESPONSE['$.[0].status.revision']
status: 200
# Validates whether revision deletion works.
# Required parameters:
# path: revision_id.
- name: purge_created_revision
desc: Begin testing from known state.
DELETE: /api/v1.0/revisions
status: 204
# Validates whether revision was deleted.
# Required parameters:
# path: revision_id.
# Asserts that 404 is thrown when trying to retrieve deleted revision.
- name: verify_revision_deleted
desc: Verify that the revision was deleted
GET: /api/v1.0/revisions/$HISTORY['initialize'].$RESPONSE['$.[0].status.revision']
status: 404
response_headers:
# Deckhand exceptions return the following content-type header by
# default. TODO(fmontei): Override that later.
content-type: 'application/json; charset=UTF-8'

View File

@ -40,7 +40,7 @@ class MultidocJsonpaths(gabbi.handlers.jsonhandler.JSONHandler):
@staticmethod
def loads(string):
return {'documents': list(yaml.safe_load_all(string))}
return list(yaml.safe_load_all(string))
def load_tests(loader, tests, pattern):

View File

@ -15,8 +15,8 @@
import mock
from deckhand.control import api
from deckhand.control import base as api_base
from deckhand.control import documents
from deckhand.control import base
from deckhand.control import buckets
from deckhand.control import revision_documents
from deckhand.control import revisions
from deckhand.control import secrets
@ -27,7 +27,7 @@ class TestApi(test_base.DeckhandTestCase):
def setUp(self):
super(TestApi, self).setUp()
for resource in (documents, revision_documents, revisions, secrets):
for resource in (buckets, revision_documents, revisions, secrets):
resource_name = resource.__name__.split('.')[-1]
resource_obj = mock.patch.object(
resource, '%sResource' % resource_name.title().replace(
@ -45,9 +45,10 @@ class TestApi(test_base.DeckhandTestCase):
self.assertEqual(mock_falcon_api, result)
mock_falcon.API.assert_called_once_with(
request_type=api_base.DeckhandRequest)
request_type=base.DeckhandRequest, middleware=[mock.ANY])
mock_falcon_api.add_route.assert_has_calls([
mock.call('/api/v1.0/documents', self.documents_resource()),
mock.call('/api/v1.0/bucket/{bucket_name}/documents',
self.buckets_resource()),
mock.call('/api/v1.0/revisions', self.revisions_resource()),
mock.call('/api/v1.0/revisions/{revision_id}',
self.revisions_resource()),

View File

@ -20,7 +20,7 @@ from deckhand.tests.unit import base
BASE_EXPECTED_FIELDS = ("created_at", "updated_at", "deleted_at", "deleted")
DOCUMENT_EXPECTED_FIELDS = BASE_EXPECTED_FIELDS + (
"id", "schema", "name", "metadata", "data", "revision_id")
"id", "schema", "name", "metadata", "data", "revision_id", "bucket_id")
REVISION_EXPECTED_FIELDS = BASE_EXPECTED_FIELDS + (
"id", "documents", "validation_policies")
@ -54,7 +54,8 @@ class DocumentFixture(object):
class TestDbBase(base.DeckhandWithDBTestCase):
def _create_documents(self, documents, validation_policies=None):
def create_documents(self, bucket_name, documents,
validation_policies=None):
if not validation_policies:
validation_policies = []
@ -63,28 +64,41 @@ class TestDbBase(base.DeckhandWithDBTestCase):
if not isinstance(validation_policies, list):
validation_policies = [validation_policies]
docs = db_api.documents_create(documents, validation_policies)
docs = db_api.documents_create(
bucket_name, documents, validation_policies)
for idx, doc in enumerate(docs):
self._validate_document(expected=documents[idx], actual=doc)
self.validate_document(expected=documents[idx], actual=doc)
self.assertEqual(bucket_name, doc['bucket_id'])
return docs
def _get_document(self, **fields):
def show_document(self, do_validation=True, **fields):
doc = db_api.document_get(**fields)
self._validate_document(actual=doc)
if do_validation:
self.validate_document(actual=doc)
return doc
def _get_revision(self, revision_id):
def delete_document(self, document_id):
return db_api.document_delete(document_id)
def show_revision(self, revision_id):
revision = db_api.revision_get(revision_id)
self._validate_revision(revision)
self.validate_revision(revision)
return revision
def _get_revision_documents(self, revision_id, **filters):
def delete_revisions(self):
return db_api.revision_delete_all()
def list_revision_documents(self, revision_id, **filters):
documents = db_api.revision_get_documents(revision_id, **filters)
for document in documents:
self._validate_document(document)
self.validate_document(document)
return documents
def _list_revisions(self):
def list_revisions(self):
return db_api.revision_get_all()
def _validate_object(self, obj):
@ -95,7 +109,7 @@ class TestDbBase(base.DeckhandWithDBTestCase):
else:
self.assertIsInstance(obj[attr], bool)
def _validate_document(self, actual, expected=None, is_deleted=False):
def validate_document(self, actual, expected=None, is_deleted=False):
self._validate_object(actual)
# Validate that the document has all expected fields and is a dict.
@ -113,7 +127,7 @@ class TestDbBase(base.DeckhandWithDBTestCase):
for key, val in expected.items():
self.assertEqual(val, actual[key])
def _validate_revision(self, revision):
def validate_revision(self, revision):
self._validate_object(revision)
for attr in REVISION_EXPECTED_FIELDS:

View File

@ -12,64 +12,71 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from deckhand.tests import test_utils
from deckhand.tests.unit.db import base
class TestDocuments(base.TestDbBase):
def test_create_and_get_document(self):
def test_create_and_show_bucket(self):
payload = base.DocumentFixture.get_minimal_fixture()
documents = self._create_documents(payload)
bucket_name = test_utils.rand_name('bucket')
documents = self.create_documents(bucket_name, payload)
self.assertIsInstance(documents, list)
self.assertEqual(1, len(documents))
retrieved_document = self._get_document(id=documents[0]['id'])
retrieved_document = self.show_document(id=documents[0]['id'])
self.assertEqual(documents[0], retrieved_document)
def test_create_document_again_with_no_changes(self):
def test_create_document_conflict(self):
payload = base.DocumentFixture.get_minimal_fixture()
self._create_documents(payload)
documents = self._create_documents(payload)
bucket_name = test_utils.rand_name('bucket')
self.create_documents(bucket_name, payload)
documents = self.create_documents(bucket_name, payload)
self.assertIsInstance(documents, list)
self.assertEmpty(documents)
def test_create_document_and_get_revision(self):
def test_create_document_and_show_revision(self):
payload = base.DocumentFixture.get_minimal_fixture()
documents = self._create_documents(payload)
bucket_name = test_utils.rand_name('bucket')
documents = self.create_documents(bucket_name, payload)
self.assertIsInstance(documents, list)
self.assertEqual(1, len(documents))
for document in documents:
revision = self._get_revision(document['revision_id'])
self._validate_revision(revision)
revision = self.show_revision(document['revision_id'])
self.validate_revision(revision)
self.assertEqual(document['revision_id'], revision['id'])
def test_get_documents_by_revision_id(self):
def test_list_documents_by_revision_id(self):
payload = base.DocumentFixture.get_minimal_fixture()
documents = self._create_documents(payload)
bucket_name = test_utils.rand_name('bucket')
documents = self.create_documents(bucket_name, payload)
revision = self._get_revision(documents[0]['revision_id'])
revision = self.show_revision(documents[0]['revision_id'])
self.assertEqual(1, len(revision['documents']))
self.assertEqual(documents[0], revision['documents'][0])
def test_get_multiple_documents_by_revision_id(self):
def test_list_multiple_documents_by_revision_id(self):
payload = base.DocumentFixture.get_minimal_multi_fixture(count=3)
documents = self._create_documents(payload)
bucket_name = test_utils.rand_name('bucket')
documents = self.create_documents(bucket_name, payload)
self.assertIsInstance(documents, list)
self.assertEqual(3, len(documents))
for document in documents:
revision = self._get_revision(document['revision_id'])
self._validate_revision(revision)
revision = self.show_revision(document['revision_id'])
self.validate_revision(revision)
self.assertEqual(document['revision_id'], revision['id'])
def test_get_documents_by_revision_id_and_filters(self):
def test_list_documents_by_revision_id_and_filters(self):
payload = base.DocumentFixture.get_minimal_fixture()
document = self._create_documents(payload)[0]
bucket_name = test_utils.rand_name('bucket')
document = self.create_documents(bucket_name, payload)[0]
filters = {
'schema': document['schema'],
'metadata.name': document['metadata']['name'],
@ -80,7 +87,7 @@ class TestDocuments(base.TestDbBase):
'metadata.label': document['metadata']['label']
}
documents = self._get_revision_documents(
documents = self.list_revision_documents(
document['revision_id'], **filters)
self.assertEqual(1, len(documents))
self.assertEqual(document, documents[0])

View File

@ -12,6 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from deckhand import errors
from deckhand.tests import test_utils
from deckhand.tests.unit.db import base
@ -19,7 +21,8 @@ class TestDocumentsNegative(base.TestDbBase):
def test_get_documents_by_revision_id_and_wrong_filters(self):
payload = base.DocumentFixture.get_minimal_fixture()
document = self._create_documents(payload)[0]
bucket_name = test_utils.rand_name('bucket')
document = self.create_documents(bucket_name, payload)[0]
filters = {
'schema': 'fake_schema',
'metadata.name': 'fake_meta_name',
@ -29,11 +32,17 @@ class TestDocumentsNegative(base.TestDbBase):
'metadata.label': 'fake_label'
}
documents = self._get_revision_documents(
documents = self.list_revision_documents(
document['revision_id'], **filters)
self.assertEmpty(documents)
for filter_key, filter_val in filters.items():
documents = self._get_revision_documents(
documents = self.list_revision_documents(
document['revision_id'], filter_key=filter_val)
self.assertEmpty(documents)
def test_delete_document_invalid_id(self):
self.assertRaises(errors.DocumentNotFound,
self.show_document,
do_validation=False,
document_id=test_utils.rand_uuid_hex())

View File

@ -12,7 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from deckhand import errors
from deckhand import factories
from deckhand.tests import test_utils
from deckhand.tests.unit.db import base
from deckhand import types
@ -22,9 +24,10 @@ class TestRevisions(base.TestDbBase):
def test_list(self):
documents = [base.DocumentFixture.get_minimal_fixture()
for _ in range(4)]
self._create_documents(documents)
bucket_name = test_utils.rand_name('bucket')
self.create_documents(bucket_name, documents)
revisions = self._list_revisions()
revisions = self.list_revisions()
self.assertIsInstance(revisions, list)
self.assertEqual(1, len(revisions))
self.assertEqual(4, len(revisions[0]['documents']))
@ -35,10 +38,40 @@ class TestRevisions(base.TestDbBase):
vp_factory = factories.ValidationPolicyFactory()
validation_policy = vp_factory.gen(types.DECKHAND_SCHEMA_VALIDATION,
'success')
self._create_documents(documents, [validation_policy])
bucket_name = test_utils.rand_name('bucket')
self.create_documents(bucket_name, documents, [validation_policy])
revisions = self._list_revisions()
revisions = self.list_revisions()
self.assertIsInstance(revisions, list)
self.assertEqual(1, len(revisions))
self.assertEqual(4, len(revisions[0]['documents']))
self.assertEqual(1, len(revisions[0]['validation_policies']))
def test_delete_all(self):
all_created_documents = []
all_revision_ids = []
for _ in range(3):
document_payload = [base.DocumentFixture.get_minimal_fixture()
for _ in range(3)]
bucket_name = test_utils.rand_name('bucket')
created_documents = self.create_documents(
bucket_name, document_payload)
all_created_documents.extend(created_documents)
revision_id = created_documents[0]['revision_id']
all_revision_ids.append(revision_id)
self.delete_revisions()
# Validate that all revisions were deleted.
for revision_id in all_revision_ids:
error_re = 'The requested revision %s was not found.' % revision_id
self.assertRaisesRegex(errors.RevisionNotFound, error_re,
self.show_revision, revision_id)
# Validate that the documents (children) were deleted.
for doc in created_documents:
filters = {'id': doc['id']}
error_re = 'The requested document %s was not found.' % filters
self.assertRaisesRegex(errors.DocumentNotFound, error_re,
self.show_document, **filters)

View File

@ -66,7 +66,6 @@ class TestDocumentValidationNegative(
self._read_data('sample_document')
properties_to_remove = self.BASIC_ATTRS + (
'metadata.layeringDefinition',
'metadata.layeringDefinition.abstract',
'metadata.layeringDefinition.layer',
'metadata.layeringDefinition.actions.0.method',
'metadata.layeringDefinition.actions.0.path',

View File

@ -12,10 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_utils import uuidutils
from deckhand.control.views import document
from deckhand import factories
from deckhand.tests import test_utils
from deckhand.tests.unit.db import base
@ -31,17 +30,19 @@ class TestRevisionViews(base.TestDbBase):
# created specified by `count`.
payload = [base.DocumentFixture.get_minimal_fixture()
for _ in range(count)]
created_documents = self._create_documents(payload)
bucket_name = test_utils.rand_name('bucket')
created_documents = self.create_documents(bucket_name, payload)
document_view = self.view_builder.list(created_documents)
expected_attrs = ('revision_id', 'documents')
for attr in expected_attrs:
self.assertIn(attr, document_view)
self.assertIsInstance(document_view, list)
self.assertEqual(count, len(document_view))
self.assertTrue(uuidutils.is_uuid_like(document_view['revision_id']))
self.assertEqual(count, len(document_view['documents']))
for doc_id in document_view['documents']:
self.assertTrue(uuidutils.is_uuid_like(doc_id))
expected_attrs = ('id', 'status', 'metadata', 'data', 'schema')
for idx in range(count):
for attr in expected_attrs:
self.assertIn(attr, document_view[idx])
for attr in ('bucket', 'revision'):
self.assertIn(attr, document_view[idx]['status'])
def test_create_single_document(self):
self._test_document_creation_view(1)

View File

@ -29,8 +29,9 @@ class TestRevisionViews(base.TestDbBase):
def test_list_revisions_with_multiple_documents(self):
payload = [base.DocumentFixture.get_minimal_fixture()
for _ in range(4)]
self._create_documents(payload)
revisions = self._list_revisions()
bucket_name = test_utils.rand_name('bucket')
self.create_documents(bucket_name, payload)
revisions = self.list_revisions()
revisions_view = self.view_builder.list(revisions)
expected_attrs = ('results', 'count')
@ -51,8 +52,9 @@ class TestRevisionViews(base.TestDbBase):
payload = [base.DocumentFixture.get_minimal_fixture()
for _ in range(doc_count)]
self._create_documents(payload)
revisions = self._list_revisions()
bucket_name = test_utils.rand_name('bucket')
self.create_documents(bucket_name, payload)
revisions = self.list_revisions()
revisions_view = self.view_builder.list(revisions)
expected_attrs = ('results', 'count')
@ -71,13 +73,14 @@ class TestRevisionViews(base.TestDbBase):
def test_show_revision(self):
payload = [base.DocumentFixture.get_minimal_fixture()
for _ in range(4)]
documents = self._create_documents(payload)
bucket_name = test_utils.rand_name('bucket')
documents = self.create_documents(bucket_name, payload)
# Validate that each document points to the same revision.
revision_ids = set([d['revision_id'] for d in documents])
self.assertEqual(1, len(revision_ids))
revision = self._get_revision(documents[0]['revision_id'])
revision = self.show_revision(documents[0]['revision_id'])
revision_view = self.view_builder.show(revision)
expected_attrs = ('id', 'url', 'createdAt', 'validationPolicies',
@ -96,9 +99,10 @@ class TestRevisionViews(base.TestDbBase):
validation_policy = self.factory.gen(types.DECKHAND_SCHEMA_VALIDATION,
status='success')
payload.append(validation_policy)
documents = self._create_documents(payload)
bucket_name = test_utils.rand_name('bucket')
documents = self.create_documents(bucket_name, payload)
revision = self._get_revision(documents[0]['revision_id'])
revision = self.show_revision(documents[0]['revision_id'])
revision_view = self.view_builder.show(revision)
expected_attrs = ('id', 'url', 'createdAt', 'validationPolicies',
@ -123,9 +127,10 @@ class TestRevisionViews(base.TestDbBase):
validation_policy = self.factory.gen(types.DECKHAND_SCHEMA_VALIDATION,
status='failed')
payload.append(validation_policy)
documents = self._create_documents(payload)
bucket_name = test_utils.rand_name('bucket')
documents = self.create_documents(bucket_name, payload)
revision = self._get_revision(documents[0]['revision_id'])
revision = self.show_revision(documents[0]['revision_id'])
revision_view = self.view_builder.show(revision)
expected_attrs = ('id', 'url', 'createdAt', 'validationPolicies',