Deckhand postgresql compatibility.

Currently, Deckhand is not fully compatible with postgresql as
it uses sqlite for all of its testing, including functional testing.
Since postgresql will be used in prod, Deckhand obviously must
support it, in addition to sqlite, needed for unit testing.

This commit alters the functional testing script to use postgresql
as well as makes necessary back-end changes to support postgresql.

Included in this commit:

  - alter tools/functional-tests.sh so that it uses postgresql
    as the db connection
  - modifies primary key for Bucket DB model to be an Integer rather
    than a String
  - updates foreign key to point to new primary key
  - updates necessary integration logic so that the bucket name
    is still known by the Document DB model and returned in
    appropriate response bodies

Change-Id: I7bc806fb18f7b47c13978dcd806d422a573a06b3
This commit is contained in:
Felipe Monteiro 2017-09-18 20:11:32 +01:00
parent 0cd5d45706
commit af0bfd813d
7 changed files with 35 additions and 28 deletions

View File

@ -34,7 +34,7 @@ class ViewBuilder(common.ViewBuilder):
# need to return bucket_id and revision_id. # need to return bucket_id and revision_id.
if len(documents) == 1 and documents[0]['deleted']: if len(documents) == 1 and documents[0]['deleted']:
resp_obj = {'status': {}} resp_obj = {'status': {}}
resp_obj['status']['bucket'] = documents[0]['bucket_id'] resp_obj['status']['bucket'] = documents[0]['bucket_name']
resp_obj['status']['revision'] = documents[0]['revision_id'] resp_obj['status']['revision'] = documents[0]['revision_id']
return [resp_obj] return [resp_obj]
@ -47,7 +47,7 @@ class ViewBuilder(common.ViewBuilder):
resp_obj = {x: document[x] for x in attrs} resp_obj = {x: document[x] for x in attrs}
resp_obj.setdefault('status', {}) resp_obj.setdefault('status', {})
resp_obj['status']['bucket'] = document['bucket_id'] resp_obj['status']['bucket'] = document['bucket_name']
resp_obj['status']['revision'] = document['revision_id'] resp_obj['status']['revision'] = document['revision_id']
resp_list.append(resp_obj) resp_list.append(resp_obj)

View File

@ -37,7 +37,7 @@ class ViewBuilder(common.ViewBuilder):
body['tags'].update([t['tag'] for t in revision['tags']]) body['tags'].update([t['tag'] for t in revision['tags']])
body['buckets'].update( body['buckets'].update(
[d['bucket_id'] for d in rev_documents]) [d['bucket_name'] for d in rev_documents])
body['tags'] = sorted(body['tags']) body['tags'] = sorted(body['tags'])
body['buckets'] = sorted(body['buckets']) body['buckets'] = sorted(body['buckets'])
@ -77,7 +77,8 @@ class ViewBuilder(common.ViewBuilder):
for tag in revision['tags']: for tag in revision['tags']:
tags.setdefault(tag['tag'], {'name': tag['tag']}) tags.setdefault(tag['tag'], {'name': tag['tag']})
buckets = sorted(set([d['bucket_id'] for d in revision['documents']])) buckets = sorted(
set([d['bucket_name'] for d in revision['documents']]))
return { return {
'id': revision.get('id'), 'id': revision.get('id'),

View File

@ -114,7 +114,7 @@ def documents_create(bucket_name, documents, session=None):
# `documents`: the difference between the former and the latter. # `documents`: the difference between the former and the latter.
document_history = [(d['schema'], d['name']) document_history = [(d['schema'], d['name'])
for d in revision_get_documents( for d in revision_get_documents(
bucket_id=bucket_name)] bucket_name=bucket_name)]
documents_to_delete = [ documents_to_delete = [
h for h in document_history if h not in h for h in document_history if h not in
[(d['schema'], d['metadata']['name']) for d in documents]] [(d['schema'], d['metadata']['name']) for d in documents]]
@ -136,7 +136,7 @@ def documents_create(bucket_name, documents, session=None):
doc['name'] = d[1] doc['name'] = d[1]
doc['data'] = {} doc['data'] = {}
doc['_metadata'] = {} doc['_metadata'] = {}
doc['bucket_id'] = bucket['name'] doc['bucket_id'] = bucket['id']
doc['revision_id'] = revision['id'] doc['revision_id'] = revision['id']
# Save and mark the document as `deleted` in the database. # Save and mark the document as `deleted` in the database.
@ -151,9 +151,10 @@ def documents_create(bucket_name, documents, session=None):
[(d['schema'], d['name']) for d in documents_to_create]) [(d['schema'], d['name']) for d in documents_to_create])
for doc in documents_to_create: for doc in documents_to_create:
with session.begin(): with session.begin():
doc['bucket_id'] = bucket['name'] doc['bucket_id'] = bucket['id']
doc['revision_id'] = revision['id'] doc['revision_id'] = revision['id']
doc.save(session=session) doc.save(session=session)
# NOTE(fmontei): The orig_revision_id is not copied into the # NOTE(fmontei): The orig_revision_id is not copied into the
# revision_id for each created document, because the revision_id here # revision_id for each created document, because the revision_id here
# should reference the just-created revision. In case the user needs # should reference the just-created revision. In case the user needs
@ -200,12 +201,12 @@ def _documents_create(bucket_name, values_list, session=None):
# If the document already exists in another bucket, raise an error. # If the document already exists in another bucket, raise an error.
# Ignore redundant validation policies as they are allowed to exist # Ignore redundant validation policies as they are allowed to exist
# in multiple buckets. # in multiple buckets.
if (existing_document['bucket_id'] != bucket_name and if (existing_document['bucket_name'] != bucket_name and
existing_document['schema'] != types.VALIDATION_POLICY_SCHEMA): existing_document['schema'] != types.VALIDATION_POLICY_SCHEMA):
raise errors.DocumentExists( raise errors.DocumentExists(
schema=existing_document['schema'], schema=existing_document['schema'],
name=existing_document['name'], name=existing_document['name'],
bucket=existing_document['bucket_id']) bucket=existing_document['bucket_name'])
if not _document_changed(existing_document): if not _document_changed(existing_document):
# Since the document has not changed, reference the original # Since the document has not changed, reference the original

View File

@ -19,6 +19,7 @@ from sqlalchemy import Boolean
from sqlalchemy import Column from sqlalchemy import Column
from sqlalchemy import DateTime from sqlalchemy import DateTime
from sqlalchemy.ext import declarative from sqlalchemy.ext import declarative
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy import ForeignKey from sqlalchemy import ForeignKey
from sqlalchemy import Integer from sqlalchemy import Integer
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
@ -64,10 +65,8 @@ class DeckhandBase(models.ModelBase, models.TimestampMixin):
def items(self): def items(self):
return self.__dict__.items() return self.__dict__.items()
def to_dict(self, raw_dict=False): def to_dict(self):
"""Convert the object into dictionary format. """Convert the object into dictionary format.
:param raw_dict: Renames the key "_metadata" to "metadata".
""" """
d = self.__dict__.copy() d = self.__dict__.copy()
# Remove private state instance, as it is not serializable and causes # Remove private state instance, as it is not serializable and causes
@ -93,8 +92,9 @@ def gen_unique_constraint(table_name, *fields):
class Bucket(BASE, DeckhandBase): class Bucket(BASE, DeckhandBase):
__tablename__ = 'buckets' __tablename__ = 'buckets'
name = Column(String(36), primary_key=True) id = Column(Integer, primary_key=True)
documents = relationship("Document") name = Column(String(36), unique=True)
documents = relationship("Document", backref="bucket")
class Revision(BASE, DeckhandBase): class Revision(BASE, DeckhandBase):
@ -141,8 +141,7 @@ class Document(BASE, DeckhandBase):
_metadata = Column(oslo_types.JsonEncodedDict(), nullable=False) _metadata = Column(oslo_types.JsonEncodedDict(), nullable=False)
data = Column(oslo_types.JsonEncodedDict(), nullable=True) data = Column(oslo_types.JsonEncodedDict(), nullable=True)
is_secret = Column(Boolean, nullable=False, default=False) is_secret = Column(Boolean, nullable=False, default=False)
bucket_id = Column(Integer, ForeignKey('buckets.id', ondelete='CASCADE'),
bucket_id = Column(Integer, ForeignKey('buckets.name', ondelete='CASCADE'),
nullable=False) nullable=False)
revision_id = Column( revision_id = Column(
Integer, ForeignKey('revisions.id', ondelete='CASCADE'), Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
@ -160,11 +159,23 @@ class Document(BASE, DeckhandBase):
Integer, ForeignKey('revisions.id', ondelete='CASCADE'), Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
nullable=True) nullable=True)
@hybrid_property
def bucket_name(self):
if hasattr(self, 'bucket') and self.bucket:
return self.bucket.name
return None
def to_dict(self, raw_dict=False): def to_dict(self, raw_dict=False):
"""Convert the object into dictionary format.
:param raw_dict: Renames the key "_metadata" to "metadata".
"""
d = super(Document, self).to_dict() d = super(Document, self).to_dict()
d['bucket_name'] = self.bucket_name
# NOTE(fmontei): ``metadata`` is reserved by the DB, so ``_metadata`` # NOTE(fmontei): ``metadata`` is reserved by the DB, so ``_metadata``
# must be used to store document metadata information in the DB. # must be used to store document metadata information in the DB.
if not raw_dict and '_metadata' in self.keys(): if not raw_dict:
d['metadata'] = d.pop('_metadata') d['metadata'] = d.pop('_metadata')
return d return d

View File

@ -69,7 +69,7 @@ class TestDbBase(base.DeckhandWithDBTestCase):
if do_validation: if do_validation:
for idx, doc in enumerate(docs): for idx, doc in enumerate(docs):
self.validate_document(expected=documents[idx], actual=doc) self.validate_document(expected=documents[idx], actual=doc)
self.assertEqual(bucket_name, doc['bucket_id']) self.assertEqual(bucket_name, doc['bucket_name'])
return docs return docs

View File

@ -18,6 +18,8 @@ stevedore>=1.20.0 # Apache-2.0
jsonschema!=2.5.0,<3.0.0,>=2.0.0 # MIT jsonschema!=2.5.0,<3.0.0,>=2.0.0 # MIT
python-keystoneclient>=3.8.0 # Apache-2.0 python-keystoneclient>=3.8.0 # Apache-2.0
keystonemiddleware>=4.12.0 # Apache-2.0 keystonemiddleware>=4.12.0 # Apache-2.0
psycopg2==2.7.3.1
uwsgi==2.0.15
oslo.cache>=1.5.0 # Apache-2.0 oslo.cache>=1.5.0 # Apache-2.0
oslo.concurrency>=3.8.0 # Apache-2.0 oslo.concurrency>=3.8.0 # Apache-2.0
@ -33,4 +35,3 @@ oslo.serialization!=2.19.1,>=1.10.0 # Apache-2.0
oslo.utils>=3.20.0 # Apache-2.0 oslo.utils>=3.20.0 # Apache-2.0
python-barbicanclient>=4.0.0 # Apache-2.0 python-barbicanclient>=4.0.0 # Apache-2.0
uwsgi==2.0.15

View File

@ -34,17 +34,12 @@ POSTGRES_IP=$(
--format='{{ .NetworkSettings.Networks.bridge.IPAddress }}' \ --format='{{ .NetworkSettings.Networks.bridge.IPAddress }}' \
$POSTGRES_ID $POSTGRES_ID
) )
POSTGRES_PORT=$(
sudo docker inspect \
--format='{{(index (index .NetworkSettings.Ports "5432/tcp") 0).HostPort}}' \
$POSTGRES_ID
)
log_section Creating config file log_section Creating config file
CONF_DIR=$(mktemp -d) CONF_DIR=$(mktemp -d)
export DECKHAND_TEST_URL=http://localhost:9000 export DECKHAND_TEST_URL=http://localhost:9000
export DATABASE_URL=postgres://deckhand:password@$POSTGRES_IP:$POSTGRES_PORT/deckhand export DATABASE_URL=postgresql+psycopg2://deckhand:password@$POSTGRES_IP:5432/deckhand
# Used by Deckhand's initialization script to search for config files. # Used by Deckhand's initialization script to search for config files.
export OS_DECKHAND_CONFIG_DIR=$CONF_DIR export OS_DECKHAND_CONFIG_DIR=$CONF_DIR
@ -61,9 +56,7 @@ use_stderr = true
[barbican] [barbican]
[database] [database]
# XXX For now, connection to postgres is not setup. connection = $DATABASE_URL
#connection = $DATABASE_URL
connection = sqlite://
[keystone_authtoken] [keystone_authtoken]
EOCONF EOCONF