Allow unit tests to be run against in-memory sqlite
Recently JSONB replaced a back-end agnostic data type for the "data" column in the Document model. This made it necessary to drop support for running Deckhand unit tests with any other database store. However, this arragenement is undesirable as a user shouldn't need to have postgresql installed just to kick off unit tests. So, this PS re-adds support for running unit tests via an in-memory sqlite database. To run unit tests with sqlite: tox -e py35 Unit tests still run against postgresql via: tox -e py35-postgresql Both jobs are executed in CICD already. This PS also updates the remaining DB columns to use JSONB if postgresql is enabled; else fallback columns are used for testing with sqlite. This is a necessary change to make the column data types consistent. Change-Id: I951f2f04fd013d635bb7653a238ff1eb3725b5e1
This commit is contained in:
parent
e4abca1cd7
commit
b0c2f1c4e2
|
@ -2,6 +2,6 @@
|
||||||
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
|
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
|
||||||
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
|
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
|
||||||
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \
|
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \
|
||||||
${PYTHON:-python} -m subunit.run discover -t ./ ${OS_TEST_PATH:-./deckhand/tests} $LISTOPT $IDOPTION
|
${PYTHON:-python} -m subunit.run discover -t ./ ${OS_TEST_PATH:-./deckhand/tests/unit} $LISTOPT $IDOPTION
|
||||||
test_id_option=--load-list $IDFILE
|
test_id_option=--load-list $IDFILE
|
||||||
test_list_option=--list
|
test_list_option=--list
|
||||||
|
|
|
@ -64,7 +64,7 @@ def init_application():
|
||||||
paste_file)
|
paste_file)
|
||||||
|
|
||||||
db_api.drop_db()
|
db_api.drop_db()
|
||||||
db_api.setup_db()
|
db_api.setup_db(CONF.database.connection)
|
||||||
|
|
||||||
app = deploy.loadapp('config:%s' % paste_file, name='deckhand_api')
|
app = deploy.loadapp('config:%s' % paste_file, name='deckhand_api')
|
||||||
return app
|
return app
|
||||||
|
|
|
@ -82,10 +82,8 @@ def drop_db():
|
||||||
models.unregister_models(get_engine())
|
models.unregister_models(get_engine())
|
||||||
|
|
||||||
|
|
||||||
def setup_db():
|
def setup_db(connection_string):
|
||||||
# Ensure the DB doesn't exist before creation.
|
models.register_models(get_engine(), connection_string)
|
||||||
drop_db()
|
|
||||||
models.register_models(get_engine())
|
|
||||||
|
|
||||||
|
|
||||||
def raw_query(query, **kwargs):
|
def raw_query(query, **kwargs):
|
||||||
|
@ -831,6 +829,9 @@ def revision_tag_create(revision_id, tag, data=None, session=None):
|
||||||
session = session or get_session()
|
session = session or get_session()
|
||||||
tag_model = models.RevisionTag()
|
tag_model = models.RevisionTag()
|
||||||
|
|
||||||
|
if data is None:
|
||||||
|
data = {}
|
||||||
|
|
||||||
if data and not isinstance(data, dict):
|
if data and not isinstance(data, dict):
|
||||||
raise errors.RevisionTagBadFormat(data=data)
|
raise errors.RevisionTagBadFormat(data=data)
|
||||||
|
|
||||||
|
|
|
@ -12,25 +12,31 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
from oslo_db.sqlalchemy import models
|
from oslo_db.sqlalchemy import models
|
||||||
from oslo_db.sqlalchemy import types as oslo_types
|
from oslo_db.sqlalchemy import types as oslo_types
|
||||||
|
from oslo_log import log as logging
|
||||||
from oslo_utils import timeutils
|
from oslo_utils import timeutils
|
||||||
from sqlalchemy import Boolean
|
from sqlalchemy import Boolean
|
||||||
from sqlalchemy import Column
|
from sqlalchemy import Column
|
||||||
from sqlalchemy import DateTime
|
from sqlalchemy import DateTime
|
||||||
from sqlalchemy.dialects.postgresql import JSONB
|
from sqlalchemy.dialects.postgresql import JSONB
|
||||||
|
|
||||||
from sqlalchemy.ext import declarative
|
from sqlalchemy.ext import declarative
|
||||||
from sqlalchemy.ext.hybrid import hybrid_property
|
from sqlalchemy.ext.hybrid import hybrid_property
|
||||||
from sqlalchemy import ForeignKey
|
from sqlalchemy import ForeignKey
|
||||||
from sqlalchemy import Integer
|
from sqlalchemy import Integer
|
||||||
from sqlalchemy.orm import relationship
|
from sqlalchemy.orm import relationship
|
||||||
from sqlalchemy import String
|
from sqlalchemy import String
|
||||||
|
from sqlalchemy.types import PickleType
|
||||||
from sqlalchemy import UniqueConstraint
|
from sqlalchemy import UniqueConstraint
|
||||||
|
|
||||||
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Declarative base class which maintains a catalog of classes and tables
|
# Declarative base class which maintains a catalog of classes and tables
|
||||||
# relative to that base.
|
# relative to that base.
|
||||||
BASE = declarative.declarative_base()
|
BASE = None
|
||||||
|
|
||||||
|
|
||||||
class DeckhandBase(models.ModelBase, models.TimestampMixin):
|
class DeckhandBase(models.ModelBase, models.TimestampMixin):
|
||||||
|
@ -83,6 +89,14 @@ class DeckhandBase(models.ModelBase, models.TimestampMixin):
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
def __build_tables(blob_type_obj, blob_type_list):
|
||||||
|
global BASE
|
||||||
|
|
||||||
|
if BASE:
|
||||||
|
return
|
||||||
|
|
||||||
|
BASE = declarative.declarative_base()
|
||||||
|
|
||||||
class Bucket(BASE, DeckhandBase):
|
class Bucket(BASE, DeckhandBase):
|
||||||
__tablename__ = 'buckets'
|
__tablename__ = 'buckets'
|
||||||
|
|
||||||
|
@ -90,6 +104,15 @@ class Bucket(BASE, DeckhandBase):
|
||||||
name = Column(String(36), unique=True)
|
name = Column(String(36), unique=True)
|
||||||
documents = relationship("Document", backref="bucket")
|
documents = relationship("Document", backref="bucket")
|
||||||
|
|
||||||
|
class RevisionTag(BASE, DeckhandBase):
|
||||||
|
__tablename__ = 'revision_tags'
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
tag = Column(String(64), nullable=False)
|
||||||
|
data = Column(blob_type_obj, nullable=True, default={})
|
||||||
|
revision_id = Column(
|
||||||
|
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
|
||||||
|
nullable=False)
|
||||||
|
|
||||||
class Revision(BASE, DeckhandBase):
|
class Revision(BASE, DeckhandBase):
|
||||||
__tablename__ = 'revisions'
|
__tablename__ = 'revisions'
|
||||||
|
@ -97,8 +120,8 @@ class Revision(BASE, DeckhandBase):
|
||||||
id = Column(Integer, primary_key=True)
|
id = Column(Integer, primary_key=True)
|
||||||
# `primaryjoin` used below for sqlalchemy to distinguish between
|
# `primaryjoin` used below for sqlalchemy to distinguish between
|
||||||
# `Document.revision_id` and `Document.orig_revision_id`.
|
# `Document.revision_id` and `Document.orig_revision_id`.
|
||||||
documents = relationship("Document",
|
documents = relationship(
|
||||||
primaryjoin="Revision.id==Document.revision_id")
|
"Document", primaryjoin="Revision.id==Document.revision_id")
|
||||||
tags = relationship("RevisionTag")
|
tags = relationship("RevisionTag")
|
||||||
validations = relationship("Validation")
|
validations = relationship("Validation")
|
||||||
|
|
||||||
|
@ -108,18 +131,6 @@ class Revision(BASE, DeckhandBase):
|
||||||
d['tags'] = [tag.to_dict() for tag in self.tags]
|
d['tags'] = [tag.to_dict() for tag in self.tags]
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
class RevisionTag(BASE, DeckhandBase):
|
|
||||||
__tablename__ = 'revision_tags'
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
tag = Column(String(64), nullable=False)
|
|
||||||
data = Column(oslo_types.JsonEncodedDict(), nullable=True, default={})
|
|
||||||
revision_id = Column(
|
|
||||||
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
|
|
||||||
nullable=False)
|
|
||||||
|
|
||||||
|
|
||||||
class Document(BASE, DeckhandBase):
|
class Document(BASE, DeckhandBase):
|
||||||
UNIQUE_CONSTRAINTS = ('schema', 'name', 'revision_id')
|
UNIQUE_CONSTRAINTS = ('schema', 'name', 'revision_id')
|
||||||
__tablename__ = 'documents'
|
__tablename__ = 'documents'
|
||||||
|
@ -129,24 +140,25 @@ class Document(BASE, DeckhandBase):
|
||||||
schema = Column(String(64), nullable=False)
|
schema = Column(String(64), nullable=False)
|
||||||
# NOTE(fmontei): ``metadata`` is reserved by the DB, so ``_metadata``
|
# NOTE(fmontei): ``metadata`` is reserved by the DB, so ``_metadata``
|
||||||
# must be used to store document metadata information in the DB.
|
# must be used to store document metadata information in the DB.
|
||||||
_metadata = Column(oslo_types.JsonEncodedDict(), nullable=False)
|
_metadata = Column(blob_type_obj, nullable=False)
|
||||||
data = Column(JSONB, nullable=True)
|
data = Column(blob_type_obj, nullable=True)
|
||||||
data_hash = Column(String, nullable=False)
|
data_hash = Column(String, nullable=False)
|
||||||
metadata_hash = Column(String, nullable=False)
|
metadata_hash = Column(String, nullable=False)
|
||||||
bucket_id = Column(Integer, ForeignKey('buckets.id', ondelete='CASCADE'),
|
bucket_id = Column(Integer, ForeignKey('buckets.id',
|
||||||
|
ondelete='CASCADE'),
|
||||||
nullable=False)
|
nullable=False)
|
||||||
revision_id = Column(
|
revision_id = Column(
|
||||||
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
|
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
|
||||||
nullable=False)
|
nullable=False)
|
||||||
# Used for documents that haven't changed across revisions but still have
|
# Used for documents that haven't changed across revisions but still
|
||||||
# been carried over into newer revisions. This is necessary in order to
|
# have been carried over into newer revisions. This is necessary in
|
||||||
# roll back to previous revisions or to generate a revision diff. Without
|
# order to roll back to previous revisions or to generate a revision
|
||||||
# recording all the documents that were PUT in a revision, this is rather
|
# diff. Without recording all the documents that were PUT in a
|
||||||
# difficult. By using `orig_revision_id` it is therefore possible to
|
# revision, this is rather difficult. By using `orig_revision_id` it is
|
||||||
# maintain the correct revision history -- that is, remembering the exact
|
# therefore possible to maintain the correct revision history -- that
|
||||||
# revision a document was created in -- while still being able to roll
|
# is, remembering the exact revision a document was created in -- while
|
||||||
# back to all the documents that exist in a specific revision or generate
|
# still being able to roll back to all the documents that exist in a
|
||||||
# an accurate revision diff report.
|
# specific revision or generate an accurate revision diff report.
|
||||||
orig_revision_id = Column(
|
orig_revision_id = Column(
|
||||||
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
|
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
|
||||||
nullable=True)
|
nullable=True)
|
||||||
|
@ -175,29 +187,49 @@ class Document(BASE, DeckhandBase):
|
||||||
|
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
class Validation(BASE, DeckhandBase):
|
class Validation(BASE, DeckhandBase):
|
||||||
__tablename__ = 'validations'
|
__tablename__ = 'validations'
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True)
|
id = Column(Integer, primary_key=True)
|
||||||
name = Column(String(64), nullable=False)
|
name = Column(String(64), nullable=False)
|
||||||
status = Column(String(8), nullable=False)
|
status = Column(String(8), nullable=False)
|
||||||
validator = Column(oslo_types.JsonEncodedDict(), nullable=False)
|
validator = Column(blob_type_obj, nullable=False)
|
||||||
errors = Column(oslo_types.JsonEncodedList(), nullable=False, default=[])
|
errors = Column(blob_type_list, nullable=False, default=[])
|
||||||
revision_id = Column(
|
revision_id = Column(
|
||||||
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
|
Integer, ForeignKey('revisions.id', ondelete='CASCADE'),
|
||||||
nullable=False)
|
nullable=False)
|
||||||
|
|
||||||
|
this_module = sys.modules[__name__]
|
||||||
|
tables = [Bucket, Document, Revision, RevisionTag, Validation]
|
||||||
|
for table in tables:
|
||||||
|
setattr(this_module, table.__name__, table)
|
||||||
|
|
||||||
|
|
||||||
|
def register_models(engine, connection_string):
|
||||||
|
blob_types = ((JSONB, JSONB) if 'postgresql' in connection_string
|
||||||
|
else (PickleType, oslo_types.JsonEncodedList()))
|
||||||
|
|
||||||
|
LOG.debug('Instantiating DB tables using %s, %s as the column type for '
|
||||||
|
'dictionaries, lists.', *blob_types)
|
||||||
|
|
||||||
def register_models(engine):
|
|
||||||
"""Create database tables for all models with the given engine."""
|
"""Create database tables for all models with the given engine."""
|
||||||
models = [Bucket, Document, Revision, RevisionTag, Validation]
|
__build_tables(*blob_types)
|
||||||
for model in models:
|
|
||||||
|
this_module = sys.modules[__name__]
|
||||||
|
models = ['Bucket', 'Document', 'RevisionTag', 'Revision', 'Validation']
|
||||||
|
|
||||||
|
for model_name in models:
|
||||||
|
if hasattr(this_module, model_name):
|
||||||
|
model = getattr(this_module, model_name)
|
||||||
model.metadata.create_all(engine)
|
model.metadata.create_all(engine)
|
||||||
|
|
||||||
|
|
||||||
def unregister_models(engine):
|
def unregister_models(engine):
|
||||||
"""Drop database tables for all models with the given engine."""
|
"""Drop database tables for all models with the given engine."""
|
||||||
models = [Bucket, Document, Revision, RevisionTag, Validation]
|
this_module = sys.modules[__name__]
|
||||||
for model in models:
|
models = ['Bucket', 'Document', 'RevisionTag', 'Revision', 'Validation']
|
||||||
|
|
||||||
|
for model_name in models:
|
||||||
|
if hasattr(this_module, model_name):
|
||||||
|
model = getattr(this_module, model_name)
|
||||||
model.metadata.drop_all(engine)
|
model.metadata.drop_all(engine)
|
||||||
|
|
|
@ -105,10 +105,8 @@ class DeckhandWithDBTestCase(DeckhandTestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(DeckhandWithDBTestCase, self).setUp()
|
super(DeckhandWithDBTestCase, self).setUp()
|
||||||
if 'PIFPAF_URL' not in os.environ:
|
|
||||||
raise RuntimeError('Unit tests must be run using `pifpaf run '
|
|
||||||
'postgresql`.')
|
|
||||||
self.override_config(
|
self.override_config(
|
||||||
'connection', os.environ['PIFPAF_URL'], group='database')
|
'connection', os.environ.get('PIFPAF_URL', 'sqlite://'),
|
||||||
db_api.setup_db()
|
group='database')
|
||||||
|
db_api.setup_db(CONF.database.connection)
|
||||||
self.addCleanup(db_api.drop_db)
|
self.addCleanup(db_api.drop_db)
|
||||||
|
|
|
@ -65,11 +65,12 @@ class TestApi(test_base.DeckhandTestCase):
|
||||||
@mock.patch.object(api, 'policy', autospec=True)
|
@mock.patch.object(api, 'policy', autospec=True)
|
||||||
@mock.patch.object(api, 'db_api', autospec=True)
|
@mock.patch.object(api, 'db_api', autospec=True)
|
||||||
@mock.patch.object(api, 'logging', autospec=True)
|
@mock.patch.object(api, 'logging', autospec=True)
|
||||||
@mock.patch.object(api, 'CONF', autospec=True)
|
|
||||||
@mock.patch('deckhand.service.falcon', autospec=True)
|
@mock.patch('deckhand.service.falcon', autospec=True)
|
||||||
def test_init_application(self, mock_falcon, mock_config, mock_logging,
|
def test_init_application(self, mock_falcon, mock_logging,
|
||||||
mock_db_api, _):
|
mock_db_api, _):
|
||||||
mock_falcon_api = mock_falcon.API.return_value
|
mock_falcon_api = mock_falcon.API.return_value
|
||||||
|
self.override_config(
|
||||||
|
'connection', mock.sentinel.db_connection, group='database')
|
||||||
|
|
||||||
api.init_application()
|
api.init_application()
|
||||||
|
|
||||||
|
@ -105,4 +106,5 @@ class TestApi(test_base.DeckhandTestCase):
|
||||||
], any_order=True)
|
], any_order=True)
|
||||||
|
|
||||||
mock_db_api.drop_db.assert_called_once_with()
|
mock_db_api.drop_db.assert_called_once_with()
|
||||||
mock_db_api.setup_db.assert_called_once_with()
|
mock_db_api.setup_db.assert_called_once_with(
|
||||||
|
str(mock.sentinel.db_connection))
|
||||||
|
|
12
tox.ini
12
tox.ini
|
@ -1,5 +1,5 @@
|
||||||
[tox]
|
[tox]
|
||||||
envlist = py{35,27},pep8,bandit,docs
|
envlist = py{35,27}-{postgresql,},pep8,bandit,docs
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
usedevelop = True
|
usedevelop = True
|
||||||
|
@ -19,11 +19,21 @@ commands =
|
||||||
rm -Rf .testrepository/times.dbm
|
rm -Rf .testrepository/times.dbm
|
||||||
|
|
||||||
[testenv:py27]
|
[testenv:py27]
|
||||||
|
commands =
|
||||||
|
{[testenv]commands}
|
||||||
|
ostestr '{posargs}'
|
||||||
|
|
||||||
|
[testenv:py27-postgresql]
|
||||||
commands =
|
commands =
|
||||||
{[testenv]commands}
|
{[testenv]commands}
|
||||||
{toxinidir}/tools/run_pifpaf.sh '{posargs}'
|
{toxinidir}/tools/run_pifpaf.sh '{posargs}'
|
||||||
|
|
||||||
[testenv:py35]
|
[testenv:py35]
|
||||||
|
commands =
|
||||||
|
{[testenv]commands}
|
||||||
|
ostestr '{posargs}'
|
||||||
|
|
||||||
|
[testenv:py35-postgresql]
|
||||||
commands =
|
commands =
|
||||||
{[testenv]commands}
|
{[testenv]commands}
|
||||||
{toxinidir}/tools/run_pifpaf.sh '{posargs}'
|
{toxinidir}/tools/run_pifpaf.sh '{posargs}'
|
||||||
|
|
Loading…
Reference in New Issue