From a483ec3c4d2fc71ff30fb1dab002fa586543eee6 Mon Sep 17 00:00:00 2001 From: Felipe Monteiro Date: Wed, 25 Jul 2018 18:35:28 +0100 Subject: [PATCH] Implement rendered documents caching This implements a rendered documents cache which is keyed by revision IDs. This means that repeatedly trying to re-render documents for the same revision ID will be much, much faster as the results will be cached. Change-Id: Ie92f55a9234d038683ba1fcad76710d968ed67ab --- deckhand/conf/config.py | 25 ++++++++++- deckhand/control/revision_documents.py | 13 +++--- deckhand/engine/__init__.py | 17 +++++++ deckhand/engine/cache.py | 49 ++++++++++++++++++++ deckhand/engine/layering.py | 2 +- deckhand/engine/render.py | 45 +++++++++++++++++++ deckhand/tests/deckhand.conf.test | 3 ++ deckhand/tests/unit/base.py | 6 +++ deckhand/tests/unit/engine/test_cache.py | 57 ++++++++++++++++++++++++ 9 files changed, 206 insertions(+), 11 deletions(-) create mode 100644 deckhand/engine/cache.py create mode 100644 deckhand/engine/render.py create mode 100644 deckhand/tests/unit/engine/test_cache.py diff --git a/deckhand/conf/config.py b/deckhand/conf/config.py index f92af7e4..1a43c89c 100644 --- a/deckhand/conf/config.py +++ b/deckhand/conf/config.py @@ -39,6 +39,26 @@ barbican_opts = [ ] +engine_group = cfg.OptGroup( + name='engine', + title='Engine Options', + help="Engine options for allowing behavior specific to Deckhand's engine " + "to be configured.") + + +engine_opts = [ + # TODO(felipemonteiro): This is better off being removed because the same + # effect can be achieved through per-test gabbi fixtures that clean up + # the cache between tests. + cfg.BoolOpt('enable_cache', default=True, + help="Whether to enable the document rendering caching. Useful" + " for testing to avoid cross-test caching conflicts."), + cfg.IntOpt('cache_timeout', default=3600, + help="How long (in seconds) document rendering results should " + "remain cached in memory."), +] + + jsonpath_group = cfg.OptGroup( name='jsonpath', title='JSONPath Options', @@ -47,8 +67,8 @@ jsonpath_group = cfg.OptGroup( jsonpath_opts = [ cfg.IntOpt('cache_timeout', default=3600, - help="How long JSONPath lookup results should remain cached " - "in memory.") + help="How long (in seconds) JSONPath lookup results should " + "remain cached in memory.") ] @@ -65,6 +85,7 @@ default_opts = [ def register_opts(conf): conf.register_group(barbican_group) conf.register_opts(barbican_opts, group=barbican_group) + conf.register_opts(engine_opts, group=engine_group) conf.register_opts(jsonpath_opts, group=jsonpath_group) conf.register_opts(default_opts) ks_loading.register_auth_conf_options(conf, group='keystone_authtoken') diff --git a/deckhand/control/revision_documents.py b/deckhand/control/revision_documents.py index 265fd64d..580beaad 100644 --- a/deckhand/control/revision_documents.py +++ b/deckhand/control/revision_documents.py @@ -27,8 +27,8 @@ from deckhand.control import base as api_base from deckhand.control import common from deckhand.control.views import document as document_view from deckhand.db.sqlalchemy import api as db_api +from deckhand import engine from deckhand.engine import document_validation -from deckhand.engine import layering from deckhand.engine import secrets_manager from deckhand import errors from deckhand import policy @@ -119,13 +119,10 @@ class RenderedDocumentsResource(api_base.BaseResource): documents = document_wrapper.DocumentDict.from_list(data) encryption_sources = self._resolve_encrypted_data(documents) try: - # NOTE(fmontei): `validate` is False because documents have already - # been pre-validated during ingestion. Documents are post-validated - # below, regardless. - document_layering = layering.DocumentLayering( - documents, encryption_sources=encryption_sources, - validate=False) - rendered_documents = document_layering.render() + rendered_documents = engine.render( + revision_id, + documents, + encryption_sources=encryption_sources) except (errors.BarbicanClientException, errors.BarbicanServerException, errors.InvalidDocumentLayer, diff --git a/deckhand/engine/__init__.py b/deckhand/engine/__init__.py index e69de29b..db31ec50 100644 --- a/deckhand/engine/__init__.py +++ b/deckhand/engine/__init__.py @@ -0,0 +1,17 @@ +# Copyright 2018 AT&T Intellectual Property. All other rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from deckhand.engine.render import render + +__all__ = ('render',) diff --git a/deckhand/engine/cache.py b/deckhand/engine/cache.py new file mode 100644 index 00000000..e218378e --- /dev/null +++ b/deckhand/engine/cache.py @@ -0,0 +1,49 @@ +# Copyright 2018 AT&T Intellectual Property. All other rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from beaker.cache import CacheManager +from beaker.util import parse_cache_config_options +from oslo_log import log as logging + +from deckhand.conf import config +from deckhand.engine import layering + +CONF = config.CONF +LOG = logging.getLogger(__name__) + +_CACHE_OPTS = { + 'cache.type': 'memory', + 'expire': CONF.engine.cache_timeout, +} +_CACHE = CacheManager(**parse_cache_config_options(_CACHE_OPTS)) +_DOCUMENT_RENDERING_CACHE = _CACHE.get_cache('rendered_documents_cache') + + +def lookup_by_revision_id(revision_id, documents, **kwargs): + """Look up rendered documents by ``revision_id``.""" + + def do_render(): + """Perform document rendering for the revision.""" + document_layering = layering.DocumentLayering(documents, **kwargs) + return document_layering.render() + + if CONF.engine.enable_cache: + return _DOCUMENT_RENDERING_CACHE.get(key=revision_id, + createfunc=do_render) + else: + return do_render() + + +def invalidate(): + _DOCUMENT_RENDERING_CACHE.clear() diff --git a/deckhand/engine/layering.py b/deckhand/engine/layering.py index 1c3177c4..39bee4d4 100644 --- a/deckhand/engine/layering.py +++ b/deckhand/engine/layering.py @@ -405,7 +405,7 @@ class DocumentLayering(object): contained in the destination document's data section to the actual unecrypted data. If encrypting data with Barbican, the reference will be a Barbican secret reference. - :type encryption_sources: List[dict] + :type encryption_sources: dict :raises LayeringPolicyNotFound: If no LayeringPolicy was found among list of ``documents``. diff --git a/deckhand/engine/render.py b/deckhand/engine/render.py new file mode 100644 index 00000000..a84a2f2f --- /dev/null +++ b/deckhand/engine/render.py @@ -0,0 +1,45 @@ +# Copyright 2018 AT&T Intellectual Property. All other rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from deckhand.engine import cache + +__all__ = ('render',) + + +def render(revision_id, documents, encryption_sources=None): + """Render revision documents for ``revision_id`` using raw ``documents``. + + :param revision_id: Key used for caching rendered documents by. + :type revision_id: int + :param documents: List of raw documents corresponding to ``revision_id`` + to render. + :type documents: List[dict] + :param encryption_sources: A dictionary that maps the reference + contained in the destination document's data section to the + actual unecrypted data. If encrypting data with Barbican, the + reference will be a Barbican secret reference. + :type encryption_sources: dict + :returns: Rendered documents for ``revision_id``. + :rtype: List[dict] + + """ + + # NOTE(felipemonteiro): `validate` is False because documents have + # already been pre-validated during ingestion. Documents are + # post-validated below, regardless. + return cache.lookup_by_revision_id( + revision_id, + documents, + encryption_sources=encryption_sources, + validate=False) diff --git a/deckhand/tests/deckhand.conf.test b/deckhand/tests/deckhand.conf.test index 782d14fd..b91469c0 100644 --- a/deckhand/tests/deckhand.conf.test +++ b/deckhand/tests/deckhand.conf.test @@ -12,6 +12,9 @@ policy_file = policy.yaml [database] connection = ${AIRSHIP_DECKHAND_DATABASE_URL} +[engine] +enable_cache = false + [keystone_authtoken] # NOTE(fmontei): Values taken from clouds.yaml. Values only used for # integration testing. diff --git a/deckhand/tests/unit/base.py b/deckhand/tests/unit/base.py index c3fd3bc9..0f949e11 100644 --- a/deckhand/tests/unit/base.py +++ b/deckhand/tests/unit/base.py @@ -24,6 +24,7 @@ import testtools from deckhand.conf import config # noqa: Calls register_opts(CONF) from deckhand.db.sqlalchemy import api as db_api +from deckhand.engine import cache from deckhand.tests.unit import fixtures as dh_fixtures CONF = cfg.CONF @@ -41,6 +42,11 @@ class DeckhandTestCase(testtools.TestCase): self.useFixture(dh_fixtures.ConfPatcher( development_mode=True, group=None)) + def tearDown(self): + # Clear the cache between tests. + cache.invalidate() + super(DeckhandTestCase, self).tearDown() + def override_config(self, name, override, group=None): CONF.set_override(name, override, group) self.addCleanup(CONF.clear_override, name, group) diff --git a/deckhand/tests/unit/engine/test_cache.py b/deckhand/tests/unit/engine/test_cache.py new file mode 100644 index 00000000..a9c63871 --- /dev/null +++ b/deckhand/tests/unit/engine/test_cache.py @@ -0,0 +1,57 @@ +# Copyright 2018 AT&T Intellectual Property. All other rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import testtools + +from deckhand.engine import cache +from deckhand import factories +from deckhand.tests.unit import base as test_base + + +class RenderedDocumentsCacheTest(test_base.DeckhandTestCase): + + def test_lookup_by_revision_id_cache(self): + """Validate ``lookup_by_revision_id`` caching works. + + Passing in None in lieu of the actual documents proves that: + + * if the payload is in the cache, then no error is thrown since the + cache is hit so no further processing is performed, where otherwise a + method would be called on `None` + * if the payload is not in the cache, then following logic above, + method is called on `None`, raising AttributeError + """ + + document_factory = factories.DocumentFactory(1, [1]) + documents = document_factory.gen_test({}) + + # Validate that caching the ref returns expected payload. + rendered_documents = cache.lookup_by_revision_id(1, documents) + self.assertIsInstance(rendered_documents, list) + + # Validate that the cache actually works. + next_rendered_documents = cache.lookup_by_revision_id(1, None) + self.assertEqual(rendered_documents, next_rendered_documents) + + # No documents passed in and revision ID 2 isn't cached - so expect + # this to blow up. + with testtools.ExpectedException(AttributeError): + cache.lookup_by_revision_id(2, None) + + # Invalidate the cache and ensure the original data isn't there. + cache.invalidate() + + # The cache won't be hit this time - expect AttributeError. + with testtools.ExpectedException(AttributeError): + cache.lookup_by_revision_id(1, None)