barbican-9.1.0.dev50/0000775000175000017500000000000013616500640014411 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/apiary.apib0000664000175000017500000000306213616500636016541 0ustar sahidsahid00000000000000HOST: https://dfw.barbican.api.rackspacecloud.com/v1/ --- Barbican API v1 --- --- Barbican is a ReST based key management service. It is built with [OpenStack](https://www.openstack.org/) in mind, but can be used outside an OpenStack implementation. More information can be found on [OpenDev](https://opendev.org/openstack/barbican). --- -- Secrets Resource The following is a description of the resources dealing with generic secrets. These can be encryption keys or anything else a user wants to store in a secure, auditable manner -- Allows a user to list all secrets in a tenant. Note: the actual secret should not be listed here, a user must make a separate call to get the secret details to view the secret. GET /secrets < 200 < Content-Type: application/json { "name": "AES key" "algorithm": "AES" "cypher_type": "CDC" "bit_length": 256 "content_types": { "default": "text/plain" } "expiration": "2013-05-08T16:21:38.134160" "id": "2eb5a8d8-2202-4f46-b64d-89e26eb25487" "mime_type": "text/plain" } Allows a user to create a new secret. This call expects the user to provide a secret. To have the API generate a secret, see the provisioning API. POST /secrets > Content-Type: application/json { "product":"1AB23ORM", "quantity": 2 } < 201 < Content-Type: application/json { "status": "created", "url": "/shopping-cart/2" } -- Payment Resources -- This resource allows you to submit payment information to process your *shopping cart* items POST /payment { "cc": "12345678900", "cvc": "123", "expiry": "0112" } < 200 { "receipt": "/payment/receipt/1" } barbican-9.1.0.dev50/babel.cfg0000664000175000017500000000002013616500636016134 0ustar sahidsahid00000000000000[python: **.py] barbican-9.1.0.dev50/bindep.txt0000664000175000017500000000107713616500636016425 0ustar sahidsahid00000000000000# See openstack-infra/project-config:jenkins/data/bindep-fallback.txt # This is used by bindep: sudo [apt-get | yum] install $(bindep -b) mozilla-nss-devel [platform:rpm] nss-devel [platform:rpm] libnss3-dev [platform:dpkg] gettext [test] # Required for the Dogtag plugin 389-ds-base [platform:rpm] pki-ca [platform:rpm] pki-kra [platform:rpm] #Required for cryptography lib libssl-dev [platform:dpkg] openssl-devel [platform:rpm] # libsrvg2 is needed for sphinxcontrib-svg2pdfconverter in docs builds. librsvg2-tools [doc platform:rpm] librsvg2-bin [doc platform:dpkg] barbican-9.1.0.dev50/tox.ini0000664000175000017500000001200313616500636015725 0ustar sahidsahid00000000000000[tox] minversion = 2.0 envlist = py36,py37,pep8,docs skipsdist = True [testenv] basepython = python3 setenv = PYTHON=coverage run --source barbican --parallel-mode usedevelop = True install_command = pip install -c{env:UPPER_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} -U {opts} {packages} deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt commands = oslo-config-generator --config-file etc/oslo-config-generator/barbican.conf --output-file etc/barbican/barbican.conf /usr/bin/find . -type f -name "*.py[c|o]" -delete rm -f .testrepository/times.dbm coverage erase stestr run {posargs} coverage combine coverage html -d cover coverage xml -o cover/coverage.xml coverage report -m whitelist_externals = rm [testenv:cover] deps = {[testenv]deps} diff_cover commands = coverage erase stestr run {posargs} coverage combine coverage html -d cover coverage xml -o cover/coverage.xml diff-cover --fail-under 100 --compare-branch master cover/coverage.xml [testenv:releasenotes] deps = -r{toxinidir}/doc/requirements.txt commands = rm -rf releasenotes/build sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html [testenv:pep8] sitepackages = False commands = doc8 {posargs} flake8 {posargs} # Run security linter bandit -r barbican -x tests -n5 -s B105 [testenv:genconfig] whitelist_externals = bash envdir = {toxworkdir}/pep8 commands = oslo-config-generator --config-file etc/oslo-config-generator/barbican.conf [testenv:venv] commands = {posargs} [testenv:debug] commands = oslo_debug_helper -t barbican/tests {posargs} [testenv:py3pep8] # This hack is in place to allow us to run py3 based flake8 # without installing barbican. install_command = /bin/echo {packages} commands = pip install "hacking>=0.10.0,<0.11" flake8 barbican setup.py [testenv:docs] # This environment is called from CI scripts to test and publish # the main docs to https://docs.openstack.org/barbican description = Build main documentation deps = -r{toxinidir}/doc/requirements.txt commands= rm -rf doc/build doc/build/doctrees sphinx-build -W -b html doc/source doc/build/html whitelist_externals = rm [testenv:pdf-docs] deps = {[testenv:docs]deps} whitelist_externals = make commands = sphinx-build -W -b latex doc/source doc/build/pdf make -C doc/build/pdf [testenv:api-guide] # This environment is called from CI scripts to test and publish # the API Guide to docs.openstack.org. deps = -r{toxinidir}/doc/requirements.txt commands = rm -rf api-guide/build sphinx-build -W -b html -d api-guide/build/doctrees api-guide/source api-guide/build/html [testenv:all-docs] description = Build all documentation deps = -r{toxinidir}/doc/requirements.txt commands= {[testenv:docs]commands} {[testenv:api-guide]commands} {[testenv:releasenotes]commands} whitelist_externals = rm [testenv:functional] # This tox env is purely to make local test development easier # Note: This requires local running instances of Barbican and Keystone # TODO(dmend): remove --serial and uncomment coverage commands deps = -r{toxinidir}/test-requirements.txt setenv = OS_TEST_PATH={toxinidir}/functionaltests commands = /usr/bin/find . -type f -name "*.py[c|o]" -delete stestr run --serial --slowest {posargs} # coverage combine # coverage html -d cover # coverage xml -o cover/coverage.xml passenv = KMIP_PLUGIN_ENABLED VAULT_PLUGIN_ENABLED PKCS11_PLUGIN_ENABLED [testenv:cmd] # This tox env is purely to make local test development easier # Note: This requires local running instances of Barbican and Keystone deps = -r{toxinidir}/test-requirements.txt setenv = OS_TEST_PATH={toxinidir}/barbican/cmd/functionaltests commands = /usr/bin/find . -type f -name "*.py[c|o]" -delete stestr run {posargs} coverage combine coverage html -d cover coverage xml -o cover/coverage.xml [doc8] ignore = D001 ignore-path = .venv,.git,.tox,.tmp,*barbican/locale*,*lib/python*,barbican.egg*,doc/build,releasenotes/*,doc/source/contributor/api [flake8] filename = *.py,app.wsgi exclude = .git,.idea,.tox,bin,dist,debian,rpmbuild,tools,*.egg-info,*.eggs,contrib, *docs/target,*.egg,build [testenv:bandit] deps = -r{toxinidir}/test-requirements.txt commands = bandit -r barbican -x tests -n5 [testenv:bindep] # Do not install any requirements. We want this to be fast and work even if # system dependencies are missing, since it's used to tell you what system # dependencies are missing! This also means that bindep must be installed # separately, outside of the requirements files. deps = bindep commands = bindep test [testenv:genpolicy] envdir = {toxworkdir}/pep8 commands = oslopolicy-sample-generator --config-file=etc/oslo-config-generator/policy.conf [hacking] local-check-factory = barbican.hacking.checks.factory [testenv:lower-constraints] deps = -c{toxinidir}/lower-constraints.txt -r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements.txt barbican-9.1.0.dev50/README.rst0000664000175000017500000001120713616500636016106 0ustar sahidsahid00000000000000======================== Team and repository tags ======================== .. image:: https://governance.openstack.org/tc/badges/barbican.svg :target: https://governance.openstack.org/tc/reference/tags/index.html .. Change things from this point on Barbican ======== Barbican is a REST API designed for the secure storage, provisioning and management of secrets. It is aimed at being useful for all environments, including large ephemeral Clouds. Barbican is an OpenStack project developed by the `Barbican Project Team `_ with support from `Rackspace Hosting _, EMC, Ericsson, Johns Hopkins University, HP, Red Hat, Cisco Systems, and many more. The full documentation can be found on the `Barbican Developer Documentation Site `_. If you have a technical question, you can ask it at `Ask OpenStack `_ with the `barbican` tag. To file a bug, use our bug tracker on `OpenStack Storyboard `_. Release notes for the project can be found at https://docs.openstack.org/releasenotes/barbican. For development questions or discussion, use the `OpenStack-discuss mailing list `_ at `openstack-discuss@lists.openstack.org` and let us know what you think, just add `[barbican]` to the subject. You can also join our IRC channel `#openstack-barbican` on Freenode. Barbican began as part of a set of applications that make up the CloudKeep ecosystem. The other systems are: * `Postern _` - Go based agent that provides access to secrets from the Barbican API. * `Palisade `_ - AngularJS based web ui for the Barbican API. * `Python-barbicanclient `_ - A convenient Python-based library to interact with the Barbican API. Getting Started --------------- Please visit our `Users, Developers and Operators documentation `_ for details. Why Should You Use Barbican? ---------------------------- The current state of key management is atrocious. While Windows does have some decent options through the use of the Data Protection API (DPAPI) and Active Directory, Linux lacks a cohesive story around how to manage keys for application use. Barbican was designed to solve this problem. The system was motivated by internal Rackspace needs, requirements from `OpenStack `_ and a realization that the current state of the art could use some help. Barbican will handle many types of secrets, including: * **Symmetric Keys** - Used to perform reversible encryption of data at rest, typically using the AES algorithm set. This type of key is required to enable features like `encrypted Swift containers and Cinder volumes `_, `encrypted Cloud Backups `_, etc. * **Asymmetric Keys** - Asymmetric key pairs (sometimes referred to as `public / private keys `_) are used in many scenarios where communication between untrusted parties is desired. The most common case is with SSL/TLS certificates, but also is used in solutions like SSH keys, S/MIME (mail) encryption and digital signatures. * **Raw Secrets** - Barbican stores secrets as a base64 encoded block of data (encrypted, naturally). Clients can use the API to store any secrets in any format they desire. The `Postern `_ agent is capable of presenting these secrets in various formats to ease integration. For the symmetric and asymmetric key types, Barbican supports full life cycle management including provisioning, expiration, reporting, etc. A plugin system allows for multiple certificate authority support (including public and private CAs). Design Goals ------------ 1. Provide a central secret-store capable of distributing secret / keying material to all types of deployments including ephemeral Cloud instances. 2. Support reasonable compliance regimes through reporting and auditability. 3. Application adoption costs should be minimal or non-existent. 4. Build a community and ecosystem by being open-source and extensible. 5. Improve security through sane defaults and centralized management of `policies for all secrets `_. 6. Provide an out of band communication mechanism to notify and protect sensitive assets. barbican-9.1.0.dev50/setup.cfg0000664000175000017500000000741713616500640016243 0ustar sahidsahid00000000000000[metadata] name = barbican summary = OpenStack Secure Key Management description = Service for storing sensitive client information for OpenStack description-file = README.rst author = OpenStack author-email = openstack-discuss@lists.openstack.org home-page = https://docs.openstack.org/barbican/latest/ classifier = Environment :: OpenStack Intended Audience :: Information Technology Intended Audience :: System Administrators License :: OSI Approved :: Apache Software License Operating System :: POSIX :: Linux Programming Language :: Python Programming Language :: Python :: 3 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 [files] data_files = etc/barbican = etc/barbican/barbican-api-paste.ini packages = barbican [extras] dogtag = dogtag-pki>=10.3.5.1 # LGPLv3+ [entry_points] oslo.policy.enforcer = barbican = barbican.common.policy:get_enforcer oslo.policy.policies = barbican = barbican.common.policies:list_rules console_scripts = barbican-manage = barbican.cmd.barbican_manage:main barbican-db-manage = barbican.cmd.db_manage:main barbican-keystone-listener = barbican.cmd.keystone_listener:main barbican-worker = barbican.cmd.worker:main pkcs11-kek-rewrap = barbican.cmd.pkcs11_kek_rewrap:main pkcs11-key-generation = barbican.cmd.pkcs11_key_generation:main barbican-retry = barbican.cmd.retry_scheduler:main barbican-status = barbican.cmd.status:main wsgi_scripts = barbican-wsgi-api = barbican.api.app:get_api_wsgi_script barbican.secretstore.plugin = store_crypto = barbican.plugin.store_crypto:StoreCryptoAdapterPlugin dogtag_crypto = barbican.plugin.dogtag:DogtagKRAPlugin kmip_plugin = barbican.plugin.kmip_secret_store:KMIPSecretStore vault_plugin = barbican.plugin.vault_secret_store:VaultSecretStore barbican.crypto.plugin = p11_crypto = barbican.plugin.crypto.p11_crypto:P11CryptoPlugin simple_crypto = barbican.plugin.crypto.simple_crypto:SimpleCryptoPlugin barbican.certificate.plugin = simple_certificate = barbican.plugin.simple_certificate_manager:SimpleCertificatePlugin snakeoil_ca = barbican.plugin.snakeoil_ca:SnakeoilCACertificatePlugin symantec = barbican.plugin.symantec:SymantecCertificatePlugin dogtag = barbican.plugin.dogtag:DogtagCAPlugin barbican.certificate.event.plugin = simple_certificate_event = barbican.plugin.simple_certificate_manager:SimpleCertificateEventPlugin barbican.test.crypto.plugin = test_crypto = barbican.tests.crypto.test_plugin:TestCryptoPlugin oslo.config.opts = barbican.common.config = barbican.common.config:list_opts barbican.plugin.secret_store = barbican.plugin.interface.secret_store:list_opts barbican.plugin.crypto = barbican.plugin.crypto.manager:list_opts barbican.plugin.crypto.simple = barbican.plugin.crypto.simple_crypto:list_opts barbican.plugin.dogtag = barbican.plugin.dogtag_config_opts:list_opts barbican.plugin.crypto.p11 = barbican.plugin.crypto.p11_crypto:list_opts barbican.plugin.secret_store.kmip = barbican.plugin.kmip_secret_store:list_opts barbican.plugin.secret_store.vault = barbican.plugin.vault_secret_store:list_opts barbican.certificate.plugin = barbican.plugin.interface.certificate_manager:list_opts barbican.certificate.plugin.snakeoil = barbican.plugin.snakeoil_ca:list_opts oslo.config.opts.defaults = barbican.common.config = barbican.common.config:set_middleware_defaults [build_apiguide] all_files = 1 build-dir = api-guide/build source-dir = api-guide/source [egg_info] tag_build = tag_date = 0 tag_svn_revision = 0 [compile_catalog] directory = barbican/locale domain = barbican [update_catalog] domain = barbican output_dir = barbican/locale input_file = barbican/locale/barbican.pot [extract_messages] keywords = _ gettext ngettext l_ lazy_gettext mapping_file = babel.cfg output_file = barbican/locale/barbican.pot [wheel] universal = 1 barbican-9.1.0.dev50/barbican/0000775000175000017500000000000013616500640016152 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/objects/0000775000175000017500000000000013616500640017603 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/objects/container_acl_user.py0000664000175000017500000000341413616500636024023 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import base as object_base from barbican.common import exception from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repos from barbican.objects import base from barbican.objects import fields @object_base.VersionedObjectRegistry.register class ContainerACLUser(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'acl_id': fields.StringField(), 'user_id': fields.StringField(), 'status': fields.StringField(nullable=True, default=base.States.ACTIVE) } db_model = models.ContainerACLUser db_repo = repos.get_container_acl_user_repository() def _validate_fields(self, change_fields): if change_fields.get('user_id') is None: msg = u._("Must supply non-None {0} argument for ContainerACLUser " "entry.") raise exception.MissingArgumentError(msg.format("user_id")) def delete(self, session): entity_id = self.id self.db_repo.delete_entity_by_id( entity_id=entity_id, external_project_id=None, session=session) barbican-9.1.0.dev50/barbican/objects/container.py0000664000175000017500000001357513616500636022157 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import base as object_base from barbican.model import models from barbican.model import repositories as repos from barbican.objects import base from barbican.objects import container_secret as con_se from barbican.objects import fields TYPE_VALUE = ['generic', 'rsa', 'dsa', 'certificate'] @object_base.VersionedObjectRegistry.register class Container(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'name': fields.StringField(nullable=True, default=None), 'type': fields.EnumField(nullable=True, valid_values=TYPE_VALUE), 'project_id': fields.StringField(nullable=True, default=None), 'creator_id': fields.StringField(nullable=True, default=None), 'consumers': fields.ListOfObjectsField('ContainerConsumerMetadatum', default=list()), 'container_secrets': fields.ListOfObjectsField('ContainerSecret', default=list()), 'container_acls': fields.ListOfObjectsField('ContainerACL', default=list()), 'project': fields.ObjectField('Project', nullable=True, default=None) } db_model = models.Container db_repo = repos.get_container_repository() synthetic_fields = ['consumers', 'container_secrets', 'container_acls', 'project'] def __init__(self, context=None, parsed_request=None, **kwargs): super(Container, self).__init__(context=context, **kwargs) if parsed_request: self.name = parsed_request.get('name') self.type = parsed_request.get('type') self.status = base.States.ACTIVE self.creator_id = parsed_request.get('creator_id') secret_refs = parsed_request.get('secret_refs') if secret_refs: for secret_ref in parsed_request.get('secret_refs'): container_secret = con_se.ContainerSecret() container_secret.name = secret_ref.get('name') secret_id = secret_ref.get('secret_ref') if secret_id.endswith('/'): secret_id = secret_id.rsplit('/', 2)[1] elif '/' in secret_id: secret_id = secret_id.rsplit('/', 1)[1] else: secret_id = secret_id container_secret.secret_id = secret_id self.container_secrets.append(container_secret) def _get_db_entity(self, data=None): return self.db_model(parsed_request=data, check_exc=False) def _attach_container_secret(self, container_secrets, container_id, session): if container_secrets: for container_secret in container_secrets: container_secret.container_id = container_id if container_secret.id is None: self.container_secrets.append(container_secret.create( session=session)) else: self.container_secrets.append(container_secret.save( session=session)) def _attach_consumers(self, consumers, container_id, session): if consumers: for consumer in consumers: consumer.container_id = container_id if consumer.id is None: self.consumers.append(consumer.create(session=session)) else: self.consumers.append(consumer.save(session=session)) def create(self, session=None): fields = self.obj_get_changes() super(Container, self).create(session=session) if 'container_secrets' in fields: self._attach_container_secret( fields['container_secrets'], container_id=self.id, session=session) if 'consumers' in fields: self._attach_consumers(fields['consumers'], container_id=self.id, session=session) def save(self, session=None): fields = self.obj_get_changes() super(Container, self).save(session=session) if 'consumers' in fields: self._attach_consumers(fields['consumers'], container_id=self.id, session=session) @classmethod def get_by_create_date(cls, external_project_id, offset_arg=None, limit_arg=None, name_arg=None, suppress_exception=False, session=None): entities_db, offset, limit, total = cls.db_repo.get_by_create_date( external_project_id, offset_arg, limit_arg, name_arg, suppress_exception, session ) entities_obj = [cls()._from_db_object(entity_db) for entity_db in entities_db] return entities_obj, offset, limit, total @classmethod def get_container_by_id(cls, entity_id, suppress_exception=False, session=None): entity_db = cls.db_repo.get_container_by_id(entity_id, suppress_exception, session) return cls()._from_db_object(entity_db) barbican-9.1.0.dev50/barbican/objects/container_consumer_meta.py0000664000175000017500000001304113616500636025064 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_db import exception as db_exc from oslo_utils import timeutils from oslo_versionedobjects import base as object_base from barbican.common import utils from barbican.model import models from barbican.model import repositories as repos from barbican.objects import base from barbican.objects import fields LOG = utils.getLogger(__name__) @object_base.VersionedObjectRegistry.register class ContainerConsumerMetadatum(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'container_id': fields.StringField(nullable=False), 'project_id': fields.StringField(nullable=True, default=None), 'name': fields.StringField(nullable=True, default=None), 'URL': fields.StringField(nullable=True, default=None), 'data_hash': fields.StringField(nullable=True, default=None) } db_model = models.ContainerConsumerMetadatum db_repo = repos.get_container_consumer_repository() @classmethod def get_by_container_id(cls, container_id, offset_arg=None, limit_arg=None, suppress_exception=False, session=None): entities_db, offset, limit, total = \ cls.db_repo.get_by_container_id( container_id, offset_arg, limit_arg, suppress_exception, session) entities = [cls()._from_db_object(entity_db) for entity_db in entities_db] return entities, offset, limit, total @classmethod def get_by_values(cls, container_id, name, URL, suppress_exception=False, show_deleted=False, session=None): consumer_db = cls.db_repo.get_by_values(container_id, name, URL, suppress_exception, show_deleted, session) return cls()._from_db_object(consumer_db) @classmethod def create_or_update_from_model(cls, new_consumer, container, session=None): """Create or update container :param new_consumer: a instance of ContainerConsumerMetadatum model :param container: a instance of Container OVO :param session: a session to connect with database :return: None It is used during converting from model to OVO. It will be removed after Container resource is implemented OVO. """ session = cls.get_session(session=session) try: container.updated_at = timeutils.utcnow() container.save(session=session) new_consumer.save(session=session) except db_exc.DBDuplicateEntry: session.rollback() # We know consumer already exists. # This operation is idempotent, so log this and move on LOG.debug("Consumer %s with URL %s already exists for " "container %s, continuing...", new_consumer.name, new_consumer.URL, new_consumer.container_id) # Get the existing entry and reuse it by clearing the deleted flags existing_consumer = cls.get_by_values( new_consumer.container_id, new_consumer.name, new_consumer.URL, show_deleted=True) existing_consumer.deleted = False existing_consumer.deleted_at = None # We are not concerned about timing here -- set only, no reads existing_consumer.save(session=session) @classmethod def create_or_update_from(cls, new_consumer, container, session=None): """Create or update container :param new_consumer: a instance of ContainerConsumerMetadatum OVO :param container: a instance of Container OVO :param session: a session to connect with database :return: None """ session = cls.get_session(session=session) try: container.updated_at = timeutils.utcnow() container.consumers.append(new_consumer) container.save(session=session) except db_exc.DBDuplicateEntry: session.rollback() # We know consumer already exists. # This operation is idempotent, so log this and move on LOG.debug("Consumer %s with URL %s already exists for " "container %s, continuing...", new_consumer.name, new_consumer.URL, new_consumer.container_id) # Get the existing entry and reuse it by clearing the deleted flags existing_consumer = cls.get_by_values( new_consumer.container_id, new_consumer.name, new_consumer.URL, show_deleted=True) existing_consumer.deleted = False existing_consumer.deleted_at = None # We are not concerned about timing here -- set only, no reads existing_consumer.save(session=session) barbican-9.1.0.dev50/barbican/objects/transport_key.py0000664000175000017500000000511413616500636023067 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import base as object_base from barbican.common import exception from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo from barbican.objects import base from barbican.objects import fields @object_base.VersionedObjectRegistry.register class TransportKey(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'plugin_name': fields.StringField(), 'transport_key': fields.StringField(), 'status': fields.StringField(nullable=True, default=base.States.ACTIVE) } db_model = models.TransportKey db_repo = repo.get_transport_key_repository() def _validate_fields(self, change_fields): msg = u._("Must supply non-None {0} argument for TransportKey entry.") if change_fields.get('plugin_name') is None: raise exception.MissingArgumentError(msg.format("plugin_name")) if change_fields.get('transport_key') is None: raise exception.MissingArgumentError(msg.format("transport_key")) @classmethod def get_by_create_date(cls, plugin_name=None, offset_arg=None, limit_arg=None, suppress_exception=False, session=None): transport_keys_db, offset, limit, total = \ cls.db_repo.get_by_create_date(plugin_name, offset_arg, limit_arg, suppress_exception, session) transport_keys_obj = [cls()._from_db_object(transport_key) for transport_key in transport_keys_db] return transport_keys_obj, offset, limit, total @classmethod def get_latest_transport_key(cls, plugin_name, suppress_exception=False, session=None): transport_key_db = cls.db_repo.get_latest_transport_key( plugin_name, suppress_exception, session) return cls()._from_db_object(transport_key_db) barbican-9.1.0.dev50/barbican/objects/secret_acl.py0000664000175000017500000001420013616500636022263 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import timeutils from oslo_versionedobjects import base as object_base from barbican.common import exception from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repos from barbican.objects import base from barbican.objects import fields from barbican.objects import secret_acl_user @object_base.VersionedObjectRegistry.register class SecretACL(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'secret_id': fields.StringField(), 'operation': fields.StringField(), 'project_access': fields.BooleanField(default=True), 'acl_users': fields.ListOfObjectsField('SecretACLUser', default=list()), 'status': fields.StringField(nullable=True, default=base.States.ACTIVE) } db_model = models.SecretACL db_repo = repos.get_secret_acl_repository() synthetic_fields = ['acl_users'] def _validate_fields(self, change_fields): msg = u._("Must supply non-None {0} argument for SecretACL entry.") if change_fields.get('secret_id') is None: raise exception.MissingArgumentError(msg.format("secret_id")) if change_fields.get('operation') is None: raise exception.MissingArgumentError(msg.format("operation")) def _get_db_entity(self, user_ids=None): return self.db_model(user_ids=user_ids, check_exc=False) def create(self, session=None, user_ids=None): change_fields = self._get_changed_persistent_fields() self._validate_fields(change_fields) db_entity = self._get_db_entity(user_ids=user_ids) db_entity.update(change_fields) db_entity = self.db_repo.create_from(db_entity, session=session) self._from_db_object(db_entity) def delete(self, session): entity_id = self.id self.db_repo.delete_entity_by_id( entity_id=entity_id, external_project_id=None, session=session) @classmethod def get_by_secret_id(cls, secret_id, session=None): secret_acls_db = cls.db_repo.get_by_secret_id( secret_id, session=session) secret_acls_obj = [cls()._from_db_object(secret_acl_db) for secret_acl_db in secret_acls_db] return secret_acls_obj @classmethod def create_or_replace_from_model(cls, secret, secret_acl, user_ids=None, session=None): """Create or replace Secret and SecretACL :param secret: an instance of Secret model :param secret_acl: an instance of SecretACL object :param user_ids: :param session: a session to connect with database It is used during converting from model to OVO. It will be removed after Secret resource is implemented OVO. """ secret.updated_at = timeutils.utcnow() secret_acl.updated_at = timeutils.utcnow() secret.save(session=session) if secret_acl.id: secret_acl.save(session=session) else: secret_acl.create(session=session) cls._create_or_replace_acl_users(secret_acl=secret_acl, user_ids=user_ids, session=session) @classmethod def _create_or_replace_acl_users(cls, secret_acl, user_ids, session=None): """Create or replace acl_user :param secret_acl: an instance of OVO :param user_ids: id of users :param session: a session to connect with database """ if user_ids is None: return user_ids = set(user_ids) now = timeutils.utcnow() secret_acl.updated_at = now for acl_user in secret_acl.acl_users: if acl_user.user_id in user_ids: # input user_id already exists acl_user.updated_at = now acl_user.save(session=session) user_ids.remove(acl_user.user_id) else: acl_user.delete(session=session) for user_id in user_ids: acl_user = secret_acl_user.SecretACLUser(acl_id=secret_acl.id, user_id=user_id) acl_user.create(session=session) if secret_acl.id: secret_acl.save(session=session) else: secret_acl.create(session=session) @classmethod def create_or_replace_from(cls, secret, secret_acl, user_ids=None): # TODO(namnh): # I will update this function after Secret resource is implemented. pass @classmethod def delete_acls_for_secret_model(cls, secret, session=None): """Delete acl in Secret :param secret: an instance of Secret model :param session: a session to connect with database Used during converting Model to OVO. It will be removed in the near future. """ cls.db_repo.delete_acls_for_secret(secret, session) @classmethod def delete_acls_for_secret(cls, secret, session=None): """Delete acl in a secret. :param secret: an instance of Secret OVO :param session: a session to connect with database This function will be using after Secret resource is implemented OVO. """ session = cls.get_session(session=session) for entity in secret.secret_acls: entity.delete(session=session) @classmethod def get_count(cls, secret_id, session=None): return cls.db_repo.get_count(secret_id, session=session) barbican-9.1.0.dev50/barbican/objects/order.py0000664000175000017500000000751413616500636021304 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import base as object_base from barbican.model import models from barbican.model import repositories as repos from barbican.objects import base from barbican.objects import fields class OrderType(object): KEY = 'key' ASYMMETRIC = 'asymmetric' CERTIFICATE = 'certificate' @classmethod def is_valid(cls, order_type): """Tests if a order type is a valid one.""" return order_type in cls.__dict__ class OrderStatus(object): def __init__(self, id, message): self.id = id self.message = message @object_base.VersionedObjectRegistry.register class Order(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): """This class represents Order object""" fields = { 'type': fields.StringField(default='key'), 'project_id': fields.StringField(), 'error_status_code': fields.StringField(nullable=True, default=None), 'error_reason': fields.StringField(nullable=True, default=None), 'meta': fields.JsonField(nullable=True, default=None), 'secret_id': fields.StringField(nullable=True, default=None), 'container_id': fields.StringField(nullable=True, default=None), 'sub_status': fields.StringField(nullable=True, default=None), 'sub_status_message': fields.StringField(nullable=True, default=None), 'creator_id': fields.StringField(nullable=True, default=None), 'order_plugin_metadata': fields.DictOfObjectsField( 'OrderPluginMetadatum', nullable=True, default=dict()), 'order_barbican_metadata': fields.DictOfObjectsField( 'OrderBarbicanMetadatum', nullable=True, default=dict()) } db_model = models.Order db_repo = repos.get_order_repository() synthetic_fields = ['order_plugin_metadata', 'order_barbican_metadata'] @classmethod def get_by_create_date(cls, external_project_id, offset_arg=None, limit_arg=None, meta_arg=None, suppress_exception=False, session=None): """Returns a list of orders The list is ordered by the date they were created at and paged based on the offset and limit fields. :param external_project_id: The keystone id for the project. :param offset_arg: The entity number where the query result should start. :param limit_arg: The maximum amount of entities in the result set. :param meta_arg: Optional meta field used to filter results. :param suppress_exception: Whether NoResultFound exceptions should be suppressed. :param session: SQLAlchemy session object. :returns: Tuple consisting of (list_of_entities, offset, limit, total). """ entities_db, offset, limit, total = cls.db_repo.get_by_create_date( external_project_id, offset_arg=offset_arg, limit_arg=limit_arg, meta_arg=meta_arg, suppress_exception=suppress_exception, session=session ) entities = [cls()._from_db_object(entity_db) for entity_db in entities_db] return entities, offset, limit, total barbican-9.1.0.dev50/barbican/objects/secret_acl_user.py0000664000175000017500000000336513616500636023333 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import base as object_base from barbican.common import exception from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repos from barbican.objects import base from barbican.objects import fields @object_base.VersionedObjectRegistry.register class SecretACLUser(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'acl_id': fields.StringField(), 'user_id': fields.StringField(), 'status': fields.StringField(nullable=True, default=base.States.ACTIVE) } db_model = models.SecretACLUser db_repo = repos.get_secret_acl_user_repository() def _validate_fields(self, change_fields): if change_fields.get('user_id') is None: msg = u._( "Must supply non-None {0} argument for SecretACLUser entry.") raise exception.MissingArgumentError(msg.format("user_id")) def delete(self, session): entity_id = self.id self.db_repo.delete_entity_by_id( entity_id=entity_id, external_project_id=None, session=session) barbican-9.1.0.dev50/barbican/objects/container_secret.py0000664000175000017500000000311113616500636023505 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import base as object_base from barbican.model import models from barbican.model import repositories as repos from barbican.objects import base from barbican.objects import fields @object_base.VersionedObjectRegistry.register class ContainerSecret(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'name': fields.StringField(nullable=True, default=None), 'container_id': fields.StringField(), 'secret_id': fields.StringField(), } db_model = models.ContainerSecret db_repo = repos.get_container_secret_repository() def create(self, session=None): change_fields = self._get_changed_persistent_fields() self._validate_fields(change_fields) db_entity = self._get_db_entity() db_entity.update(change_fields) db_entity = self.db_repo.create_from(db_entity, session=session) return self._from_db_object(db_entity) barbican-9.1.0.dev50/barbican/objects/order_plugin_metadatum.py0000664000175000017500000000242713616500636024721 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import base as object_base from barbican.model import models from barbican.model import repositories as repos from barbican.objects import base from barbican.objects import fields @object_base.VersionedObjectRegistry.register class OrderPluginMetadatum(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): """This class represents OrderPluginMetadatum object""" fields = { 'order_id': fields.StringField(), 'key': fields.StringField(), 'value': fields.StringField() } db_model = models.OrderPluginMetadatum db_repo = repos.get_order_plugin_meta_repository() barbican-9.1.0.dev50/barbican/objects/order_barbican_metadatum.py0000664000175000017500000000247613616500636025170 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import base as object_base from barbican.model import models from barbican.model import repositories as repos from barbican.objects import base from barbican.objects import fields @object_base.VersionedObjectRegistry.register class OrderBarbicanMetadatum(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): """This class represents OrderBarbicanMetadatum object""" fields = { 'order_id': fields.StringField(), 'key': fields.StringField(), 'value': fields.StringField() } db_model = models.OrderBarbicanMetadatum db_repo = repos.get_order_barbican_meta_repository() barbican-9.1.0.dev50/barbican/objects/kekdatum.py0000664000175000017500000000373513616500636021777 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import base as object_base from barbican.model import models from barbican.model import repositories as repo from barbican.objects import base from barbican.objects import fields @object_base.VersionedObjectRegistry.register class KEKDatum(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'plugin_name': fields.StringField(nullable=True, default=None), 'kek_label': fields.StringField(nullable=True, default=None), 'project_id': fields.StringField(nullable=True, default=None), 'active': fields.BooleanField(default=True), 'bind_completed': fields.BooleanField(default=False), 'algorithm': fields.StringField(nullable=True, default=None), 'bit_length': fields.IntegerField(nullable=True, default=None), 'mode': fields.StringField(nullable=True, default=None), 'plugin_meta': fields.StringField(nullable=True, default=None) } db_model = models.KEKDatum db_repo = repo.get_kek_datum_repository() @classmethod def find_or_create_kek_datum(cls, project, plugin_name, suppress_exception=False, session=None): kek_datum_db = cls.db_repo.find_or_create_kek_datum( project, plugin_name, suppress_exception, session) return cls()._from_db_object(kek_datum_db) barbican-9.1.0.dev50/barbican/objects/project.py0000664000175000017500000000270513616500636021634 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import base as object_base from barbican.model import models from barbican.model import repositories as repo from barbican.objects import base from barbican.objects import fields @object_base.VersionedObjectRegistry.register class Project(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'external_id': fields.StringField(nullable=True, default=None), } db_model = models.Project db_repo = repo.get_project_repository() @classmethod def find_by_external_project_id(cls, external_project_id, suppress_exception=False, session=None): project_db = cls.db_repo.find_by_external_project_id( external_project_id, suppress_exception, session) return cls()._from_db_object(project_db) barbican-9.1.0.dev50/barbican/objects/container_acl.py0000664000175000017500000001533313616500636022770 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import timeutils from oslo_versionedobjects import base as object_base from barbican.common import exception from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo from barbican.objects import base from barbican.objects import container_acl_user from barbican.objects import fields @object_base.VersionedObjectRegistry.register class ContainerACL(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'container_id': fields.StringField(), 'operation': fields.StringField(), 'project_access': fields.BooleanField(default=True), 'acl_users': fields.ListOfObjectsField('ContainerACLUser', default=list()), 'status': fields.StringField(nullable=True, default=base.States.ACTIVE) } db_model = models.ContainerACL db_repo = repo.get_container_acl_repository() synthetic_fields = ['acl_users'] def _validate_fields(self, change_fields): msg = u._("Must supply non-None {0} argument for ContainerACL entry.") if change_fields.get('container_id') is None: raise exception.MissingArgumentError(msg.format("container_id")) if change_fields.get('operation') is None: raise exception.MissingArgumentError(msg.format("operation")) def _get_db_entity(self, user_ids=None): return self.db_model(user_ids=user_ids, check_exc=False) def create(self, session=None, user_ids=None): change_fields = self._get_changed_persistent_fields() self._validate_fields(change_fields) db_entity = self._get_db_entity(user_ids=user_ids) db_entity.update(change_fields) db_entity = self.db_repo.create_from(db_entity, session=session) self._from_db_object(db_entity) def delete(self, session): entity_id = self.id self.db_repo.delete_entity_by_id( entity_id=entity_id, external_project_id=None, session=session) @classmethod def get_by_container_id(cls, container_id, session=None): entities_db = cls.db_repo.get_by_container_id(container_id, session) entities = [cls()._from_db_object(entity_db) for entity_db in entities_db] return entities @classmethod def create_or_replace_from(cls, container, container_acl, user_ids=None, session=None): """Create or replace Secret and SecretACL :param container: an instance of Container object :param container_acl: an instance of ContainerACL object :param user_ids: :param session: a session to connect with database """ session = cls.get_session(session) container.updated_at = timeutils.utcnow() container.save(session=session) container_acl.updated_at = timeutils.utcnow() if container_acl.id is None: container_acl.create(session=session, user_ids=user_ids) else: container_acl.save(session=session) cls._create_or_replace_acl_users(container_acl=container_acl, user_ids=user_ids, session=session) @classmethod def create_or_replace_from_model(cls, container, container_acl, user_ids=None, session=None): """Create or replace Secret and SecretACL :param container: an instance of Container model :param container_acl: an instance of ContainerACL object :param user_ids: :param session: a session to connect with database It is used during converting from model to OVO. It will be removed after Container resource is implemented OVO. """ session = cls.get_session(session) container.updated_at = timeutils.utcnow() container.save(session=session) now = timeutils.utcnow() container_acl.updated_at = now if container_acl.id is None: container_acl.create(session=session, user_ids=user_ids) else: container_acl.save(session=session) cls._create_or_replace_acl_users(container_acl=container_acl, user_ids=user_ids, session=session) @classmethod def _create_or_replace_acl_users(cls, container_acl, user_ids, session=None): if user_ids is None: return user_ids = set(user_ids) now = timeutils.utcnow() session = session or cls.get_session(session) container_acl.updated_at = now for acl_user in container_acl.acl_users: if acl_user.user_id in user_ids: # input user_id already exists acl_user.updated_at = now acl_user.save(session=session) user_ids.remove(acl_user.user_id) else: acl_user.delete(session=session) for user_id in user_ids: acl_user = container_acl_user.ContainerACLUser( acl_id=container_acl.id, user_id=user_id) acl_user.create(session=session) if container_acl.id: container_acl.save(session=session) else: container_acl.create(session=session) @classmethod def get_count(cls, container_id, session=None): query = cls.db_repo.get_count(container_id, session) return query @classmethod def delete_acls_for_container(cls, container, session=None): # TODO(namnh) # After Container resource is implemented, This function # will be updated source code being used. session = cls.get_session(session=session) for entity in container.container_acls: entity.delete(session=session) @classmethod def delete_acls_for_container_model(cls, container, session=None): """Delete ACLs in Container Used during converting Model to OVO, it will be removed in near future. :param container: instance of Container model :param session: connection to database """ cls.db_repo.delete_acls_for_container(container, session) barbican-9.1.0.dev50/barbican/objects/encrypted_datum.py0000664000175000017500000000320613616500636023352 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import base as object_base from barbican.model import models from barbican.model import repositories as repo from barbican.objects import base from barbican.objects import fields @object_base.VersionedObjectRegistry.register class EncryptedDatum(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'content_type': fields.StringField(nullable=True, default=None), 'secret_id': fields.StringField(), 'kek_id': fields.StringField(), 'cypher_text': fields.StringField(nullable=True, default=None), 'kek_meta_extended': fields.StringField(nullable=True, default=None), 'kek_meta_project': fields.ObjectField('KEKDatum', nullable=True, default=None), 'status': fields.StringField(nullable=True, default=base.States.ACTIVE) } db_model = models.EncryptedDatum db_repo = repo.get_encrypted_datum_repository() synthetic_fields = ['kek_meta_project'] barbican-9.1.0.dev50/barbican/objects/secret_consumer_metadatum.py0000664000175000017500000001371013616500636025425 0ustar sahidsahid00000000000000# Copyright (c) 2019 Red Hat, inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_db import exception as db_exc from oslo_utils import timeutils from oslo_versionedobjects import base as object_base from barbican.common import utils from barbican.model import models from barbican.model import repositories as repos from barbican.objects import base from barbican.objects import fields LOG = utils.getLogger(__name__) @object_base.VersionedObjectRegistry.register class SecretConsumerMetadatum(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'secret_id': fields.StringField(nullable=False), 'project_id': fields.StringField(nullable=False, default=None), 'service': fields.StringField(nullable=True, default=None), 'resource_type': fields.StringField(nullable=True, default=None), 'resource_id': fields.StringField(nullable=True, default=None), } db_model = models.SecretConsumerMetadatum db_repo = repos.get_secret_consumer_repository() @classmethod def get_by_secret_id(cls, secret_id, offset_arg=None, limit_arg=None, suppress_exception=False, session=None): entities_db, offset, limit, total = \ cls.db_repo.get_by_secret_id( secret_id, offset_arg, limit_arg, suppress_exception, session) entities = [cls()._from_db_object(entity_db) for entity_db in entities_db] return entities, offset, limit, total @classmethod def get_by_resource_id(cls, resource_id, offset_arg=None, limit_arg=None, suppress_exception=False, session=None): entities_db, offset, limit, total = \ cls.db_repo.get_by_resource_id( resource_id, offset_arg, limit_arg, suppress_exception, session) entities = [cls()._from_db_object(entity_db) for entity_db in entities_db] return entities, offset, limit, total @classmethod def get_by_values(cls, secret_id, resource_id, suppress_exception=False, show_deleted=False, session=None): consumer_db = cls.db_repo.get_by_values(secret_id, resource_id, suppress_exception, show_deleted, session) return cls()._from_db_object(consumer_db) @classmethod def create_or_update_from_model(cls, new_consumer, secret, session=None): """Create or update secret :param new_consumer: a instance of SecretConsumerMetadatum model :param secret: a instance of Secret OVO :param session: a session to connect with database :return: None It is used during converting from model to OVO. It will be removed after Secret resource is implemented OVO. """ session = cls.get_session(session=session) try: secret.updated_at = timeutils.utcnow() secret.save(session=session) new_consumer.save(session=session) except db_exc.DBDuplicateEntry: session.rollback() # We know consumer already exists. # This operation is idempotent, so log this and move on LOG.debug( "Consumer with resource_id %s already exists for secret %s...", new_consumer.resource_id, new_consumer.secret_id ) # Get the existing entry and reuse it by clearing the deleted flags existing_consumer = cls.get_by_values( new_consumer.secret_id, new_consumer.resource_id, show_deleted=True ) existing_consumer.deleted = False existing_consumer.deleted_at = None # We are not concerned about timing here -- set only, no reads existing_consumer.save(session=session) @classmethod def create_or_update_from(cls, new_consumer, secret, session=None): """Create or update secret :param new_consumer: a instance of SecretConsumerMetadatum OVO :param secret: a instance of Secret OVO :param session: a session to connect with database :return: None """ session = cls.get_session(session=session) try: secret.updated_at = timeutils.utcnow() secret.consumers.append(new_consumer) secret.save(session=session) except db_exc.DBDuplicateEntry: session.rollback() # We know consumer already exists. # This operation is idempotent, so log this and move on LOG.debug( "Consumer with resource_id %s already exists for secret %s...", new_consumer.resource_id, new_consumer.secret_id ) # Get the existing entry and reuse it by clearing the deleted flags existing_consumer = cls.get_by_values( new_consumer.secret_id, new_consumer.resource_id, show_deleted=True ) existing_consumer.deleted = False existing_consumer.deleted_at = None # We are not concerned about timing here -- set only, no reads existing_consumer.save(session=session) barbican-9.1.0.dev50/barbican/objects/secret_user_metadatum.py0000664000175000017500000000340513616500636024550 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import base as object_base from barbican.model import models from barbican.model import repositories as repo from barbican.objects import base from barbican.objects import fields @object_base.VersionedObjectRegistry.register class SecretUserMetadatum(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'key': fields.StringField(), 'value': fields.StringField(), 'secret_id': fields.StringField(), } db_model = models.SecretStoreMetadatum db_repo = repo.get_secret_user_meta_repository() @classmethod def create_replace_user_metadata(cls, secret_id, metadata): cls.db_repo.create_replace_user_metadata(secret_id, metadata) @classmethod def get_metadata_for_secret(cls, secret_id): return cls.db_repo.get_metadata_for_secret(secret_id) @classmethod def create_replace_user_metadatum(cls, secret_id, key, value): cls.db_repo.create_replace_user_metadatum(secret_id, key, value) @classmethod def delete_metadatum(cls, secret_id, key): cls.db_repo.delete_metadatum(secret_id, key) barbican-9.1.0.dev50/barbican/objects/secret.py0000664000175000017500000000773513616500636021463 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import base as object_base from barbican.common import utils from barbican.model import models from barbican.model import repositories as repo from barbican.objects import base from barbican.objects import fields @object_base.VersionedObjectRegistry.register class Secret(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): """This class represents Secret object""" fields = { 'name': fields.StringField(nullable=True), 'secret_type': fields.StringField(nullable=True, default=utils.SECRET_TYPE_OPAQUE), 'expiration': fields.DateTimeField(nullable=True, default=None), 'algorithm': fields.StringField(nullable=True, default=None), 'bit_length': fields.IntegerField(nullable=True, default=None), 'mode': fields.StringField(nullable=True, default=None), 'creator_id': fields.StringField(nullable=True, default=None), 'project_id': fields.StringField(nullable=True, default=None), 'encrypted_data': fields.ListOfObjectsField('EncryptedDatum', default=list(), nullable=True), 'secret_acls': fields.ListOfObjectsField('SecretACL', default=list(), nullable=True), 'secret_store_metadata': fields.DictOfObjectsField('SecretStoreMetadatum', default=dict(), nullable=True), 'secret_user_metadata': fields.DictOfObjectsField( 'SecretUserMetadatum', default=dict(), nullable=True), 'consumers': fields.ListOfObjectsField('SecretConsumerMetadatum', default=list(), nullable=True), 'status': fields.StringField(nullable=True, default=base.States.ACTIVE) } db_model = models.Secret db_repo = repo.get_secret_repository() synthetic_fields = ['encrypted_data', 'secret_acls', 'secret_store_metadata', 'secret_user_metadata', 'consumers'] @classmethod def get_secret_list(cls, external_project_id, offset_arg=None, limit_arg=None, name=None, alg=None, mode=None, bits=0, secret_type=None, suppress_exception=False, session=None, acl_only=None, user_id=None, created=None, updated=None, expiration=None, sort=None): secrets_db, offset, limit, total = cls.db_repo.get_secret_list( external_project_id, offset_arg, limit_arg, name, alg, mode, bits, secret_type, suppress_exception, session, acl_only, user_id, created, updated, expiration, sort) secrets_object = [cls()._from_db_object(secret_db) for secret_db in secrets_db] return secrets_object, offset, limit, total @classmethod def get_secret_by_id(cls, entity_id, suppress_exception=False, session=None): secret_db = cls.db_repo.get_secret_by_id( entity_id, suppress_exception, session) return cls()._from_db_object(secret_db) barbican-9.1.0.dev50/barbican/objects/base.py0000664000175000017500000001673613616500636021111 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Barbican common internal objects model""" from oslo_versionedobjects import base as object_base from barbican.model import repositories as repos from barbican.objects import fields class States(object): PENDING = 'PENDING' ACTIVE = 'ACTIVE' ERROR = 'ERROR' @classmethod def is_valid(cls, state_to_test): """Tests if a state is a valid one.""" return state_to_test in cls.__dict__ class BarbicanPersistentObject(object): fields = { 'id': fields.StringField(nullable=True, default=None), 'created_at': fields.DateTimeField(nullable=True, tzinfo_aware=False), 'updated_at': fields.DateTimeField(nullable=True, tzinfo_aware=False), 'deleted_at': fields.DateTimeField(nullable=True, tzinfo_aware=False), 'deleted': fields.BooleanField(nullable=True), 'status': fields.StringField(nullable=True, default=States.PENDING) } class BarbicanObject(object_base.VersionedObject): # Version 1.0: Initial version VERSION = '1.0' OBJ_PROJECT_NAMESPACE = 'barbican' synthetic_fields = [] # NOTE(namnh): The db_model, db_repo class variable should be inherited # by sub-classes. # For example, in the Secret object has to have "db_model = models.Secret" # and "db_repo = repo.get_secret_repository()". db_model = None db_repo = repos.BaseRepo() def __init__(self, context=None, **kwargs): super(BarbicanObject, self).__init__(context=context, **kwargs) self.dict_fields = None self.obj_set_defaults() def set_attribute(self, name_attr, value_attr=None): setattr(self, name_attr, value_attr) def _get_db_entity(self): return self.db_model(check_exc=False) def _get_changed_persistent_fields(self): change_fields = self.obj_get_changes() for field in self.synthetic_fields: if field in change_fields: del change_fields[field] return change_fields def _validate_fields(self, change_fields): """Validate fields before creating model In order to verify fields before saving a model to database. It should be inherited by sub-class in case the class need to verify. """ pass def _from_db_object(self, db_entity): if db_entity is None: return None for field in self.fields: if field not in self.synthetic_fields: self.set_attribute(field, db_entity[field]) self.load_synthetic_db_fields(db_entity=db_entity) self.dict_fields = db_entity.to_dict_fields() self.obj_reset_changes() return self def to_dict_fields(self): return self.dict_fields def register_value(self, data=None, **kwargs): data = data or dict() data.update(kwargs) for key, value in data.items(): setattr(self, key, value) @classmethod def is_synthetic(cls, field): return field in cls.synthetic_fields @classmethod def load_object(cls, db_entity): obj = cls() obj._from_db_object(db_entity=db_entity) return obj def load_synthetic_db_fields(self, db_entity): """Load synthetic database field. :param db_entity: Database model :return: None """ for field in self.synthetic_fields: objclasses = object_base.VersionedObjectRegistry.obj_classes( ).get(self.fields[field].objname) objclass = objclasses[0] synth_db_objs = db_entity.get(field, None) # NOTE(namnh): synth_db_objs can be list, dict, empty list if isinstance(self.fields[field], fields.DictOfObjectsField): dict_entity_object = {key: objclass.load_object(value) for key, value in synth_db_objs.items()} setattr(self, field, dict_entity_object) elif isinstance(self.fields[field], fields.ListOfObjectsField): entities_object = [objclass.load_object(entity) for entity in synth_db_objs] setattr(self, field, entities_object) else: # At this moment, this field is an ObjectField. entity_object = objclass.load_object(synth_db_objs) setattr(self, field, entity_object) self.obj_reset_changes([fields]) def create(self, session=None): change_fields = self._get_changed_persistent_fields() self._validate_fields(change_fields) db_entity = self._get_db_entity() db_entity.update(change_fields) db_entity = self.db_repo.create_from(db_entity, session=session) self._from_db_object(db_entity) def save(self, session=None): """To update new values to a row in database.""" change_fields = self._get_changed_persistent_fields() self.db_repo.update_from(self.db_model, self.id, change_fields, session=session) self.obj_reset_changes() def delete(self, session): raise NotImplementedError() @classmethod def get(cls, entity_id, external_project_id=None, force_show_deleted=False, suppress_exception=False, session=None): """Get an entity or raise if it does not exist""" db_entity = cls.db_repo.get( entity_id, external_project_id=external_project_id, force_show_deleted=force_show_deleted, suppress_exception=suppress_exception, session=session) if db_entity: return cls()._from_db_object(db_entity) else: return None @classmethod def get_session(cls, session=None): return session or repos.get_session() @classmethod def delete_entity_by_id(cls, entity_id, external_project_id, session=None): cls.db_repo.delete_entity_by_id( entity_id, external_project_id, session=session) @classmethod def get_project_entities(cls, project_id, session=None): """Gets entities associated with a given project.""" entities_db = cls.db_repo.get_project_entities(project_id, session=session) entities_object = [cls()._from_db_object(entity_db) for entity_db in entities_db] if entities_db else [] return entities_object @classmethod def get_count(cls, project_id, session=None): """Gets count of entities associated with a given project""" return cls.db_repo.get_count(project_id, session=session) @classmethod def delete_project_entities(cls, project_id, suppress_exception=False, session=None): """Deletes entities for a given project.""" cls.db_repo.delete_project_entities( project_id, suppress_exception=suppress_exception, session=session) barbican-9.1.0.dev50/barbican/objects/fields.py0000664000175000017500000000667613616500636021447 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_serialization import jsonutils as json from oslo_versionedobjects import fields import six # Import field errors from oslo.versionedobjects KeyTypeError = fields.KeyTypeError ElementTypeError = fields.ElementTypeError # Import fields from oslo.versionedobjects BooleanField = fields.BooleanField UnspecifiedDefault = fields.UnspecifiedDefault IntegerField = fields.IntegerField NonNegativeIntegerField = fields.NonNegativeIntegerField UUIDField = fields.UUIDField FloatField = fields.FloatField NonNegativeFloatField = fields.NonNegativeFloatField StringField = fields.StringField SensitiveStringField = fields.SensitiveStringField EnumField = fields.EnumField DateTimeField = fields.DateTimeField DictOfStringsField = fields.DictOfStringsField DictOfNullableStringsField = fields.DictOfNullableStringsField DictOfIntegersField = fields.DictOfIntegersField ListOfStringsField = fields.ListOfStringsField SetOfIntegersField = fields.SetOfIntegersField ListOfSetsOfIntegersField = fields.ListOfSetsOfIntegersField ListOfDictOfNullableStringsField = fields.ListOfDictOfNullableStringsField DictProxyField = fields.DictProxyField ObjectField = fields.ObjectField ListOfObjectsField = fields.ListOfObjectsField VersionPredicateField = fields.VersionPredicateField FlexibleBooleanField = fields.FlexibleBooleanField DictOfListOfStringsField = fields.DictOfListOfStringsField IPAddressField = fields.IPAddressField IPV4AddressField = fields.IPV4AddressField IPV6AddressField = fields.IPV6AddressField IPV4AndV6AddressField = fields.IPV4AndV6AddressField IPNetworkField = fields.IPNetworkField IPV4NetworkField = fields.IPV4NetworkField IPV6NetworkField = fields.IPV6NetworkField AutoTypedField = fields.AutoTypedField BaseEnumField = fields.BaseEnumField MACAddressField = fields.MACAddressField ListOfIntegersField = fields.ListOfIntegersField PCIAddressField = fields.PCIAddressField Enum = fields.Enum Field = fields.Field FieldType = fields.FieldType Set = fields.Set Dict = fields.Dict List = fields.List Object = fields.Object IPAddress = fields.IPAddress IPV4Address = fields.IPV4Address IPV6Address = fields.IPV6Address IPNetwork = fields.IPNetwork IPV4Network = fields.IPV4Network IPV6Network = fields.IPV6Network class Json(FieldType): def coerce(self, obj, attr, value): if isinstance(value, six.string_types): loaded = json.loads(value) return loaded return value def from_primitive(self, obj, attr, value): return self.coerce(obj, attr, value) def to_primitive(self, obj, attr, value): return json.dumps(value) class DictOfObjectsField(AutoTypedField): def __init__(self, objtype, subclasses=False, **kwargs): self.AUTO_TYPE = Dict(Object(objtype, subclasses)) self.objname = objtype super(DictOfObjectsField, self).__init__(**kwargs) class JsonField(AutoTypedField): AUTO_TYPE = Json() barbican-9.1.0.dev50/barbican/objects/__init__.py0000664000175000017500000000571413616500636021730 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from barbican.objects import base from barbican.objects import container from barbican.objects import container_acl from barbican.objects import container_consumer_meta from barbican.objects import container_secret from barbican.objects import encrypted_datum from barbican.objects import kekdatum from barbican.objects import order from barbican.objects import order_barbican_metadatum from barbican.objects import order_plugin_metadatum from barbican.objects import order_retry_task from barbican.objects import project from barbican.objects import project_quotas from barbican.objects import project_secret_store from barbican.objects import secret from barbican.objects import secret_acl from barbican.objects import secret_consumer_metadatum from barbican.objects import secret_store_metadatum from barbican.objects import secret_stores from barbican.objects import secret_user_metadatum from barbican.objects import transport_key States = base.States BarbicanObject = base.BarbicanObject Container = container.Container ContainerACL = container_acl.ContainerACL ContainerConsumerMetadatum = container_consumer_meta.ContainerConsumerMetadatum ContainerSecret = container_secret.ContainerSecret EncryptedDatum = encrypted_datum.EncryptedDatum Order = order.Order OrderBarbicanMetadatum = order_barbican_metadatum.OrderBarbicanMetadatum OrderPluginMetadatum = order_plugin_metadatum.OrderPluginMetadatum OrderRetryTask = order_retry_task.OrderRetryTask Project = project.Project ProjectQuotas = project_quotas.ProjectQuotas ProjectSecretStore = project_secret_store.ProjectSecretStore TransportKey = transport_key.TransportKey KEKDatum = kekdatum.KEKDatum Secret = secret.Secret SecretACL = secret_acl.SecretACL SecretStores = secret_stores.SecretStores SecretUserMetadatum = secret_user_metadatum.SecretUserMetadatum SecretStoreMetadatum = secret_store_metadatum.SecretStoreMetadatum SecretConsumerMetadatum = secret_consumer_metadatum.SecretConsumerMetadatum __all__ = ( States, BarbicanObject, Container, ContainerACL, ContainerConsumerMetadatum, ContainerSecret, EncryptedDatum, Order, OrderBarbicanMetadatum, OrderPluginMetadatum, OrderRetryTask, Project, ProjectQuotas, ProjectSecretStore, KEKDatum, Secret, SecretACL, SecretStores, SecretUserMetadatum, SecretStoreMetadatum, SecretConsumerMetadatum, TransportKey, ) barbican-9.1.0.dev50/barbican/objects/project_quotas.py0000664000175000017500000001005213616500636023222 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import base as object_base from barbican.common import exception from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repos from barbican.objects import base from barbican.objects import fields @object_base.VersionedObjectRegistry.register class ProjectQuotas(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'project_id': fields.StringField(nullable=True), 'secrets': fields.IntegerField(nullable=True, default=None), 'containers': fields.IntegerField(nullable=True, default=None), 'consumers': fields.IntegerField(nullable=True, default=None), 'orders': fields.IntegerField(nullable=True, default=None), 'cas': fields.IntegerField(nullable=True, default=None), 'project': fields.ObjectField('Project', nullable=True, default=None), } db_model = models.ProjectQuotas db_repo = repos.get_project_quotas_repository() synthetic_fields = ['project'] def _validate_fields(self, change_fields): msg = u._("Must supply non-None {0} argument for ProjectQuotas entry.") if not change_fields.get('project_id'): raise exception.MissingArgumentError(msg.format("project_id")) def _get_db_entity(self, parsed_project_quotas=None): return self.db_model(parsed_project_quotas=parsed_project_quotas, check_exc=False) def create(self, session=None, parsed_project_quotas=None): change_fields = self._get_changed_persistent_fields() self._validate_fields(change_fields) db_entity = self._get_db_entity( parsed_project_quotas=parsed_project_quotas) db_entity.update(change_fields) db_entity = self.db_repo.create_from(db_entity, session=session) self._from_db_object(db_entity) @classmethod def get_by_create_date(cls, offset_arg=None, limit_arg=None, suppress_exception=False, session=None): entities_db, offset, limit, total = \ cls.db_repo.get_by_create_date(offset_arg, limit_arg, suppress_exception, session) entities = [cls()._from_db_object(entity_db) for entity_db in entities_db] return entities, offset, limit, total @classmethod def create_or_update_by_project_id(cls, project_id, parsed_project_quotas, session=None): cls.db_repo.create_or_update_by_project_id(project_id, parsed_project_quotas, session) @classmethod def get_by_external_project_id(cls, external_project_id, suppress_exception=False, session=None): entity_db = cls.db_repo. \ get_by_external_project_id(external_project_id, suppress_exception, session) return cls()._from_db_object(entity_db) @classmethod def delete_by_external_project_id(cls, external_project_id, suppress_exception=False, session=None): cls.db_repo.delete_by_external_project_id(external_project_id, suppress_exception, session) barbican-9.1.0.dev50/barbican/objects/secret_store_metadatum.py0000664000175000017500000000423113616500636024724 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import timeutils from oslo_versionedobjects import base as object_base from barbican.common import exception from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo from barbican.objects import base from barbican.objects import fields @object_base.VersionedObjectRegistry.register class SecretStoreMetadatum(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'key': fields.StringField(), 'value': fields.StringField(), 'secret_id': fields.StringField() } db_model = models.SecretStoreMetadatum db_repo = repo.get_secret_meta_repository() def _validate_fields(self, change_fields): msg = u._("Must supply non-None {0} argument " "for SecretStoreMetadatum entry.") if change_fields.get('key') is None: raise exception.MissingArgumentError(msg.format('key')) if change_fields.get('value') is None: raise exception.MissingArgumentError(msg.format('value')) @classmethod def save(cls, metadata, secret_obj): """Saves the specified metadata for the secret.""" now = timeutils.utcnow() for k, v in metadata.items(): meta_obj = cls(key=k, value=v) meta_obj.updated_at = now meta_obj.secret_id = secret_obj.id meta_obj.create() @classmethod def get_metadata_for_secret(cls, secret_id): return cls.db_repo.get_metadata_for_secret(secret_id) barbican-9.1.0.dev50/barbican/objects/project_secret_store.py0000664000175000017500000000610013616500636024406 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import base as object_base from barbican.common import exception from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repos from barbican.objects import base from barbican.objects import fields @object_base.VersionedObjectRegistry.register class ProjectSecretStore(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'secret_store_id': fields.StringField(nullable=True, default=None), 'project_id': fields.StringField(nullable=True, default=None), 'secret_store': fields.ObjectField('SecretStores', nullable=True, default=None), 'project': fields.ObjectField('Project', nullable=True, default=None), 'status': fields.StringField(nullable=True, default=base.States.ACTIVE) } db_model = models.ProjectSecretStore db_repo = repos.get_project_secret_store_repository() synthetic_fields = ['secret_store', 'project'] def _validate_fields(self, change_fields): msg = u._("Must supply non-None {0} argument for ProjectSecretStore " " entry.") if not change_fields.get('project_id'): raise exception.MissingArgumentError(msg.format("project_id")) if not change_fields.get('secret_store_id'): raise exception.MissingArgumentError(msg.format("secret_store_id")) @classmethod def get_secret_store_for_project(cls, project_id, external_project_id, suppress_exception=False, session=None): pss_db = cls.db_repo.get_secret_store_for_project( project_id, external_project_id, suppress_exception, session) return cls()._from_db_object(pss_db) @classmethod def create_or_update_for_project(cls, project_id, secret_store_id, session=None): pss_db = cls.db_repo.create_or_update_for_project(project_id, secret_store_id, session) return cls()._from_db_object(pss_db) @classmethod def get_count_by_secret_store(cls, secret_store_id, session=None): number_pss = cls.db_repo.get_count_by_secret_store(secret_store_id, session) return number_pss barbican-9.1.0.dev50/barbican/objects/order_retry_task.py0000664000175000017500000000375213616500636023553 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import base as object_base from barbican.common import utils from barbican.model import models from barbican.model import repositories as repos from barbican.objects import base from barbican.objects import fields class OrderRetryTask(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'id': fields.StringField(default=utils.generate_uuid()), 'order_id': fields.StringField(), 'retry_task': fields.StringField(), 'retry_at': fields.DateTimeField(nullable=True, default=None), 'retry_args': fields.JsonField(), 'retry_kwargs': fields.JsonField(), 'retry_count': fields.IntegerField(default=0) } db_model = models.OrderRetryTask db_repo = repos.get_order_retry_tasks_repository() @classmethod def get_by_create_date(cls, only_at_or_before_this_date=None, offset_arg=None, limit_arg=None, suppress_exception=False, session=None): entities_db, offset, limit, total = cls.db_repo.get_by_create_date( only_at_or_before_this_date, offset_arg, limit_arg, suppress_exception, session) entities = [cls()._from_db_object(entity_db) for entity_db in entities_db] return entities, offset, limit, total barbican-9.1.0.dev50/barbican/objects/secret_stores.py0000664000175000017500000000406413616500636023052 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import base as object_base from barbican.common import exception from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repos from barbican.objects import base from barbican.objects import fields @object_base.VersionedObjectRegistry.register class SecretStores(base.BarbicanObject, base.BarbicanPersistentObject, object_base.VersionedObjectDictCompat): fields = { 'store_plugin': fields.StringField(), 'crypto_plugin': fields.StringField(nullable=True), 'global_default': fields.BooleanField(default=False), 'name': fields.StringField(), 'status': fields.StringField(nullable=True, default=base.States.ACTIVE) } db_model = models.SecretStores db_repo = repos.get_secret_stores_repository() def _validate_fields(self, change_fields): msg = u._("Must supply non-Blank {0} argument for SecretStores entry.") if not change_fields.get('name'): raise exception.MissingArgumentError(msg.format("name")) if not change_fields.get('store_plugin'): raise exception.MissingArgumentError(msg.format("store_plugin")) @classmethod def get_all(cls, session=None): secret_stores_db = cls.db_repo.get_all(session) secret_stores_obj = [cls()._from_db_object(secret_store_db) for secret_store_db in secret_stores_db] return secret_stores_obj barbican-9.1.0.dev50/barbican/model/0000775000175000017500000000000013616500640017252 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/model/models.py0000664000175000017500000015553013616500636021125 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Defines database models for Barbican """ import hashlib from oslo_serialization import jsonutils as json from oslo_utils import timeutils import six import sqlalchemy as sa from sqlalchemy.ext import compiler from sqlalchemy.ext import declarative from sqlalchemy import orm from sqlalchemy.orm import collections as col from sqlalchemy import types as sql_types from barbican.common import exception from barbican.common import utils from barbican import i18n as u BASE = declarative.declarative_base() ERROR_REASON_LENGTH = 255 SUB_STATUS_LENGTH = 36 SUB_STATUS_MESSAGE_LENGTH = 255 # Allowed entity states class States(object): PENDING = 'PENDING' ACTIVE = 'ACTIVE' ERROR = 'ERROR' @classmethod def is_valid(cls, state_to_test): """Tests if a state is a valid one.""" return state_to_test in cls.__dict__ class OrderType(object): KEY = 'key' ASYMMETRIC = 'asymmetric' CERTIFICATE = 'certificate' @classmethod def is_valid(cls, order_type): """Tests if a order type is a valid one.""" return order_type in cls.__dict__ class OrderStatus(object): def __init__(self, id, message): self.id = id self.message = message @compiler.compiles(sa.BigInteger, 'sqlite') def compile_big_int_sqlite(type_, compiler, **kw): return 'INTEGER' class JsonBlob(sql_types.TypeDecorator): """JsonBlob is custom type for fields which need to store JSON text.""" impl = sa.Text def process_bind_param(self, value, dialect): if value is not None: return json.dumps(value) return value def process_result_value(self, value, dialect): if value is not None: return json.loads(value) return value class ModelBase(object): """Base class for Nova and Barbican Models.""" __table_args__ = {'mysql_engine': 'InnoDB'} __table_initialized__ = False __protected_attributes__ = { "created_at", "updated_at", "deleted_at", "deleted"} id = sa.Column(sa.String(36), primary_key=True, default=utils.generate_uuid) created_at = sa.Column(sa.DateTime, default=timeutils.utcnow, nullable=False) updated_at = sa.Column(sa.DateTime, default=timeutils.utcnow, nullable=False, onupdate=timeutils.utcnow) deleted_at = sa.Column(sa.DateTime) deleted = sa.Column(sa.Boolean, nullable=False, default=False) status = sa.Column(sa.String(20), nullable=False, default=States.PENDING) def save(self, session=None): """Save this object.""" # import api here to prevent circular dependency problem import barbican.model.repositories session = session or barbican.model.repositories.get_session() # if model is being created ensure that created/updated are the same if self.id is None: self.created_at = timeutils.utcnow() self.updated_at = self.created_at session.add(self) session.flush() def delete(self, session=None): """Delete this object.""" import barbican.model.repositories session = session or barbican.model.repositories.get_session() self._do_delete_children(session) session.delete(self) def get(self, key, default=None): return getattr(self, key, default) def _do_delete_children(self, session): """Sub-class hook: delete children relationships.""" pass def update(self, values): """dict.update() behaviour.""" for k, v in values.items(): self[k] = v def __setitem__(self, key, value): setattr(self, key, value) def __getitem__(self, key): return getattr(self, key) def __iter__(self): self._i = iter(orm.object_mapper(self).sa.Columns) return self def next(self): n = next(self._i).name return n, getattr(self, n) def keys(self): return self.__dict__.keys() def values(self): return self.__dict__.values() def items(self): return self.__dict__.items() def to_dict(self): return self.__dict__.copy() def to_dict_fields(self): """Returns a dictionary of just the db fields of this entity.""" if self.created_at: created_at = self.created_at.isoformat() else: created_at = self.created_at if self.updated_at: updated_at = self.updated_at.isoformat() else: updated_at = self.updated_at dict_fields = { 'created': created_at, 'updated': updated_at, 'status': self.status } if self.deleted_at: dict_fields['deleted_at'] = self.deleted_at.isoformat() if self.deleted: dict_fields['deleted'] = True dict_fields.update(self._do_extra_dict_fields()) return dict_fields def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {} def _iso_to_datetime(self, expiration): """Convert ISO formatted string to datetime.""" if isinstance(expiration, six.string_types): expiration_iso = timeutils.parse_isotime(expiration.strip()) expiration = timeutils.normalize_time(expiration_iso) return expiration class SoftDeleteMixIn(object): """Mix-in class that adds soft delete functionality.""" def delete(self, session=None): """Delete this object.""" import barbican.model.repositories session = session or barbican.model.repositories.get_session() self.deleted = True self.deleted_at = timeutils.utcnow() self.save(session=session) self._do_delete_children(session) class ContainerSecret(BASE, SoftDeleteMixIn, ModelBase): """Represents an association between a Container and a Secret.""" __tablename__ = 'container_secret' name = sa.Column(sa.String(255), nullable=True) container_id = sa.Column( sa.String(36), sa.ForeignKey('containers.id'), index=True, nullable=False) secret_id = sa.Column( sa.String(36), sa.ForeignKey('secrets.id'), index=True, nullable=False) # Eager load this relationship via 'lazy=False'. container = orm.relationship( 'Container', backref=orm.backref('container_secrets', lazy=False, primaryjoin="and_(ContainerSecret.container_id == " "Container.id, ContainerSecret.deleted!=True)")) secrets = orm.relationship( 'Secret', backref=orm.backref('container_secrets', primaryjoin="and_(ContainerSecret.secret_id == " "Secret.id, ContainerSecret.deleted!=True)")) __table_args__ = (sa.UniqueConstraint('container_id', 'secret_id', 'name', name='_container_secret_name_uc'),) def __init__(self, check_exc=True): super(ContainerSecret, self).__init__() class Project(BASE, SoftDeleteMixIn, ModelBase): """Represents a Project in the datastore. Projects are users that wish to store secret information within Barbican. """ __tablename__ = 'projects' external_id = sa.Column(sa.String(255), unique=True) orders = orm.relationship("Order", backref="project") secrets = orm.relationship("Secret", backref="project") keks = orm.relationship("KEKDatum", backref="project") containers = orm.relationship("Container", backref="project") cas = orm.relationship("ProjectCertificateAuthority", backref="project") project_quotas = orm.relationship("ProjectQuotas", backref="project") def __init__(self, check_exc=True): super(Project, self).__init__() def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'external_id': self.external_id} class Secret(BASE, SoftDeleteMixIn, ModelBase): """Represents a Secret in the datastore. Secrets are any information Projects wish to store within Barbican, though the actual encrypted data is stored in one or more EncryptedData entities on behalf of a Secret. """ __tablename__ = 'secrets' name = sa.Column(sa.String(255)) secret_type = sa.Column(sa.String(255), server_default=utils.SECRET_TYPE_OPAQUE) expiration = sa.Column(sa.DateTime, default=None) algorithm = sa.Column(sa.String(255)) bit_length = sa.Column(sa.Integer) mode = sa.Column(sa.String(255)) creator_id = sa.Column(sa.String(255)) project_id = sa.Column( sa.String(36), sa.ForeignKey('projects.id', name='secrets_project_fk'), index=True, nullable=False) # TODO(jwood): Performance - Consider avoiding full load of all # datum attributes here. This is only being done to support the # building of the list of supported content types when secret # metadata is retrieved. # See barbican.api.resources.py::SecretsResource.on_get() # Eager load this relationship via 'lazy=False'. encrypted_data = orm.relationship("EncryptedDatum", lazy=False) secret_store_metadata = orm.relationship( "SecretStoreMetadatum", collection_class=col.attribute_mapped_collection('key'), backref="secret", cascade="all, delete-orphan") secret_user_metadata = orm.relationship( "SecretUserMetadatum", collection_class=col.attribute_mapped_collection('key'), backref="secret", cascade="all, delete-orphan") consumers = orm.relationship( "SecretConsumerMetadatum", backref="secret", cascade="all, delete-orphan") def __init__(self, parsed_request=None, check_exc=True): """Creates secret from a dict.""" super(Secret, self).__init__() if parsed_request: self.name = parsed_request.get('name') self.secret_type = parsed_request.get( 'secret_type', utils.SECRET_TYPE_OPAQUE) expiration = self._iso_to_datetime(parsed_request.get ('expiration')) self.expiration = expiration self.algorithm = parsed_request.get('algorithm') self.bit_length = parsed_request.get('bit_length') self.mode = parsed_request.get('mode') self.creator_id = parsed_request.get('creator_id') self.project_id = parsed_request.get('project_id') self.status = States.ACTIVE def _do_delete_children(self, session): """Sub-class hook: delete children relationships.""" for k, v in self.secret_store_metadata.items(): v.delete(session) for k, v in self.secret_user_metadata.items(): v.delete(session) for datum in self.encrypted_data: datum.delete(session) for secret_ref in self.container_secrets: session.delete(secret_ref) for secret_acl in self.secret_acls: session.delete(secret_acl) def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" if self.expiration: expiration = self.expiration.isoformat() else: expiration = self.expiration return { 'secret_id': self.id, 'name': self.name, 'secret_type': self.secret_type, 'expiration': expiration, 'algorithm': self.algorithm, 'bit_length': self.bit_length, 'mode': self.mode, 'creator_id': self.creator_id, # TODO(redrobot): Uncomment this after adding microversions # "consumers": [ # { # "service": consumer.service, # "resource_type": consumer.resource_type, # "resource_id": consumer.resource_id, # } for consumer in self.consumers if not consumer.deleted # ], } class SecretStoreMetadatum(BASE, SoftDeleteMixIn, ModelBase): """Represents Secret Store metadatum for a single key-value pair.""" __tablename__ = "secret_store_metadata" key = sa.Column(sa.String(255), nullable=False) value = sa.Column(sa.String(255), nullable=False) secret_id = sa.Column( sa.String(36), sa.ForeignKey('secrets.id'), index=True, nullable=False) def __init__(self, key=None, value=None, check_exc=True): super(SecretStoreMetadatum, self).__init__() msg = u._("Must supply non-None {0} argument " "for SecretStoreMetadatum entry.") if key is None and check_exc: raise exception.MissingArgumentError(msg.format("key")) self.key = key if value is None and check_exc: raise exception.MissingArgumentError(msg.format("value")) self.value = value def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return { 'key': self.key, 'value': self.value } class SecretUserMetadatum(BASE, SoftDeleteMixIn, ModelBase): """Represents Secret user metadatum for a single key-value pair.""" __tablename__ = "secret_user_metadata" key = sa.Column(sa.String(255), nullable=False) value = sa.Column(sa.String(255), nullable=False) secret_id = sa.Column( sa.String(36), sa.ForeignKey('secrets.id'), index=True, nullable=False) __table_args__ = (sa.UniqueConstraint('secret_id', 'key', name='_secret_key_uc'),) def __init__(self, key=None, value=None, check_exc=True): super(SecretUserMetadatum, self).__init__() msg = u._("Must supply non-None {0} argument " "for SecretUserMetadatum entry.") if key is None and check_exc: raise exception.MissingArgumentError(msg.format("key")) self.key = key if value is None and check_exc: raise exception.MissingArgumentError(msg.format("value")) self.value = value def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return { 'key': self.key, 'value': self.value } class EncryptedDatum(BASE, SoftDeleteMixIn, ModelBase): """Represents the encrypted data for a Secret.""" __tablename__ = 'encrypted_data' content_type = sa.Column(sa.String(255)) secret_id = sa.Column( sa.String(36), sa.ForeignKey('secrets.id'), index=True, nullable=False) kek_id = sa.Column( sa.String(36), sa.ForeignKey('kek_data.id'), index=True, nullable=False) # TODO(jwood) Why LargeBinary on Postgres (BYTEA) not work correctly? cypher_text = sa.Column(sa.Text) kek_meta_extended = sa.Column(sa.Text) # Eager load this relationship via 'lazy=False'. kek_meta_project = orm.relationship("KEKDatum", lazy=False) def __init__(self, secret=None, kek_datum=None, check_exc=True): """Creates encrypted datum from a secret and KEK metadata.""" super(EncryptedDatum, self).__init__() if secret: self.secret_id = secret.id if kek_datum: self.kek_id = kek_datum.id self.kek_meta_project = kek_datum self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'content_type': self.content_type} class KEKDatum(BASE, SoftDeleteMixIn, ModelBase): """Key encryption key (KEK) metadata model. Represents the key encryption key (KEK) metadata associated with a process used to encrypt/decrypt secret information. When a secret is encrypted, in addition to the cypher text, the Barbican encryption process produces a KEK metadata object. The cypher text is stored via the EncryptedDatum model above, whereas the metadata is stored within this model. Decryption processes utilize this KEK metadata to decrypt the associated cypher text. Note that this model is intended to be agnostic to the specific means used to encrypt/decrypt the secret information, so please do not place vendor- specific attributes here. Note as well that each Project will have at most one 'active=True' KEKDatum instance at a time, representing the most recent KEK metadata instance to use for encryption processes performed on behalf of the Project. KEKDatum instances that are 'active=False' are associated to previously used encryption processes for the Project, that eventually should be rotated and deleted with the Project's active KEKDatum. """ __tablename__ = 'kek_data' plugin_name = sa.Column(sa.String(255), nullable=False) kek_label = sa.Column(sa.String(255)) project_id = sa.Column( sa.String(36), sa.ForeignKey('projects.id', name='kek_data_project_fk'), index=True, nullable=False) active = sa.Column(sa.Boolean, nullable=False, default=True) bind_completed = sa.Column(sa.Boolean, nullable=False, default=False) algorithm = sa.Column(sa.String(255)) bit_length = sa.Column(sa.Integer) mode = sa.Column(sa.String(255)) plugin_meta = sa.Column(sa.Text) def __index__(self, check_exc=True): super(KEKDatum, self).__init__() def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'algorithm': self.algorithm} class Order(BASE, SoftDeleteMixIn, ModelBase): """Represents an Order in the datastore. Orders are requests for Barbican to generate secrets, ranging from symmetric, asymmetric keys to automated requests to Certificate Authorities to generate SSL certificates. """ __tablename__ = 'orders' type = sa.Column(sa.String(255), nullable=False, default='key') project_id = sa.Column( sa.String(36), sa.ForeignKey('projects.id', name='orders_project_fk'), index=True, nullable=False) error_status_code = sa.Column(sa.String(16)) error_reason = sa.Column(sa.String(ERROR_REASON_LENGTH)) meta = sa.Column(JsonBlob(), nullable=True) secret_id = sa.Column(sa.String(36), sa.ForeignKey('secrets.id'), index=True, nullable=True) container_id = sa.Column(sa.String(36), sa.ForeignKey('containers.id'), index=True, nullable=True) sub_status = sa.Column(sa.String(SUB_STATUS_LENGTH), nullable=True) sub_status_message = sa.Column(sa.String(SUB_STATUS_MESSAGE_LENGTH), nullable=True) creator_id = sa.Column(sa.String(255)) order_plugin_metadata = orm.relationship( "OrderPluginMetadatum", collection_class=col.attribute_mapped_collection('key'), backref="order", cascade="all, delete-orphan") order_barbican_metadata = orm.relationship( "OrderBarbicanMetadatum", collection_class=col.attribute_mapped_collection('key'), backref="order", cascade="all, delete-orphan") def __init__(self, parsed_request=None, check_exc=True): """Creates a Order entity from a dict.""" super(Order, self).__init__() if parsed_request: self.type = parsed_request.get('type') self.meta = parsed_request.get('meta') self.status = States.ACTIVE self.sub_status = parsed_request.get('sub_status') self.sub_status_message = parsed_request.get( 'sub_status_message') self.creator_id = parsed_request.get('creator_id') def set_error_reason_safely(self, error_reason_raw): """Ensure error reason does not raise database attribute exceptions.""" self.error_reason = error_reason_raw[:ERROR_REASON_LENGTH] def set_sub_status_safely(self, sub_status_raw): """Ensure sub-status does not raise database attribute exceptions.""" self.sub_status = sub_status_raw[:SUB_STATUS_LENGTH] def set_sub_status_message_safely(self, sub_status_message_raw): """Ensure status message doesn't raise database attrib. exceptions.""" self.sub_status_message = sub_status_message_raw[ :SUB_STATUS_MESSAGE_LENGTH ] def _do_delete_children(self, session): """Sub-class hook: delete children relationships.""" for k, v in self.order_plugin_metadata.items(): v.delete(session) for k, v in self.order_barbican_metadata.items(): v.delete(session) def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" ret = { 'type': self.type, 'meta': self.meta, 'order_id': self.id } if self.secret_id: ret['secret_id'] = self.secret_id if self.container_id: ret['container_id'] = self.container_id if self.error_status_code: ret['error_status_code'] = self.error_status_code if self.error_reason: ret['error_reason'] = self.error_reason if self.sub_status: ret['sub_status'] = self.sub_status if self.sub_status_message: ret['sub_status_message'] = self.sub_status_message if self.creator_id: ret['creator_id'] = self.creator_id return ret class OrderPluginMetadatum(BASE, SoftDeleteMixIn, ModelBase): """Represents Order plugin metadatum for a single key-value pair. This entity is used to store plugin-specific metadata on behalf of an Order instance. """ __tablename__ = "order_plugin_metadata" order_id = sa.Column(sa.String(36), sa.ForeignKey('orders.id'), index=True, nullable=False) key = sa.Column(sa.String(255), nullable=False) value = sa.Column(sa.String(255), nullable=False) def __init__(self, key=None, value=None, check_exc=True): super(OrderPluginMetadatum, self).__init__() msg = u._("Must supply non-None {0} argument " "for OrderPluginMetadatum entry.") if key is None and check_exc: raise exception.MissingArgumentError(msg.format("key")) self.key = key if value is None and check_exc: raise exception.MissingArgumentError(msg.format("value")) self.value = value def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'key': self.key, 'value': self.value} class OrderBarbicanMetadatum(BASE, SoftDeleteMixIn, ModelBase): """Represents Order barbican metadatum for a single key-value pair. This entity is used to store barbican-specific metadata on behalf of an Order instance. This is data that is stored by the server to help process the order through its life cycle, but which is not in the original request. """ __tablename__ = "order_barbican_metadata" order_id = sa.Column(sa.String(36), sa.ForeignKey('orders.id'), index=True, nullable=False) key = sa.Column(sa.String(255), nullable=False) value = sa.Column(sa.Text, nullable=False) def __init__(self, key=None, value=None, check_exc=True): super(OrderBarbicanMetadatum, self).__init__() msg = u._("Must supply non-None {0} argument " "for OrderBarbicanMetadatum entry.") if key is None and check_exc: raise exception.MissingArgumentError(msg.format("key")) self.key = key if value is None and check_exc: raise exception.MissingArgumentError(msg.format("value")) self.value = value def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'key': self.key, 'value': self.value} class OrderRetryTask(BASE, SoftDeleteMixIn, ModelBase): __tablename__ = "order_retry_tasks" __table_args__ = {"mysql_engine": "InnoDB"} __table_initialized__ = False id = sa.Column( sa.String(36), primary_key=True, default=utils.generate_uuid, ) order_id = sa.Column( sa.String(36), sa.ForeignKey("orders.id"), index=True, nullable=False, ) retry_task = sa.Column(sa.Text, nullable=False) retry_at = sa.Column(sa.DateTime, default=None, nullable=False) retry_args = sa.Column(JsonBlob(), nullable=False) retry_kwargs = sa.Column(JsonBlob(), nullable=False) retry_count = sa.Column(sa.Integer, nullable=False, default=0) def __index__(self, check_exc): super(OrderRetryTask, self).__init__() class Container(BASE, SoftDeleteMixIn, ModelBase): """Represents a Container for Secrets in the datastore. Containers store secret references. Containers are owned by Projects. Containers can be generic or have a predefined type. Predefined typed containers allow users to store structured key relationship inside Barbican. """ __tablename__ = 'containers' name = sa.Column(sa.String(255)) type = sa.Column(sa.Enum('generic', 'rsa', 'dsa', 'certificate', name='container_types')) project_id = sa.Column( sa.String(36), sa.ForeignKey('projects.id', name='containers_project_fk'), index=True, nullable=False) consumers = sa.orm.relationship("ContainerConsumerMetadatum") creator_id = sa.Column(sa.String(255)) def __init__(self, parsed_request=None, check_exc=True): """Creates a Container entity from a dict.""" super(Container, self).__init__() if parsed_request: self.name = parsed_request.get('name') self.type = parsed_request.get('type') self.status = States.ACTIVE self.creator_id = parsed_request.get('creator_id') secret_refs = parsed_request.get('secret_refs') if secret_refs: for secret_ref in parsed_request.get('secret_refs'): container_secret = ContainerSecret() container_secret.name = secret_ref.get('name') # TODO(hgedikli) move this into a common location # TODO(hgedikli) validate provided url # TODO(hgedikli) parse out secret_id with regex secret_id = secret_ref.get('secret_ref') if secret_id.endswith('/'): secret_id = secret_id.rsplit('/', 2)[1] elif '/' in secret_id: secret_id = secret_id.rsplit('/', 1)[1] else: secret_id = secret_id container_secret.secret_id = secret_id self.container_secrets.append(container_secret) def _do_delete_children(self, session): """Sub-class hook: delete children relationships.""" for container_secret in self.container_secrets: session.delete(container_secret) for container_acl in self.container_acls: session.delete(container_acl) def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'container_id': self.id, 'name': self.name, 'type': self.type, 'creator_id': self.creator_id, 'secret_refs': [ { 'secret_id': container_secret.secret_id, 'name': container_secret.name if hasattr(container_secret, 'name') else None } for container_secret in self.container_secrets], 'consumers': [ { 'name': consumer.name, 'URL': consumer.URL } for consumer in self.consumers if not consumer.deleted ]} class ContainerConsumerMetadatum(BASE, SoftDeleteMixIn, ModelBase): """Stores Consumer Registrations for Containers in the datastore. Services can register interest in Containers. Services will provide a type and a URL for the object that is using the Container. """ __tablename__ = 'container_consumer_metadata' container_id = sa.Column(sa.String(36), sa.ForeignKey('containers.id'), index=True, nullable=False) project_id = sa.Column(sa.String(36), sa.ForeignKey('projects.id'), index=True, nullable=True) name = sa.Column(sa.String(36)) URL = sa.Column(sa.String(255)) data_hash = sa.Column(sa.CHAR(64)) __table_args__ = ( sa.UniqueConstraint('data_hash', name='_consumer_hashed_container_name_url_uc'), sa.Index('values_index', 'container_id', 'name', 'URL') ) def __init__(self, container_id=None, project_id=None, parsed_request=None, check_exc=True): """Registers a Consumer to a Container.""" super(ContainerConsumerMetadatum, self).__init__() # TODO(john-wood-w) This class should really be immutable due to the # data_hash attribute. if container_id and parsed_request: self.container_id = container_id self.project_id = project_id self.name = parsed_request.get('name') self.URL = parsed_request.get('URL') hash_text = ''.join((self.container_id, self.name, self.URL)) self.data_hash = hashlib.sha256(hash_text. encode('utf-8')).hexdigest() self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'name': self.name, 'URL': self.URL} class TransportKey(BASE, SoftDeleteMixIn, ModelBase): """Transport Key model for wrapping secrets in transit Represents the transport key used for wrapping secrets in transit to/from clients when storing/retrieving secrets. """ __tablename__ = 'transport_keys' plugin_name = sa.Column(sa.String(255), nullable=False) transport_key = sa.Column(sa.Text, nullable=False) def __init__(self, plugin_name=None, transport_key=None, check_exc=True): """Creates transport key entity.""" super(TransportKey, self).__init__() msg = u._("Must supply non-None {0} argument for TransportKey entry.") if plugin_name is None and check_exc: raise exception.MissingArgumentError(msg.format("plugin_name")) self.plugin_name = plugin_name if transport_key is None and check_exc: raise exception.MissingArgumentError(msg.format("transport_key")) self.transport_key = transport_key self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'transport_key_id': self.id, 'plugin_name': self.plugin_name} class CertificateAuthority(BASE, ModelBase): """CertificateAuthority model to specify the CAs available to Barbican Represents the CAs available for certificate issuance to Barbican. """ __tablename__ = 'certificate_authorities' plugin_name = sa.Column(sa.String(255), nullable=False) plugin_ca_id = sa.Column(sa.Text, nullable=False) expiration = sa.Column(sa.DateTime, default=None) creator_id = sa.Column(sa.String(255), nullable=True) project_id = sa.Column( sa.String(36), sa.ForeignKey('projects.id', name='cas_project_fk'), nullable=True) ca_meta = orm.relationship( 'CertificateAuthorityMetadatum', collection_class=col.attribute_mapped_collection('key'), backref="ca", cascade="all, delete-orphan" ) def __init__(self, parsed_ca_in=None, check_exc=True): """Creates certificate authority entity.""" super(CertificateAuthority, self).__init__() msg = u._("Must supply Non-None {0} argument " "for CertificateAuthority entry.") parsed_ca = dict(parsed_ca_in) plugin_name = parsed_ca.pop('plugin_name', None) if plugin_name is None: raise exception.MissingArgumentError(msg.format("plugin_name")) self.plugin_name = plugin_name plugin_ca_id = parsed_ca.pop('plugin_ca_id', None) if plugin_ca_id is None: raise exception.MissingArgumentError(msg.format("plugin_ca_id")) self.plugin_ca_id = plugin_ca_id expiration = parsed_ca.pop('expiration', None) self.expiration = self._iso_to_datetime(expiration) creator_id = parsed_ca.pop('creator_id', None) if creator_id is not None: self.creator_id = creator_id project_id = parsed_ca.pop('project_id', None) if project_id is not None: self.project_id = project_id for key in parsed_ca: meta = CertificateAuthorityMetadatum(key, parsed_ca[key]) self.ca_meta[key] = meta self.status = States.ACTIVE def _do_delete_children(self, session): """Sub-class hook: delete children relationships.""" for k, v in self.ca_meta.items(): v.delete(session) def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" if self.expiration: expiration = self.expiration.isoformat() else: expiration = None return { 'ca_id': self.id, 'plugin_name': self.plugin_name, 'plugin_ca_id': self.plugin_ca_id, 'expiration': expiration, 'meta': [ { meta['key']: meta['value'] } for key, meta in self.ca_meta.items() ] } class CertificateAuthorityMetadatum(BASE, ModelBase): """Represents CA metadatum for a single key-value pair.""" __tablename__ = "certificate_authority_metadata" key = sa.Column(sa.String(255), index=True, nullable=False) value = sa.Column(sa.Text, nullable=False) ca_id = sa.Column( sa.String(36), sa.ForeignKey('certificate_authorities.id'), index=True, nullable=False) __table_args__ = (sa.UniqueConstraint( 'ca_id', 'key', name='_certificate_authority_metadatum_uc'),) def __init__(self, key=None, value=None, check_exc=True): super(CertificateAuthorityMetadatum, self).__init__() msg = u._("Must supply non-None {0} argument " "for CertificateAuthorityMetadatum entry.") if key is None and check_exc: raise exception.MissingArgumentError(msg.format("key")) self.key = key if value is None and check_exc: raise exception.MissingArgumentError(msg.format("value")) self.value = value def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return { 'key': self.key, 'value': self.value } class ProjectCertificateAuthority(BASE, ModelBase): """Stores CAs available for a project. Admins can define a set of CAs that are available for use in a particular project. There can be multiple entries for any given project. """ __tablename__ = 'project_certificate_authorities' project_id = sa.Column(sa.String(36), sa.ForeignKey('projects.id'), index=True, nullable=False) ca_id = sa.Column(sa.String(36), sa.ForeignKey('certificate_authorities.id'), index=True, nullable=False) ca = orm.relationship("CertificateAuthority", backref="project_cas") __table_args__ = (sa.UniqueConstraint( 'project_id', 'ca_id', name='_project_certificate_authority_uc'),) def __init__(self, project_id=None, ca_id=None, check_exc=True): """Registers a Consumer to a Container.""" super(ProjectCertificateAuthority, self).__init__() msg = u._("Must supply non-None {0} argument " "for ProjectCertificateAuthority entry.") if project_id is None and check_exc: raise exception.MissingArgumentError(msg.format("project_id")) self.project_id = project_id if ca_id is None and check_exc: raise exception.MissingArgumentError(msg.format("ca_id")) self.ca_id = ca_id self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'project_id': self.project_id, 'ca_id': self.ca_id} class PreferredCertificateAuthority(BASE, ModelBase): """Stores preferred CAs for any project. Admins can define a set of CAs available for issuance requests for any project in the ProjectCertificateAuthority table.. """ __tablename__ = 'preferred_certificate_authorities' project_id = sa.Column(sa.String(36), sa.ForeignKey('projects.id'), index=True, unique=True, nullable=False) ca_id = sa.Column(sa.String(36), sa.ForeignKey( 'certificate_authorities.id', name='preferred_certificate_authorities_fk'), index=True, nullable=False) project = orm.relationship('Project', backref=orm.backref('preferred_ca'), uselist=False) ca = orm.relationship('CertificateAuthority', backref=orm.backref('preferred_ca')) def __init__(self, project_id=None, ca_id=None, check_exc=True): """Registers a Consumer to a Container.""" super(PreferredCertificateAuthority, self).__init__() msg = u._("Must supply non-None {0} argument " "for PreferredCertificateAuthority entry.") if project_id is None and check_exc: raise exception.MissingArgumentError(msg.format("project_id")) self.project_id = project_id if ca_id is None and check_exc: raise exception.MissingArgumentError(msg.format("ca_id")) self.ca_id = ca_id self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'project_id': self.project_id, 'ca_id': self.ca_id} class SecretACL(BASE, ModelBase): """Stores Access Control List (ACL) for a secret. Class to define whitelist of user ids who are allowed specific operation on a secret. List of user ids is defined via SecretACLUser via acl_users association. Creator_only flag helps in making a secret private for non-admin project users who may have access otherwise. SecretACL deletes are not soft-deletes. """ __tablename__ = 'secret_acls' secret_id = sa.Column(sa.String(36), sa.ForeignKey('secrets.id'), index=True, nullable=False) operation = sa.Column(sa.String(255), nullable=False) project_access = sa.Column(sa.Boolean, nullable=False, default=True) secret = orm.relationship( 'Secret', backref=orm.backref('secret_acls', lazy=False)) acl_users = orm.relationship( 'SecretACLUser', backref=orm.backref('secret_acl', lazy=False), cascade="all, delete-orphan") __table_args__ = (sa.UniqueConstraint( 'secret_id', 'operation', name='_secret_acl_operation_uc'),) def __init__(self, secret_id=None, operation=None, project_access=None, user_ids=None, check_exc=True): """Creates secret ACL entity.""" super(SecretACL, self).__init__() msg = u._("Must supply non-None {0} argument for SecretACL entry.") if secret_id is None and check_exc: raise exception.MissingArgumentError(msg.format("secret_id")) self.secret_id = secret_id if operation is None and check_exc: raise exception.MissingArgumentError(msg.format("operation")) self.operation = operation if project_access is not None: self.project_access = project_access self.status = States.ACTIVE if user_ids is not None and isinstance(user_ids, list): userids = set(user_ids) # remove duplicate if any for user_id in userids: acl_user = SecretACLUser(self.id, user_id) self.acl_users.append(acl_user) def _do_delete_children(self, session): """Sub-class hook: delete children relationships.""" for acl_user in self.acl_users: acl_user.delete(session) def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields. Adds non-deleted acl related users from relationship if there. """ users = [acl_user.user_id for acl_user in self.acl_users if not acl_user.deleted] fields = {'acl_id': self.id, 'secret_id': self.secret_id, 'operation': self.operation, 'project_access': self.project_access} if users: fields['users'] = users return fields class ContainerACL(BASE, ModelBase): """Stores Access Control List (ACL) for a container. Class to define whitelist of user ids who are allowed specific operation on a container. List of user ids is defined in ContainerACLUser via acl_users association. Creator_only flag helps in making a container private for non-admin project users who may have access otherwise. ContainerACL deletes are not soft-deletes. """ __tablename__ = 'container_acls' container_id = sa.Column(sa.String(36), sa.ForeignKey('containers.id'), index=True, nullable=False) operation = sa.Column(sa.String(255), nullable=False) project_access = sa.Column(sa.Boolean, nullable=False, default=True) container = orm.relationship( 'Container', backref=orm.backref('container_acls', lazy=False)) acl_users = orm.relationship( 'ContainerACLUser', backref=orm.backref('container_acl', lazy=False), cascade="all, delete-orphan") __table_args__ = (sa.UniqueConstraint( 'container_id', 'operation', name='_container_acl_operation_uc'),) def __init__(self, container_id=None, operation=None, project_access=None, user_ids=None, check_exc=True): """Creates container ACL entity.""" super(ContainerACL, self).__init__() msg = u._("Must supply non-None {0} argument for ContainerACL entry.") if container_id is None and check_exc: raise exception.MissingArgumentError(msg.format("container_id")) self.container_id = container_id if operation is None and check_exc: raise exception.MissingArgumentError(msg.format("operation")) self.operation = operation if project_access is not None: self.project_access = project_access self.status = States.ACTIVE if user_ids is not None and isinstance(user_ids, list): userids = set(user_ids) # remove duplicate if any for user_id in userids: acl_user = ContainerACLUser(self.id, user_id) self.acl_users.append(acl_user) def _do_delete_children(self, session): """Sub-class hook: delete children relationships.""" for acl_user in self.acl_users: acl_user.delete(session) def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields. Adds non-deleted acl related users from relationship if there. """ users = [acl_user.user_id for acl_user in self.acl_users if not acl_user.deleted] fields = {'acl_id': self.id, 'container_id': self.container_id, 'operation': self.operation, 'project_access': self.project_access} if users: fields['users'] = users return fields class SecretACLUser(BASE, ModelBase): """Stores user id for a secret ACL. This class provides way to store list of users associated with a specific ACL operation. SecretACLUser deletes are not soft-deletes. """ __tablename__ = 'secret_acl_users' acl_id = sa.Column(sa.String(36), sa.ForeignKey('secret_acls.id'), index=True, nullable=False) user_id = sa.Column(sa.String(255), nullable=False) __table_args__ = (sa.UniqueConstraint( 'acl_id', 'user_id', name='_secret_acl_user_uc'),) def __init__(self, acl_id=None, user_id=None, check_exc=True): """Creates secret ACL user entity.""" super(SecretACLUser, self).__init__() msg = u._("Must supply non-None {0} argument for SecretACLUser entry.") self.acl_id = acl_id if user_id is None and check_exc: raise exception.MissingArgumentError(msg.format("user_id")) self.user_id = user_id self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'acl_id': self.acl_id, 'user_id': self.user_id} class ContainerACLUser(BASE, ModelBase): """Stores user id for a container ACL. This class provides way to store list of users associated with a specific ACL operation. ContainerACLUser deletes are not soft-deletes. """ __tablename__ = 'container_acl_users' acl_id = sa.Column(sa.String(36), sa.ForeignKey('container_acls.id'), index=True, nullable=False) user_id = sa.Column(sa.String(255), nullable=False) __table_args__ = (sa.UniqueConstraint( 'acl_id', 'user_id', name='_container_acl_user_uc'),) def __init__(self, acl_id=None, user_id=None, check_exc=True): """Creates container ACL user entity.""" super(ContainerACLUser, self).__init__() msg = u._("Must supply non-None {0} argument for ContainerACLUser " "entry.") self.acl_id = acl_id if user_id is None and check_exc: raise exception.MissingArgumentError(msg.format("user_id")) self.user_id = user_id self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'acl_id': self.acl_id, 'user_id': self.user_id} class ProjectQuotas(BASE, ModelBase): """Stores Project Quotas. Class to define project specific resource quotas. Project quota deletes are not soft-deletes. """ __tablename__ = 'project_quotas' project_id = sa.Column( sa.String(36), sa.ForeignKey('projects.id', name='project_quotas_fk'), index=True, nullable=False) secrets = sa.Column(sa.Integer, nullable=True) orders = sa.Column(sa.Integer, nullable=True) containers = sa.Column(sa.Integer, nullable=True) consumers = sa.Column(sa.Integer, nullable=True) cas = sa.Column(sa.Integer, nullable=True) def __init__(self, project_id=None, parsed_project_quotas=None, check_exc=True): """Creates Project Quotas entity from a project and a dict. :param project_id: the internal id of the project with quotas :param parsed_project_quotas: a dict with the keys matching the resources for which quotas are to be set, and the values containing the quota value to be set for this project and that resource. :return: None """ super(ProjectQuotas, self).__init__() msg = u._("Must supply non-None {0} argument for ProjectQuotas entry.") if project_id is None and check_exc: raise exception.MissingArgumentError(msg.format("project_id")) self.project_id = project_id if parsed_project_quotas is None: self.secrets = None self.orders = None self.containers = None self.consumers = None self.cas = None else: self.secrets = parsed_project_quotas.get('secrets') self.orders = parsed_project_quotas.get('orders') self.containers = parsed_project_quotas.get('containers') self.consumers = parsed_project_quotas.get('consumers') self.cas = parsed_project_quotas.get('cas') def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" ret = { 'project_id': self.project_id, } if self.secrets: ret['secrets'] = self.secrets if self.orders: ret['orders'] = self.orders if self.containers: ret['containers'] = self.containers if self.consumers: ret['consumers'] = self.consumers if self.cas: ret['cas'] = self.cas return ret class SecretStores(BASE, ModelBase): """List of secret stores defined via service configuration. This class provides a list of secret stores entities with their respective secret store plugin and crypto plugin names. SecretStores deletes are NOT soft-deletes. """ __tablename__ = 'secret_stores' store_plugin = sa.Column(sa.String(255), nullable=False) crypto_plugin = sa.Column(sa.String(255), nullable=True) global_default = sa.Column(sa.Boolean, nullable=False, default=False) name = sa.Column(sa.String(255), nullable=False) __table_args__ = (sa.UniqueConstraint( 'store_plugin', 'crypto_plugin', name='_secret_stores_plugin_names_uc'), sa.UniqueConstraint('name', name='_secret_stores_name_uc'),) def __init__(self, name=None, store_plugin=None, crypto_plugin=None, global_default=None, check_exc=True): """Creates secret store entity.""" super(SecretStores, self).__init__() msg = u._("Must supply non-Blank {0} argument for SecretStores entry.") if not name and check_exc: raise exception.MissingArgumentError(msg.format("name")) if not store_plugin and check_exc: raise exception.MissingArgumentError(msg.format("store_plugin")) self.store_plugin = store_plugin self.name = name self.crypto_plugin = crypto_plugin if global_default is not None: self.global_default = global_default self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'secret_store_id': self.id, 'store_plugin': self.store_plugin, 'crypto_plugin': self.crypto_plugin, 'global_default': self.global_default, 'name': self.name} class ProjectSecretStore(BASE, ModelBase): """Stores secret store to be used for new project secrets. This class maintains secret store and project mapping so that new project secret entries uses it as plugin backend. ProjectSecretStores deletes are NOT soft-deletes. """ __tablename__ = 'project_secret_store' secret_store_id = sa.Column(sa.String(36), sa.ForeignKey('secret_stores.id'), index=True, nullable=False) project_id = sa.Column(sa.String(36), sa.ForeignKey('projects.id'), index=True, nullable=False) secret_store = orm.relationship("SecretStores", backref="project_store") project = orm.relationship('Project', backref=orm.backref('preferred_secret_store')) __table_args__ = (sa.UniqueConstraint( 'project_id', name='_project_secret_store_project_uc'),) def __init__(self, project_id=None, secret_store_id=None, check_exc=True): """Creates project secret store mapping entity.""" super(ProjectSecretStore, self).__init__() msg = u._("Must supply non-None {0} argument for ProjectSecretStore " " entry.") if not project_id and check_exc: raise exception.MissingArgumentError(msg.format("project_id")) self.project_id = project_id if not secret_store_id and check_exc: raise exception.MissingArgumentError(msg.format("secret_store_id")) self.secret_store_id = secret_store_id self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return {'secret_store_id': self.secret_store_id, 'project_id': self.project_id} class SecretConsumerMetadatum(BASE, SoftDeleteMixIn, ModelBase): """Stores Consumer Registrations for Secrets in the datastore. Services can register interest in Secrets. Services will provide a resource type and a resource id for the object that is using the Secret. """ __tablename__ = "secret_consumer_metadata" secret_id = sa.Column( sa.String(36), sa.ForeignKey("secrets.id"), index=True, nullable=False ) project_id = sa.Column( sa.String(36), sa.ForeignKey("projects.id"), index=True, nullable=True ) service = sa.Column(sa.String(255), nullable=False) resource_type = sa.Column(sa.String(255), nullable=False) resource_id = sa.Column(sa.String(36), index=True, nullable=False) __table_args__ = ( sa.UniqueConstraint( "secret_id", "resource_id", name="_secret_consumer_resource_uc" ), ) def __init__(self, secret_id=None, project_id=None, service=None, resource_type=None, resource_id=None, check_exc=True): """Registers a Consumer to a Secret.""" super(SecretConsumerMetadatum, self).__init__() msg = u._("Must supply non-None {0} argument " "for SecretConsumerMetadatum entry.") if secret_id is None and check_exc: raise exception.MissingArgumentError(msg.format("secret_id")) if project_id is None and check_exc: raise exception.MissingArgumentError(msg.format("project_id")) if service is None and check_exc: raise exception.MissingArgumentError(msg.format("service")) if resource_type is None and check_exc: raise exception.MissingArgumentError(msg.format("resource_type")) if resource_id is None and check_exc: raise exception.MissingArgumentError(msg.format("resource_id")) self.secret_id = secret_id self.project_id = project_id self.service = service self.resource_type = resource_type self.resource_id = resource_id self.status = States.ACTIVE def _do_extra_dict_fields(self): """Sub-class hook method: return dict of fields.""" return { "service": self.service, "resource_type": self.resource_type, "resource_id": self.resource_id, } barbican-9.1.0.dev50/barbican/model/migration/0000775000175000017500000000000013616500640021243 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic.ini0000664000175000017500000000241113616500636023343 0ustar sahidsahid00000000000000# A generic, single database configuration [alembic] # path to migration scripts script_location = %(here)s/alembic_migrations # template used to generate migration files # file_template = %%(rev)s_%%(slug)s # set to 'true' to run the environment during # the 'revision' command, regardless of autogenerate # revision_environment = false # default to an empty string because the Barbican migration process will # extract the correct value and set it programmatically before alembic is fully # invoked. sqlalchemy.url = #sqlalchemy.url = driver://user:pass@localhost/dbname #sqlalchemy.url = sqlite:///barbican.sqlite #sqlalchemy.url = sqlite:////var/lib/barbican/barbican.sqlite #sqlalchemy.url = postgresql+psycopg2://postgres:postgres@localhost:5432/barbican_api # Logging configuration [loggers] keys = alembic #keys = root,sqlalchemy,alembic [handlers] keys = console [formatters] keys = generic [logger_root] level = DEBUG handlers = console qualname = [logger_sqlalchemy] level = DEBUG handlers = qualname = sqlalchemy.engine [logger_alembic] level = INFO handlers = qualname = alembic [handler_console] class = StreamHandler args = (sys.stderr,) level = NOTSET formatter = generic [formatter_generic] format = %(levelname)-5.5s [%(name)s] %(message)s datefmt = %H:%M:%S barbican-9.1.0.dev50/barbican/model/migration/__init__.py0000664000175000017500000000000013616500636023347 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/model/migration/commands.py0000664000175000017500000000547513616500636023436 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Interface to the Alembic migration process and environment. Concepts in this file are based on Quantum's Alembic approach. Available Alembic commands are detailed here: https://alembic.readthedocs.org/en/latest/api.html#module-alembic.command """ import os from alembic import command as alembic_command from alembic import config as alembic_config from barbican.common import config from barbican.common import utils LOG = utils.getLogger(__name__) CONF = config.CONF def init_config(sql_url=None): """Initialize and return the Alembic configuration.""" sqlalchemy_url = sql_url or CONF.sql_connection if not sqlalchemy_url: raise RuntimeError("Please specify a SQLAlchemy-friendly URL to " "connect to the proper database, either through " "the CLI or the configuration file.") if sqlalchemy_url and 'sqlite' in sqlalchemy_url: LOG.warning('!!! Limited support for migration commands using' ' sqlite databases; This operation may not succeed.') config = alembic_config.Config( os.path.join(os.path.dirname(__file__), 'alembic.ini') ) config.barbican_sqlalchemy_url = sqlalchemy_url config.set_main_option('script_location', 'barbican.model.migration:alembic_migrations') return config def upgrade(to_version='head', sql_url=None): """Upgrade to the specified version.""" alembic_cfg = init_config(sql_url) alembic_command.upgrade(alembic_cfg, to_version) def history(verbose, sql_url=None): alembic_cfg = init_config(sql_url) alembic_command.history(alembic_cfg, verbose=verbose) def current(verbose, sql_url=None): alembic_cfg = init_config(sql_url) alembic_command.current(alembic_cfg, verbose=verbose) def stamp(to_version='head', sql_url=None): """Stamp the specified version, with no migration performed.""" alembic_cfg = init_config(sql_url) alembic_command.stamp(alembic_cfg, to_version) def generate(autogenerate=True, message='generate changes', sql_url=None): """Generate a version file.""" alembic_cfg = init_config(sql_url) alembic_command.revision(alembic_cfg, message=message, autogenerate=autogenerate) barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/0000775000175000017500000000000013616500640025073 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/env.py0000664000175000017500000000553513616500636026252 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import with_statement from alembic import context from oslo_db.sqlalchemy import session from barbican.model import models # this is the Alembic Config object, which provides # access to the values within the .ini file in use. # Note that the 'config' instance is not available in for unit testing. try: config = context.config except Exception: config = None # WARNING! The following was autogenerated by Alembic as part of setting up # the initial environment. Unfortunately it also **clobbers** the logging # for the rest of this application, so please do not use it! # Interpret the config file for Python logging. # This line sets up loggers basically. # fileConfig(config.config_file_name) # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata target_metadata = models.BASE.metadata # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. def get_sqlalchemy_url(): return (config.barbican_sqlalchemy_url or config.get_main_option("sqlalchemy.url")) def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ context.configure(url=get_sqlalchemy_url()) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ engine = session.create_engine( get_sqlalchemy_url()) connection = engine.connect() context.configure( connection=connection, target_metadata=target_metadata ) try: with context.begin_transaction(): context.run_migrations() finally: connection.close() if config: if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online() barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/container_init_ops.py0000664000175000017500000000577013616500636031351 0ustar sahidsahid00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Initial operations for agent management extension # This module only manages the 'agents' table. Binding tables are created # in the modules for relevant resources from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'containers', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('type', sa.Enum('generic', 'rsa', 'dsa', 'certificate', name='container_types'), nullable=True), sa.Column('tenant_id', sa.String(length=36), nullable=False), sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'],), sa.PrimaryKeyConstraint('id') ) op.create_table( 'container_consumer_metadata', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('container_id', sa.String(length=36), nullable=False), sa.Column('URL', sa.String(length=255), nullable=True), sa.Column('data_hash', sa.CHAR(64), nullable=True), sa.ForeignKeyConstraint(['container_id'], ['containers.id'],), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('data_hash', name='_consumer_hashed_container_name_url_uc'), sa.Index('values_index', 'container_id', 'name', 'URL') ) op.create_table( 'container_secret', sa.Column('name', sa.String(length=255), nullable=True), sa.Column('container_id', sa.String(length=36), nullable=False), sa.Column('secret_id', sa.String(length=36), nullable=False), sa.ForeignKeyConstraint(['container_id'], ['containers.id'],), sa.ForeignKeyConstraint(['secret_id'], ['secrets.id'],) ) barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/kek_init_ops.py0000664000175000017500000000366613616500636030143 0ustar sahidsahid00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Initial operations for agent management extension # This module only manages the 'agents' table. Binding tables are created # in the modules for relevant resources from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'kek_data', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('plugin_name', sa.String(length=255), nullable=False), sa.Column('kek_label', sa.String(length=255), nullable=True), sa.Column('tenant_id', sa.String(length=36), nullable=False), sa.Column('active', sa.Boolean(), nullable=False), sa.Column('bind_completed', sa.Boolean(), nullable=False), sa.Column('algorithm', sa.String(length=255), nullable=True), sa.Column('bit_length', sa.Integer(), nullable=True), sa.Column('mode', sa.String(length=255), nullable=True), sa.Column('plugin_meta', sa.Text(), nullable=True), sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'],), sa.PrimaryKeyConstraint('id') ) barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/secrets_init_ops.py0000664000175000017500000000402413616500636031026 0ustar sahidsahid00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Initial operations for agent management extension # This module only manages the 'agents' table. Binding tables are created # in the modules for relevant resources from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'secrets', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('expiration', sa.DateTime(), nullable=True), sa.Column('algorithm', sa.String(length=255), nullable=True), sa.Column('bit_length', sa.Integer(), nullable=True), sa.Column('mode', sa.String(length=255), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table( 'tenant_secret', sa.Column('tenant_id', sa.String(length=36), nullable=False), sa.Column('secret_id', sa.String(length=36), nullable=False), sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'],), sa.ForeignKeyConstraint(['secret_id'], ['secrets.id'],), sa.UniqueConstraint('tenant_id', 'secret_id', name='_tenant_secret_uc') ) barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/0000775000175000017500000000000013616500640026743 5ustar sahidsahid00000000000000././@LongLink0000000000000000000000000000015700000000000011220 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/0f8c192a061f_add_secret_consumers.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/0f8c192a061f_add_secret_co0000664000175000017500000000456213616500636033272 0ustar sahidsahid00000000000000# Copyright 2019 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """Add Secret Consumers table Revision ID: 0f8c192a061f Revises: 39cf2e645cba Create Date: 2019-08-19 12:03:08.567230 """ # revision identifiers, used by Alembic. revision = "0f8c192a061f" down_revision = "39cf2e645cba" from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, "secret_consumer_metadata") if not table_exists: op.create_table( "secret_consumer_metadata", # ModelBase sa.Column("id", sa.String(length=36), nullable=False), sa.Column("created_at", sa.DateTime(), nullable=False), sa.Column("updated_at", sa.DateTime(), nullable=False), sa.Column("deleted_at", sa.DateTime(), nullable=True), sa.Column("deleted", sa.Boolean(), nullable=False), sa.Column("status", sa.String(length=20), nullable=False), # SecretConsumerMetadatum sa.Column("secret_id", sa.String(36), nullable=False), sa.Column("project_id", sa.String(36), nullable=False), sa.Column("service", sa.String(255), nullable=False), sa.Column("resource_type", sa.String(255), nullable=False), sa.Column("resource_id", sa.String(36), nullable=False), # Constraints and Indexes sa.PrimaryKeyConstraint("id"), sa.ForeignKeyConstraint(["secret_id"], ["secrets.id"]), sa.UniqueConstraint( "secret_id", "resource_id", name="_secret_consumer_resource_uc" ), sa.Index("ix_secret_consumer_metadata_secret_id", "secret_id"), sa.Index("ix_secret_consumer_metadata_resource_id", "resource_id"), ) ././@LongLink0000000000000000000000000000017500000000000011220 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/39cf2e645cba_model_for_multiple_backend_support.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/39cf2e645cba_model_for_mul0000664000175000017500000000632613616500636033505 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Model for multiple backend support Revision ID: 39cf2e645cba Revises: d2780d5aa510 Create Date: 2016-07-29 16:45:22.953811 """ # revision identifiers, used by Alembic. revision = '39cf2e645cba' down_revision = 'd2780d5aa510' from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, 'secret_stores') if not table_exists: op.create_table( 'secret_stores', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('store_plugin', sa.String(length=255), nullable=False), sa.Column('crypto_plugin', sa.String(length=255), nullable=True), sa.Column('global_default', sa.Boolean(), nullable=False, default=False), sa.Column('name', sa.String(length=255), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('store_plugin', 'crypto_plugin', name='_secret_stores_plugin_names_uc'), sa.UniqueConstraint('name', name='_secret_stores_name_uc') ) table_exists = ctx.dialect.has_table(con.engine, 'project_secret_store') if not table_exists: op.create_table( 'project_secret_store', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('project_id', sa.String(length=36), nullable=False), sa.Column('secret_store_id', sa.String(length=36), nullable=False), sa.ForeignKeyConstraint(['project_id'], ['projects.id'],), sa.ForeignKeyConstraint( ['secret_store_id'], ['secret_stores.id'],), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('project_id', name='_project_secret_store_project_uc') ) op.create_index(op.f('ix_project_secret_store_project_id'), 'project_secret_store', ['project_id'], unique=True) ././@LongLink0000000000000000000000000000017500000000000011220 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/13d127569afa_create_secret_store_metadata_table.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/13d127569afa_create_secret0000664000175000017500000000357013616500636033325 0ustar sahidsahid00000000000000# Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """create_secret_store_metadata_table Revision ID: 13d127569afa Revises: juno Create Date: 2014-04-24 13:15:41.858266 """ # revision identifiers, used by Alembic. revision = '13d127569afa' down_revision = 'juno' from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, 'secret_store_metadata') if not table_exists: op.create_table( 'secret_store_metadata', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('secret_id', sa.String(length=36), nullable=False), sa.Column('key', sa.String(length=255), nullable=False), sa.Column('value', sa.String(length=255), nullable=False), sa.ForeignKeyConstraint(['secret_id'], ['secrets.id'],), sa.PrimaryKeyConstraint('id'), ) ././@LongLink0000000000000000000000000000017600000000000011221 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/1a7cf79559e3_new_secret_and_container_acl_tables.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/1a7cf79559e3_new_secret_an0000664000175000017500000001321213616500636033342 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """New secret and container ACL tables Revision ID: 1a7cf79559e3 Revises: 1c0f328bfce0 Create Date: 2015-04-01 13:31:04.292754 """ # revision identifiers, used by Alembic. revision = '1a7cf79559e3' down_revision = '1c0f328bfce0' from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, 'secret_acls') if not table_exists: op.create_table( 'secret_acls', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('secret_id', sa.String(length=36), nullable=False), sa.Column('operation', sa.String(length=255), nullable=False), sa.Column('creator_only', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint(['secret_id'], ['secrets.id'],), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('secret_id', 'operation', name='_secret_acl_operation_uc') ) op.create_index(op.f('ix_secret_acls_secret_id'), 'secret_acls', ['secret_id'], unique=False) table_exists = ctx.dialect.has_table(con.engine, 'container_acls') if not table_exists: op.create_table( 'container_acls', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('container_id', sa.String(length=36), nullable=False), sa.Column('operation', sa.String(length=255), nullable=False), sa.Column('creator_only', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint(['container_id'], ['containers.id'],), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('container_id', 'operation', name='_container_acl_operation_uc') ) op.create_index(op.f('ix_container_acls_container_id'), 'container_acls', ['container_id'], unique=False) table_exists = ctx.dialect.has_table(con.engine, 'secret_acl_users') if not table_exists: op.create_table( 'secret_acl_users', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('acl_id', sa.String(length=36), nullable=False), sa.Column('user_id', sa.String(length=255), nullable=False), sa.ForeignKeyConstraint(['acl_id'], ['secret_acls.id'],), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('acl_id', 'user_id', name='_secret_acl_user_uc') ) op.create_index(op.f('ix_secret_acl_users_acl_id'), 'secret_acl_users', ['acl_id'], unique=False) table_exists = ctx.dialect.has_table(con.engine, 'container_acl_users') if not table_exists: op.create_table( 'container_acl_users', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('acl_id', sa.String(length=36), nullable=False), sa.Column('user_id', sa.String(length=255), nullable=False), sa.ForeignKeyConstraint(['acl_id'], ['container_acls.id'],), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('acl_id', 'user_id', name='_container_acl_user_uc') ) op.create_index(op.f('ix_container_acl_users_acl_id'), 'container_acl_users', ['acl_id'], unique=False) op.add_column(u'containers', sa.Column('creator_id', sa.String(length=255), nullable=True)) op.add_column(u'orders', sa.Column('creator_id', sa.String(length=255), nullable=True)) op.add_column(u'secrets', sa.Column('creator_id', sa.String(length=255), nullable=True)) ././@LongLink0000000000000000000000000000017500000000000011220 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/10220ccbe7fa_remove_transport_keys_column_from_.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/10220ccbe7fa_remove_transp0000664000175000017500000000157713616500636033530 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Remove transport keys column from project quotas table Revision ID: 10220ccbe7fa Revises: 3c3b04040bfe Create Date: 2015-09-09 09:10:23.812681 """ # revision identifiers, used by Alembic. revision = '10220ccbe7fa' down_revision = '3c3b04040bfe' from alembic import op def upgrade(): op.drop_column('project_quotas', 'transport_keys') ././@LongLink0000000000000000000000000000016300000000000011215 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/dce488646127_add_secret_user_metadata.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/dce488646127_add_secret_us0000664000175000017500000000366313616500636033260 0ustar sahidsahid00000000000000# Copyright (c) 2015 IBM # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """add-secret-user-metadata Revision ID: dce488646127 Revises: 39a96e67e990 Create Date: 2016-02-09 04:52:03.975486 """ # revision identifiers, used by Alembic. revision = 'dce488646127' down_revision = '39a96e67e990' from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, 'secret_user_metadata') if not table_exists: op.create_table( 'secret_user_metadata', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('key', sa.String(length=255), nullable=False), sa.Column('value', sa.String(length=255), nullable=False), sa.Column('secret_id', sa.String(length=36), nullable=False), sa.ForeignKeyConstraint(['secret_id'], ['secrets.id'],), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('secret_id', 'key', name='_secret_key_uc') ) ././@LongLink0000000000000000000000000000017100000000000011214 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/30dba269cc64_update_order_retry_tasks_table.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/30dba269cc64_update_order_0000664000175000017500000000341013616500636033377 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Update order_retry_tasks table Revision ID: 30dba269cc64 Revises: 3041b53b95d7 Create Date: 2015-04-01 17:53:25.447919 """ # revision identifiers, used by Alembic. revision = '30dba269cc64' down_revision = '3041b53b95d7' from oslo_utils import timeutils from alembic import op from barbican.model import models as m import sqlalchemy as sa def upgrade(): op.add_column( 'order_retry_tasks', sa.Column( 'created_at', sa.DateTime(), nullable=False, server_default=str(timeutils.utcnow()))) op.add_column( 'order_retry_tasks', sa.Column( 'deleted', sa.Boolean(), nullable=False, server_default='0')) op.add_column( 'order_retry_tasks', sa.Column('deleted_at', sa.DateTime(), nullable=True)) op.add_column( 'order_retry_tasks', sa.Column( 'status', sa.String(length=20), nullable=False, server_default=m.States.PENDING)) op.add_column( 'order_retry_tasks', sa.Column( 'updated_at', sa.DateTime(), nullable=False, server_default=str(timeutils.utcnow()))) ././@LongLink0000000000000000000000000000020200000000000011207 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/3041b53b95d7_remove_size_limits_on_meta_table_values.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/3041b53b95d7_remove_size_l0000664000175000017500000000204713616500636033274 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Remove size limits on meta table values Revision ID: 3041b53b95d7 Revises: 1a7cf79559e3 Create Date: 2015-04-08 15:43:32.852529 """ # revision identifiers, used by Alembic. revision = '3041b53b95d7' down_revision = '1a7cf79559e3' from alembic import op import sqlalchemy as sa def upgrade(): op.alter_column( 'order_barbican_metadata', 'value', type_=sa.Text() ) op.alter_column( 'certificate_authority_metadata', 'value', type_=sa.Text() ) ././@LongLink0000000000000000000000000000017100000000000011214 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/2843d6469f25_add_sub_status_info_for_orders.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/2843d6469f25_add_sub_statu0000664000175000017500000000214113616500636033204 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """add sub status info for orders Revision ID: 2843d6469f25 Revises: 2ab3f5371bde Create Date: 2014-09-16 12:31:15.181380 """ # revision identifiers, used by Alembic. revision = '2843d6469f25' down_revision = '2ab3f5371bde' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('orders', sa.Column('sub_status', sa.String(length=36), nullable=True)) op.add_column('orders', sa.Column('sub_status_message', sa.String(length=255), nullable=True)) ././@LongLink0000000000000000000000000000017500000000000011220 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/2ab3f5371bde_dsa_in_container_type_modelbase_to.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/2ab3f5371bde_dsa_in_contai0000664000175000017500000000331513616500636033437 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """dsa in container type modelbase_to Revision ID: 2ab3f5371bde Revises: 4070806f6972 Create Date: 2014-09-02 12:11:43.524247 """ # revision identifiers, used by Alembic. revision = '2ab3f5371bde' down_revision = '4070806f6972' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('container_secret', sa.Column('created_at', sa.DateTime(), nullable=False)) op.add_column('container_secret', sa.Column('deleted', sa.Boolean(), nullable=False)) op.add_column('container_secret', sa.Column('deleted_at', sa.DateTime(), nullable=True)) op.add_column('container_secret', sa.Column('id', sa.String(length=36), nullable=False)) op.add_column('container_secret', sa.Column('status', sa.String(length=20), nullable=False)) op.add_column('container_secret', sa.Column('updated_at', sa.DateTime(), nullable=False)) op.create_primary_key('pk_container_secret', 'container_secret', ['id']) op.create_unique_constraint( '_container_secret_name_uc', 'container_secret', ['container_id', 'secret_id', 'name'] ) ././@LongLink0000000000000000000000000000020000000000000011205 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/3c3b04040bfe_add_owning_project_and_creator_to_cas.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/3c3b04040bfe_add_owning_pr0000664000175000017500000000240713616500636033367 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """add owning project and creator to CAs Revision ID: 3c3b04040bfe Revises: 156cd9933643 Create Date: 2015-09-04 12:22:22.745824 """ # revision identifiers, used by Alembic. revision = '3c3b04040bfe' down_revision = '156cd9933643' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('certificate_authorities', sa.Column('creator_id', sa.String(length=255), nullable=True)) op.add_column('certificate_authorities', sa.Column('project_id', sa.String(length=36), nullable=True)) op.create_foreign_key('cas_project_fk', 'certificate_authorities', 'projects', ['project_id'], ['id']) ././@LongLink0000000000000000000000000000020000000000000011205 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/4070806f6972_add_orders_plugin_metadata_table_and_.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/4070806f6972_add_orders_pl0000664000175000017500000000342613616500636033107 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add orders plugin metadata table and relationships Revision ID: 4070806f6972 Revises: 47b69e523451 Create Date: 2014-08-21 14:06:48.237701 """ # revision identifiers, used by Alembic. revision = '4070806f6972' down_revision = '47b69e523451' from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, 'order_plugin_metadata') if not table_exists: op.create_table( 'order_plugin_metadata', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('order_id', sa.String(length=36), nullable=False), sa.Column('key', sa.String(length=255), nullable=False), sa.Column('value', sa.String(length=255), nullable=False), sa.ForeignKeyConstraint(['order_id'], ['orders.id'],), sa.PrimaryKeyConstraint('id'), ) ././@LongLink0000000000000000000000000000016200000000000011214 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/2d21598e7e70_added_ca_related_tables.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/2d21598e7e70_added_ca_rela0000664000175000017500000001177313616500636033161 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Added CA related tables Revision ID: 2d21598e7e70 Revises: 3d36a26b88af Create Date: 2015-03-11 15:47:32.292944 """ # revision identifiers, used by Alembic. revision = '2d21598e7e70' down_revision = '3d36a26b88af' from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, 'certificate_authorities') if not table_exists: op.create_table( 'certificate_authorities', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('plugin_name', sa.String(length=255), nullable=False), sa.Column('plugin_ca_id', sa.Text(), nullable=False), sa.Column('expiration', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id') ) table_exists = ctx.dialect.has_table( con.engine, 'project_certificate_authorities') if not table_exists: op.create_table( 'project_certificate_authorities', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('project_id', sa.String(length=36), nullable=False), sa.Column('ca_id', sa.String(length=36), nullable=False), sa.ForeignKeyConstraint(['ca_id'], ['certificate_authorities.id'],), sa.ForeignKeyConstraint(['project_id'], ['projects.id'],), sa.PrimaryKeyConstraint('id', 'project_id', 'ca_id'), sa.UniqueConstraint('project_id', 'ca_id', name='_project_certificate_authority_uc') ) table_exists = ctx.dialect.has_table( con.engine, 'certificate_authority_metadata') if not table_exists: op.create_table( 'certificate_authority_metadata', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('key', sa.String(length=255), nullable=False), sa.Column('value', sa.String(length=255), nullable=False), sa.Column('ca_id', sa.String(length=36), nullable=False), sa.ForeignKeyConstraint(['ca_id'], ['certificate_authorities.id'],), sa.PrimaryKeyConstraint('id', 'key', 'ca_id'), sa.UniqueConstraint('ca_id', 'key', name='_certificate_authority_metadatum_uc') ) table_exists = ctx.dialect.has_table( con.engine, 'preferred_certificate_authorities') if not table_exists: op.create_table( 'preferred_certificate_authorities', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('project_id', sa.String(length=36), nullable=False), sa.Column('ca_id', sa.String(length=36), nullable=True), sa.ForeignKeyConstraint(['ca_id'], ['certificate_authorities.id'],), sa.ForeignKeyConstraint(['project_id'], ['projects.id'],), sa.PrimaryKeyConstraint('id', 'project_id'), sa.UniqueConstraint('project_id') ) ././@LongLink0000000000000000000000000000020100000000000011206 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/256da65e0c5f_change_keystone_id_for_external_id_in_.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/256da65e0c5f_change_keysto0000664000175000017500000000174613616500636033425 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Change keystone_id for external_id in Project model Revision ID: 256da65e0c5f Revises: 795737bb3c3 Create Date: 2014-12-22 03:55:29.072375 """ # revision identifiers, used by Alembic. revision = '256da65e0c5f' down_revision = '795737bb3c3' from alembic import op import sqlalchemy as sa def upgrade(): op.alter_column('projects', 'keystone_id', type_=sa.String(36), new_column_name='external_id') ././@LongLink0000000000000000000000000000020300000000000011210 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/161f8aceb687_fill_project_id_to_secrets_where_missing.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/161f8aceb687_fill_project_0000664000175000017500000000444413616500636033421 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """fill project_id to secrets where missing Revision ID: 161f8aceb687 Revises: 1bc885808c76 Create Date: 2015-06-22 15:58:03.131256 """ # revision identifiers, used by Alembic. revision = '161f8aceb687' down_revision = '1bc885808c76' from alembic import op import sqlalchemy as sa def _get_database_metadata(): con = op.get_bind() metadata = sa.MetaData(bind=con) metadata.reflect() return metadata def _drop_constraint(ctx, name, table): if ctx.dialect.name == 'mysql': # MySQL won't allow some operations with constraints in place op.drop_constraint(name, table, type_='foreignkey') def _create_constraint(ctx, name, tableone, tabletwo, columnone, columntwo): if ctx.dialect.name == 'mysql': # Recreate foreign key constraint op.create_foreign_key(name, tableone, tabletwo, columnone, columntwo) def upgrade(): metadata = _get_database_metadata() # Get relevant tables secrets = metadata.tables['secrets'] project_secret = metadata.tables['project_secret'] # Add project_id to the secrets op.execute(secrets.update(). values({'project_id': project_secret.c.project_id}). where(secrets.c.id == project_secret.c.secret_id). where(secrets.c.project_id == None) # noqa ) # Need to drop foreign key constraint before mysql will allow changes ctx = op.get_context() _drop_constraint(ctx, 'secrets_project_fk', 'secrets') # make project_id no longer nullable op.alter_column('secrets', 'project_id', type_=sa.String(36), nullable=False) # Create foreign key constraint again _create_constraint(ctx, 'secrets_project_fk', 'secrets', 'projects', ['project_id'], ['id']) ././@LongLink0000000000000000000000000000016200000000000011214 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/46b98cde536_add_project_quotas_table.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/46b98cde536_add_project_qu0000664000175000017500000000427513616500636033434 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add project quotas table Revision ID: 46b98cde536 Revises: 1bece815014f Create Date: 2015-08-28 17:42:35.057103 """ # revision identifiers, used by Alembic. revision = '46b98cde536' down_revision = 'kilo' from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, 'project_quotas') if not table_exists: op.create_table( 'project_quotas', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('project_id', sa.String(length=36), nullable=False), sa.Column('secrets', sa.Integer(), nullable=True), sa.Column('orders', sa.Integer(), nullable=True), sa.Column('containers', sa.Integer(), nullable=True), sa.Column('transport_keys', sa.Integer(), nullable=True), sa.Column('consumers', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['project_id'], ['projects.id'], name='project_quotas_fk'), sa.PrimaryKeyConstraint('id'), mysql_engine='InnoDB') op.create_index( op.f('ix_project_quotas_project_id'), 'project_quotas', ['project_id'], unique=False) barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/kilo_release.py0000664000175000017500000000156413616500636031766 0ustar sahidsahid00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """kilo Revision ID: kilo Revises: 1bece815014f Create Date: 2015-08-26 00:00:00.000000 """ # revision identifiers, used by Alembic. revision = 'kilo' down_revision = '1bece815014f' def upgrade(): """A no-op migration for marking the Kilo release.""" pass ././@LongLink0000000000000000000000000000017500000000000011220 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/1c0f328bfce0_fixing_composite_primary_keys_and_.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/1c0f328bfce0_fixing_compos0000664000175000017500000001100713616500636033504 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Fixing composite primary keys and adding indexes to foreign key Revision ID: 1c0f328bfce0 Revises: 3d36a26b88af Create Date: 2015-03-04 17:09:41.479708 """ # revision identifiers, used by Alembic. revision = '1c0f328bfce0' down_revision = '2d21598e7e70' from alembic import op import sqlalchemy as sa def _drop_constraint(ctx, name, table): if ctx.dialect.name == 'mysql': # MySQL won't allow some operations with constraints in place op.drop_constraint(name, table, type_='foreignkey') def upgrade(): op.create_index(op.f('ix_certificate_authority_metadata_ca_id'), 'certificate_authority_metadata', ['ca_id'], unique=False) op.create_index(op.f('ix_certificate_authority_metadata_key'), 'certificate_authority_metadata', ['key'], unique=False) op.create_index(op.f('ix_container_consumer_metadata_container_id'), 'container_consumer_metadata', ['container_id'], unique=False) op.create_index(op.f('ix_container_secret_container_id'), 'container_secret', ['container_id'], unique=False) op.create_index(op.f('ix_container_secret_secret_id'), 'container_secret', ['secret_id'], unique=False) op.create_index(op.f('ix_containers_project_id'), 'containers', ['project_id'], unique=False) op.create_index(op.f('ix_encrypted_data_kek_id'), 'encrypted_data', ['kek_id'], unique=False) op.create_index(op.f('ix_encrypted_data_secret_id'), 'encrypted_data', ['secret_id'], unique=False) op.create_index(op.f('ix_kek_data_project_id'), 'kek_data', ['project_id'], unique=False) op.create_index(op.f('ix_order_barbican_metadata_order_id'), 'order_barbican_metadata', ['order_id'], unique=False) op.create_index(op.f('ix_order_plugin_metadata_order_id'), 'order_plugin_metadata', ['order_id'], unique=False) op.create_index(op.f('ix_order_retry_tasks_order_id'), 'order_retry_tasks', ['order_id'], unique=False) op.create_index(op.f('ix_orders_container_id'), 'orders', ['container_id'], unique=False) op.create_index(op.f('ix_orders_project_id'), 'orders', ['project_id'], unique=False) op.create_index(op.f('ix_orders_secret_id'), 'orders', ['secret_id'], unique=False) ctx = op.get_context() _drop_constraint(ctx, 'preferred_certificate_authorities_ibfk_1', 'preferred_certificate_authorities') op.alter_column('preferred_certificate_authorities', 'ca_id', existing_type=sa.VARCHAR(length=36), nullable=False) op.create_foreign_key('preferred_certificate_authorities_fk', 'preferred_certificate_authorities', 'certificate_authorities', ['ca_id'], ['id']) op.create_index(op.f('ix_preferred_certificate_authorities_ca_id'), 'preferred_certificate_authorities', ['ca_id'], unique=False) op.create_index(op.f('ix_preferred_certificate_authorities_project_id'), 'preferred_certificate_authorities', ['project_id'], unique=True) op.create_index(op.f('ix_project_certificate_authorities_ca_id'), 'project_certificate_authorities', ['ca_id'], unique=False) op.create_index(op.f('ix_project_certificate_authorities_project_id'), 'project_certificate_authorities', ['project_id'], unique=False) op.create_index(op.f('ix_project_secret_project_id'), 'project_secret', ['project_id'], unique=False) op.create_index(op.f('ix_project_secret_secret_id'), 'project_secret', ['secret_id'], unique=False) op.create_index(op.f('ix_secret_store_metadata_secret_id'), 'secret_store_metadata', ['secret_id'], unique=False) ././@LongLink0000000000000000000000000000017400000000000011217 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/3d36a26b88af_add_order_barbican_metadata_table.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/3d36a26b88af_add_order_bar0000664000175000017500000000340713616500636033347 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add OrderBarbicanMetadata table Revision ID: 3d36a26b88af Revises: 443d6f4a69ac Create Date: 2015-02-20 12:27:08.155647 """ # revision identifiers, used by Alembic. revision = '3d36a26b88af' down_revision = '443d6f4a69ac' from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, 'order_barbican_metadata') if not table_exists: op.create_table( 'order_barbican_metadata', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('order_id', sa.String(length=36), nullable=False), sa.Column('key', sa.String(length=255), nullable=False), sa.Column('value', sa.String(length=255), nullable=False), sa.ForeignKeyConstraint(['order_id'], ['orders.id'], ), sa.PrimaryKeyConstraint('id') ) ././@LongLink0000000000000000000000000000017700000000000011222 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/443d6f4a69ac_added_secret_type_column_to_secrets_.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/443d6f4a69ac_added_secret_0000664000175000017500000000173213616500636033345 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """added secret type column to secrets table Revision ID: 443d6f4a69ac Revises: aa2cf96a1d5 Create Date: 2015-02-16 12:35:12.876413 """ # revision identifiers, used by Alembic. revision = '443d6f4a69ac' down_revision = 'aa2cf96a1d5' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('secrets', sa.Column('secret_type', sa.String(length=255), nullable=False, server_default="opaque")) ././@LongLink0000000000000000000000000000015400000000000011215 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/aa2cf96a1d5_add_orderretrytask.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/aa2cf96a1d5_add_orderretry0000664000175000017500000000265513616500636033602 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add OrderRetryTask Revision ID: aa2cf96a1d5 Revises: 256da65e0c5f Create Date: 2015-01-19 10:27:19.179196 """ # revision identifiers, used by Alembic. revision = "aa2cf96a1d5" down_revision = "256da65e0c5f" from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( "order_retry_tasks", sa.Column("id", sa.String(length=36), nullable=False), sa.Column("order_id", sa.String(length=36), nullable=False), sa.Column("retry_task", sa.Text(), nullable=False), sa.Column("retry_at", sa.DateTime(), nullable=False), sa.Column("retry_args", sa.Text(), nullable=False), sa.Column("retry_kwargs", sa.Text(), nullable=False), sa.Column("retry_count", sa.Integer(), nullable=False), sa.ForeignKeyConstraint(["order_id"], ["orders.id"]), sa.PrimaryKeyConstraint("id"), mysql_engine="InnoDB" ) ././@LongLink0000000000000000000000000000017500000000000011220 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/6a4457517a3_rename_acl_creator_only_to_project_.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/6a4457517a3_rename_acl_cre0000664000175000017500000000304113616500636033203 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """rename ACL creator_only to project_access Revision ID: 6a4457517a3 Revises: 30dba269cc64 Create Date: 2015-06-03 11:54:55.187875 """ # revision identifiers, used by Alembic. revision = '6a4457517a3' down_revision = '30dba269cc64' from alembic import op import sqlalchemy as sa def upgrade(): op.alter_column('secret_acls', 'creator_only', existing_type=sa.BOOLEAN(), new_column_name='project_access') # reverse existing flag value as project_access is negation of creator_only op.execute('UPDATE secret_acls SET project_access = NOT project_access', execution_options={'autocommit': True}) op.alter_column('container_acls', 'creator_only', existing_type=sa.BOOLEAN(), new_column_name='project_access') # reverse existing flag value as project_access is negation of creator_only op.execute('UPDATE container_acls SET project_access = NOT project_access', execution_options={'autocommit': True}) ././@LongLink0000000000000000000000000000015400000000000011215 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/d2780d5aa510_change_url_length.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/d2780d5aa510_change_url_le0000664000175000017500000000166013616500636033272 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """change_url_length Revision ID: d2780d5aa510 Revises: dce488646127 Create Date: 2016-03-11 09:39:32.593231 """ # revision identifiers, used by Alembic. revision = 'd2780d5aa510' down_revision = 'dce488646127' from alembic import op import sqlalchemy as sa def upgrade(): op.alter_column( 'container_consumer_metadata', 'URL', type_=sa.String(length=255) ) ././@LongLink0000000000000000000000000000016400000000000011216 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/795737bb3c3_change_tenants_to_projects.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/795737bb3c3_change_tenants0000664000175000017500000000612213616500636033337 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Change tenants to projects Revision ID: 795737bb3c3 Revises: 254495565185 Create Date: 2014-12-09 15:58:35.535032 """ # revision identifiers, used by Alembic. revision = '795737bb3c3' down_revision = '254495565185' from alembic import op import sqlalchemy as sa def _drop_constraint(ctx, con, table, fk_name_to_try): if ctx.dialect.name == 'mysql': # MySQL creates different default names for foreign key constraints op.drop_constraint(fk_name_to_try, table, type_='foreignkey') def _change_fk_to_project(ctx, con, table, fk_old, fk_new): _drop_constraint(ctx, con, table, fk_old) op.alter_column(table, 'tenant_id', type_=sa.String(36), new_column_name='project_id') op.create_foreign_key(fk_new, table, 'projects', ['project_id'], ['id']) def upgrade(): # project_secret table ctx = op.get_context() con = op.get_bind() # ---- Update tenant_secret table to project_secret: _drop_constraint(ctx, con, 'tenant_secret', 'tenant_secret_ibfk_1') _drop_constraint(ctx, con, 'tenant_secret', 'tenant_secret_ibfk_2') op.drop_constraint('_tenant_secret_uc', 'tenant_secret', type_='unique') op.rename_table('tenant_secret', 'project_secret') op.alter_column('project_secret', 'tenant_id', type_=sa.String(36), new_column_name='project_id') op.create_unique_constraint('_project_secret_uc', 'project_secret', ['project_id', 'secret_id']) # ---- Update tenants table to projects: op.rename_table('tenants', 'projects') # re-create the foreign key constraints with explicit names. op.create_foreign_key('project_secret_project_fk', 'project_secret', 'projects', ['project_id'], ['id']) op.create_foreign_key('project_secret_secret_fk', 'project_secret', 'secrets', ['secret_id'], ['id']) # ---- Update containers table: _change_fk_to_project( ctx, con, 'containers', 'containers_ibfk_1', 'containers_project_fk') # ---- Update kek_data table: _change_fk_to_project( ctx, con, 'kek_data', 'kek_data_ibfk_1', 'kek_data_project_fk') # ---- Update orders table: _change_fk_to_project( ctx, con, 'orders', 'orders_ibfk_2', 'orders_project_fk') op.create_foreign_key('orders_ibfk_2', 'orders', 'containers', ['container_id'], ['id']) ././@LongLink0000000000000000000000000000020000000000000011205 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/1e86c18af2dd_add_new_columns_type_meta_containerid.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/1e86c18af2dd_add_new_colum0000664000175000017500000000224313616500636033454 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """add new columns type meta containerId Revision ID: 1e86c18af2dd Revises: 13d127569afa Create Date: 2014-06-04 09:53:27.116054 """ # revision identifiers, used by Alembic. revision = '1e86c18af2dd' down_revision = '13d127569afa' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('orders', sa.Column('container_id', sa.String(length=36), nullable=True)) op.add_column('orders', sa.Column('meta', sa.Text, nullable=True)) op.add_column('orders', sa.Column('type', sa.String(length=255), nullable=True)) ././@LongLink0000000000000000000000000000016500000000000011217 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/1bece815014f_remove_projectsecret_table.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/1bece815014f_remove_projec0000664000175000017500000000152013616500636033420 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """remove ProjectSecret table Revision ID: 1bece815014f Revises: 161f8aceb687 Create Date: 2015-06-23 16:17:50.805295 """ # revision identifiers, used by Alembic. revision = '1bece815014f' down_revision = '161f8aceb687' from alembic import op def upgrade(): op.drop_table('project_secret') ././@LongLink0000000000000000000000000000017700000000000011222 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/254495565185_removing_redundant_fields_from_order.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/254495565185_removing_redu0000664000175000017500000000213713616500636033075 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """removing redundant fields from order Revision ID: 254495565185 Revises: 2843d6469f25 Create Date: 2014-09-16 12:09:23.716390 """ # revision identifiers, used by Alembic. revision = '254495565185' down_revision = '2843d6469f25' from alembic import op def upgrade(): op.drop_column('orders', 'secret_mode') op.drop_column('orders', 'secret_algorithm') op.drop_column('orders', 'secret_bit_length') op.drop_column('orders', 'secret_expiration') op.drop_column('orders', 'secret_payload_content_type') op.drop_column('orders', 'secret_name') ././@LongLink0000000000000000000000000000016400000000000011216 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/1bc885808c76_add_project_id_to_secrets.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/1bc885808c76_add_project_i0000664000175000017500000000222713616500636033236 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add project id to Secrets Revision ID: 1bc885808c76 Revises: 6a4457517a3 Create Date: 2015-04-24 13:53:29.926426 """ # revision identifiers, used by Alembic. revision = '1bc885808c76' down_revision = '6a4457517a3' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('secrets', sa.Column('project_id', sa.String(length=36), nullable=True)) op.create_index(op.f('ix_secrets_project_id'), 'secrets', ['project_id'], unique=False) op.create_foreign_key('secrets_project_fk', 'secrets', 'projects', ['project_id'], ['id']) ././@LongLink0000000000000000000000000000016300000000000011215 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/cd4106a1a0_add_cert_to_container_type.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/cd4106a1a0_add_cert_to_con0000664000175000017500000000174413616500636033433 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """add-cert-to-container-type Revision ID: cd4106a1a0 Revises: 1e86c18af2dd Create Date: 2014-06-10 15:07:25.084173 """ # revision identifiers, used by Alembic. revision = 'cd4106a1a0' down_revision = '1e86c18af2dd' from alembic import op import sqlalchemy as sa def upgrade(): enum_type = sa.Enum( 'generic', 'rsa', 'dsa', 'certificate', name='container_types') op.alter_column('containers', 'type', type_=enum_type) ././@LongLink0000000000000000000000000000016200000000000011214 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/39a96e67e990_add_missing_constraints.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/39a96e67e990_add_missing_c0000664000175000017500000000317213616500636033245 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add missing constraints Revision ID: 39a96e67e990 Revises: 4ecde3a3a72a Create Date: 2016-01-26 13:18:06.113621 """ # revision identifiers, used by Alembic. revision = '39a96e67e990' down_revision = '4ecde3a3a72a' from alembic import op import sqlalchemy as sa def upgrade(): # Add missing projects table keystone_id uniqueness constraint. op.create_unique_constraint('uc_projects_external_ids', 'projects', ['external_id']) # Add missing default for secret_acls' project_access. op.alter_column('secret_acls', 'project_access', server_default=sa.sql.expression.true(), existing_type=sa.Boolean, existing_server_default=None, existing_nullable=False) # Add missing default for container_acls' project_access. op.alter_column('container_acls', 'project_access', server_default=sa.sql.expression.true(), existing_type=sa.Boolean, existing_server_default=None, existing_nullable=False) ././@LongLink0000000000000000000000000000015200000000000011213 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/1a0c2cdafb38_initial_version.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/1a0c2cdafb38_initial_versi0000664000175000017500000000141313616500636033554 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """create test table Revision ID: 1a0c2cdafb38 Revises: juno Create Date: 2013-06-17 16:42:13.634746 """ # revision identifiers, used by Alembic. revision = '1a0c2cdafb38' down_revision = None def upgrade(): pass barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/juno_initial.py0000664000175000017500000000274513616500636032016 0ustar sahidsahid00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """juno_initial Revision ID: juno Revises: None """ # revision identifiers, used by Alembic. revision = 'juno' down_revision = '1a0c2cdafb38' from barbican.model.migration.alembic_migrations import container_init_ops from barbican.model.migration.alembic_migrations import encrypted_init_ops from barbican.model.migration.alembic_migrations import kek_init_ops from barbican.model.migration.alembic_migrations import order_ops from barbican.model.migration.alembic_migrations import projects_init_ops from barbican.model.migration.alembic_migrations import secrets_init_ops from barbican.model.migration.alembic_migrations import transport_keys_init_ops def upgrade(): projects_init_ops.upgrade() secrets_init_ops.upgrade() container_init_ops.upgrade() kek_init_ops.upgrade() encrypted_init_ops.upgrade() order_ops.upgrade() transport_keys_init_ops.upgrade() ././@LongLink0000000000000000000000000000017700000000000011222 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/156cd9933643_add_project_column_to_consumer_table.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/156cd9933643_add_project_c0000664000175000017500000000244113616500636033143 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add project column to consumer table Revision ID: 156cd9933643 Revises: 46b98cde536 Create Date: 2015-08-28 20:53:23.205128 """ # revision identifiers, used by Alembic. revision = '156cd9933643' down_revision = '46b98cde536' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column( 'container_consumer_metadata', sa.Column('project_id', sa.String(length=36), nullable=True)) op.create_index( op.f('ix_container_consumer_metadata_project_id'), 'container_consumer_metadata', ['project_id'], unique=False) op.create_foreign_key( None, 'container_consumer_metadata', 'projects', ['project_id'], ['id']) ././@LongLink0000000000000000000000000000020100000000000011206 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/4ecde3a3a72a_add_cas_column_to_project_quotas_table.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/4ecde3a3a72a_add_cas_colum0000664000175000017500000000166413616500636033505 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Add cas column to project quotas table Revision ID: 4ecde3a3a72a Revises: 10220ccbe7fa Create Date: 2015-09-09 09:40:08.540064 """ # revision identifiers, used by Alembic. revision = '4ecde3a3a72a' down_revision = '10220ccbe7fa' from alembic import op import sqlalchemy as sa def upgrade(): op.add_column( 'project_quotas', sa.Column('cas', sa.Integer(), nullable=True)) ././@LongLink0000000000000000000000000000017600000000000011221 Lustar 00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/47b69e523451_made_plugin_names_in_kek_datum_non_.pybarbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/versions/47b69e523451_made_plugin_n0000664000175000017500000000167213616500636033166 0ustar sahidsahid00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Made plugin names in kek datum non nullable Revision ID: 47b69e523451 Revises: cd4106a1a0 Create Date: 2014-06-16 14:05:45.428226 """ # revision identifiers, used by Alembic. revision = '47b69e523451' down_revision = 'cd4106a1a0' from alembic import op import sqlalchemy as sa def upgrade(): op.alter_column('kek_data', 'plugin_name', type_=sa.String(255), nullable=False) barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/transport_keys_init_ops.py0000664000175000017500000000265613616500636032456 0ustar sahidsahid00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Initial operations for agent management extension # This module only manages the 'agents' table. Binding tables are created # in the modules for relevant resources from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'transport_keys', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('plugin_name', sa.String(length=255), nullable=False), sa.Column('transport_key', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('id') ) barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/encrypted_init_ops.py0000664000175000017500000000336713616500636031364 0ustar sahidsahid00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Initial operations for agent management extension # This module only manages the 'agents' table. Binding tables are created # in the modules for relevant resources from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'encrypted_data', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('content_type', sa.String(length=255), nullable=True), sa.Column('secret_id', sa.String(length=36), nullable=False), sa.Column('kek_id', sa.String(length=36), nullable=False), sa.Column('cypher_text', sa.Text(), nullable=True), sa.Column('kek_meta_extended', sa.Text(), nullable=True), sa.ForeignKeyConstraint(['secret_id'], ['secrets.id'],), sa.ForeignKeyConstraint(['kek_id'], ['kek_data.id'],), sa.PrimaryKeyConstraint('id') ) barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/order_ops.py0000664000175000017500000000424713616500636027455 0ustar sahidsahid00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Initial operations for agent management extension # This module only manages the 'agents' table. Binding tables are created # in the modules for relevant resources from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'orders', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('tenant_id', sa.String(length=36), nullable=False), sa.Column('error_status_code', sa.String(length=16), nullable=True), sa.Column('error_reason', sa.String(length=255), nullable=True), sa.Column('secret_id', sa.String(length=36), nullable=True), sa.Column('secret_mode', sa.String(length=255), nullable=True), sa.Column('secret_algorithm', sa.String(length=255), nullable=True), sa.Column('secret_bit_length', sa.String(length=255), nullable=True), sa.Column('secret_expiration', sa.String(length=255), nullable=True), sa.Column('secret_payload_content_type', sa.String(length=255), nullable=True), sa.Column('secret_name', sa.String(length=255), nullable=True), sa.ForeignKeyConstraint(['secret_id'], ['secrets.id'], ), sa.ForeignKeyConstraint(['tenant_id'], ['tenants.id'], ), sa.PrimaryKeyConstraint('id') ) barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/__init__.py0000664000175000017500000000000013616500636027177 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/README0000664000175000017500000000004713616500636025761 0ustar sahidsahid00000000000000Generic single-database configuration. barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/projects_init_ops.py0000664000175000017500000000255013616500636031211 0ustar sahidsahid00000000000000# Copyright 2015 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Initial operations for agent management extension # This module only manages the 'agents' table. Binding tables are created # in the modules for relevant resources from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'tenants', sa.Column('id', sa.String(length=36), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('deleted_at', sa.DateTime(), nullable=True), sa.Column('deleted', sa.Boolean(), nullable=False), sa.Column('status', sa.String(length=20), nullable=False), sa.Column('keystone_id', sa.String(length=255), nullable=True), sa.PrimaryKeyConstraint('id') ) barbican-9.1.0.dev50/barbican/model/migration/alembic_migrations/script.py.mako0000664000175000017500000000172313616500636027707 0ustar sahidsahid00000000000000# Copyright ${create_date.year} OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """${message} Revision ID: ${up_revision} Revises: ${down_revision} Create Date: ${create_date} """ # revision identifiers, used by Alembic. revision = ${repr(up_revision)} down_revision = ${repr(down_revision)} from alembic import op import sqlalchemy as sa ${imports if imports else ""} def upgrade(): ${upgrades if upgrades else "pass"} barbican-9.1.0.dev50/barbican/model/sync.py0000664000175000017500000000404313616500636020606 0ustar sahidsahid00000000000000# Copyright (c) 2018 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import config from barbican.model import repositories as repo from oslo_log import log # Import and configure logging. CONF = config.CONF log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) def sync_secret_stores(sql_url, verbose, log_file): """Command to sync secret stores table with config . :param sql_url: sql connection string to connect to a database :param verbose: If True, log and print more information :param log_file: If set, override the log_file configured """ if verbose: # The verbose flag prints out log events to the screen, otherwise # the log events will only go to the log file CONF.set_override('debug', True) if log_file: CONF.set_override('log_file', log_file) LOG.info("Syncing the secret_stores table with barbican.conf") log.setup(CONF, 'barbican') try: if sql_url: CONF.set_override('sql_connection', sql_url) repo.setup_database_engine_and_factory( initialize_secret_stores=True) repo.commit() except Exception as ex: LOG.exception('Failed to sync secret_stores table.') repo.rollback() raise ex finally: if verbose: CONF.clear_override('debug') if log_file: CONF.clear_override('log_file') repo.clear() if sql_url: CONF.clear_override('sql_connection') log.setup(CONF, 'barbican') # reset the overrides barbican-9.1.0.dev50/barbican/model/repositories.py0000664000175000017500000030516213616500636022367 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Defines interface for DB access that Resource controllers may reference TODO: The top part of this file was 'borrowed' from Glance, but seems quite intense for sqlalchemy, and maybe could be simplified. """ import logging import re import sys import time from oslo_db import exception as db_exc from oslo_db.sqlalchemy import session from oslo_utils import timeutils from oslo_utils import uuidutils import sqlalchemy from sqlalchemy import func as sa_func from sqlalchemy import or_ import sqlalchemy.orm as sa_orm from barbican.common import config from barbican.common import exception from barbican.common import utils from barbican import i18n as u from barbican.model.migration import commands from barbican.model import models LOG = utils.getLogger(__name__) _ENGINE = None _SESSION_FACTORY = None BASE = models.BASE sa_logger = None # Singleton repository references, instantiated via get_xxxx_repository() # functions below. Please keep this list in alphabetical order. _CA_REPOSITORY = None _CONTAINER_ACL_USER_REPOSITORY = None _CONTAINER_ACL_REPOSITORY = None _CONTAINER_CONSUMER_REPOSITORY = None _CONTAINER_REPOSITORY = None _CONTAINER_SECRET_REPOSITORY = None _ENCRYPTED_DATUM_REPOSITORY = None _KEK_DATUM_REPOSITORY = None _ORDER_PLUGIN_META_REPOSITORY = None _ORDER_BARBICAN_META_REPOSITORY = None _ORDER_REPOSITORY = None _ORDER_RETRY_TASK_REPOSITORY = None _PREFERRED_CA_REPOSITORY = None _PROJECT_REPOSITORY = None _PROJECT_CA_REPOSITORY = None _PROJECT_QUOTAS_REPOSITORY = None _SECRET_ACL_USER_REPOSITORY = None _SECRET_ACL_REPOSITORY = None _SECRET_META_REPOSITORY = None _SECRET_USER_META_REPOSITORY = None _SECRET_REPOSITORY = None _TRANSPORT_KEY_REPOSITORY = None _SECRET_STORES_REPOSITORY = None _PROJECT_SECRET_STORE_REPOSITORY = None _SECRET_CONSUMER_REPOSITORY = None CONF = config.CONF def hard_reset(): """Performs a hard reset of database resources, used for unit testing.""" # TODO(jvrbanac): Remove this as soon as we improve our unit testing # to not require this. global _ENGINE, _SESSION_FACTORY if _ENGINE: _ENGINE.dispose() _ENGINE = None _SESSION_FACTORY = None # Make sure we reinitialize the engine and session factory setup_database_engine_and_factory() def setup_database_engine_and_factory(initialize_secret_stores=False): global sa_logger, _SESSION_FACTORY, _ENGINE LOG.info('Setting up database engine and session factory') if CONF.debug: sa_logger = logging.getLogger('sqlalchemy.engine') sa_logger.setLevel(logging.DEBUG) if CONF.sql_pool_logging: pool_logger = logging.getLogger('sqlalchemy.pool') pool_logger.setLevel(logging.DEBUG) _ENGINE = _get_engine(_ENGINE) # Utilize SQLAlchemy's scoped_session to ensure that we only have one # session instance per thread. session_maker = sa_orm.sessionmaker(bind=_ENGINE) _SESSION_FACTORY = sqlalchemy.orm.scoped_session(session_maker) if initialize_secret_stores: _initialize_secret_stores_data() def start(): """Start for read-write requests placeholder Typically performed at the start of a request cycle, say for POST or PUT requests. """ pass def start_read_only(): """Start for read-only requests placeholder Typically performed at the start of a request cycle, say for GET or HEAD requests. """ pass def commit(): """Commit session state so far to the database. Typically performed at the end of a request cycle. """ get_session().commit() def rollback(): """Rollback session state so far. Typically performed when the request cycle raises an Exception. """ get_session().rollback() def clear(): """Dispose of this session, releases db resources. Typically performed at the end of a request cycle, after a commit() or rollback(). """ if _SESSION_FACTORY: # not initialized in some unit test _SESSION_FACTORY.remove() def get_session(): """Helper method to grab session.""" return _SESSION_FACTORY() def _get_engine(engine): if not engine: connection = CONF.sql_connection if not connection: raise exception.BarbicanException( u._('No SQL connection configured')) # TODO(jfwood): # connection_dict = sqlalchemy.engine.url.make_url(_CONNECTION) engine_args = { 'idle_timeout': CONF.sql_idle_timeout} if CONF.sql_pool_size: engine_args['max_pool_size'] = CONF.sql_pool_size if CONF.sql_pool_max_overflow: engine_args['max_overflow'] = CONF.sql_pool_max_overflow db_connection = None try: engine = _create_engine(connection, **engine_args) db_connection = engine.connect() except Exception as err: msg = u._("Error configuring registry database with supplied " "sql_connection. Got error: {error}").format(error=err) LOG.exception(msg) raise exception.BarbicanException(msg) finally: if db_connection: db_connection.close() if CONF.db_auto_create: meta = sqlalchemy.MetaData() meta.reflect(bind=engine) tables = meta.tables _auto_generate_tables(engine, tables) else: LOG.info('Not auto-creating barbican registry DB') return engine def model_query(model, *args, **kwargs): """Query helper for simpler session usage.""" session = kwargs.get('session') query = session.query(model, *args) return query def _initialize_secret_stores_data(): """Initializes secret stores data in database. This logic is executed only when database engine and factory is built. Secret store get_manager internally reads secret store plugin configuration from service configuration and saves it in secret_stores table in database. """ if utils.is_multiple_backends_enabled(): from barbican.plugin.interface import secret_store secret_store.get_manager() def is_db_connection_error(args): """Return True if error in connecting to db.""" # NOTE(adam_g): This is currently MySQL specific and needs to be extended # to support Postgres and others. conn_err_codes = ('2002', '2003', '2006') for err_code in conn_err_codes: if args.find(err_code) != -1: return True return False def _create_engine(connection, **engine_args): LOG.debug('Sql connection: please check "sql_connection" property in ' 'barbican configuration file; Args: %s', engine_args) engine = session.create_engine(connection, **engine_args) # TODO(jfwood): if 'mysql' in connection_dict.drivername: # TODO(jfwood): sqlalchemy.event.listen(_ENGINE, 'checkout', # TODO(jfwood): ping_listener) # Wrap the engine's connect method with a retry decorator. engine.connect = wrap_db_error(engine.connect) return engine def _auto_generate_tables(engine, tables): if tables and 'alembic_version' in tables: # Upgrade the database to the latest version. LOG.info('Updating schema to latest version') commands.upgrade() else: # Create database tables from our models. LOG.info('Auto-creating barbican registry DB') models.BASE.metadata.create_all(engine) # Sync the alembic version 'head' with current models. commands.stamp() def wrap_db_error(f): """Retry DB connection. Copied from nova and modified.""" def _wrap(*args, **kwargs): try: return f(*args, **kwargs) except sqlalchemy.exc.OperationalError as e: if not is_db_connection_error(e.args[0]): raise remaining_attempts = CONF.sql_max_retries while True: LOG.warning('SQL connection failed. %d attempts left.', remaining_attempts) remaining_attempts -= 1 time.sleep(CONF.sql_retry_interval) try: return f(*args, **kwargs) except sqlalchemy.exc.OperationalError as e: if (remaining_attempts <= 0 or not is_db_connection_error(e.args[0])): raise except sqlalchemy.exc.DBAPIError: raise except sqlalchemy.exc.DBAPIError: raise _wrap.__name__ = f.__name__ return _wrap def clean_paging_values(offset_arg=0, limit_arg=CONF.default_limit_paging): """Cleans and safely limits raw paging offset/limit values.""" offset_arg = offset_arg or 0 limit_arg = limit_arg or CONF.default_limit_paging try: offset = int(offset_arg) if offset < 0: offset = 0 if offset > sys.maxsize: offset = 0 except ValueError: offset = 0 try: limit = int(limit_arg) if limit < 1: limit = 1 if limit > CONF.max_limit_paging: limit = CONF.max_limit_paging except ValueError: limit = CONF.default_limit_paging LOG.debug("Clean paging values limit=%(limit)s, offset=%(offset)s" % {'limit': limit, 'offset': offset}) return offset, limit def delete_all_project_resources(project_id): """Logic to cleanup all project resources. This cleanup uses same alchemy session to perform all db operations as a transaction and will commit only when all db operations are performed without error. """ session = get_session() container_repo = get_container_repository() container_repo.delete_project_entities( project_id, suppress_exception=False, session=session) # secret children SecretStoreMetadatum, EncryptedDatum # and container_secrets are deleted as part of secret delete secret_repo = get_secret_repository() secret_repo.delete_project_entities( project_id, suppress_exception=False, session=session) kek_repo = get_kek_datum_repository() kek_repo.delete_project_entities( project_id, suppress_exception=False, session=session) project_repo = get_project_repository() project_repo.delete_project_entities( project_id, suppress_exception=False, session=session) class BaseRepo(object): """Base repository for the barbican entities. This class provides template methods that allow sub-classes to hook specific functionality as needed. Clients access instances of this class via singletons, therefore implementations should be stateless aside from configuration. """ def get_session(self, session=None): LOG.debug("Getting session...") return session or get_session() def get(self, entity_id, external_project_id=None, force_show_deleted=False, suppress_exception=False, session=None): """Get an entity or raise if it does not exist.""" session = self.get_session(session) try: query = self._do_build_get_query(entity_id, external_project_id, session) # filter out deleted entities if requested if not force_show_deleted: query = query.filter_by(deleted=False) entity = query.one() except sa_orm.exc.NoResultFound: LOG.exception("Not found for %s", entity_id) entity = None if not suppress_exception: _raise_entity_not_found(self._do_entity_name(), entity_id) return entity def create_from(self, entity, session=None): """Sub-class hook: create from entity.""" if not entity: msg = u._( "Must supply non-None {entity_name}." ).format(entity_name=self._do_entity_name()) raise exception.Invalid(msg) if entity.id: msg = u._( "Must supply {entity_name} with id=None (i.e. new entity)." ).format(entity_name=self._do_entity_name()) raise exception.Invalid(msg) LOG.debug("Begin create from...") session = self.get_session(session) start = time.time() # DEBUG # Validate the attributes before we go any further. From my # (unknown Glance developer) investigation, the @validates # decorator does not validate # on new records, only on existing records, which is, well, # idiotic. self._do_validate(entity.to_dict()) try: LOG.debug("Saving entity...") entity.save(session=session) except db_exc.DBDuplicateEntry as e: session.rollback() LOG.exception('Problem saving entity for create') error_msg = re.sub('[()]', '', str(e.args)) raise exception.ConstraintCheck(error=error_msg) LOG.debug('Elapsed repo ' 'create secret:%s', (time.time() - start)) # DEBUG return entity def update_from(self, model_class, entity_id, values, session=None): if id in values: raise Exception('Cannot update id') LOG.debug('Begin update from ...') session = self.get_session(session=session) query = session.query(model_class) query = query.filter_by(id=entity_id) try: LOG.debug('Updating value ...') entity = query.one() except sa_orm.exc.NoResultFound: raise exception.NotFound('DB Entity with id {0} not ' 'found'.format(entity_id)) self._update_values(entity, values) entity.save() def save(self, entity): """Saves the state of the entity.""" entity.updated_at = timeutils.utcnow() # Validate the attributes before we go any further. From my # (unknown Glance developer) investigation, the @validates # decorator does not validate # on new records, only on existing records, which is, well, # idiotic. self._do_validate(entity.to_dict()) entity.save() def delete_entity_by_id(self, entity_id, external_project_id, session=None): """Remove the entity by its ID.""" session = self.get_session(session) entity = self.get(entity_id=entity_id, external_project_id=external_project_id, session=session) entity.delete(session=session) def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "Entity" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return None def _do_convert_values(self, values): """Sub-class hook: convert text-based values to target types This is specifically for database values. """ pass def _do_validate(self, values): """Sub-class hook: validate values. Validates the incoming data and raises an Invalid exception if anything is out of order. :param values: Mapping of entity metadata to check """ status = values.get('status', None) if not status: # TODO(jfwood): I18n this! msg = u._("{entity_name} status is required.").format( entity_name=self._do_entity_name()) raise exception.Invalid(msg) if not models.States.is_valid(status): msg = u._("Invalid status '{status}' for {entity_name}.").format( status=status, entity_name=self._do_entity_name()) raise exception.Invalid(msg) return values def _update_values(self, entity_ref, values): for k in values: if getattr(entity_ref, k) != values[k]: setattr(entity_ref, k, values[k]) def _build_get_project_entities_query(self, project_id, session): """Sub-class hook: build a query to retrieve entities for a project. :param project_id: id of barbican project entity :param session: existing db session reference. :returns: A query object for getting all project related entities This will filter deleted entities if there. """ msg = u._( "{entity_name} is missing query build method for get " "project entities.").format( entity_name=self._do_entity_name()) raise NotImplementedError(msg) def get_project_entities(self, project_id, session=None): """Gets entities associated with a given project. :param project_id: id of barbican project entity :param session: existing db session reference. If None, gets session. :returns: list of matching entities found otherwise returns empty list if no entity exists for a given project. Sub-class should implement `_build_get_project_entities_query` function to delete related entities otherwise it would raise NotImplementedError on its usage. """ session = self.get_session(session) query = self._build_get_project_entities_query(project_id, session) if query: return query.all() else: return [] def get_count(self, project_id, session=None): """Gets count of entities associated with a given project :param project_id: id of barbican project entity :param session: existing db session reference. If None, gets session. :return: an number 0 or greater Sub-class should implement `_build_get_project_entities_query` function to delete related entities otherwise it would raise NotImplementedError on its usage. """ session = self.get_session(session) query = self._build_get_project_entities_query(project_id, session) if query: return query.count() else: return 0 def delete_project_entities(self, project_id, suppress_exception=False, session=None): """Deletes entities for a given project. :param project_id: id of barbican project entity :param suppress_exception: Pass True if want to suppress exception :param session: existing db session reference. If None, gets session. Sub-class should implement `_build_get_project_entities_query` function to delete related entities otherwise it would raise NotImplementedError on its usage. """ session = self.get_session(session) query = self._build_get_project_entities_query(project_id, session=session) try: # query cannot be None as related repo class is expected to # implement it otherwise error is raised in build query call for entity in query: # Its a soft delete so its more like entity update entity.delete(session=session) except sqlalchemy.exc.SQLAlchemyError: LOG.exception('Problem finding project related entity to delete') if not suppress_exception: raise exception.BarbicanException(u._('Error deleting project ' 'entities for ' 'project_id=%s'), project_id) class ProjectRepo(BaseRepo): """Repository for the Project entity.""" def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "Project" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.Project).filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass def find_by_external_project_id(self, external_project_id, suppress_exception=False, session=None): session = self.get_session(session) try: query = session.query(models.Project) query = query.filter_by(external_id=external_project_id) entity = query.one() except sa_orm.exc.NoResultFound: entity = None if not suppress_exception: LOG.exception("Problem getting Project %s", external_project_id) raise exception.NotFound(u._( "No {entity_name} found with keystone-ID {id}").format( entity_name=self._do_entity_name(), id=external_project_id)) return entity def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving project for given id.""" query = session.query(models.Project) return query.filter_by(id=project_id).filter_by(deleted=False) class SecretRepo(BaseRepo): """Repository for the Secret entity.""" def get_secret_list(self, external_project_id, offset_arg=None, limit_arg=None, name=None, alg=None, mode=None, bits=0, secret_type=None, suppress_exception=False, session=None, acl_only=None, user_id=None, created=None, updated=None, expiration=None, sort=None): """Returns a list of secrets The list is scoped to secrets that are associated with the external_project_id (e.g. Keystone Project ID), and filtered using any provided filters. """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) utcnow = timeutils.utcnow() query = session.query(models.Secret) query = query.filter_by(deleted=False) query = query.filter(or_(models.Secret.expiration.is_(None), models.Secret.expiration > utcnow)) if name: query = query.filter(models.Secret.name.like(name)) if alg: query = query.filter(models.Secret.algorithm.like(alg)) if mode: query = query.filter(models.Secret.mode.like(mode)) if bits > 0: query = query.filter(models.Secret.bit_length == bits) if secret_type: query = query.filter(models.Secret.secret_type == secret_type) if created: query = self._build_date_filter_query(query, 'created_at', created) if updated: query = self._build_date_filter_query(query, 'updated_at', updated) if expiration: query = self._build_date_filter_query( query, 'expiration', expiration ) else: query = query.filter(or_(models.Secret.expiration.is_(None), models.Secret.expiration > utcnow)) if sort: query = self._build_sort_filter_query(query, sort) if acl_only and acl_only.lower() == 'true' and user_id: query = query.join(models.SecretACL) query = query.join(models.SecretACLUser) query = query.filter(models.SecretACLUser.user_id == user_id) else: query = query.join(models.Project) query = query.filter( models.Project.external_id == external_project_id) total = query.count() end_offset = offset + limit LOG.debug('Retrieving from %s to %s', offset, end_offset) query = query.limit(limit).offset(offset) entities = query.all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "Secret" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" utcnow = timeutils.utcnow() expiration_filter = or_(models.Secret.expiration.is_(None), models.Secret.expiration > utcnow) query = session.query(models.Secret) query = query.filter_by(id=entity_id, deleted=False) query = query.filter(expiration_filter) query = query.join(models.Project) query = query.filter(models.Project.external_id == external_project_id) return query def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving Secrets associated with a given project :param project_id: id of barbican project entity :param session: existing db session reference. """ utcnow = timeutils.utcnow() expiration_filter = or_(models.Secret.expiration.is_(None), models.Secret.expiration > utcnow) query = session.query(models.Secret).filter_by(deleted=False) query = query.filter(models.Secret.project_id == project_id) query = query.filter(expiration_filter) return query def _build_date_filter_query(self, query, attribute, date_filters): """Parses date_filters to apply each filter to the given query :param query: query object to apply filters to :param attribute: name of the model attribute to be filtered :param date_filters: comma separated string of date filters to apply """ parse = timeutils.parse_isotime for filter in date_filters.split(','): if filter.startswith('lte:'): isotime = filter[4:] query = query.filter(or_( getattr(models.Secret, attribute) < parse(isotime), getattr(models.Secret, attribute) == parse(isotime)) ) elif filter.startswith('lt:'): isotime = filter[3:] query = query.filter( getattr(models.Secret, attribute) < parse(isotime) ) elif filter.startswith('gte:'): isotime = filter[4:] query = query.filter(or_( getattr(models.Secret, attribute) > parse(isotime), getattr(models.Secret, attribute) == parse(isotime)) ) elif filter.startswith('gt:'): isotime = filter[3:] query = query.filter( getattr(models.Secret, attribute) > parse(isotime) ) else: query = query.filter( getattr(models.Secret, attribute) == parse(filter) ) return query def _build_sort_filter_query(self, query, sort_filters): """Parses sort_filters to order the query""" key_to_column_map = { 'created': 'created_at', 'updated': 'updated_at' } ordering = list() for sort in sort_filters.split(','): if ':' in sort: key, direction = sort.split(':') else: key, direction = sort, 'asc' ordering.append( getattr( getattr(models.Secret, key_to_column_map.get(key, key)), direction )() ) return query.order_by(*ordering) def get_secret_by_id(self, entity_id, suppress_exception=False, session=None): """Gets secret by its entity id without project id check.""" session = self.get_session(session) try: utcnow = timeutils.utcnow() expiration_filter = or_(models.Secret.expiration.is_(None), models.Secret.expiration > utcnow) query = session.query(models.Secret) query = query.filter_by(id=entity_id, deleted=False) query = query.filter(expiration_filter) entity = query.one() except sa_orm.exc.NoResultFound: entity = None if not suppress_exception: LOG.exception("Problem getting secret %s", entity_id) raise exception.NotFound(u._( "No secret found with secret-ID {id}").format( entity_name=self._do_entity_name(), id=entity_id)) return entity class EncryptedDatumRepo(BaseRepo): """Repository for the EncryptedDatum entity Stores encrypted information on behalf of a Secret. """ def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "EncryptedDatum" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.EncryptedDatum).filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class SecretStoreMetadatumRepo(BaseRepo): """Repository for the SecretStoreMetadatum entity Stores key/value information on behalf of a Secret. """ def save(self, metadata, secret_model): """Saves the specified metadata for the secret. :raises NotFound if entity does not exist. """ now = timeutils.utcnow() for k, v in metadata.items(): meta_model = models.SecretStoreMetadatum(k, v) meta_model.updated_at = now meta_model.secret = secret_model meta_model.save() def get_metadata_for_secret(self, secret_id): """Returns a dict of SecretStoreMetadatum instances.""" session = get_session() query = session.query(models.SecretStoreMetadatum) query = query.filter_by(deleted=False) query = query.filter( models.SecretStoreMetadatum.secret_id == secret_id) metadata = query.all() return {m.key: m.value for m in metadata} def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "SecretStoreMetadatum" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.SecretStoreMetadatum) return query.filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class SecretUserMetadatumRepo(BaseRepo): """Repository for the SecretUserMetadatum entity Stores key/value information on behalf of a Secret. """ def create_replace_user_metadata(self, secret_id, metadata): """Creates or replaces the specified metadata for the secret.""" now = timeutils.utcnow() session = get_session() query = session.query(models.SecretUserMetadatum) query = query.filter_by(secret_id=secret_id) query.delete() for k, v in metadata.items(): meta_model = models.SecretUserMetadatum(k, v) meta_model.secret_id = secret_id meta_model.updated_at = now meta_model.save(session=session) def get_metadata_for_secret(self, secret_id): """Returns a dict of SecretUserMetadatum instances.""" session = get_session() query = session.query(models.SecretUserMetadatum) query = query.filter_by(deleted=False) query = query.filter( models.SecretUserMetadatum.secret_id == secret_id) metadata = query.all() return {m.key: m.value for m in metadata} def create_replace_user_metadatum(self, secret_id, key, value): now = timeutils.utcnow() session = get_session() query = session.query(models.SecretUserMetadatum) query = query.filter_by(secret_id=secret_id) query = query.filter_by(key=key) query.delete() meta_model = models.SecretUserMetadatum(key, value) meta_model.secret_id = secret_id meta_model.updated_at = now meta_model.save(session=session) def delete_metadatum(self, secret_id, key): """Removes a key from a SecretUserMetadatum instances.""" session = get_session() query = session.query(models.SecretUserMetadatum) query = query.filter_by(secret_id=secret_id) query = query.filter_by(key=key) query.delete() def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "SecretUserMetadatum" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.SecretUserMetadatum) return query.filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class KEKDatumRepo(BaseRepo): """Repository for the KEKDatum entity Stores key encryption key (KEK) metadata used by crypto plugins to encrypt/decrypt secrets. """ def find_or_create_kek_datum(self, project, plugin_name, suppress_exception=False, session=None): """Find or create a KEK datum instance.""" if not plugin_name: raise exception.BarbicanException( u._('Tried to register crypto plugin with null or empty ' 'name.')) kek_datum = None session = self.get_session(session) query = session.query(models.KEKDatum) query = query.filter_by(project_id=project.id, plugin_name=plugin_name, active=True, deleted=False) query = query.order_by(models.KEKDatum.created_at) kek_datums = query.all() if not kek_datums: kek_datum = models.KEKDatum() kek_datum.kek_label = "project-{0}-key-{1}".format( project.external_id, uuidutils.generate_uuid()) kek_datum.project_id = project.id kek_datum.plugin_name = plugin_name kek_datum.status = models.States.ACTIVE self.save(kek_datum) else: kek_datum = kek_datums.pop() # (alee) There should be only one active KEKDatum. # Due to a race condition with many threads or # many barbican processes, its possible to have # multiple active KEKDatum. The code below makes # all the extra KEKDatum inactive # See LP#1726378 for kd in kek_datums: LOG.debug( "Multiple active KEKDatum found for %s." "Setting %s to be inactive.", project.external_id, kd.kek_label) kd.active = False self.save(kd) return kek_datum def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "KEKDatum" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.KEKDatum).filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving KEK Datum instance(s). The returned KEK Datum instance(s) are related to a given project. :param project_id: id of barbican project entity :param session: existing db session reference. """ return session.query(models.KEKDatum).filter_by( project_id=project_id).filter_by(deleted=False) class OrderRepo(BaseRepo): """Repository for the Order entity.""" def get_by_create_date(self, external_project_id, offset_arg=None, limit_arg=None, meta_arg=None, suppress_exception=False, session=None): """Returns a list of orders The list is ordered by the date they were created at and paged based on the offset and limit fields. :param external_project_id: The keystone id for the project. :param offset_arg: The entity number where the query result should start. :param limit_arg: The maximum amount of entities in the result set. :param meta_arg: Optional meta field used to filter results. :param suppress_exception: Whether NoResultFound exceptions should be suppressed. :param session: SQLAlchemy session object. :returns: Tuple consisting of (list_of_entities, offset, limit, total). """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.Order) query = query.order_by(models.Order.created_at) query = query.filter_by(deleted=False) if meta_arg: query = query.filter(models.Order.meta.contains(meta_arg)) query = query.join(models.Project, models.Order.project) query = query.filter(models.Project.external_id == external_project_id) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total ) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "Order" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.Order) query = query.filter_by(id=entity_id, deleted=False) query = query.join(models.Project, models.Order.project) query = query.filter(models.Project.external_id == external_project_id) return query def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving orders related to given project. :param project_id: id of barbican project entity :param session: existing db session reference. """ return session.query(models.Order).filter_by( project_id=project_id).filter_by(deleted=False) class OrderPluginMetadatumRepo(BaseRepo): """Repository for the OrderPluginMetadatum entity Stores key/value plugin information on behalf of an Order. """ def save(self, metadata, order_model): """Saves the specified metadata for the order. :raises NotFound if entity does not exist. """ now = timeutils.utcnow() session = get_session() for k, v in metadata.items(): meta_model = models.OrderPluginMetadatum(k, v) meta_model.updated_at = now meta_model.order = order_model meta_model.save(session=session) def get_metadata_for_order(self, order_id): """Returns a dict of OrderPluginMetadatum instances.""" session = get_session() try: query = session.query(models.OrderPluginMetadatum) query = query.filter_by(deleted=False) query = query.filter( models.OrderPluginMetadatum.order_id == order_id) metadata = query.all() except sa_orm.exc.NoResultFound: metadata = {} return {m.key: m.value for m in metadata} def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "OrderPluginMetadatum" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.OrderPluginMetadatum) return query.filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class OrderBarbicanMetadatumRepo(BaseRepo): """Repository for the OrderBarbicanMetadatum entity Stores key/value plugin information on behalf of a Order. """ def save(self, metadata, order_model): """Saves the specified metadata for the order. :raises NotFound if entity does not exist. """ now = timeutils.utcnow() session = get_session() for k, v in metadata.items(): meta_model = models.OrderBarbicanMetadatum(k, v) meta_model.updated_at = now meta_model.order = order_model meta_model.save(session=session) def get_metadata_for_order(self, order_id): """Returns a dict of OrderBarbicanMetadatum instances.""" session = get_session() try: query = session.query(models.OrderBarbicanMetadatum) query = query.filter_by(deleted=False) query = query.filter( models.OrderBarbicanMetadatum.order_id == order_id) metadata = query.all() except sa_orm.exc.NoResultFound: metadata = {} return {m.key: m.value for m in metadata} def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "OrderBarbicanMetadatum" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.OrderBarbicanMetadatum) return query.filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class OrderRetryTaskRepo(BaseRepo): """Repository for the OrderRetryTask entity.""" def get_by_create_date( self, only_at_or_before_this_date=None, offset_arg=None, limit_arg=None, suppress_exception=False, session=None): """Returns a list of order retry task entities The list is ordered by the date they were created at and paged based on the offset and limit fields. :param only_at_or_before_this_date: If specified, only entities at or before this date are returned. :param offset_arg: The entity number where the query result should start. :param limit_arg: The maximum amount of entities in the result set. :param suppress_exception: Whether NoResultFound exceptions should be suppressed. :param session: SQLAlchemy session object. :returns: Tuple consisting of (list_of_entities, offset, limit, total). """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.OrderRetryTask) query = query.order_by(models.OrderRetryTask.created_at) query = query.filter_by(deleted=False) if only_at_or_before_this_date: query = query.filter( models.OrderRetryTask.retry_at <= only_at_or_before_this_date) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total ) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "OrderRetryTask" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.OrderRetryTask) query = query.filter_by(id=entity_id, deleted=False) return query def _do_validate(self, values): """Sub-class hook: validate values.""" pass class ContainerRepo(BaseRepo): """Repository for the Container entity.""" def get_by_create_date(self, external_project_id, offset_arg=None, limit_arg=None, name_arg=None, type_arg=None, suppress_exception=False, session=None): """Returns a list of containers The list is ordered by the date they were created at and paged based on the offset and limit fields. The external_project_id is external-to-Barbican value assigned to the project by Keystone. """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.Container) query = query.order_by(models.Container.created_at) query = query.filter_by(deleted=False) if name_arg: query = query.filter(models.Container.name.like(name_arg)) if type_arg: query = query.filter(models.Container.type == type_arg) query = query.join(models.Project, models.Container.project) query = query.filter(models.Project.external_id == external_project_id) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total ) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "Container" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.Container) query = query.filter_by(id=entity_id, deleted=False) query = query.join(models.Project, models.Container.project) query = query.filter(models.Project.external_id == external_project_id) return query def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving container related to given project. :param project_id: id of barbican project entity :param session: existing db session reference. """ return session.query(models.Container).filter_by( deleted=False).filter_by(project_id=project_id) def get_container_by_id(self, entity_id, suppress_exception=False, session=None): """Gets container by its entity id without project id check.""" session = self.get_session(session) try: query = session.query(models.Container) query = query.filter_by(id=entity_id, deleted=False) entity = query.one() except sa_orm.exc.NoResultFound: entity = None if not suppress_exception: LOG.exception("Problem getting container %s", entity_id) raise exception.NotFound(u._( "No container found with container-ID {id}").format( entity_name=self._do_entity_name(), id=entity_id)) return entity class ContainerSecretRepo(BaseRepo): """Repository for the ContainerSecret entity.""" def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "ContainerSecret" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.ContainerSecret ).filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class ContainerConsumerRepo(BaseRepo): """Repository for the Service entity.""" def get_by_container_id(self, container_id, offset_arg=None, limit_arg=None, suppress_exception=False, session=None): """Returns a list of Consumers The list is ordered by the date they were created at and paged based on the offset and limit fields. """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.ContainerConsumerMetadatum) query = query.order_by(models.ContainerConsumerMetadatum.name) query = query.filter_by(deleted=False) query = query.filter( models.ContainerConsumerMetadatum.container_id == container_id ) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total ) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def get_by_values(self, container_id, name, URL, suppress_exception=False, show_deleted=False, session=None): session = self.get_session(session) try: query = session.query(models.ContainerConsumerMetadatum) query = query.filter_by( container_id=container_id, name=name, URL=URL) if not show_deleted: query.filter_by(deleted=False) consumer = query.one() except sa_orm.exc.NoResultFound: consumer = None if not suppress_exception: raise exception.NotFound( u._("Could not find {entity_name}").format( entity_name=self._do_entity_name())) return consumer def create_or_update_from(self, new_consumer, container, session=None): session = self.get_session(session) try: container.updated_at = timeutils.utcnow() container.consumers.append(new_consumer) container.save(session=session) except db_exc.DBDuplicateEntry: session.rollback() # We know consumer already exists. # This operation is idempotent, so log this and move on LOG.debug("Consumer %s with URL %s already exists for " "container %s, continuing...", new_consumer.name, new_consumer.URL, new_consumer.container_id) # Get the existing entry and reuse it by clearing the deleted flags existing_consumer = self.get_by_values( new_consumer.container_id, new_consumer.name, new_consumer.URL, show_deleted=True) existing_consumer.deleted = False existing_consumer.deleted_at = None # We are not concerned about timing here -- set only, no reads existing_consumer.save() def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "ContainerConsumer" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.ContainerConsumerMetadatum) return query.filter_by(id=entity_id, deleted=False) def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving consumers associated with given project :param project_id: id of barbican project entity :param session: existing db session reference. """ query = session.query( models.ContainerConsumerMetadatum).filter_by(deleted=False) query = query.filter( models.ContainerConsumerMetadatum.project_id == project_id) return query class TransportKeyRepo(BaseRepo): """Repository for the TransportKey entity Stores transport keys for wrapping the secret data to/from a barbican client. """ def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "TransportKey" def get_by_create_date(self, plugin_name=None, offset_arg=None, limit_arg=None, suppress_exception=False, session=None): """Returns a list of transport keys The list is ordered from latest created first. The search accepts plugin_id as an optional parameter for the search. """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.TransportKey) query = query.order_by(models.TransportKey.created_at) if plugin_name is not None: query = session.query(models.TransportKey) query = query.filter_by(deleted=False, plugin_name=plugin_name) else: query = query.filter_by(deleted=False) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number of entities retrieved: %s out of %s', len(entities), total) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def get_latest_transport_key(self, plugin_name, suppress_exception=False, session=None): """Returns the latest transport key for a given plugin.""" entity, offset, limit, total = self.get_by_create_date( plugin_name, offset_arg=0, limit_arg=1, suppress_exception=suppress_exception, session=session) return entity def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.TransportKey).filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class CertificateAuthorityRepo(BaseRepo): """Repository for the CertificateAuthority entity. CertificateAuthority entries are not soft delete. So there is no need to have deleted=False filter in queries. """ def get_by_create_date(self, offset_arg=None, limit_arg=None, plugin_name=None, plugin_ca_id=None, suppress_exception=False, session=None, show_expired=False, project_id=None, restrict_to_project_cas=False): """Returns a list of certificate authorities The returned certificate authorities are ordered by the date they were created and paged based on the offset and limit fields. """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) if restrict_to_project_cas: # get both subCAs which have been defined for your project # (cas for which the ca.project_id == project_id) AND # project_cas which are defined for your project # (pca.project_id = project_id) query1 = session.query(models.CertificateAuthority) query1 = query1.filter( models.CertificateAuthority.project_id == project_id) query2 = session.query(models.CertificateAuthority) query2 = query2.join(models.ProjectCertificateAuthority) query2 = query2.filter( models.ProjectCertificateAuthority.project_id == project_id) query = query1.union(query2) else: # get both subcas that have been defined for your project # (cas for which ca.project_id == project_id) AND # all top-level CAs (ca.project_id == None) query = session.query(models.CertificateAuthority) query = query.filter(or_( models.CertificateAuthority.project_id == project_id, models.CertificateAuthority.project_id.is_(None) )) query = query.order_by(models.CertificateAuthority.created_at) query = query.filter_by(deleted=False) if not show_expired: utcnow = timeutils.utcnow() query = query.filter(or_( models.CertificateAuthority.expiration.is_(None), models.CertificateAuthority.expiration > utcnow)) if plugin_name: query = query.filter( models.CertificateAuthority.plugin_name.like(plugin_name)) if plugin_ca_id: query = query.filter( models.CertificateAuthority.plugin_ca_id.like(plugin_ca_id)) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total ) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def update_entity(self, old_ca, parsed_ca_in, session=None): """Updates CA entry and its sub-entries.""" parsed_ca = dict(parsed_ca_in) # these fields cannot be modified parsed_ca.pop('plugin_name', None) parsed_ca.pop('plugin_ca_id', None) expiration = parsed_ca.pop('expiration', None) expiration_iso = timeutils.parse_isotime(expiration.strip()) new_expiration = timeutils.normalize_time(expiration_iso) session = self.get_session(session) query = session.query(models.CertificateAuthority).filter_by( id=old_ca.id, deleted=False) entity = query.one() entity.expiration = new_expiration for k, v in entity.ca_meta.items(): if k not in parsed_ca.keys(): v.delete(session) for key in parsed_ca: if key not in entity.ca_meta.keys(): meta = models.CertificateAuthorityMetadatum( key, parsed_ca[key]) entity.ca_meta[key] = meta else: entity.ca_meta[key].value = parsed_ca[key] entity.save() return entity def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "CertificateAuthority" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" utcnow = timeutils.utcnow() # TODO(jfwood): Performance? Is the many-to-many join needed? expiration_filter = or_( models.CertificateAuthority.expiration.is_(None), models.CertificateAuthority.expiration > utcnow) query = session.query(models.CertificateAuthority) query = query.filter_by(id=entity_id, deleted=False) query = query.filter(expiration_filter) return query def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving CA related to given project. :param project_id: id of barbican project entity :param session: existing db session reference. """ return session.query(models.CertificateAuthority).filter_by( project_id=project_id).filter_by(deleted=False) class CertificateAuthorityMetadatumRepo(BaseRepo): """Repository for the CertificateAuthorityMetadatum entity Stores key/value information on behalf of a CA. """ def save(self, metadata, ca_model): """Saves the specified metadata for the CA. :raises NotFound if entity does not exist. """ now = timeutils.utcnow() session = get_session() for k, v in metadata.items(): meta_model = models.CertificateAuthorityMetadatum(k, v) meta_model.updated_at = now meta_model.ca = ca_model meta_model.save(session=session) def get_metadata_for_certificate_authority(self, ca_id): """Returns a dict of CertificateAuthorityMetadatum instances.""" session = get_session() try: query = session.query(models.CertificateAuthorityMetadatum) query = query.filter_by(deleted=False) query = query.filter( models.CertificateAuthorityMetadatum.ca_id == ca_id) metadata = query.all() except sa_orm.exc.NoResultFound: metadata = dict() return {(m.key, m.value) for m in metadata} def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "CertificateAuthorityMetadatum" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.CertificateAuthorityMetadatum) return query.filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class ProjectCertificateAuthorityRepo(BaseRepo): """Repository for the ProjectCertificateAuthority entity. ProjectCertificateAuthority entries are not soft delete. So there is no need to have deleted=False filter in queries. """ def get_by_create_date(self, offset_arg=None, limit_arg=None, project_id=None, ca_id=None, suppress_exception=False, session=None): """Returns a list of project CAs The returned project are ordered by the date they were created and paged based on the offset and limit fields. """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.ProjectCertificateAuthority) query = query.order_by(models.ProjectCertificateAuthority.created_at) query = query.filter_by(deleted=False) if project_id: query = query.filter( models.ProjectCertificateAuthority.project_id.like(project_id)) if ca_id: query = query.filter( models.ProjectCertificateAuthority.ca_id.like(ca_id)) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total ) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "ProjectCertificateAuthority" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.ProjectCertificateAuthority).filter_by( id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving CA related to given project. :param project_id: id of barbican project entity :param session: existing db session reference. """ return session.query(models.ProjectCertificateAuthority).filter_by( project_id=project_id) class PreferredCertificateAuthorityRepo(BaseRepo): """Repository for the PreferredCertificateAuthority entity. PreferredCertificateAuthority entries are not soft delete. So there is no need to have deleted=False filter in queries. """ def get_by_create_date(self, offset_arg=None, limit_arg=None, project_id=None, ca_id=None, suppress_exception=False, session=None): """Returns a list of preferred CAs The returned CAs are ordered by the date they were created and paged based on the offset and limit fields. """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.PreferredCertificateAuthority) query = query.order_by(models.PreferredCertificateAuthority.created_at) if project_id: query = query.filter( models.PreferredCertificateAuthority.project_id.like( project_id)) if ca_id: query = query.filter( models.PreferredCertificateAuthority.ca_id.like(ca_id)) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total ) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def create_or_update_by_project_id(self, project_id, ca_id, session=None): """Create or update preferred CA for a project by project_id. :param project_id: ID of project whose preferred CA will be saved :param ca_id: ID of preferred CA :param session: SQLAlchemy session object. :return: None """ session = self.get_session(session) query = session.query(models.PreferredCertificateAuthority) query = query.filter_by(project_id=project_id) try: entity = query.one() except sa_orm.exc.NoResultFound: self.create_from( models.PreferredCertificateAuthority(project_id, ca_id), session=session) else: entity.ca_id = ca_id entity.save(session) def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "PreferredCertificateAuthority" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.PreferredCertificateAuthority).filter_by( id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving preferred CA related to given project. :param project_id: id of barbican project entity :param session: existing db session reference. """ return session.query(models.PreferredCertificateAuthority).filter_by( project_id=project_id) class SecretACLRepo(BaseRepo): """Repository for the SecretACL entity. There is no need for SecretACLUserRepo as none of logic access SecretACLUser (ACL user data) directly. Its always derived from SecretACL relationship. SecretACL and SecretACLUser data is not soft delete. So there is no need to have deleted=False filter in queries. """ def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "SecretACL" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.SecretACL) query = query.filter_by(id=entity_id) return query def _do_validate(self, values): """Sub-class hook: validate values.""" pass def get_by_secret_id(self, secret_id, session=None): """Return list of secret ACLs by secret id.""" session = self.get_session(session) query = session.query(models.SecretACL) query = query.filter_by(secret_id=secret_id) return query.all() def create_or_replace_from(self, secret, secret_acl, user_ids=None, session=None): session = self.get_session(session) secret.updated_at = timeutils.utcnow() secret_acl.updated_at = timeutils.utcnow() secret.secret_acls.append(secret_acl) secret.save(session=session) self._create_or_replace_acl_users(secret_acl, user_ids, session=session) def _create_or_replace_acl_users(self, secret_acl, user_ids, session=None): """Creates or updates secret acl user based on input user_ids list. user_ids is expected to be list of ids (enforced by schema validation). Input user ids should have complete list of acl users. It does not apply partial update of user ids. If user_ids is None, no change is made in acl user data. If user_ids list is not None, then following change is made. For existing acl users, just update timestamp if user_id is present in input user ids list. Otherwise, remove existing acl user entries. Then add the remaining input user ids as new acl user db entries. """ if user_ids is None: return user_ids = set(user_ids) now = timeutils.utcnow() session = self.get_session(session) secret_acl.updated_at = now for acl_user in secret_acl.acl_users: if acl_user.user_id in user_ids: # input user_id already exists acl_user.updated_at = now user_ids.remove(acl_user.user_id) else: acl_user.delete(session) for user_id in user_ids: acl_user = models.SecretACLUser(secret_acl.id, user_id) secret_acl.acl_users.append(acl_user) secret_acl.save(session=session) def get_count(self, secret_id, session=None): """Gets count of existing secret ACL(s) for a given secret.""" session = self.get_session(session) query = session.query(sa_func.count(models.SecretACL.id)) query = query.filter(models.SecretACL.secret_id == secret_id) return query.scalar() def delete_acls_for_secret(self, secret, session=None): session = self.get_session(session) for entity in secret.secret_acls: entity.delete(session=session) class SecretACLUserRepo(BaseRepo): """Repository for the SecretACLUser entity.""" def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "SecretACLUser" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.SecretACLUser) query = query.filter_by(id=entity_id) return query def _do_validate(self, values): """Sub-class hook: validate values.""" pass class ContainerACLRepo(BaseRepo): """Repository for the ContainerACL entity. There is no need for ContainerACLUserRepo as none of logic access ContainerACLUser (ACL user data) directly. Its always derived from ContainerACL relationship. ContainerACL and ContainerACLUser data is not soft delete. So there is no need to have deleted=False filter in queries. """ def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "ContainerACL" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.ContainerACL) query = query.filter_by(id=entity_id) return query def _do_validate(self, values): """Sub-class hook: validate values.""" pass def get_by_container_id(self, container_id, session=None): """Return list of container ACLs by container id.""" session = self.get_session(session) query = session.query(models.ContainerACL) query = query.filter_by(container_id=container_id) return query.all() def create_or_replace_from(self, container, container_acl, user_ids=None, session=None): session = self.get_session(session) container.updated_at = timeutils.utcnow() container_acl.updated_at = timeutils.utcnow() container.container_acls.append(container_acl) container.save(session=session) self._create_or_replace_acl_users(container_acl, user_ids, session) def _create_or_replace_acl_users(self, container_acl, user_ids, session=None): """Creates or updates container acl user based on input user_ids list. user_ids is expected to be list of ids (enforced by schema validation). Input user ids should have complete list of acl users. It does not apply partial update of user ids. If user_ids is None, no change is made in acl user data. If user_ids list is not None, then following change is made. For existing acl users, just update timestamp if user_id is present in input user ids list. Otherwise, remove existing acl user entries. Then add the remaining input user ids as new acl user db entries. """ if user_ids is None: return user_ids = set(user_ids) now = timeutils.utcnow() session = self.get_session(session) container_acl.updated_at = now for acl_user in container_acl.acl_users: if acl_user.user_id in user_ids: # input user_id already exists acl_user.updated_at = now user_ids.remove(acl_user.user_id) else: acl_user.delete(session) for user_id in user_ids: acl_user = models.ContainerACLUser(container_acl.id, user_id) container_acl.acl_users.append(acl_user) container_acl.save(session=session) def get_count(self, container_id, session=None): """Gets count of existing container ACL(s) for a given container.""" session = self.get_session(session) query = session.query(sa_func.count(models.ContainerACL.id)) query = query.filter(models.ContainerACL.container_id == container_id) return query.scalar() def delete_acls_for_container(self, container, session=None): session = self.get_session(session) for entity in container.container_acls: entity.delete(session=session) class ContainerACLUserRepo(BaseRepo): """Repository for ContainerACLUser entity.""" def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "ContainerACLUser" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.ContainerACLUser) query = query.filter_by(id=entity_id) return query def _do_validate(self, values): """Sub-class hook: validate values.""" pass class ProjectQuotasRepo(BaseRepo): """Repository for the ProjectQuotas entity.""" def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "ProjectQuotas" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.ProjectQuotas).filter_by(id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass def get_by_create_date(self, offset_arg=None, limit_arg=None, suppress_exception=False, session=None): """Returns a list of ProjectQuotas The list is ordered by the date they were created at and paged based on the offset and limit fields. :param offset_arg: The entity number where the query result should start. :param limit_arg: The maximum amount of entities in the result set. :param suppress_exception: Whether NoResultFound exceptions should be suppressed. :param session: SQLAlchemy session object. :raises NotFound: if no quota config is found for the project :returns: Tuple consisting of (list_of_entities, offset, limit, total). """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.ProjectQuotas) query = query.order_by(models.ProjectQuotas.created_at) query = query.join(models.Project, models.ProjectQuotas.project) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def create_or_update_by_project_id(self, project_id, parsed_project_quotas, session=None): """Create or update Project Quotas config for a project by project_id. :param project_id: ID of project whose quota config will be saved :param parsed_project_quotas: Python dict with quota definition :param session: SQLAlchemy session object. :return: None """ session = self.get_session(session) query = session.query(models.ProjectQuotas) query = query.filter_by(project_id=project_id) try: entity = query.one() except sa_orm.exc.NoResultFound: self.create_from( models.ProjectQuotas(project_id, parsed_project_quotas), session=session) else: self._update_values(entity, parsed_project_quotas) entity.save(session) def get_by_external_project_id(self, external_project_id, suppress_exception=False, session=None): """Return configured Project Quotas for a project by project_id. :param external_project_id: external ID of project to get quotas for :param suppress_exception: when True, NotFound is not raised :param session: SQLAlchemy session object. :raises NotFound: if no quota config is found for the project :return: None or Python dict of project quotas for project """ session = self.get_session(session) query = session.query(models.ProjectQuotas) query = query.join(models.Project, models.ProjectQuotas.project) query = query.filter(models.Project.external_id == external_project_id) try: entity = query.one() except sa_orm.exc.NoResultFound: if suppress_exception: return None else: _raise_no_entities_found(self._do_entity_name()) return entity def delete_by_external_project_id(self, external_project_id, suppress_exception=False, session=None): """Remove configured Project Quotas for a project by project_id. :param external_project_id: external ID of project to delete quotas :param suppress_exception: when True, NotFound is not raised :param session: SQLAlchemy session object. :raises NotFound: if no quota config is found for the project :return: None """ session = self.get_session(session) query = session.query(models.ProjectQuotas) query = query.join(models.Project, models.ProjectQuotas.project) query = query.filter(models.Project.external_id == external_project_id) try: entity = query.one() except sa_orm.exc.NoResultFound: if suppress_exception: return else: _raise_no_entities_found(self._do_entity_name()) entity.delete(session=session) class SecretStoresRepo(BaseRepo): """Repository for the SecretStores entity. SecretStores entries are not soft delete. So there is no need to have deleted=False filter in queries. """ def get_all(self, session=None): """Get list of available secret stores. Status value is not used while getting complete list as we will just maintain ACTIVE ones. No other state is used and needed here. :param session: SQLAlchemy session object. :return: None """ session = self.get_session(session) query = session.query(models.SecretStores) query.order_by(models.SecretStores.created_at.asc()) return query.all() def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "SecretStores" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.SecretStores).filter_by( id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass class ProjectSecretStoreRepo(BaseRepo): """Repository for the ProjectSecretStore entity. ProjectSecretStore entries are not soft delete. So there is no need to have deleted=False filter in queries. """ def get_secret_store_for_project(self, project_id, external_project_id, suppress_exception=False, session=None): """Returns preferred secret store for a project if set. :param project_id: ID of project whose preferred secret store is set :param external_project_id: external ID of project whose preferred secret store is set :param suppress_exception: when True, NotFound is not raised :param session: SQLAlchemy session object. Will return preferred secret store by external project id if provided otherwise uses barbican project identifier to lookup. Throws exception in case no preferred secret store is defined and supporess_exception=False. If suppress_exception is True, then returns None for no preferred secret store for a project found. """ session = self.get_session(session) if external_project_id is None: query = session.query(models.ProjectSecretStore).filter_by( project_id=project_id) else: query = session.query(models.ProjectSecretStore) query = query.join(models.Project, models.ProjectSecretStore.project) query = query.filter(models.Project.external_id == external_project_id) try: entity = query.one() except sa_orm.exc.NoResultFound: LOG.info("No preferred secret store found for project = %s", project_id) entity = None if not suppress_exception: _raise_entity_not_found(self._do_entity_name(), project_id) return entity def create_or_update_for_project(self, project_id, secret_store_id, session=None): """Create or update preferred secret store for a project. :param project_id: ID of project whose preferred secret store is set :param secret_store_id: ID of secret store :param session: SQLAlchemy session object. :return: None If preferred secret store is not set for given project, then create new preferred secret store setting for that project. If secret store setting for project is already there, then it updates with given secret store id. """ session = self.get_session(session) try: entity = self.get_secret_store_for_project(project_id, None, session=session) except exception.NotFound: entity = self.create_from( models.ProjectSecretStore(project_id, secret_store_id), session=session) else: entity.secret_store_id = secret_store_id entity.save(session) return entity def get_count_by_secret_store(self, secret_store_id, session=None): """Gets count of projects mapped to a given secret store. :param secret_store_id: id of secret stores entity :param session: existing db session reference. If None, gets session. :return: an number 0 or greater This method is supposed to provide count of projects which are currently set to use input secret store as their preferred store. This is used when existing secret store configuration is removed and validation is done to make sure that there are no projects using it as preferred secret store. """ session = self.get_session(session) query = session.query(models.ProjectSecretStore).filter_by( secret_store_id=secret_store_id) return query.count() def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "ProjectSecretStore" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" return session.query(models.ProjectSecretStore).filter_by( id=entity_id) def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for getting preferred secret stores list for a project. :param project_id: id of barbican project entity :param session: existing db session reference. """ return session.query(models.ProjectSecretStore).filter_by( project_id=project_id) class SecretConsumerRepo(BaseRepo): """Repository for the SecretConsumer entity.""" def get_by_secret_id(self, secret_id, offset_arg=None, limit_arg=None, suppress_exception=False, session=None): """Returns a list of SecretConsumers for a specific secret_id The list is ordered by the date they were created at and paged based on the offset and limit fields. """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.SecretConsumerMetadatum) query = query.order_by(models.SecretConsumerMetadatum.created_at) query = query.filter_by(deleted=False) query = query.filter( models.SecretConsumerMetadatum.secret_id == secret_id ) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total ) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def get_by_resource_id(self, resource_id, offset_arg=None, limit_arg=None, suppress_exception=False, session=None): """Returns a list of SecretConsumers for a specific resource_id The list is ordered by the date they were created at and paged based on the offset and limit fields. """ offset, limit = clean_paging_values(offset_arg, limit_arg) session = self.get_session(session) query = session.query(models.SecretConsumerMetadatum) query = query.order_by(models.SecretConsumerMetadatum.created_at) query = query.filter_by(deleted=False) query = query.filter( models.SecretConsumerMetadatum.resource_id == resource_id ) start = offset end = offset + limit LOG.debug('Retrieving from %s to %s', start, end) total = query.count() entities = query.offset(start).limit(limit).all() LOG.debug('Number entities retrieved: %s out of %s', len(entities), total ) if total <= 0 and not suppress_exception: _raise_no_entities_found(self._do_entity_name()) return entities, offset, limit, total def get_by_values(self, secret_id, resource_id, suppress_exception=False, show_deleted=False, session=None): session = self.get_session(session) try: query = session.query(models.SecretConsumerMetadatum) query = query.filter_by( secret_id=secret_id, resource_id=resource_id, ) if not show_deleted: query.filter_by(deleted=False) consumer = query.one() except sa_orm.exc.NoResultFound: consumer = None if not suppress_exception: raise exception.NotFound( u._("Could not find {entity_name}").format( entity_name=self._do_entity_name())) return consumer def create_or_update_from(self, new_consumer, secret, session=None): session = self.get_session(session) try: secret.updated_at = timeutils.utcnow() secret.consumers.append(new_consumer) secret.save(session=session) except db_exc.DBDuplicateEntry: session.rollback() # We know consumer already exists. # This operation is idempotent, so log this and move on LOG.debug( "Consumer with resource_id %s already exists for secret %s...", new_consumer.resource_id, new_consumer.secret_id ) # Get the existing entry and reuse it by clearing the deleted flags existing_consumer = self.get_by_values( new_consumer.secret_id, new_consumer.resource_id, show_deleted=True ) existing_consumer.deleted = False existing_consumer.deleted_at = None # We are not concerned about timing here -- set only, no reads existing_consumer.save() def _do_entity_name(self): """Sub-class hook: return entity name, such as for debugging.""" return "SecretConsumer" def _do_build_get_query(self, entity_id, external_project_id, session): """Sub-class hook: build a retrieve query.""" query = session.query(models.SecretConsumerMetadatum) return query.filter_by(id=entity_id, deleted=False) def _do_validate(self, values): """Sub-class hook: validate values.""" pass def _build_get_project_entities_query(self, project_id, session): """Builds query for retrieving consumers associated with given project :param project_id: id of barbican project entity :param session: existing db session reference. """ query = session.query( models.SecretConsumerMetadatum).filter_by(deleted=False) query = query.filter( models.SecretConsumerMetadatum.project_id == project_id) return query def get_ca_repository(): """Returns a singleton Secret repository instance.""" global _CA_REPOSITORY return _get_repository(_CA_REPOSITORY, CertificateAuthorityRepo) def get_container_acl_repository(): """Returns a singleton Container ACL repository instance.""" global _CONTAINER_ACL_REPOSITORY return _get_repository(_CONTAINER_ACL_REPOSITORY, ContainerACLRepo) def get_container_consumer_repository(): """Returns a singleton Container Consumer repository instance.""" global _CONTAINER_CONSUMER_REPOSITORY return _get_repository(_CONTAINER_CONSUMER_REPOSITORY, ContainerConsumerRepo) def get_container_repository(): """Returns a singleton Container repository instance.""" global _CONTAINER_REPOSITORY return _get_repository(_CONTAINER_REPOSITORY, ContainerRepo) def get_container_secret_repository(): """Returns a singleton Container-Secret repository instance.""" global _CONTAINER_SECRET_REPOSITORY return _get_repository(_CONTAINER_SECRET_REPOSITORY, ContainerSecretRepo) def get_container_acl_user_repository(): """Returns a singleton Container-ACL-User repository instance.""" global _CONTAINER_ACL_USER_REPOSITORY return _get_repository(_CONTAINER_ACL_USER_REPOSITORY, ContainerACLUserRepo) def get_encrypted_datum_repository(): """Returns a singleton Encrypted Datum repository instance.""" global _ENCRYPTED_DATUM_REPOSITORY return _get_repository(_ENCRYPTED_DATUM_REPOSITORY, EncryptedDatumRepo) def get_kek_datum_repository(): """Returns a singleton KEK Datum repository instance.""" global _KEK_DATUM_REPOSITORY return _get_repository(_KEK_DATUM_REPOSITORY, KEKDatumRepo) def get_order_plugin_meta_repository(): """Returns a singleton Order-Plugin meta repository instance.""" global _ORDER_PLUGIN_META_REPOSITORY return _get_repository(_ORDER_PLUGIN_META_REPOSITORY, OrderPluginMetadatumRepo) def get_order_barbican_meta_repository(): """Returns a singleton Order-Barbican meta repository instance.""" global _ORDER_BARBICAN_META_REPOSITORY return _get_repository(_ORDER_BARBICAN_META_REPOSITORY, OrderBarbicanMetadatumRepo) def get_order_repository(): """Returns a singleton Order repository instance.""" global _ORDER_REPOSITORY return _get_repository(_ORDER_REPOSITORY, OrderRepo) def get_order_retry_tasks_repository(): """Returns a singleton OrderRetryTask repository instance.""" global _ORDER_RETRY_TASK_REPOSITORY return _get_repository(_ORDER_RETRY_TASK_REPOSITORY, OrderRetryTaskRepo) def get_preferred_ca_repository(): """Returns a singleton Secret repository instance.""" global _PREFERRED_CA_REPOSITORY return _get_repository(_PREFERRED_CA_REPOSITORY, PreferredCertificateAuthorityRepo) def get_project_repository(): """Returns a singleton Project repository instance.""" global _PROJECT_REPOSITORY return _get_repository(_PROJECT_REPOSITORY, ProjectRepo) def get_project_ca_repository(): """Returns a singleton Secret repository instance.""" global _PROJECT_CA_REPOSITORY return _get_repository(_PROJECT_CA_REPOSITORY, ProjectCertificateAuthorityRepo) def get_project_quotas_repository(): """Returns a singleton Project Quotas repository instance.""" global _PROJECT_QUOTAS_REPOSITORY return _get_repository(_PROJECT_QUOTAS_REPOSITORY, ProjectQuotasRepo) def get_secret_acl_repository(): """Returns a singleton Secret ACL repository instance.""" global _SECRET_ACL_REPOSITORY return _get_repository(_SECRET_ACL_REPOSITORY, SecretACLRepo) def get_secret_acl_user_repository(): """Returns a singleton Secret-ACL-User repository instance.""" global _SECRET_ACL_USER_REPOSITORY return _get_repository(_SECRET_ACL_USER_REPOSITORY, SecretACLUserRepo) def get_secret_meta_repository(): """Returns a singleton Secret meta repository instance.""" global _SECRET_META_REPOSITORY return _get_repository(_SECRET_META_REPOSITORY, SecretStoreMetadatumRepo) def get_secret_user_meta_repository(): """Returns a singleton Secret user meta repository instance.""" global _SECRET_USER_META_REPOSITORY return _get_repository(_SECRET_USER_META_REPOSITORY, SecretUserMetadatumRepo) def get_secret_repository(): """Returns a singleton Secret repository instance.""" global _SECRET_REPOSITORY return _get_repository(_SECRET_REPOSITORY, SecretRepo) def get_transport_key_repository(): """Returns a singleton Transport Key repository instance.""" global _TRANSPORT_KEY_REPOSITORY return _get_repository(_TRANSPORT_KEY_REPOSITORY, TransportKeyRepo) def get_secret_stores_repository(): """Returns a singleton Secret Stores repository instance.""" global _SECRET_STORES_REPOSITORY return _get_repository(_SECRET_STORES_REPOSITORY, SecretStoresRepo) def get_project_secret_store_repository(): """Returns a singleton Project Secret Store repository instance.""" global _PROJECT_SECRET_STORE_REPOSITORY return _get_repository(_PROJECT_SECRET_STORE_REPOSITORY, ProjectSecretStoreRepo) def get_secret_consumer_repository(): """Returns a singleton Secret Consumer repository instance.""" global _SECRET_CONSUMER_REPOSITORY return _get_repository(_SECRET_CONSUMER_REPOSITORY, SecretConsumerRepo) def _get_repository(global_ref, repo_class): if not global_ref: global_ref = repo_class() return global_ref def _raise_entity_not_found(entity_name, entity_id): raise exception.NotFound(u._("No {entity} found with ID {id}").format( entity=entity_name, id=entity_id)) def _raise_entity_id_not_found(entity_id): raise exception.NotFound(u._("Entity ID {entity_id} not " "found").format(entity_id=entity_id)) def _raise_no_entities_found(entity_name): raise exception.NotFound( u._("No entities of type {entity_name} found").format( entity_name=entity_name)) barbican-9.1.0.dev50/barbican/model/__init__.py0000664000175000017500000000000013616500636021356 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/model/clean.py0000664000175000017500000003567013616500636020726 0ustar sahidsahid00000000000000# Copyright (c) 2016 IBM # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import config from barbican.model import models from barbican.model import repositories as repo from oslo_log import log from oslo_utils import timeutils from sqlalchemy import sql as sa_sql import datetime # Import and configure logging. CONF = config.CONF log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) def cleanup_unassociated_projects(): """Clean up unassociated projects. This looks for projects that have no children entries on the dependent tables and removes them. """ LOG.debug("Cleaning up unassociated projects") session = repo.get_session() project_children_tables = [models.Order, models.KEKDatum, models.SecretConsumerMetadatum, models.Secret, models.ContainerConsumerMetadatum, models.Container, models.PreferredCertificateAuthority, models.CertificateAuthority, models.ProjectCertificateAuthority, models.ProjectQuotas] children_names = map(lambda child: child.__name__, project_children_tables) LOG.debug("Children tables for Project table being checked: %s", str(children_names)) sub_query = session.query(models.Project.id) for model in project_children_tables: sub_query = sub_query.outerjoin(model, models.Project.id == model.project_id) sub_query = sub_query.filter(model.id == None) # nopep8 sub_query = sub_query.subquery() sub_query = sa_sql.select([sub_query]) query = session.query(models.Project) query = query.filter(models.Project.id.in_(sub_query)) delete_count = query.delete(synchronize_session='fetch') LOG.info("Cleaned up %(delete_count)s entries for " "%(project_name)s", {'delete_count': str(delete_count), 'project_name': models.Project.__name__}) return delete_count def cleanup_parent_with_no_child(parent_model, child_model, threshold_date=None): """Clean up soft deletions in parent that do not have references in child. Before running this function, the child table should be cleaned of soft deletions. This function left outer joins the parent and child tables and finds the parent entries that do not have a foreign key reference in the child table. Then the results are filtered by soft deletions and are cleaned up. :param parent_model: table class for parent :param child_model: table class for child which restricts parent deletion :param threshold_date: soft deletions older than this date will be removed :returns: total number of entries removed from database """ LOG.debug("Cleaning soft deletes for %(parent_name)s without " "a child in %(child_name)s" % {'parent_name': parent_model.__name__, 'child_name': child_model.__name__}) session = repo.get_session() sub_query = session.query(parent_model.id) sub_query = sub_query.outerjoin(child_model) sub_query = sub_query.filter(child_model.id == None) # nopep8 sub_query = sub_query.subquery() sub_query = sa_sql.select([sub_query]) query = session.query(parent_model) query = query.filter(parent_model.id.in_(sub_query)) query = query.filter(parent_model.deleted) if threshold_date: query = query.filter(parent_model.deleted_at <= threshold_date) delete_count = query.delete(synchronize_session='fetch') LOG.info("Cleaned up %(delete_count)s entries for %(parent_name)s " "with no children in %(child_name)s", {'delete_count': delete_count, 'parent_name': parent_model.__name__, 'child_name': child_model.__name__}) return delete_count def cleanup_softdeletes(model, threshold_date=None): """Remove soft deletions from a table. :param model: table class to remove soft deletions :param threshold_date: soft deletions older than this date will be removed :returns: total number of entries removed from the database """ LOG.debug("Cleaning soft deletes: %s", model.__name__) session = repo.get_session() query = session.query(model) query = query.filter_by(deleted=True) if threshold_date: query = query.filter(model.deleted_at <= threshold_date) delete_count = query.delete() LOG.info("Cleaned up %(delete_count)s entries for %(model_name)s", {'delete_count': delete_count, 'model_name': model.__name__}) return delete_count def cleanup_all(threshold_date=None): """Clean up the main soft deletable resources. This function contains an order of calls to clean up the soft-deletable resources. :param threshold_date: soft deletions older than this date will be removed :returns: total number of entries removed from the database """ LOG.debug("Cleaning up soft deletions where deletion date" " is older than %s", str(threshold_date)) total = 0 total += cleanup_softdeletes(models.TransportKey, threshold_date=threshold_date) total += cleanup_softdeletes(models.OrderBarbicanMetadatum, threshold_date=threshold_date) total += cleanup_softdeletes(models.OrderRetryTask, threshold_date=threshold_date) total += cleanup_softdeletes(models.OrderPluginMetadatum, threshold_date=threshold_date) total += cleanup_parent_with_no_child(models.Order, models.OrderRetryTask, threshold_date=threshold_date) total += cleanup_softdeletes(models.EncryptedDatum, threshold_date=threshold_date) total += cleanup_softdeletes(models.SecretUserMetadatum, threshold_date=threshold_date) total += cleanup_softdeletes(models.SecretStoreMetadatum, threshold_date=threshold_date) total += cleanup_softdeletes(models.ContainerSecret, threshold_date=threshold_date) total += cleanup_softdeletes(models.SecretConsumerMetadatum, threshold_date=threshold_date) total += cleanup_parent_with_no_child(models.Secret, models.Order, threshold_date=threshold_date) total += cleanup_softdeletes(models.ContainerConsumerMetadatum, threshold_date=threshold_date) total += cleanup_parent_with_no_child(models.Container, models.Order, threshold_date=threshold_date) total += cleanup_softdeletes(models.KEKDatum, threshold_date=threshold_date) # TODO(edtubill) Clean up projects that were soft deleted by # the keystone listener LOG.info("Cleaned up %s soft deleted entries", total) return total def _soft_delete_expired_secrets(threshold_date): """Soft delete expired secrets. :param threshold_date: secrets that have expired past this date will be soft deleted :returns: total number of secrets that were soft deleted """ current_time = timeutils.utcnow() session = repo.get_session() query = session.query(models.Secret.id) query = query.filter(~models.Secret.deleted) query = query.filter( models.Secret.expiration <= threshold_date ) update_count = query.update( { models.Secret.deleted: True, models.Secret.deleted_at: current_time }, synchronize_session='fetch') return update_count def _hard_delete_acls_for_soft_deleted_secrets(): """Remove acl entries for secrets that have been soft deleted. Removes entries in SecretACL and SecretACLUser which are for secrets that have been soft deleted. """ session = repo.get_session() acl_user_sub_query = session.query(models.SecretACLUser.id) acl_user_sub_query = acl_user_sub_query.join(models.SecretACL) acl_user_sub_query = acl_user_sub_query.join(models.Secret) acl_user_sub_query = acl_user_sub_query.filter(models.Secret.deleted) acl_user_sub_query = acl_user_sub_query.subquery() acl_user_sub_query = sa_sql.select([acl_user_sub_query]) acl_user_query = session.query(models.SecretACLUser) acl_user_query = acl_user_query.filter( models.SecretACLUser.id.in_(acl_user_sub_query)) acl_total = acl_user_query.delete(synchronize_session='fetch') acl_sub_query = session.query(models.SecretACL.id) acl_sub_query = acl_sub_query.join(models.Secret) acl_sub_query = acl_sub_query.filter(models.Secret.deleted) acl_sub_query = acl_sub_query.subquery() acl_sub_query = sa_sql.select([acl_sub_query]) acl_query = session.query(models.SecretACL) acl_query = acl_query.filter( models.SecretACL.id.in_(acl_sub_query)) acl_total += acl_query.delete(synchronize_session='fetch') return acl_total def _soft_delete_expired_secret_children(threshold_date): """Soft delete the children tables of expired secrets. Soft deletes the children tables and hard deletes the ACL children tables of the expired secrets. :param threshold_date: threshold date for secret expiration :returns: returns a pair for number of soft delete children and deleted ACLs """ current_time = timeutils.utcnow() secret_children = [models.SecretStoreMetadatum, models.SecretUserMetadatum, models.EncryptedDatum, models.ContainerSecret] children_names = map(lambda child: child.__name__, secret_children) LOG.debug("Children tables for Secret table being checked: %s", str(children_names)) session = repo.get_session() update_count = 0 for table in secret_children: # Go through children and soft delete them sub_query = session.query(table.id) sub_query = sub_query.join(models.Secret) sub_query = sub_query.filter( models.Secret.expiration <= threshold_date ) sub_query = sub_query.subquery() sub_query = sa_sql.select([sub_query]) query = session.query(table) query = query.filter(table.id.in_(sub_query)) current_update_count = query.update( { table.deleted: True, table.deleted_at: current_time }, synchronize_session='fetch') update_count += current_update_count session.flush() acl_total = _hard_delete_acls_for_soft_deleted_secrets() return update_count, acl_total def soft_delete_expired_secrets(threshold_date): """Soft deletes secrets that are past expiration date. The expired secrets and its children are marked for deletion. ACLs are soft deleted and then purged from the database. :param threshold_date: secrets that have expired past this date will be soft deleted :returns: the sum of soft deleted entries and hard deleted acl entries """ # Note: sqllite does not support multiple table updates so # several db updates are used instead LOG.debug('Soft deleting expired secrets older than: %s', str(threshold_date)) update_count = _soft_delete_expired_secrets(threshold_date) children_count, acl_total = _soft_delete_expired_secret_children( threshold_date) update_count += children_count LOG.info("Soft deleted %(update_count)s entries due to secret " "expiration and %(acl_total)s secret acl entries " "were removed from the database", {'update_count': update_count, 'acl_total': acl_total}) return update_count + acl_total def clean_command(sql_url, min_num_days, do_clean_unassociated_projects, do_soft_delete_expired_secrets, verbose, log_file): """Clean command to clean up the database. :param sql_url: sql connection string to connect to a database :param min_num_days: clean up soft deletions older than this date :param do_clean_unassociated_projects: If True, clean up unassociated projects :param do_soft_delete_expired_secrets: If True, soft delete secrets that have expired :param verbose: If True, log and print more information :param log_file: If set, override the log_file configured """ if verbose: # The verbose flag prints out log events to the screen, otherwise # the log events will only go to the log file CONF.set_override('debug', True) if log_file: CONF.set_override('log_file', log_file) LOG.info("Cleaning up soft deletions in the barbican database") log.setup(CONF, 'barbican') cleanup_total = 0 current_time = timeutils.utcnow() stop_watch = timeutils.StopWatch() stop_watch.start() try: if sql_url: CONF.set_override('sql_connection', sql_url) repo.setup_database_engine_and_factory() if do_clean_unassociated_projects: cleanup_total += cleanup_unassociated_projects() if do_soft_delete_expired_secrets: cleanup_total += soft_delete_expired_secrets( threshold_date=current_time) threshold_date = None if min_num_days >= 0: threshold_date = current_time - datetime.timedelta( days=min_num_days) else: threshold_date = current_time cleanup_total += cleanup_all(threshold_date=threshold_date) repo.commit() except Exception as ex: LOG.exception('Failed to clean up soft deletions in database.') repo.rollback() cleanup_total = 0 # rollback happened, no entries affected raise ex finally: stop_watch.stop() elapsed_time = stop_watch.elapsed() if verbose: CONF.clear_override('debug') if log_file: CONF.clear_override('log_file') repo.clear() if sql_url: CONF.clear_override('sql_connection') log.setup(CONF, 'barbican') # reset the overrides LOG.info("Cleaning of database affected %s entries", cleanup_total) LOG.info('DB clean up finished in %s seconds', elapsed_time) barbican-9.1.0.dev50/barbican/queue/0000775000175000017500000000000013616500640017276 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/queue/retry_scheduler.py0000664000175000017500000001342213616500636023062 0ustar sahidsahid00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Retry/scheduler classes and logic. """ import datetime import random from oslo_service import periodic_task from oslo_service import service from barbican.common import config from barbican.common import utils from barbican.model import models from barbican.model import repositories from barbican.queue import client as async_client LOG = utils.getLogger(__name__) CONF = config.CONF def _compute_next_periodic_interval(): periodic_interval = ( CONF.retry_scheduler.periodic_interval_max_seconds ) # Return +- 20% of interval. return random.uniform(0.8 * periodic_interval, # nosec 1.2 * periodic_interval) class PeriodicServer(service.Service): """Server to process retry and scheduled tasks. This server is an Oslo periodic-task service (see https://docs.openstack.org/oslo.service/latest/reference/periodic_task.html). On a periodic basis, this server checks for tasks that need to be retried, and then sends them up to the RPC queue for later processing by a worker node. """ def __init__(self, queue_resource=None): super(PeriodicServer, self).__init__() # Setting up db engine to avoid lazy initialization repositories.setup_database_engine_and_factory() # Connect to the worker queue, to send retry RPC tasks to it later. self.queue = queue_resource or async_client.TaskClient() # Start the task retry periodic scheduler process up. periodic_interval = ( CONF.retry_scheduler.periodic_interval_max_seconds ) self.tg.add_dynamic_timer( self._check_retry_tasks, initial_delay=CONF.retry_scheduler.initial_delay_seconds, periodic_interval_max=periodic_interval) self.order_retry_repo = repositories.get_order_retry_tasks_repository() def start(self): LOG.info("Starting the PeriodicServer") super(PeriodicServer, self).start() def stop(self, graceful=True): LOG.info("Halting the PeriodicServer") super(PeriodicServer, self).stop(graceful=graceful) @periodic_task.periodic_task def _check_retry_tasks(self): """Periodically check to see if tasks need to be scheduled. :return: Return the number of seconds to wait before invoking this method again. """ total_tasks_processed = 0 try: total_tasks_processed = self._process_retry_tasks() except Exception: LOG.exception("Problem seen processing scheduled retry tasks") # Return the next delay before this method is invoked again. check_again_in_seconds = _compute_next_periodic_interval() LOG.info("Done processing '%(total)s' tasks, will check again in " "'%(next)s' seconds.", { 'total': total_tasks_processed, 'next': check_again_in_seconds } ) return check_again_in_seconds def _process_retry_tasks(self): """Scan for and then re-queue tasks that are ready to retry.""" LOG.info("Processing scheduled retry tasks:") # Retrieve tasks to retry. entities, total = self._retrieve_tasks() # Create RPC tasks for each retry task found. for task in entities: self._enqueue_task(task) return total def _retrieve_tasks(self): """Retrieve a list of tasks to retry.""" repositories.start() try: entities, _, _, total = self.order_retry_repo.get_by_create_date( only_at_or_before_this_date=datetime.datetime.utcnow(), suppress_exception=True) finally: repositories.clear() return entities, total def _enqueue_task(self, task): """Re-enqueue the specified task.""" retry_task_name = 'N/A' retry_args = 'N/A' retry_kwargs = 'N/A' # Start a new isolated database transaction just for this task. repositories.start() try: # Invoke queue client to place retried RPC task on queue. retry_task_name = task.retry_task retry_args = task.retry_args retry_kwargs = task.retry_kwargs retry_method = getattr(self.queue, retry_task_name) retry_method(*retry_args, **retry_kwargs) # Remove the retry record from the queue. task.status = models.States.ACTIVE self.order_retry_repo.delete_entity_by_id(task.id, None) repositories.commit() LOG.debug( "(Enqueued method '{0}' with args '{1}' and " "kwargs '{2}')".format( retry_task_name, retry_args, retry_kwargs)) except Exception: LOG.exception("Problem enqueuing method '%(name)s' with args " "'%(args)s' and kwargs '%(kwargs)s'.", { 'name': retry_task_name, 'args': retry_args, 'kwargs': retry_kwargs } ) repositories.rollback() finally: repositories.clear() barbican-9.1.0.dev50/barbican/queue/client.py0000664000175000017500000000633113616500636021136 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Client-side (i.e. API side) classes and logic. """ from barbican.common import utils from barbican import i18n as u from barbican import queue from barbican.queue import server LOG = utils.getLogger(__name__) class TaskClient(object): """API-side client interface to asynchronous queuing services. The class delegates calls to the oslo_messaging RPC framework. """ def __init__(self): super(TaskClient, self).__init__() # Establish either an asynchronous messaging/queuing client # interface (via Oslo's RPC messaging) or else allow for # synchronously invoking worker processes in support of a # standalone single-node mode for Barbican. self._client = queue.get_client() or _DirectTaskInvokerClient() def process_type_order(self, order_id, project_id, request_id): """Process TypeOrder.""" self._cast('process_type_order', order_id=order_id, project_id=project_id, request_id=request_id) def check_certificate_status(self, order_id, project_id, request_id): """Check the status of a certificate order.""" self._cast('check_certificate_status', order_id=order_id, project_id=project_id, request_id=request_id) def _cast(self, name, **kwargs): """Asynchronous call handler. Barbican probably only needs casts. :param name: Method name to invoke. :param kwargs: Arguments for the method invocation. :return: """ return self._client.cast({}, name, **kwargs) def _call(self, name, **kwargs): """Synchronous call handler. Barbican probably *never* uses calls.""" return self._client.call({}, name, **kwargs) class _DirectTaskInvokerClient(object): """Allows for direct invocation of queue.server Tasks. This class supports a standalone single-node mode of operation for Barbican, whereby typically asynchronous requests to Barbican are handled synchronously. """ def __init__(self): super(_DirectTaskInvokerClient, self).__init__() self._tasks = server.Tasks() def cast(self, context, method_name, **kwargs): try: getattr(self._tasks, method_name)(context, **kwargs) except Exception: LOG.exception( u._(">>>>> Task exception seen for synchronous task " "invocation, so handling exception to mimic " "asynchronous behavior.")) def call(self, context, method_name, **kwargs): raise ValueError("No support for call() client methods.") barbican-9.1.0.dev50/barbican/queue/keystone_listener.py0000664000175000017500000001525513616500636023433 0ustar sahidsahid00000000000000# Copyright (c) 2014 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Server-side (i.e. worker side) Keystone notification related classes and logic. """ import oslo_messaging from oslo_service import service from barbican.common import utils from barbican.model import repositories from barbican import queue from barbican.tasks import keystone_consumer LOG = utils.getLogger(__name__) class NotificationTask(object): """Task which exposes the API for consuming priority based notifications. The Oslo notification framework delivers notifications based on priority to matching callback APIs as defined in its notification listener endpoint list. Currently from Keystone perspective, `info` API is sufficient as Keystone send notifications at `info` priority ONLY. Other priority level APIs (warn, error, critical, audit, debug) are not needed here. """ def __init__(self, conf): self.conf = conf repositories.setup_database_engine_and_factory() def info(self, ctxt, publisher_id, event_type, payload, metadata): """Receives notification at info level.""" return self.process_event(ctxt, publisher_id, event_type, payload, metadata) def process_event(self, ctxt, publisher_id, event_type, payload, metadata): """Process Keystone Event based on event_type and payload data. Parses notification data to identify if the event is related to delete project or not. In case of delete project event, it passes project_id to KeystoneEventConsumer logic for further processing. Barbican service is not interested in other events so in that case it just returns None as acknowledgment. Messaging server considers message is acknowledged when either return value is `oslo_messaging.NotificationResult.HANDLED` or None. In case of successful processing of notification, the returned value is `oslo_messaging.NotificationResult.HANDLED` In case of notification processing error, the value returned is oslo_messaging.NotificationResult.REQUEUE when transport supports this feature otherwise `oslo_messaging.NotificationResult.HANDLED` is returned. """ LOG.debug("Input keystone event publisher_id = %s", publisher_id) LOG.debug("Input keystone event payload = %s", payload) LOG.debug("Input keystone event type = %s", event_type) LOG.debug("Input keystone event metadata = %s", metadata) project_id = self._parse_payload_for_project_id(payload) resource_type, operation_type = self._parse_event_type(event_type) LOG.debug('Keystone Event: resource type={0}, operation type={1}, ' 'keystone id={2}'.format(resource_type, operation_type, project_id)) if (project_id and resource_type == 'project' and operation_type == 'deleted'): task = keystone_consumer.KeystoneEventConsumer() try: task.process(project_id=project_id, resource_type=resource_type, operation_type=operation_type) return oslo_messaging.NotificationResult.HANDLED except Exception: # No need to log message here as task process method has # already logged it # TODO(john-wood-w) This really should be retried on a # schedule and really only if the database is down, not # for any exception otherwise tasks will be re-queued # repeatedly. Revisit as part of the retry task work later. if self.conf.keystone_notifications.allow_requeue: return oslo_messaging.NotificationResult.REQUEUE else: return oslo_messaging.NotificationResult.HANDLED return None # in case event is not project delete def _parse_event_type(self, event_type): """Parses event type provided as part of notification. Parses to identify what operation is performed and on which Keystone resource. A few event type sample values are provided below:: identity.project.deleted identity.role.created identity.domain.updated identity.authenticate """ resource_type = None operation_type = None if event_type: type_list = event_type.split('.') # 2 is min. number of dot delimiters expected in event_type value. if len(type_list) > 2: resource_type = type_list[-2].lower() operation_type = type_list[-1].lower() return resource_type, operation_type def _parse_payload_for_project_id(self, payload_s): """Gets project resource identifier from payload Sample payload is provided below:: {'resource_info': u'2b99a94ad02741978e613fb52dd1f4cd'} """ if payload_s: return payload_s.get('resource_info') class MessageServer(NotificationTask, service.Service): """Server to retrieve messages from queue used by Keystone. This is used to send public notifications for openstack service consumption. This server is an Oslo notification server that exposes set of standard APIs for events consumption based on event priority. Some of messaging server configuration needs to match with Keystone deployment notification configuration e.g. exchange name, topic name """ def __init__(self, conf): pool_size = conf.keystone_notifications.thread_pool_size NotificationTask.__init__(self, conf) service.Service.__init__(self, threads=pool_size) self.target = queue.get_notification_target() self._msg_server = queue.get_notification_server(targets=[self.target], endpoints=[self]) def start(self): self._msg_server.start() super(MessageServer, self).start() def stop(self): super(MessageServer, self).stop() self._msg_server.stop() queue.cleanup() barbican-9.1.0.dev50/barbican/queue/__init__.py0000664000175000017500000000761013616500636021420 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Queue objects for Barbican """ import oslo_messaging as messaging from oslo_messaging.notify import dispatcher as notify_dispatcher from oslo_messaging.notify import listener from oslo_messaging.rpc import dispatcher from barbican.common import config from barbican.common import exception # Constant at one place if this needs to be changed later KS_NOTIFICATIONS_GRP_NAME = config.KS_NOTIFICATIONS_GRP_NAME CONF = config.CONF TRANSPORT = None IS_SERVER_SIDE = True ALLOWED_EXMODS = [ exception.__name__, ] def get_allowed_exmods(): return ALLOWED_EXMODS def init(conf, is_server_side=True): global TRANSPORT, IS_SERVER_SIDE exmods = get_allowed_exmods() IS_SERVER_SIDE = is_server_side TRANSPORT = messaging.get_rpc_transport(conf, allowed_remote_exmods=exmods) def is_server_side(): return IS_SERVER_SIDE def cleanup(): global TRANSPORT TRANSPORT.cleanup() TRANSPORT = None def get_target(): return messaging.Target(topic=CONF.queue.topic, namespace=CONF.queue.namespace, version=CONF.queue.version, server=CONF.queue.server_name) def get_client(target=None, version_cap=None, serializer=None): if not CONF.queue.enable: return None queue_target = target or get_target() return messaging.RPCClient(TRANSPORT, target=queue_target, version_cap=version_cap, serializer=serializer) def get_server(target, endpoints, serializer=None): access_policy = dispatcher.DefaultRPCAccessPolicy return messaging.get_rpc_server(TRANSPORT, target, endpoints, executor='eventlet', serializer=serializer, access_policy=access_policy) def get_notification_target(): conf_opts = getattr(CONF, KS_NOTIFICATIONS_GRP_NAME) return messaging.Target(exchange=conf_opts.control_exchange, topic=conf_opts.topic, version=conf_opts.version, fanout=True) def get_notification_server(targets, endpoints, serializer=None): """Retrieve notification server This Notification server uses same transport configuration as used by other barbican functionality like async order processing. Assumption is that messaging infrastructure is going to be shared (same) among different barbican features. """ allow_requeue = getattr(getattr(CONF, KS_NOTIFICATIONS_GRP_NAME), 'allow_requeue') pool_name = getattr(getattr(CONF, KS_NOTIFICATIONS_GRP_NAME), 'pool_name') TRANSPORT._require_driver_features(requeue=allow_requeue) dispatcher = notify_dispatcher.NotificationDispatcher(endpoints, serializer) # we don't want blocking executor so use eventlet as executor choice return listener.NotificationServer(TRANSPORT, targets, dispatcher, executor='eventlet', pool=pool_name, allow_requeue=allow_requeue) barbican-9.1.0.dev50/barbican/queue/server.py0000664000175000017500000002225213616500636021166 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Server-side (i.e. worker side) classes and logic. """ import datetime import functools try: import newrelic.agent from newrelic.api import application newrelic_loaded = True except ImportError: newrelic_loaded = False from oslo_service import service from barbican.common import utils from barbican.model import models from barbican.model import repositories from barbican import queue from barbican.tasks import common from barbican.tasks import resources if newrelic_loaded: newrelic.agent.initialize('/etc/newrelic/newrelic.ini') LOG = utils.getLogger(__name__) # Maps the common/shared RetryTasks (returned from lower-level business logic # and plugin processing) to top-level RPC tasks in the Tasks class below. MAP_RETRY_TASKS = { common.RetryTasks.INVOKE_CERT_STATUS_CHECK_TASK: 'check_certificate_status' } def find_function_name(func, if_no_name=None): """Returns pretty-formatted function name.""" return getattr(func, '__name__', if_no_name) def retryable_order(fn): """Provides retry/scheduling support to Order-related tasks.""" @functools.wraps(fn) def wrapper(method_self, *args, **kwargs): result = fn(method_self, *args, **kwargs) retry_rpc_method = schedule_order_retry_tasks( fn, result, *args, **kwargs) if retry_rpc_method: LOG.info("Scheduled RPC method for retry: '%s'", retry_rpc_method) else: LOG.info("Task '%s' did not have to be retried", find_function_name(fn, if_no_name='???')) return wrapper def transactional(fn): """Provides request-scoped database transaction support to tasks.""" @functools.wraps(fn) def wrapper(*args, **kwargs): fn_name = find_function_name(fn, if_no_name='???') if not queue.is_server_side(): # Non-server mode directly invokes tasks. fn(*args, **kwargs) LOG.info("Completed worker task: '%s'", fn_name) else: # Manage session/transaction. try: fn(*args, **kwargs) repositories.commit() LOG.info("Completed worker task (post-commit): '%s'", fn_name) except Exception: """NOTE: Wrapped functions must process with care! Exceptions that reach here will revert the entire transaction, including any updates made to entities such as setting error codes and error messages. """ LOG.exception("Problem seen processing worker task: '%s'", fn_name ) repositories.rollback() finally: repositories.clear() return wrapper def monitored(fn): # pragma: no cover """Provides monitoring capabilities for task methods.""" # TODO(jvrbanac): Figure out how we should test third-party monitoring # Support NewRelic Monitoring if newrelic_loaded: # Create a NewRelic app instance app = application.application_instance() def newrelic_wrapper(*args, **kwargs): # Resolve real name since decorators are wrapper the method if len(args) > 0 and hasattr(args[0], fn.__name__): cls = type(args[0]) task_name = '{0}:{1}.{2}'.format( cls.__module__, cls.__name__, fn.__name__ ) else: task_name = newrelic.agent.callable_name(fn) # Execute task under a monitored context with newrelic.agent.BackgroundTask(app, task_name): fn(*args, **kwargs) return newrelic_wrapper return fn def schedule_order_retry_tasks( invoked_task, retry_result, context, *args, **kwargs): """Schedules an Order-related task for retry. :param invoked_task: The RPC method that was just invoked. :param retry_result: A :class:`FollowOnProcessingStatusDTO` if follow-on processing (such as retrying this or another task) is required, otherwise None indicates no such follow-on processing is required. :param context: Queue context, not used. :param order_id: ID of the Order entity the task to retry is for. :param args: List of arguments passed in to the just-invoked task. :param kwargs: Dict of arguments passed in to the just-invoked task. :return: Returns the RPC task method scheduled for a retry, None if no RPC task was scheduled. """ retry_rpc_method = None order_id = kwargs.get('order_id') if not retry_result or not order_id: pass elif common.RetryTasks.INVOKE_SAME_TASK == retry_result.retry_task: if invoked_task: retry_rpc_method = find_function_name(invoked_task) else: retry_rpc_method = MAP_RETRY_TASKS.get(retry_result.retry_task) if retry_rpc_method: LOG.debug( 'Scheduling RPC method for retry: {0}'.format(retry_rpc_method)) date_to_retry_at = datetime.datetime.utcnow() + datetime.timedelta( milliseconds=retry_result.retry_msec) retry_model = models.OrderRetryTask() retry_model.order_id = order_id retry_model.retry_task = retry_rpc_method retry_model.retry_at = date_to_retry_at retry_model.retry_args = args retry_model.retry_kwargs = kwargs retry_model.retry_count = 0 retry_repo = repositories.get_order_retry_tasks_repository() retry_repo.create_from(retry_model) return retry_rpc_method class Tasks(object): """Tasks that can be invoked asynchronously in Barbican. Only place task methods and implementations on this class, as they can be called directly from the client side for non-asynchronous standalone single-node operation. If a new method is added that can be retried, please also add its method name to MAP_RETRY_TASKS above. The TaskServer class below extends this class to implement a worker-side server utilizing Oslo messaging's RPC server. This RPC server can invoke methods on itself, which include the methods in this class. """ @monitored @transactional @retryable_order def process_type_order(self, context, order_id, project_id, request_id): """Process TypeOrder.""" message = "Processing type order: order ID is '%(order)s' and " \ "request ID is '%(request)s'" LOG.info(message, {'order': order_id, 'request': request_id}) return resources.BeginTypeOrder().process_and_suppress_exceptions( order_id, project_id) @monitored @transactional @retryable_order def check_certificate_status(self, context, order_id, project_id, request_id): """Check the status of a certificate order.""" message = "Processing check certificate status on order: " \ "order ID is '%(order)s' and request ID is '%(request)s'" LOG.info(message, {'order': order_id, 'request': request_id}) check_cert_order = resources.CheckCertificateStatusOrder() return check_cert_order.process_and_suppress_exceptions( order_id, project_id) class TaskServer(Tasks, service.Service): """Server to process asynchronous tasking from Barbican API nodes. This server is an Oslo service that exposes task methods that can be invoked from the Barbican API nodes. It delegates to an Oslo RPC messaging server to invoke methods asynchronously on this class. Since this class also extends the Tasks class above, its task-based methods are hence available to the RPC messaging server. """ def __init__(self): super(TaskServer, self).__init__() # Setting up db engine to avoid lazy initialization repositories.setup_database_engine_and_factory() # This property must be defined for the 'endpoints' specified below, # as the oslo_messaging RPC server will ask for it. self.target = queue.get_target() # Create an oslo RPC server, that calls back on to this class # instance to invoke tasks, such as 'process_order()' on the # extended Tasks class above. self._server = queue.get_server(target=self.target, endpoints=[self]) def start(self): LOG.info("Starting the TaskServer") self._server.start() super(TaskServer, self).start() def stop(self): LOG.info("Halting the TaskServer") super(TaskServer, self).stop() self._server.stop() barbican-9.1.0.dev50/barbican/api/0000775000175000017500000000000013616500640016723 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/api/app.wsgi0000664000175000017500000000164013616500636020404 0ustar sahidsahid00000000000000# -*- mode: python -*- # # Copyright 2016 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Use this file for deploying the API under mod_wsgi. See http://pecan.readthedocs.org/en/latest/deployment.html for details. NOTE(mtreinish): This wsgi script is deprecated since the wsgi app is now exposed as an entrypoint via barbican-wsgi-api """ from barbican.api import app application = app.get_api_wsgi_script() barbican-9.1.0.dev50/barbican/api/app.py0000664000175000017500000000676213616500636020075 0ustar sahidsahid00000000000000# Copyright (c) 2013-2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ API application handler for Barbican """ import os from paste import deploy import pecan try: import newrelic.agent newrelic_loaded = True except ImportError: newrelic_loaded = False from oslo_log import log from barbican.api.controllers import versions from barbican.api import hooks from barbican.common import config from barbican.model import repositories from barbican import queue CONF = config.CONF if newrelic_loaded: newrelic.agent.initialize( os.environ.get('NEW_RELIC_CONFIG_FILE', '/etc/newrelic/newrelic.ini'), os.environ.get('NEW_RELIC_ENVIRONMENT') ) def build_wsgi_app(controller=None, transactional=False): """WSGI application creation helper :param controller: Overrides default application controller :param transactional: Adds transaction hook for all requests """ request_hooks = [hooks.JSONErrorHook()] if transactional: request_hooks.append(hooks.BarbicanTransactionHook()) if newrelic_loaded: request_hooks.insert(0, hooks.NewRelicHook()) # Create WSGI app wsgi_app = pecan.Pecan( controller or versions.AVAILABLE_VERSIONS[versions.DEFAULT_VERSION](), hooks=request_hooks, force_canonical=False ) # clear the session created in controller initialization 60 repositories.clear() return wsgi_app def main_app(func): def _wrapper(global_config, **local_conf): # Queuing initialization queue.init(CONF, is_server_side=False) # Configure oslo logging and configuration services. log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) config.setup_remote_pydev_debug() # Initializing the database engine and session factory before the app # starts ensures we don't lose requests due to lazy initialization of # db connections. try: repositories.setup_database_engine_and_factory( initialize_secret_stores=True ) repositories.commit() except Exception: LOG.exception('Failed to sync secret_stores table.') repositories.rollback() raise wsgi_app = func(global_config, **local_conf) if newrelic_loaded: wsgi_app = newrelic.agent.WSGIApplicationWrapper(wsgi_app) LOG.info('Barbican app created and initialized') return wsgi_app return _wrapper @main_app def create_main_app(global_config, **local_conf): """uWSGI factory method for the Barbican-API application.""" # Setup app with transactional hook enabled return build_wsgi_app(versions.V1Controller(), transactional=True) def create_version_app(global_config, **local_conf): wsgi_app = pecan.make_app(versions.VersionsController()) return wsgi_app def get_api_wsgi_script(): conf = '/etc/barbican/barbican-api-paste.ini' application = deploy.loadapp('config:%s' % conf) return application barbican-9.1.0.dev50/barbican/api/__init__.py0000664000175000017500000001073513616500636021047 0ustar sahidsahid00000000000000# Copyright (c) 2013-2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ API handler for Barbican """ import pkgutil import six from oslo_policy import policy from oslo_serialization import jsonutils as json import pecan from barbican.common import config from barbican.common import exception from barbican.common import utils from barbican import i18n as u LOG = utils.getLogger(__name__) CONF = config.CONF class ApiResource(object): """Base class for API resources.""" pass def load_body(req, resp=None, validator=None): """Helper function for loading an HTTP request body from JSON. This body is placed into into a Python dictionary. :param req: The HTTP request instance to load the body from. :param resp: The HTTP response instance. :param validator: The JSON validator to enforce. :return: A dict of values from the JSON request. """ try: body = req.body_file.read(CONF.max_allowed_request_size_in_bytes) req.body_file.seek(0) except IOError: LOG.exception("Problem reading request JSON stream.") pecan.abort(500, u._('Read Error')) try: # TODO(jwood): Investigate how to get UTF8 format via openstack # jsonutils: # parsed_body = json.loads(raw_json, 'utf-8') parsed_body = json.loads(body) strip_whitespace(parsed_body) except ValueError: LOG.exception("Problem loading request JSON.") pecan.abort(400, u._('Malformed JSON')) if validator: try: parsed_body = validator.validate(parsed_body) except exception.BarbicanHTTPException as e: LOG.exception(six.text_type(e)) pecan.abort(e.status_code, e.client_message) return parsed_body def generate_safe_exception_message(operation_name, excep): """Generates an exception message that is 'safe' for clients to consume. A 'safe' message is one that doesn't contain sensitive information that could be used for (say) cryptographic attacks on Barbican. That generally means that em.CryptoXxxx should be captured here and with a simple message created on behalf of them. :param operation_name: Name of attempted operation, with a 'Verb noun' format (e.g. 'Create Secret). :param excep: The Exception instance that halted the operation. :return: (status, message) where 'status' is one of the webob.exc.HTTP_xxx codes, and 'message' is the sanitized message associated with the error. """ message = None reason = None status = 500 try: raise excep except policy.PolicyNotAuthorized: message = u._( '{operation} attempt not allowed - ' 'please review your ' 'user/project privileges').format(operation=operation_name) status = 403 except exception.BarbicanHTTPException as http_exception: reason = http_exception.client_message status = http_exception.status_code except Exception: message = u._('{operation} failure seen - please contact site ' 'administrator.').format(operation=operation_name) if reason: message = u._('{operation} issue seen - {reason}.').format( operation=operation_name, reason=reason) return status, message @pkgutil.simplegeneric def get_items(obj): """This is used to get items from either a list or a dictionary. While false generator is need to process scalar object """ while False: yield None @get_items.register(dict) def _json_object(obj): return obj.items() @get_items.register(list) def _json_array(obj): return enumerate(obj) def strip_whitespace(json_data): """Recursively trim values from the object passed in using get_items().""" for key, value in get_items(json_data): if hasattr(value, 'strip'): json_data[key] = value.strip() else: strip_whitespace(value) barbican-9.1.0.dev50/barbican/api/controllers/0000775000175000017500000000000013616500640021271 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/api/controllers/acls.py0000664000175000017500000003660213616500636022601 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan import six from barbican import api from barbican.api import controllers from barbican.common import hrefs from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo LOG = utils.getLogger(__name__) def _convert_acl_to_response_format(acl, acls_dict): fields = acl.to_dict_fields() operation = fields['operation'] acl_data = {} # dict for each acl operation data acl_data['project-access'] = fields['project_access'] acl_data['users'] = fields.get('users', []) acl_data['created'] = fields['created'] acl_data['updated'] = fields['updated'] acls_dict[operation] = acl_data DEFAULT_ACL = {'read': {'project-access': True}} class SecretACLsController(controllers.ACLMixin): """Handles SecretACL requests by a given secret id.""" def __init__(self, secret): self.secret = secret self.secret_project_id = self.secret.project.external_id self.acl_repo = repo.get_secret_acl_repository() self.validator = validators.ACLValidator() def get_acl_tuple(self, req, **kwargs): d = {'project_id': self.secret_project_id, 'creator_id': self.secret.creator_id} return 'secret', d @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('SecretACL(s) retrieval')) @controllers.enforce_rbac('secret_acls:get') def on_get(self, external_project_id, **kw): LOG.debug('Start secret ACL on_get ' 'for secret-ID %s:', self.secret.id) return self._return_acl_list_response(self.secret.id) @index.when(method='PATCH', template='json') @controllers.handle_exceptions(u._('SecretACL(s) Update')) @controllers.enforce_rbac('secret_acls:put_patch') @controllers.enforce_content_types(['application/json']) def on_patch(self, external_project_id, **kwargs): """Handles update of existing secret acl requests. At least one secret ACL needs to exist for update to proceed. In update, multiple operation ACL payload can be specified as mentioned in sample below. A specific ACL can be updated by its own id via SecretACLController patch request. { "read":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a", "20b63d71f90848cf827ee48074f213b7", "c7753f8da8dc4fbea75730ab0b6e0ef4" ] }, "write":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a" ], "project-access":true } } """ data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start on_patch...%s', data) existing_acls_map = {acl.operation: acl for acl in self.secret.secret_acls} for operation in six.moves.filter(lambda x: data.get(x), validators.ACL_OPERATIONS): project_access = data[operation].get('project-access') user_ids = data[operation].get('users') s_acl = None if operation in existing_acls_map: # update if matching acl exists s_acl = existing_acls_map[operation] if project_access is not None: s_acl.project_access = project_access else: s_acl = models.SecretACL(self.secret.id, operation=operation, project_access=project_access) self.acl_repo.create_or_replace_from(self.secret, secret_acl=s_acl, user_ids=user_ids) acl_ref = '{0}/acl'.format( hrefs.convert_secret_to_href(self.secret.id)) return {'acl_ref': acl_ref} @index.when(method='PUT', template='json') @controllers.handle_exceptions(u._('SecretACL(s) Update')) @controllers.enforce_rbac('secret_acls:put_patch') @controllers.enforce_content_types(['application/json']) def on_put(self, external_project_id, **kwargs): """Handles update of existing secret acl requests. Replaces existing secret ACL(s) with input ACL(s) data. Existing ACL operation not specified in input are removed as part of update. For missing project-access in ACL, true is used as default. In update, multiple operation ACL payload can be specified as mentioned in sample below. A specific ACL can be updated by its own id via SecretACLController patch request. { "read":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a", "20b63d71f90848cf827ee48074f213b7", "c7753f8da8dc4fbea75730ab0b6e0ef4" ] }, "write":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a" ], "project-access":false } } Every secret, by default, has an implicit ACL in case client has not defined an explicit ACL. That default ACL definition, DEFAULT_ACL, signifies that a secret by default has project based access i.e. client with necessary roles on secret project can access the secret. That's why when ACL is added to a secret, it always returns 200 (and not 201) indicating existence of implicit ACL on a secret. """ data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start on_put...%s', data) existing_acls_map = {acl.operation: acl for acl in self.secret.secret_acls} for operation in six.moves.filter(lambda x: data.get(x), validators.ACL_OPERATIONS): project_access = data[operation].get('project-access', True) user_ids = data[operation].get('users', []) s_acl = None if operation in existing_acls_map: # update if matching acl exists s_acl = existing_acls_map.pop(operation) s_acl.project_access = project_access else: s_acl = models.SecretACL(self.secret.id, operation=operation, project_access=project_access) self.acl_repo.create_or_replace_from(self.secret, secret_acl=s_acl, user_ids=user_ids) # delete remaining existing acls as they are not present in input. for acl in existing_acls_map.values(): self.acl_repo.delete_entity_by_id(entity_id=acl.id, external_project_id=None) acl_ref = '{0}/acl'.format( hrefs.convert_secret_to_href(self.secret.id)) return {'acl_ref': acl_ref} @index.when(method='DELETE', template='json') @controllers.handle_exceptions(u._('SecretACL(s) deletion')) @controllers.enforce_rbac('secret_acls:delete') def on_delete(self, external_project_id, **kwargs): count = self.acl_repo.get_count(self.secret.id) if count > 0: self.acl_repo.delete_acls_for_secret(self.secret) def _return_acl_list_response(self, secret_id): result = self.acl_repo.get_by_secret_id(secret_id) acls_data = {} if result: for acl in result: _convert_acl_to_response_format(acl, acls_data) if not acls_data: acls_data = DEFAULT_ACL.copy() return acls_data class ContainerACLsController(controllers.ACLMixin): """Handles ContainerACL requests by a given container id.""" def __init__(self, container): self.container = container self.container_id = container.id self.acl_repo = repo.get_container_acl_repository() self.container_repo = repo.get_container_repository() self.validator = validators.ACLValidator() self.container_project_id = container.project.external_id def get_acl_tuple(self, req, **kwargs): d = {'project_id': self.container_project_id, 'creator_id': self.container.creator_id} return 'container', d @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('ContainerACL(s) retrieval')) @controllers.enforce_rbac('container_acls:get') def on_get(self, external_project_id, **kw): LOG.debug('Start container ACL on_get ' 'for container-ID %s:', self.container_id) return self._return_acl_list_response(self.container.id) @index.when(method='PATCH', template='json') @controllers.handle_exceptions(u._('ContainerACL(s) Update')) @controllers.enforce_rbac('container_acls:put_patch') @controllers.enforce_content_types(['application/json']) def on_patch(self, external_project_id, **kwargs): """Handles update of existing container acl requests. At least one container ACL needs to exist for update to proceed. In update, multiple operation ACL payload can be specified as mentioned in sample below. A specific ACL can be updated by its own id via ContainerACLController patch request. { "read":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a", "20b63d71f90848cf827ee48074f213b7", "c7753f8da8dc4fbea75730ab0b6e0ef4" ] }, "write":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a" ], "project-access":false } } """ data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start ContainerACLsController on_patch...%s', data) existing_acls_map = {acl.operation: acl for acl in self.container.container_acls} for operation in six.moves.filter(lambda x: data.get(x), validators.ACL_OPERATIONS): project_access = data[operation].get('project-access') user_ids = data[operation].get('users') if operation in existing_acls_map: # update if matching acl exists c_acl = existing_acls_map[operation] if project_access is not None: c_acl.project_access = project_access else: c_acl = models.ContainerACL(self.container.id, operation=operation, project_access=project_access) self.acl_repo.create_or_replace_from(self.container, container_acl=c_acl, user_ids=user_ids) acl_ref = '{0}/acl'.format( hrefs.convert_container_to_href(self.container.id)) return {'acl_ref': acl_ref} @index.when(method='PUT', template='json') @controllers.handle_exceptions(u._('ContainerACL(s) Update')) @controllers.enforce_rbac('container_acls:put_patch') @controllers.enforce_content_types(['application/json']) def on_put(self, external_project_id, **kwargs): """Handles update of existing container acl requests. Replaces existing container ACL(s) with input ACL(s) data. Existing ACL operation not specified in input are removed as part of update. For missing project-access in ACL, true is used as default. In update, multiple operation ACL payload can be specified as mentioned in sample below. A specific ACL can be updated by its own id via ContainerACLController patch request. { "read":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a", "20b63d71f90848cf827ee48074f213b7", "c7753f8da8dc4fbea75730ab0b6e0ef4" ] }, "write":{ "users":[ "5ecb18f341894e94baca9e8c7b6a824a" ], "project-access":false } } Every container, by default, has an implicit ACL in case client has not defined an explicit ACL. That default ACL definition, DEFAULT_ACL, signifies that a container by default has project based access i.e. client with necessary roles on container project can access the container. That's why when ACL is added to a container, it always returns 200 (and not 201) indicating existence of implicit ACL on a container. """ data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start ContainerACLsController on_put...%s', data) existing_acls_map = {acl.operation: acl for acl in self.container.container_acls} for operation in six.moves.filter(lambda x: data.get(x), validators.ACL_OPERATIONS): project_access = data[operation].get('project-access', True) user_ids = data[operation].get('users', []) if operation in existing_acls_map: # update if matching acl exists c_acl = existing_acls_map.pop(operation) c_acl.project_access = project_access else: c_acl = models.ContainerACL(self.container.id, operation=operation, project_access=project_access) self.acl_repo.create_or_replace_from(self.container, container_acl=c_acl, user_ids=user_ids) # delete remaining existing acls as they are not present in input. for acl in existing_acls_map.values(): self.acl_repo.delete_entity_by_id(entity_id=acl.id, external_project_id=None) acl_ref = '{0}/acl'.format( hrefs.convert_container_to_href(self.container.id)) return {'acl_ref': acl_ref} @index.when(method='DELETE', template='json') @controllers.handle_exceptions(u._('ContainerACL(s) deletion')) @controllers.enforce_rbac('container_acls:delete') def on_delete(self, external_project_id, **kwargs): count = self.acl_repo.get_count(self.container_id) if count > 0: self.acl_repo.delete_acls_for_container(self.container) def _return_acl_list_response(self, container_id): result = self.acl_repo.get_by_container_id(container_id) acls_data = {} if result: for acl in result: _convert_acl_to_response_format(acl, acls_data) if not acls_data: acls_data = DEFAULT_ACL.copy() return acls_data barbican-9.1.0.dev50/barbican/api/controllers/secrets.py0000664000175000017500000004422013616500636023322 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import timeutils import pecan from six.moves.urllib import parse from barbican import api from barbican.api import controllers from barbican.api.controllers import acls from barbican.api.controllers import consumers from barbican.api.controllers import secretmeta from barbican.common import accept from barbican.common import exception from barbican.common import hrefs from barbican.common import quota from barbican.common import resources as res from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo from barbican.plugin import resources as plugin from barbican.plugin import util as putil LOG = utils.getLogger(__name__) def _secret_not_found(): """Throw exception indicating secret not found.""" pecan.abort(404, u._('Secret not found.')) def _invalid_secret_id(): """Throw exception indicating secret id is invalid.""" pecan.abort(404, u._('Not Found. Provided secret id is invalid.')) def _secret_payload_not_found(): """Throw exception indicating secret's payload is not found.""" pecan.abort(404, u._('Not Found. Sorry but your secret has no payload.')) def _secret_already_has_data(): """Throw exception that the secret already has data.""" pecan.abort(409, u._("Secret already has data, cannot modify it.")) def _bad_query_string_parameters(): pecan.abort(400, u._("URI provided invalid query string parameters.")) def _request_has_twsk_but_no_transport_key_id(): """Throw exception for bad wrapping parameters. Throw exception if transport key wrapped session key has been provided, but the transport key id has not. """ pecan.abort(400, u._('Transport key wrapped session key has been ' 'provided to wrap secrets for retrieval, but the ' 'transport key id has not been provided.')) class SecretController(controllers.ACLMixin): """Handles Secret retrieval and deletion requests.""" def __init__(self, secret): LOG.debug('=== Creating SecretController ===') self.secret = secret self.consumers = consumers.SecretConsumersController(secret.id) self.consumer_repo = repo.get_secret_consumer_repository() self.transport_key_repo = repo.get_transport_key_repository() def get_acl_tuple(self, req, **kwargs): d = self.get_acl_dict_for_user(req, self.secret.secret_acls) d['project_id'] = self.secret.project.external_id d['creator_id'] = self.secret.creator_id return 'secret', d @pecan.expose() def _lookup(self, sub_resource, *remainder): if sub_resource == 'acl': return acls.SecretACLsController(self.secret), remainder elif sub_resource == 'metadata': if len(remainder) == 0 or remainder == ('',): return secretmeta.SecretMetadataController(self.secret), \ remainder else: request_method = pecan.request.method allowed_methods = ['GET', 'PUT', 'DELETE'] if request_method in allowed_methods: return secretmeta.SecretMetadatumController(self.secret), \ remainder else: # methods cannot be handled at controller level pecan.abort(405) else: # only 'acl' and 'metadata' as sub-resource is supported pecan.abort(404) @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret retrieval')) @controllers.enforce_rbac('secret:get') def on_get(self, external_project_id, **kwargs): if controllers.is_json_request_accept(pecan.request): resp = self._on_get_secret_metadata(self.secret, **kwargs) LOG.info('Retrieved secret metadata for project: %s', external_project_id) return resp else: LOG.warning('Decrypted secret %s requested using deprecated ' 'API call.', self.secret.id) return self._on_get_secret_payload(self.secret, external_project_id, **kwargs) def _on_get_secret_metadata(self, secret, **kwargs): """GET Metadata-only for a secret.""" pecan.override_template('json', 'application/json') secret_fields = putil.mime_types.augment_fields_with_content_types( secret) transport_key_id = self._get_transport_key_id_if_needed( kwargs.get('transport_key_needed'), secret) if transport_key_id: secret_fields['transport_key_id'] = transport_key_id return hrefs.convert_to_hrefs(secret_fields) def _get_transport_key_id_if_needed(self, transport_key_needed, secret): if transport_key_needed and transport_key_needed.lower() == 'true': return plugin.get_transport_key_id_for_retrieval(secret) return None def _on_get_secret_payload(self, secret, external_project_id, **kwargs): """GET actual payload containing the secret.""" # With ACL support, the user token project does not have to be same as # project associated with secret. The lookup project_id needs to be # derived from the secret's data considering authorization is already # done. external_project_id = secret.project.external_id project = res.get_or_create_project(external_project_id) # default to application/octet-stream if there is no Accept header if (type(pecan.request.accept) is accept.NoHeaderType or not pecan.request.accept.header_value): accept_header = 'application/octet-stream' else: accept_header = pecan.request.accept.header_value pecan.override_template('', accept_header) # check if payload exists before proceeding if not secret.encrypted_data and not secret.secret_store_metadata: _secret_payload_not_found() twsk = kwargs.get('trans_wrapped_session_key', None) transport_key = None if twsk: transport_key = self._get_transport_key( kwargs.get('transport_key_id', None)) return plugin.get_secret(accept_header, secret, project, twsk, transport_key) def _get_transport_key(self, transport_key_id): if transport_key_id is None: _request_has_twsk_but_no_transport_key_id() transport_key_model = self.transport_key_repo.get( entity_id=transport_key_id, suppress_exception=True) return transport_key_model.transport_key @pecan.expose() @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret payload retrieval')) @controllers.enforce_rbac('secret:decrypt') def payload(self, external_project_id, **kwargs): if pecan.request.method != 'GET': pecan.abort(405) resp = self._on_get_secret_payload(self.secret, external_project_id, **kwargs) LOG.info('Retrieved secret payload for project: %s', external_project_id) return resp @index.when(method='PUT') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret update')) @controllers.enforce_rbac('secret:put') @controllers.enforce_content_types(['application/octet-stream', 'text/plain']) def on_put(self, external_project_id, **kwargs): if (not pecan.request.content_type or pecan.request.content_type == 'application/json'): pecan.abort( 415, u._("Content-Type of '{content_type}' is not supported for " "PUT.").format(content_type=pecan.request.content_type) ) transport_key_id = kwargs.get('transport_key_id') payload = pecan.request.body if not payload: raise exception.NoDataToProcess() if validators.secret_too_big(payload): raise exception.LimitExceeded() if self.secret.encrypted_data or self.secret.secret_store_metadata: _secret_already_has_data() project_model = res.get_or_create_project(external_project_id) content_type = pecan.request.content_type content_encoding = pecan.request.headers.get('Content-Encoding') plugin.store_secret( unencrypted_raw=payload, content_type_raw=content_type, content_encoding=content_encoding, secret_model=self.secret, project_model=project_model, transport_key_id=transport_key_id) LOG.info('Updated secret for project: %s', external_project_id) @index.when(method='DELETE') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret deletion')) @controllers.enforce_rbac('secret:delete') def on_delete(self, external_project_id, **kwargs): secret_consumers = self.consumer_repo.get_by_secret_id( self.secret.id, suppress_exception=True ) plugin.delete_secret(self.secret, external_project_id) LOG.info('Deleted secret for project: %s', external_project_id) for consumer in secret_consumers[0]: try: self.consumer_repo.delete_entity_by_id( consumer.id, external_project_id) except exception.NotFound: # nosec pass class SecretsController(controllers.ACLMixin): """Handles Secret creation requests.""" def __init__(self): LOG.debug('Creating SecretsController') self.validator = validators.NewSecretValidator() self.secret_repo = repo.get_secret_repository() self.quota_enforcer = quota.QuotaEnforcer('secrets', self.secret_repo) def _is_valid_date_filter(self, date_filter): filters = date_filter.split(',') sorted_filters = dict() try: for filter in filters: if filter.startswith('gt:'): if sorted_filters.get('gt') or sorted_filters.get('gte'): return False sorted_filters['gt'] = timeutils.parse_isotime(filter[3:]) elif filter.startswith('gte:'): if sorted_filters.get('gt') or sorted_filters.get( 'gte') or sorted_filters.get('eq'): return False sorted_filters['gte'] = timeutils.parse_isotime(filter[4:]) elif filter.startswith('lt:'): if sorted_filters.get('lt') or sorted_filters.get('lte'): return False sorted_filters['lt'] = timeutils.parse_isotime(filter[3:]) elif filter.startswith('lte:'): if sorted_filters.get('lt') or sorted_filters.get( 'lte') or sorted_filters.get('eq'): return False sorted_filters['lte'] = timeutils.parse_isotime(filter[4:]) elif sorted_filters.get('eq') or sorted_filters.get( 'gte') or sorted_filters.get('lte'): return False else: sorted_filters['eq'] = timeutils.parse_isotime(filter) except ValueError: return False return True def _is_valid_sorting(self, sorting): allowed_keys = ['algorithm', 'bit_length', 'created', 'expiration', 'mode', 'name', 'secret_type', 'status', 'updated'] allowed_directions = ['asc', 'desc'] sorted_keys = dict() for sort in sorting.split(','): if ':' in sort: try: key, direction = sort.split(':') except ValueError: return False else: key, direction = sort, 'asc' if key not in allowed_keys or direction not in allowed_directions: return False if sorted_keys.get(key): return False else: sorted_keys[key] = direction return True @pecan.expose() def _lookup(self, secret_id, *remainder): # NOTE(jaosorior): It's worth noting that even though this section # actually does a lookup in the database regardless of the RBAC policy # check, the execution only gets here if authentication of the user was # previously successful. if not utils.validate_id_is_uuid(secret_id): _invalid_secret_id()() secret = self.secret_repo.get_secret_by_id( entity_id=secret_id, suppress_exception=True) if not secret: _secret_not_found() return SecretController(secret), remainder @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Secret(s) retrieval')) @controllers.enforce_rbac('secrets:get') def on_get(self, external_project_id, **kw): def secret_fields(field): return putil.mime_types.augment_fields_with_content_types(field) LOG.debug('Start secrets on_get ' 'for project-ID %s:', external_project_id) name = kw.get('name', '') if name: name = parse.unquote_plus(name) bits = kw.get('bits', 0) try: bits = int(bits) except ValueError: # as per Github issue 171, if bits is invalid then # the default should be used. bits = 0 for date_filter in 'created', 'updated', 'expiration': if kw.get(date_filter) and not self._is_valid_date_filter( kw.get(date_filter)): _bad_query_string_parameters() if kw.get('sort') and not self._is_valid_sorting(kw.get('sort')): _bad_query_string_parameters() ctxt = controllers._get_barbican_context(pecan.request) user_id = None if ctxt: user_id = ctxt.user result = self.secret_repo.get_secret_list( external_project_id, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit'), name=name, alg=kw.get('alg'), mode=kw.get('mode'), bits=bits, secret_type=kw.get('secret_type'), suppress_exception=True, acl_only=kw.get('acl_only'), user_id=user_id, created=kw.get('created'), updated=kw.get('updated'), expiration=kw.get('expiration'), sort=kw.get('sort') ) secrets, offset, limit, total = result if not secrets: secrets_resp_overall = {'secrets': [], 'total': total} else: secrets_resp = [ hrefs.convert_to_hrefs(secret_fields(s)) for s in secrets ] secrets_resp_overall = hrefs.add_nav_hrefs( 'secrets', offset, limit, total, {'secrets': secrets_resp} ) secrets_resp_overall.update({'total': total}) LOG.info('Retrieved secret list for project: %s', external_project_id) return secrets_resp_overall @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Secret creation')) @controllers.enforce_rbac('secrets:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): LOG.debug('Start on_post for project-ID %s:...', external_project_id) data = api.load_body(pecan.request, validator=self.validator) project = res.get_or_create_project(external_project_id) self.quota_enforcer.enforce(project) transport_key_needed = data.get('transport_key_needed', 'false').lower() == 'true' ctxt = controllers._get_barbican_context(pecan.request) if ctxt: # in authenticated pipleline case, always use auth token user data['creator_id'] = ctxt.user secret_model = models.Secret(data) new_secret, transport_key_model = plugin.store_secret( unencrypted_raw=data.get('payload'), content_type_raw=data.get('payload_content_type', 'application/octet-stream'), content_encoding=data.get('payload_content_encoding'), secret_model=secret_model, project_model=project, transport_key_needed=transport_key_needed, transport_key_id=data.get('transport_key_id')) url = hrefs.convert_secret_to_href(new_secret.id) LOG.debug('URI to secret is %s', url) pecan.response.status = 201 pecan.response.headers['Location'] = url LOG.info('Created a secret for project: %s', external_project_id) if transport_key_model is not None: tkey_url = hrefs.convert_transport_key_to_href( transport_key_model.id) return {'secret_ref': url, 'transport_key_ref': tkey_url} else: return {'secret_ref': url} barbican-9.1.0.dev50/barbican/api/controllers/secretstores.py0000664000175000017500000001776613616500636024416 0ustar sahidsahid00000000000000# (c) Copyright 2015-2016 Hewlett Packard Enterprise Development LP # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from barbican.api import controllers from barbican.common import hrefs from barbican.common import resources as res from barbican.common import utils from barbican import i18n as u from barbican.model import repositories as repo from barbican.plugin.util import multiple_backends LOG = utils.getLogger(__name__) def _secret_store_not_found(): """Throw exception indicating secret store not found.""" pecan.abort(404, u._('Not Found. Secret store not found.')) def _preferred_secret_store_not_found(): """Throw exception indicating preferred secret store not found.""" pecan.abort(404, u._('Not Found. No preferred secret store defined for ' 'this project.')) def _multiple_backends_not_enabled(): """Throw exception indicating multiple backends support is not enabled.""" pecan.abort(404, u._('Not Found. Multiple backends support is not enabled ' 'in service configuration.')) def convert_secret_store_to_response_format(secret_store): data = secret_store.to_dict_fields() data['secret_store_plugin'] = data.pop('store_plugin') data['secret_store_ref'] = hrefs.convert_secret_stores_to_href( data['secret_store_id']) # no need to pass store id as secret_store_ref is returned data.pop('secret_store_id', None) return data class PreferredSecretStoreController(controllers.ACLMixin): """Handles preferred secret store set/removal requests.""" def __init__(self, secret_store): LOG.debug('=== Creating PreferredSecretStoreController ===') self.secret_store = secret_store self.proj_store_repo = repo.get_project_secret_store_repository() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='DELETE', template='json') @controllers.handle_exceptions(u._('Removing preferred secret store')) @controllers.enforce_rbac('secretstore_preferred:delete') def on_delete(self, external_project_id, **kw): LOG.debug('Start: Remove project preferred secret-store for store' ' id %s', self.secret_store.id) project = res.get_or_create_project(external_project_id) project_store = self.proj_store_repo.get_secret_store_for_project( project.id, None, suppress_exception=True) if project_store is None: _preferred_secret_store_not_found() self.proj_store_repo.delete_entity_by_id( entity_id=project_store.id, external_project_id=external_project_id) pecan.response.status = 204 @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Setting preferred secret store')) @controllers.enforce_rbac('secretstore_preferred:post') def on_post(self, external_project_id, **kwargs): LOG.debug('Start: Set project preferred secret-store for store ' 'id %s', self.secret_store.id) project = res.get_or_create_project(external_project_id) self.proj_store_repo.create_or_update_for_project(project.id, self.secret_store.id) pecan.response.status = 204 class SecretStoreController(controllers.ACLMixin): """Handles secret store retrieval requests.""" def __init__(self, secret_store): LOG.debug('=== Creating SecretStoreController ===') self.secret_store = secret_store @pecan.expose() def _lookup(self, action, *remainder): if (action == 'preferred'): return PreferredSecretStoreController(self.secret_store), remainder else: pecan.abort(405) @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Secret store retrieval')) @controllers.enforce_rbac('secretstore:get') def on_get(self, external_project_id): LOG.debug("== Getting secret store for %s", self.secret_store.id) return convert_secret_store_to_response_format(self.secret_store) class SecretStoresController(controllers.ACLMixin): """Handles secret-stores list requests.""" def __init__(self): LOG.debug('Creating SecretStoresController') self.secret_stores_repo = repo.get_secret_stores_repository() self.proj_store_repo = repo.get_project_secret_store_repository() def __getattr__(self, name): route_table = { 'global-default': self.get_global_default, 'preferred': self.get_preferred, } if name in route_table: return route_table[name] raise AttributeError @pecan.expose() def _lookup(self, secret_store_id, *remainder): if not utils.is_multiple_backends_enabled(): _multiple_backends_not_enabled() secret_store = self.secret_stores_repo.get(entity_id=secret_store_id, suppress_exception=True) if not secret_store: _secret_store_not_found() return SecretStoreController(secret_store), remainder @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('List available secret stores')) @controllers.enforce_rbac('secretstores:get') def on_get(self, external_project_id, **kw): LOG.debug('Start SecretStoresController on_get: listing secret ' 'stores') if not utils.is_multiple_backends_enabled(): _multiple_backends_not_enabled() res.get_or_create_project(external_project_id) secret_stores = self.secret_stores_repo.get_all() resp_list = [] for store in secret_stores: item = convert_secret_store_to_response_format(store) resp_list.append(item) resp = {'secret_stores': resp_list} return resp @pecan.expose(generic=True, template='json') @controllers.handle_exceptions(u._('Retrieve global default secret store')) @controllers.enforce_rbac('secretstores:get_global_default') def get_global_default(self, external_project_id, **kw): LOG.debug('Start secret-stores get global default secret store') if not utils.is_multiple_backends_enabled(): _multiple_backends_not_enabled() res.get_or_create_project(external_project_id) store = multiple_backends.get_global_default_secret_store() return convert_secret_store_to_response_format(store) @pecan.expose(generic=True, template='json') @controllers.handle_exceptions(u._('Retrieve project preferred store')) @controllers.enforce_rbac('secretstores:get_preferred') def get_preferred(self, external_project_id, **kw): LOG.debug('Start secret-stores get preferred secret store') if not utils.is_multiple_backends_enabled(): _multiple_backends_not_enabled() project = res.get_or_create_project(external_project_id) project_store = self.proj_store_repo.get_secret_store_for_project( project.id, None, suppress_exception=True) if project_store is None: _preferred_secret_store_not_found() return convert_secret_store_to_response_format( project_store.secret_store) barbican-9.1.0.dev50/barbican/api/controllers/secretmeta.py0000664000175000017500000001650513616500636024013 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import pecan from barbican import api from barbican.api import controllers from barbican.common import hrefs from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import repositories as repo LOG = utils.getLogger(__name__) def _secret_metadata_not_found(): """Throw exception indicating secret metadata not found.""" pecan.abort(404, u._('Secret metadata not found.')) class SecretMetadataController(controllers.ACLMixin): """Handles SecretMetadata requests by a given secret id.""" def __init__(self, secret): LOG.debug('=== Creating SecretMetadataController ===') self.secret = secret self.secret_project_id = self.secret.project.external_id self.secret_repo = repo.get_secret_repository() self.user_meta_repo = repo.get_secret_user_meta_repository() self.metadata_validator = validators.NewSecretMetadataValidator() self.metadatum_validator = validators.NewSecretMetadatumValidator() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret metadata retrieval')) @controllers.enforce_rbac('secret_meta:get') def on_get(self, external_project_id, **kwargs): """Handles retrieval of existing secret metadata requests.""" LOG.debug('Start secret metadata on_get ' 'for secret-ID %s:', self.secret.id) resp = self.user_meta_repo.get_metadata_for_secret(self.secret.id) pecan.response.status = 200 return {"metadata": resp} @index.when(method='PUT', template='json') @controllers.handle_exceptions(u._('Secret metadata creation')) @controllers.enforce_rbac('secret_meta:put') @controllers.enforce_content_types(['application/json']) def on_put(self, external_project_id, **kwargs): """Handles creation/update of secret metadata.""" data = api.load_body(pecan.request, validator=self.metadata_validator) LOG.debug('Start secret metadata on_put...%s', data) self.user_meta_repo.create_replace_user_metadata(self.secret.id, data) url = hrefs.convert_user_meta_to_href(self.secret.id) LOG.debug('URI to secret metadata is %s', url) pecan.response.status = 201 return {'metadata_ref': url} @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Secret metadatum creation')) @controllers.enforce_rbac('secret_meta:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): """Handles creation of secret metadatum.""" data = api.load_body(pecan.request, validator=self.metadatum_validator) key = data.get('key') value = data.get('value') metadata = self.user_meta_repo.get_metadata_for_secret(self.secret.id) if key in metadata: pecan.abort(409, u._('Conflict. Key in request is already in the ' 'secret metadata')) LOG.debug('Start secret metadatum on_post...%s', metadata) self.user_meta_repo.create_replace_user_metadatum(self.secret.id, key, value) url = hrefs.convert_user_meta_to_href(self.secret.id) LOG.debug('URI to secret metadata is %s', url) pecan.response.status = 201 return {'metadata_ref': url + "/%s {key: %s, value:%s}" % (key, key, value)} class SecretMetadatumController(controllers.ACLMixin): def __init__(self, secret): LOG.debug('=== Creating SecretMetadatumController ===') self.user_meta_repo = repo.get_secret_user_meta_repository() self.secret = secret self.metadatum_validator = validators.NewSecretMetadatumValidator() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Secret metadatum retrieval')) @controllers.enforce_rbac('secret_meta:get') def on_get(self, external_project_id, remainder, **kwargs): """Handles retrieval of existing secret metadatum.""" LOG.debug('Start secret metadatum on_get ' 'for secret-ID %s:', self.secret.id) metadata = self.user_meta_repo.get_metadata_for_secret(self.secret.id) if remainder in metadata: pecan.response.status = 200 pair = {'key': remainder, 'value': metadata[remainder]} return collections.OrderedDict(sorted(pair.items())) else: _secret_metadata_not_found() @index.when(method='PUT', template='json') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Secret metadatum update')) @controllers.enforce_rbac('secret_meta:put') @controllers.enforce_content_types(['application/json']) def on_put(self, external_project_id, remainder, **kwargs): """Handles update of existing secret metadatum.""" metadata = self.user_meta_repo.get_metadata_for_secret(self.secret.id) data = api.load_body(pecan.request, validator=self.metadatum_validator) key = data.get('key') value = data.get('value') if remainder not in metadata: _secret_metadata_not_found() elif remainder != key: msg = 'Key in request data does not match key in the ' 'request url.' pecan.abort(409, msg) else: LOG.debug('Start secret metadatum on_put...%s', metadata) self.user_meta_repo.create_replace_user_metadatum(self.secret.id, key, value) pecan.response.status = 200 pair = {'key': key, 'value': value} return collections.OrderedDict(sorted(pair.items())) @index.when(method='DELETE', template='json') @controllers.handle_exceptions(u._('Secret metadatum removal')) @controllers.enforce_rbac('secret_meta:delete') def on_delete(self, external_project_id, remainder, **kwargs): """Handles removal of existing secret metadatum.""" self.user_meta_repo.delete_metadatum(self.secret.id, remainder) msg = 'Deleted secret metadatum: %s for secret %s' % (remainder, self.secret.id) pecan.response.status = 204 LOG.info(msg) barbican-9.1.0.dev50/barbican/api/controllers/orders.py0000664000175000017500000001675113616500636023160 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from barbican import api from barbican.api import controllers from barbican.common import hrefs from barbican.common import quota from barbican.common import resources as res from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo from barbican.queue import client as async_client LOG = utils.getLogger(__name__) _DEPRECATION_MSG = '%s has been deprecated in the Newton release. ' \ 'It will be removed in the Pike release.' def _order_not_found(): """Throw exception indicating order not found.""" pecan.abort(404, u._('Order not found.')) def _secret_not_in_order(): """Throw exception that secret info is not available in the order.""" pecan.abort(400, u._("Secret metadata expected but not received.")) def _order_update_not_supported(): """Throw exception that PUT operation is not supported for orders.""" pecan.abort(405, u._("Order update is not supported.")) def _order_cannot_be_updated_if_not_pending(order_status): """Throw exception that order cannot be updated if not PENDING.""" pecan.abort(400, u._("Only PENDING orders can be updated. Order is in the" "{0} state.").format(order_status)) def order_cannot_modify_order_type(): """Throw exception that order type cannot be modified.""" pecan.abort(400, u._("Cannot modify order type.")) class OrderController(controllers.ACLMixin): """Handles Order retrieval and deletion requests.""" def __init__(self, order, queue_resource=None): self.order = order self.order_repo = repo.get_order_repository() self.queue = queue_resource or async_client.TaskClient() self.type_order_validator = validators.TypeOrderValidator() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Order retrieval')) @controllers.enforce_rbac('order:get') def on_get(self, external_project_id): return hrefs.convert_to_hrefs(self.order.to_dict_fields()) @index.when(method='DELETE') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Order deletion')) @controllers.enforce_rbac('order:delete') def on_delete(self, external_project_id, **kwargs): self.order_repo.delete_entity_by_id( entity_id=self.order.id, external_project_id=external_project_id) class OrdersController(controllers.ACLMixin): """Handles Order requests for Secret creation.""" def __init__(self, queue_resource=None): LOG.debug('Creating OrdersController') self.order_repo = repo.get_order_repository() self.queue = queue_resource or async_client.TaskClient() self.type_order_validator = validators.TypeOrderValidator() self.quota_enforcer = quota.QuotaEnforcer('orders', self.order_repo) @pecan.expose() def _lookup(self, order_id, *remainder): # NOTE(jaosorior): It's worth noting that even though this section # actually does a lookup in the database regardless of the RBAC policy # check, the execution only gets here if authentication of the user was # previously successful. ctx = controllers._get_barbican_context(pecan.request) order = self.order_repo.get(entity_id=order_id, external_project_id=ctx.project_id, suppress_exception=True) if not order: _order_not_found() return OrderController(order, self.order_repo), remainder @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Order(s) retrieval')) @controllers.enforce_rbac('orders:get') def on_get(self, external_project_id, **kw): LOG.debug('Start orders on_get ' 'for project-ID %s:', external_project_id) result = self.order_repo.get_by_create_date( external_project_id, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit', None), meta_arg=kw.get('meta', None), suppress_exception=True) orders, offset, limit, total = result if not orders: orders_resp_overall = {'orders': [], 'total': total} else: orders_resp = [ hrefs.convert_to_hrefs(o.to_dict_fields()) for o in orders ] orders_resp_overall = hrefs.add_nav_hrefs('orders', offset, limit, total, {'orders': orders_resp}) orders_resp_overall.update({'total': total}) return orders_resp_overall @index.when(method='PUT', template='json') @controllers.handle_exceptions(u._('Order update')) @controllers.enforce_rbac('orders:put') def on_put(self, external_project_id, **kwargs): _order_update_not_supported() @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Order creation')) @controllers.enforce_rbac('orders:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): project = res.get_or_create_project(external_project_id) body = api.load_body(pecan.request, validator=self.type_order_validator) order_type = body.get('type') order_meta = body.get('meta') request_type = order_meta.get('request_type') LOG.debug('Processing order type %(order_type)s,' ' request type %(request_type)s' % {'order_type': order_type, 'request_type': request_type}) self.quota_enforcer.enforce(project) new_order = models.Order() new_order.meta = body.get('meta') new_order.type = order_type new_order.project_id = project.id request_id = None ctxt = controllers._get_barbican_context(pecan.request) if ctxt: new_order.creator_id = ctxt.user request_id = ctxt.request_id self.order_repo.create_from(new_order) # Grab our id before commit due to obj expiration from sqlalchemy order_id = new_order.id # Force commit to avoid async issues with the workers repo.commit() self.queue.process_type_order(order_id=order_id, project_id=external_project_id, request_id=request_id) url = hrefs.convert_order_to_href(order_id) pecan.response.status = 202 pecan.response.headers['Location'] = url return {'order_ref': url} barbican-9.1.0.dev50/barbican/api/controllers/transportkeys.py0000664000175000017500000001373113616500636024605 0ustar sahidsahid00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from six.moves.urllib import parse from barbican import api from barbican.api import controllers from barbican.common import exception from barbican.common import hrefs from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo LOG = utils.getLogger(__name__) def _transport_key_not_found(): """Throw exception indicating transport key not found.""" pecan.abort(404, u._('Not Found. Transport Key not found.')) def _invalid_transport_key_id(): """Throw exception indicating transport key id is invalid.""" pecan.abort(404, u._('Not Found. Provided transport key id is invalid.')) class TransportKeyController(controllers.ACLMixin): """Handles transport key retrieval requests.""" def __init__(self, transport_key_id, transport_key_repo=None): LOG.debug('=== Creating TransportKeyController ===') self.transport_key_id = transport_key_id self.repo = transport_key_repo or repo.TransportKeyRepo() @pecan.expose(generic=True) def index(self, external_project_id, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET') @controllers.handle_exceptions(u._('Transport Key retrieval')) @controllers.enforce_rbac('transport_key:get') def on_get(self, external_project_id): LOG.debug("== Getting transport key for %s", external_project_id) transport_key = self.repo.get(entity_id=self.transport_key_id) if not transport_key: _transport_key_not_found() pecan.override_template('json', 'application/json') return transport_key @index.when(method='DELETE') @controllers.handle_exceptions(u._('Transport Key deletion')) @controllers.enforce_rbac('transport_key:delete') def on_delete(self, external_project_id, **kwargs): LOG.debug("== Deleting transport key ===") try: self.repo.delete_entity_by_id( entity_id=self.transport_key_id, external_project_id=external_project_id) # TODO(alee) response should be 204 on success # pecan.response.status = 204 except exception.NotFound: LOG.exception('Problem deleting transport_key') _transport_key_not_found() class TransportKeysController(controllers.ACLMixin): """Handles transport key list requests.""" def __init__(self, transport_key_repo=None): LOG.debug('Creating TransportKeyController') self.repo = transport_key_repo or repo.TransportKeyRepo() self.validator = validators.NewTransportKeyValidator() @pecan.expose() def _lookup(self, transport_key_id, *remainder): if not utils.validate_id_is_uuid(transport_key_id): _invalid_transport_key_id() return TransportKeyController(transport_key_id, self.repo), remainder @pecan.expose(generic=True) def index(self, external_project_id, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Transport Key(s) retrieval')) @controllers.enforce_rbac('transport_keys:get') def on_get(self, external_project_id, **kw): LOG.debug('Start transport_keys on_get') plugin_name = kw.get('plugin_name', None) if plugin_name is not None: plugin_name = parse.unquote_plus(plugin_name) result = self.repo.get_by_create_date( plugin_name=plugin_name, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit', None), suppress_exception=True ) transport_keys, offset, limit, total = result if not transport_keys: transport_keys_resp_overall = {'transport_keys': [], 'total': total} else: transport_keys_resp = [ hrefs.convert_transport_key_to_href(s.id) for s in transport_keys ] transport_keys_resp_overall = hrefs.add_nav_hrefs( 'transport_keys', offset, limit, total, {'transport_keys': transport_keys_resp} ) transport_keys_resp_overall.update({'total': total}) return transport_keys_resp_overall @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Transport Key Creation')) @controllers.enforce_rbac('transport_keys:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): LOG.debug('Start transport_keys on_post') # TODO(alee) POST should determine the plugin name and call the # relevant get_transport_key() call. We will implement this once # we figure out how the plugins will be enumerated. data = api.load_body(pecan.request, validator=self.validator) new_key = models.TransportKey(data.get('plugin_name'), data.get('transport_key')) self.repo.create_from(new_key) url = hrefs.convert_transport_key_to_href(new_key.id) LOG.debug('URI to transport key is %s', url) pecan.response.status = 201 pecan.response.headers['Location'] = url return {'transport_key_ref': url} barbican-9.1.0.dev50/barbican/api/controllers/quotas.py0000664000175000017500000001163413616500636023171 0ustar sahidsahid00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from barbican import api from barbican.api import controllers from barbican.common import exception from barbican.common import quota from barbican.common import resources as res from barbican.common import utils from barbican.common import validators from barbican import i18n as u LOG = utils.getLogger(__name__) def _project_quotas_not_found(): """Throw exception indicating project quotas not found.""" pecan.abort(404, u._('Project quotas not found.')) class QuotasController(controllers.ACLMixin): """Handles quota retrieval requests.""" def __init__(self): LOG.debug('=== Creating QuotasController ===') self.quota_driver = quota.QuotaDriver() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Quotas')) @controllers.enforce_rbac('quotas:get') def on_get(self, external_project_id, **kwargs): LOG.debug('=== QuotasController GET ===') # make sure project exists res.get_or_create_project(external_project_id) resp = self.quota_driver.get_quotas(external_project_id) return resp class ProjectQuotasController(controllers.ACLMixin): """Handles project quota requests.""" def __init__(self, project_id): LOG.debug('=== Creating ProjectQuotasController ===') self.passed_project_id = project_id self.validator = validators.ProjectQuotaValidator() self.quota_driver = quota.QuotaDriver() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Project Quotas')) @controllers.enforce_rbac('project_quotas:get') def on_get(self, external_project_id, **kwargs): LOG.debug('=== ProjectQuotasController GET ===') resp = self.quota_driver.get_project_quotas(self.passed_project_id) if resp: return resp else: _project_quotas_not_found() @index.when(method='PUT', template='json') @controllers.handle_exceptions(u._('Project Quotas')) @controllers.enforce_rbac('project_quotas:put') @controllers.enforce_content_types(['application/json']) def on_put(self, external_project_id, **kwargs): LOG.debug('=== ProjectQuotasController PUT ===') if not pecan.request.body: raise exception.NoDataToProcess() api.load_body(pecan.request, validator=self.validator) self.quota_driver.set_project_quotas(self.passed_project_id, kwargs['project_quotas']) LOG.info('Put Project Quotas') pecan.response.status = 204 @index.when(method='DELETE', template='json') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Project Quotas')) @controllers.enforce_rbac('project_quotas:delete') def on_delete(self, external_project_id, **kwargs): LOG.debug('=== ProjectQuotasController DELETE ===') try: self.quota_driver.delete_project_quotas(self.passed_project_id) except exception.NotFound: LOG.info('Delete Project Quotas - Project not found') _project_quotas_not_found() else: LOG.info('Delete Project Quotas') pecan.response.status = 204 class ProjectsQuotasController(controllers.ACLMixin): """Handles projects quota retrieval requests.""" def __init__(self): LOG.debug('=== Creating ProjectsQuotaController ===') self.quota_driver = quota.QuotaDriver() @pecan.expose() def _lookup(self, project_id, *remainder): return ProjectQuotasController(project_id), remainder @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Project Quotas')) @controllers.enforce_rbac('project_quotas:get') def on_get(self, external_project_id, **kwargs): resp = self.quota_driver.get_project_quotas_list( offset_arg=kwargs.get('offset', 0), limit_arg=kwargs.get('limit', None) ) return resp barbican-9.1.0.dev50/barbican/api/controllers/versions.py0000664000175000017500000001317713616500636023531 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from six.moves.urllib import parse from barbican.api import controllers from barbican.api.controllers import containers from barbican.api.controllers import orders from barbican.api.controllers import quotas from barbican.api.controllers import secrets from barbican.api.controllers import secretstores from barbican.api.controllers import transportkeys from barbican.common import utils from barbican import i18n as u from barbican import version LOG = utils.getLogger(__name__) MIME_TYPE_JSON = 'application/json' MIME_TYPE_JSON_HOME = 'application/json-home' MEDIA_TYPE_JSON = 'application/vnd.openstack.key-manager-%s+json' def _version_not_found(): """Throw exception indicating version not found.""" pecan.abort(404, u._("The version you requested wasn't found")) def _get_versioned_url(version): if version[-1] != '/': version += '/' # If host_href is not set in barbican conf, then derive it from request url host_part = utils.get_base_url_from_request() if host_part[-1] != '/': host_part += '/' return parse.urljoin(host_part, version) class BaseVersionController(object): """Base class for the version-specific controllers""" @classmethod def get_version_info(cls, request): return { 'id': cls.version_id, 'status': 'stable', 'updated': cls.last_updated, 'links': [ { 'rel': 'self', 'href': _get_versioned_url(cls.version_string), }, { 'rel': 'describedby', 'type': 'text/html', 'href': 'https://docs.openstack.org/' } ], 'media-types': [ { 'base': MIME_TYPE_JSON, 'type': MEDIA_TYPE_JSON % cls.version_string } ] } class V1Controller(BaseVersionController): """Root controller for the v1 API""" version_string = 'v1' # NOTE(jaosorior): We might start using decimals in the future, meanwhile # this is the same as the version string. version_id = 'v1' last_updated = '2015-04-28T00:00:00Z' def __init__(self): LOG.debug('=== Creating V1Controller ===') self.secrets = secrets.SecretsController() self.orders = orders.OrdersController() self.containers = containers.ContainersController() self.transport_keys = transportkeys.TransportKeysController() self.quotas = quotas.QuotasController() setattr(self, 'project-quotas', quotas.ProjectsQuotasController()) setattr(self, 'secret-stores', secretstores.SecretStoresController()) @pecan.expose(generic=True) def index(self): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @utils.allow_certain_content_types(MIME_TYPE_JSON, MIME_TYPE_JSON_HOME) @controllers.handle_exceptions(u._('Version retrieval')) def on_get(self): pecan.core.override_template('json') return {'version': self.get_version_info(pecan.request)} AVAILABLE_VERSIONS = { V1Controller.version_string: V1Controller, } DEFAULT_VERSION = V1Controller.version_string class VersionsController(object): def __init__(self): LOG.debug('=== Creating VersionsController ===') @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @utils.allow_certain_content_types(MIME_TYPE_JSON, MIME_TYPE_JSON_HOME) def on_get(self, **kwargs): """The list of versions is dependent on the context.""" self._redirect_to_default_json_home_if_needed(pecan.request) if 'build' in kwargs: return {'build': version.__version__} versions_info = [version_class.get_version_info(pecan.request) for version_class in AVAILABLE_VERSIONS.values()] version_output = { 'versions': { 'values': versions_info } } # Since we are returning all the versions available, the proper status # code is Multiple Choices (300) pecan.response.status = 300 return version_output def _redirect_to_default_json_home_if_needed(self, request): if self._mime_best_match(request.accept) == MIME_TYPE_JSON_HOME: url = _get_versioned_url(DEFAULT_VERSION) LOG.debug("Redirecting Request to " + url) # NOTE(jaosorior): This issues an "external" redirect because of # two reasons: # * This module doesn't require authorization, and accessing # specific version info needs that. # * The resource is a separate app_factory and won't be found # internally pecan.redirect(url, request=request) def _mime_best_match(self, accept): if not accept: return MIME_TYPE_JSON SUPPORTED_TYPES = [MIME_TYPE_JSON, MIME_TYPE_JSON_HOME] return accept.best_match(SUPPORTED_TYPES) barbican-9.1.0.dev50/barbican/api/controllers/consumers.py0000664000175000017500000003520313616500636023671 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from barbican import api from barbican.api import controllers from barbican.common import exception from barbican.common import hrefs from barbican.common import quota from barbican.common import resources as res from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo LOG = utils.getLogger(__name__) def _consumer_not_found(): """Throw exception indicating consumer not found.""" pecan.abort(404, u._('Consumer not found.')) def _consumer_ownership_mismatch(): """Throw exception indicating the user does not own this consumer.""" pecan.abort(403, u._('Not Allowed. Sorry, only the creator of a consumer ' 'can delete it.')) def _invalid_consumer_id(): """Throw exception indicating consumer id is invalid.""" pecan.abort(404, u._('Not Found. Provided consumer id is invalid.')) class ContainerConsumerController(controllers.ACLMixin): """Handles Container Consumer entity retrieval and deletion requests""" def __init__(self, consumer_id): self.consumer_id = consumer_id self.consumer_repo = repo.get_container_consumer_repository() self.validator = validators.ContainerConsumerValidator() @pecan.expose(generic=True) def index(self): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('ContainerConsumer retrieval')) @controllers.enforce_rbac('consumer:get') def on_get(self, external_project_id): consumer = self.consumer_repo.get( entity_id=self.consumer_id, suppress_exception=True) if not consumer: _consumer_not_found() dict_fields = consumer.to_dict_fields() LOG.info('Retrieved a consumer for project: %s', external_project_id) return hrefs.convert_to_hrefs( hrefs.convert_to_hrefs(dict_fields) ) class ContainerConsumersController(controllers.ACLMixin): """Handles Container Consumer creation requests""" def __init__(self, container_id): self.container_id = container_id self.consumer_repo = repo.get_container_consumer_repository() self.container_repo = repo.get_container_repository() self.project_repo = repo.get_project_repository() self.validator = validators.ContainerConsumerValidator() self.quota_enforcer = quota.QuotaEnforcer('consumers', self.consumer_repo) @pecan.expose() def _lookup(self, consumer_id, *remainder): if not utils.validate_id_is_uuid(consumer_id): _invalid_consumer_id()() return ContainerConsumerController(consumer_id), remainder @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('ContainerConsumers(s) retrieval')) @controllers.enforce_rbac('consumers:get') def on_get(self, external_project_id, **kw): LOG.debug('Start consumers on_get ' 'for container-ID %s:', self.container_id) result = self.consumer_repo.get_by_container_id( self.container_id, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit'), suppress_exception=True ) consumers, offset, limit, total = result if not consumers: resp_ctrs_overall = {'consumers': [], 'total': total} else: resp_ctrs = [ hrefs.convert_to_hrefs(c.to_dict_fields()) for c in consumers ] consumer_path = "containers/{container_id}/consumers".format( container_id=self.container_id) resp_ctrs_overall = hrefs.add_nav_hrefs( consumer_path, offset, limit, total, {'consumers': resp_ctrs} ) resp_ctrs_overall.update({'total': total}) LOG.info('Retrieved a container consumer list for project: %s', external_project_id) return resp_ctrs_overall @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('ContainerConsumer creation')) @controllers.enforce_rbac('consumers:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): project = res.get_or_create_project(external_project_id) data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start on_post...%s', data) container = self._get_container(self.container_id) self.quota_enforcer.enforce(project) new_consumer = models.ContainerConsumerMetadatum(self.container_id, project.id, data) self.consumer_repo.create_or_update_from(new_consumer, container) url = hrefs.convert_consumer_to_href(new_consumer.container_id) pecan.response.headers['Location'] = url LOG.info('Created a container consumer for project: %s', external_project_id) return self._return_container_data(self.container_id) @index.when(method='DELETE', template='json') @controllers.handle_exceptions(u._('ContainerConsumer deletion')) @controllers.enforce_rbac('consumers:delete') @controllers.enforce_content_types(['application/json']) def on_delete(self, external_project_id, **kwargs): data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start on_delete...%s', data) project = self.project_repo.find_by_external_project_id( external_project_id, suppress_exception=True) if not project: _consumer_not_found() consumer = self.consumer_repo.get_by_values( self.container_id, data["name"], data["URL"], suppress_exception=True ) if not consumer: _consumer_not_found() LOG.debug("Found container consumer: %s", consumer) container = self._get_container(self.container_id) owner_of_consumer = consumer.project_id == project.id owner_of_container = container.project.external_id \ == external_project_id if not owner_of_consumer and not owner_of_container: _consumer_ownership_mismatch() try: self.consumer_repo.delete_entity_by_id(consumer.id, external_project_id) except exception.NotFound: LOG.exception('Problem deleting container consumer') _consumer_not_found() ret_data = self._return_container_data(self.container_id) LOG.info('Deleted a container consumer for project: %s', external_project_id) return ret_data def _get_container(self, container_id): container = self.container_repo.get_container_by_id( container_id, suppress_exception=True) if not container: controllers.containers.container_not_found() return container def _return_container_data(self, container_id): container = self._get_container(container_id) dict_fields = container.to_dict_fields() for secret_ref in dict_fields['secret_refs']: hrefs.convert_to_hrefs(secret_ref) # TODO(john-wood-w) Why two calls to convert_to_hrefs()? return hrefs.convert_to_hrefs( hrefs.convert_to_hrefs(dict_fields) ) class SecretConsumerController(controllers.ACLMixin): """Handles Secret Consumer entity retrieval and deletion requests""" def __init__(self, consumer_id): self.consumer_id = consumer_id self.consumer_repo = repo.get_secret_consumer_repository() self.validator = validators.SecretConsumerValidator() @pecan.expose(generic=True) def index(self): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('SecretConsumer retrieval')) @controllers.enforce_rbac('consumer:get') def on_get(self, external_project_id): consumer = self.consumer_repo.get( entity_id=self.consumer_id, suppress_exception=True) if not consumer: _consumer_not_found() dict_fields = consumer.to_dict_fields() LOG.info('Retrieved a secret consumer for project: %s', external_project_id) return hrefs.convert_to_hrefs( hrefs.convert_to_hrefs(dict_fields) ) class SecretConsumersController(controllers.ACLMixin): """Handles Secret Consumer creation requests""" def __init__(self, secret_id): self.secret_id = secret_id self.consumer_repo = repo.get_secret_consumer_repository() self.secret_repo = repo.get_secret_repository() self.project_repo = repo.get_project_repository() self.validator = validators.SecretConsumerValidator() self.quota_enforcer = quota.QuotaEnforcer('consumers', self.consumer_repo) @pecan.expose() def _lookup(self, consumer_id, *remainder): if not utils.validate_id_is_uuid(consumer_id): _invalid_consumer_id()() return SecretConsumerController(consumer_id), remainder @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('SecretConsumers(s) retrieval')) @controllers.enforce_rbac('consumers:get') def on_get(self, external_project_id, **kw): LOG.debug('Start consumers on_get ' 'for secret-ID %s:', self.secret_id) result = self.consumer_repo.get_by_secret_id( self.secret_id, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit'), suppress_exception=True ) consumers, offset, limit, total = result if not consumers: resp_ctrs_overall = {'consumers': [], 'total': total} else: resp_ctrs = [ hrefs.convert_to_hrefs(c.to_dict_fields()) for c in consumers ] consumer_path = "secrets/{secret_id}/consumers".format( secret_id=self.secret_id) resp_ctrs_overall = hrefs.add_nav_hrefs( consumer_path, offset, limit, total, {'consumers': resp_ctrs} ) resp_ctrs_overall.update({'total': total}) LOG.info('Retrieved a consumer list for project: %s', external_project_id) return resp_ctrs_overall @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('SecretConsumer creation')) @controllers.enforce_rbac('consumers:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): project = res.get_or_create_project(external_project_id) data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start on_post...%s', data) secret = self._get_secret(self.secret_id) self.quota_enforcer.enforce(project) new_consumer = models.SecretConsumerMetadatum( self.secret_id, project.id, data["service"], data["resource_type"], data["resource_id"], ) self.consumer_repo.create_or_update_from(new_consumer, secret) url = hrefs.convert_consumer_to_href(new_consumer.secret_id) pecan.response.headers['Location'] = url LOG.info('Created a consumer for project: %s', external_project_id) return self._return_secret_data(self.secret_id) @index.when(method='DELETE', template='json') @controllers.handle_exceptions(u._('SecretConsumer deletion')) @controllers.enforce_rbac('consumers:delete') @controllers.enforce_content_types(['application/json']) def on_delete(self, external_project_id, **kwargs): data = api.load_body(pecan.request, validator=self.validator) LOG.debug('Start on_delete...%s', data) project = self.project_repo.find_by_external_project_id( external_project_id, suppress_exception=True) if not project: _consumer_not_found() consumer = self.consumer_repo.get_by_values( self.secret_id, data["resource_id"], suppress_exception=True ) if not consumer: _consumer_not_found() LOG.debug("Found consumer: %s", consumer) secret = self._get_secret(self.secret_id) owner_of_consumer = consumer.project_id == project.id owner_of_secret = secret.project.external_id \ == external_project_id if not owner_of_consumer and not owner_of_secret: _consumer_ownership_mismatch() try: self.consumer_repo.delete_entity_by_id(consumer.id, external_project_id) except exception.NotFound: LOG.exception('Problem deleting consumer') _consumer_not_found() ret_data = self._return_secret_data(self.secret_id) LOG.info('Deleted a consumer for project: %s', external_project_id) return ret_data def _get_secret(self, secret_id): secret = self.secret_repo.get_secret_by_id( secret_id, suppress_exception=True) if not secret: controllers.secrets.secret_not_found() return secret def _return_secret_data(self, secret_id): secret = self._get_secret(secret_id) dict_fields = secret.to_dict_fields() return hrefs.convert_to_hrefs( hrefs.convert_to_hrefs(dict_fields) ) barbican-9.1.0.dev50/barbican/api/controllers/__init__.py0000664000175000017500000001715713616500636023422 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections from oslo_policy import policy import pecan from webob import exc from barbican import api from barbican.common import accept from barbican.common import utils from barbican import i18n as u LOG = utils.getLogger(__name__) def is_json_request_accept(req): """Test if http request 'accept' header configured for JSON response. :param req: HTTP request :return: True if need to return JSON response. """ return ( type(req.accept) is accept.NoHeaderType or type(req.accept) is accept.ValidHeaderType and ( req.accept.header_value == 'application/json' or req.accept.header_value == '*/*' ) ) def _get_barbican_context(req): if 'barbican.context' in req.environ: return req.environ['barbican.context'] else: return None def _do_enforce_rbac(inst, req, action_name, ctx, **kwargs): """Enforce RBAC based on 'request' information.""" if action_name and ctx: # Enforce special case: secret GET decryption if 'secret:get' == action_name and not is_json_request_accept(req): action_name = 'secret:decrypt' # Override to perform special rules target_name, target_data = inst.get_acl_tuple(req, **kwargs) policy_dict = {} if target_name and target_data: policy_dict['target'] = {target_name: target_data} policy_dict.update(kwargs) # Enforce access controls. if ctx.policy_enforcer: ctx.policy_enforcer.authorize(action_name, flatten(policy_dict), ctx, do_raise=True) def enforce_rbac(action_name='default'): """Decorator handling RBAC enforcement on behalf of REST verb methods.""" def rbac_decorator(fn): def enforcer(inst, *args, **kwargs): # Enforce RBAC rules. # context placed here by context.py # middleware ctx = _get_barbican_context(pecan.request) external_project_id = None if ctx: external_project_id = ctx.project_id _do_enforce_rbac(inst, pecan.request, action_name, ctx, **kwargs) # insert external_project_id as the first arg to the guarded method args = list(args) args.insert(0, external_project_id) # Execute guarded method now. return fn(inst, *args, **kwargs) return enforcer return rbac_decorator def handle_exceptions(operation_name=u._('System')): """Decorator handling generic exceptions from REST methods.""" def exceptions_decorator(fn): def handler(inst, *args, **kwargs): try: return fn(inst, *args, **kwargs) except exc.HTTPError: LOG.exception('Webob error seen') raise # Already converted to Webob exception, just reraise # In case PolicyNotAuthorized, we do not want to expose payload by # logging exception, so just LOG.error except policy.PolicyNotAuthorized as pna: status, message = api.generate_safe_exception_message( operation_name, pna) LOG.error(message) pecan.abort(status, message) except Exception as e: # In case intervening modules have disabled logging. LOG.logger.disabled = False status, message = api.generate_safe_exception_message( operation_name, e) LOG.exception(message) pecan.abort(status, message) return handler return exceptions_decorator def _do_enforce_content_types(pecan_req, valid_content_types): """Content type enforcement Check to see that content type in the request is one of the valid types passed in by our caller. """ if pecan_req.content_type not in valid_content_types: m = u._( "Unexpected content type. Expected content types " "are: {expected}" ).format( expected=valid_content_types ) pecan.abort(415, m) def enforce_content_types(valid_content_types=[]): """Decorator handling content type enforcement on behalf of REST verbs.""" def content_types_decorator(fn): def content_types_enforcer(inst, *args, **kwargs): _do_enforce_content_types(pecan.request, valid_content_types) return fn(inst, *args, **kwargs) return content_types_enforcer return content_types_decorator def flatten(d, parent_key=''): """Flatten a nested dictionary Converts a dictionary with nested values to a single level flat dictionary, with dotted notation for each key. """ items = [] for k, v in d.items(): new_key = parent_key + '.' + k if parent_key else k if isinstance(v, collections.MutableMapping): items.extend(flatten(v, new_key).items()) else: items.append((new_key, v)) return dict(items) class ACLMixin(object): def get_acl_tuple(self, req, **kwargs): return None, None def get_acl_dict_for_user(self, req, acl_list): """Get acl operation found for token user in acl list. Token user is looked into users list present for each acl operation. If there is a match, it means that ACL data is applicable for policy logic. Policy logic requires data as dictionary so this method capture acl's operation, project_access data in that format. For operation value, matching ACL record's operation is stored in dict as key and value both. project_access flag is intended to make secret/container private for a given operation. It doesn't require user match. So its captured in dict format where key is prefixed with related operation and flag is used as its value. Then for acl related policy logic, this acl dict data is combined with target entity (secret or container) creator_id and project id. The whole dict serves as target in policy enforcement logic i.e. right hand side of policy rule. Following is sample outcome where secret or container has ACL defined and token user is among the ACL users defined for 'read' and 'list' operation. {'read': 'read', 'list': 'list', 'read_project_access': True, 'list_project_access': True } Its possible that ACLs are defined without any user, they just have project_access flag set. This means only creator can read or list ACL entities. In that case, dictionary output can be as follows. {'read_project_access': False, 'list_project_access': False } """ ctxt = _get_barbican_context(req) if not ctxt: return {} acl_dict = {acl.operation: acl.operation for acl in acl_list if ctxt.user in acl.to_dict_fields().get('users', [])} co_dict = {'%s_project_access' % acl.operation: acl.project_access for acl in acl_list if acl.project_access is not None} acl_dict.update(co_dict) return acl_dict barbican-9.1.0.dev50/barbican/api/controllers/containers.py0000664000175000017500000003056713616500636024030 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pecan from barbican import api from barbican.api import controllers from barbican.api.controllers import acls from barbican.api.controllers import consumers from barbican.common import exception from barbican.common import hrefs from barbican.common import quota from barbican.common import resources as res from barbican.common import utils from barbican.common import validators from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo LOG = utils.getLogger(__name__) CONTAINER_GET = 'container:get' def container_not_found(): """Throw exception indicating container not found.""" pecan.abort(404, u._('Secrets container not found.')) def invalid_container_id(): """Throw exception indicating container id is invalid.""" pecan.abort(404, u._('Not Found. Provided container id is invalid.')) class ContainerController(controllers.ACLMixin): """Handles Container entity retrieval and deletion requests.""" def __init__(self, container): self.container = container self.container_id = container.id self.consumer_repo = repo.get_container_consumer_repository() self.container_repo = repo.get_container_repository() self.validator = validators.ContainerValidator() self.consumers = consumers.ContainerConsumersController( self.container_id) self.acl = acls.ContainerACLsController(self.container) def get_acl_tuple(self, req, **kwargs): d = self.get_acl_dict_for_user(req, self.container.container_acls) d['project_id'] = self.container.project.external_id d['creator_id'] = self.container.creator_id return 'container', d @pecan.expose(generic=True, template='json') def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Container retrieval')) @controllers.enforce_rbac(CONTAINER_GET) def on_get(self, external_project_id): dict_fields = self.container.to_dict_fields() for secret_ref in dict_fields['secret_refs']: hrefs.convert_to_hrefs(secret_ref) LOG.info('Retrieved container for project: %s', external_project_id) return hrefs.convert_to_hrefs( hrefs.convert_to_hrefs(dict_fields) ) @index.when(method='DELETE') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Container deletion')) @controllers.enforce_rbac('container:delete') def on_delete(self, external_project_id, **kwargs): container_consumers = self.consumer_repo.get_by_container_id( self.container_id, suppress_exception=True ) try: self.container_repo.delete_entity_by_id( entity_id=self.container_id, external_project_id=external_project_id ) except exception.NotFound: LOG.exception('Problem deleting container') container_not_found() LOG.info('Deleted container for project: %s', external_project_id) for consumer in container_consumers[0]: try: self.consumer_repo.delete_entity_by_id( consumer.id, external_project_id) except exception.NotFound: # nosec pass class ContainersController(controllers.ACLMixin): """Handles Container creation requests.""" def __init__(self): self.consumer_repo = repo.get_container_consumer_repository() self.container_repo = repo.get_container_repository() self.secret_repo = repo.get_secret_repository() self.validator = validators.ContainerValidator() self.quota_enforcer = quota.QuotaEnforcer('containers', self.container_repo) @pecan.expose() def _lookup(self, container_id, *remainder): if not utils.validate_id_is_uuid(container_id): invalid_container_id() container = self.container_repo.get_container_by_id( entity_id=container_id, suppress_exception=True) if not container: container_not_found() if len(remainder) > 0 and remainder[0] == 'secrets': return ContainersSecretsController(container), () return ContainerController(container), remainder @pecan.expose(generic=True, template='json') def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='GET', template='json') @controllers.handle_exceptions(u._('Containers(s) retrieval')) @controllers.enforce_rbac('containers:get') def on_get(self, project_id, **kw): LOG.debug('Start containers on_get for project-ID %s:', project_id) result = self.container_repo.get_by_create_date( project_id, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit', None), name_arg=kw.get('name', None), type_arg=kw.get('type', None), suppress_exception=True ) containers, offset, limit, total = result if not containers: resp_ctrs_overall = {'containers': [], 'total': total} else: resp_ctrs = [ hrefs.convert_to_hrefs(c.to_dict_fields()) for c in containers ] for ctr in resp_ctrs: for secret_ref in ctr.get('secret_refs', []): hrefs.convert_to_hrefs(secret_ref) resp_ctrs_overall = hrefs.add_nav_hrefs( 'containers', offset, limit, total, {'containers': resp_ctrs} ) resp_ctrs_overall.update({'total': total}) LOG.info('Retrieved container list for project: %s', project_id) return resp_ctrs_overall @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Container creation')) @controllers.enforce_rbac('containers:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): project = res.get_or_create_project(external_project_id) data = api.load_body(pecan.request, validator=self.validator) ctxt = controllers._get_barbican_context(pecan.request) if ctxt: # in authenticated pipleline case, always use auth token user data['creator_id'] = ctxt.user self.quota_enforcer.enforce(project) LOG.debug('Start on_post...%s', data) new_container = models.Container(data) new_container.project_id = project.id # TODO(hgedikli): performance optimizations for secret_ref in new_container.container_secrets: secret = self.secret_repo.get( entity_id=secret_ref.secret_id, external_project_id=external_project_id, suppress_exception=True) if not secret: # This only partially localizes the error message and # doesn't localize secret_ref.name. pecan.abort( 404, u._("Secret provided for '{secret_name}' doesn't " "exist.").format(secret_name=secret_ref.name) ) self.container_repo.create_from(new_container) url = hrefs.convert_container_to_href(new_container.id) pecan.response.status = 201 pecan.response.headers['Location'] = url LOG.info('Created a container for project: %s', external_project_id) return {'container_ref': url} class ContainersSecretsController(controllers.ACLMixin): """Handles ContainerSecret creation and deletion requests.""" def __init__(self, container): LOG.debug('=== Creating ContainerSecretsController ===') self.container = container self.container_secret_repo = repo.get_container_secret_repository() self.secret_repo = repo.get_secret_repository() self.validator = validators.ContainerSecretValidator() @pecan.expose(generic=True) def index(self, **kwargs): pecan.abort(405) # HTTP 405 Method Not Allowed as default @index.when(method='POST', template='json') @controllers.handle_exceptions(u._('Container Secret creation')) @controllers.enforce_rbac('container_secret:post') @controllers.enforce_content_types(['application/json']) def on_post(self, external_project_id, **kwargs): """Handles adding an existing secret to an existing container.""" if self.container.type != 'generic': pecan.abort(400, u._("Only 'generic' containers can be modified.")) data = api.load_body(pecan.request, validator=self.validator) name = data.get('name') secret_ref = data.get('secret_ref') secret_id = hrefs.get_secret_id_from_ref(secret_ref) secret = self.secret_repo.get( entity_id=secret_id, external_project_id=external_project_id, suppress_exception=True) if not secret: pecan.abort(404, u._("Secret provided doesn't exist.")) found_container_secrets = list( filter(lambda cs: cs.secret_id == secret_id and cs.name == name, self.container.container_secrets) ) if found_container_secrets: pecan.abort(409, u._('Conflict. A secret with that name and ID is ' 'already stored in this container. The same ' 'secret can exist in a container as long as ' 'the name is unique.')) LOG.debug('Start container secret on_post...%s', secret_ref) new_container_secret = models.ContainerSecret() new_container_secret.container_id = self.container.id new_container_secret.name = name new_container_secret.secret_id = secret_id self.container_secret_repo.save(new_container_secret) url = hrefs.convert_container_to_href(self.container.id) LOG.debug('URI to container is %s', url) pecan.response.status = 201 pecan.response.headers['Location'] = url LOG.info('Created a container secret for project: %s', external_project_id) return {'container_ref': url} @index.when(method='DELETE') @utils.allow_all_content_types @controllers.handle_exceptions(u._('Container Secret deletion')) @controllers.enforce_rbac('container_secret:delete') def on_delete(self, external_project_id, **kwargs): """Handles removing a secret reference from an existing container.""" data = api.load_body(pecan.request, validator=self.validator) name = data.get('name') secret_ref = data.get('secret_ref') secret_id = hrefs.get_secret_id_from_ref(secret_ref) secret = self.secret_repo.get( entity_id=secret_id, external_project_id=external_project_id, suppress_exception=True) if not secret: pecan.abort(404, u._("Secret '{secret_name}' with reference " "'{secret_ref}' doesn't exist.").format( secret_name=name, secret_ref=secret_ref)) found_container_secrets = list( filter(lambda cs: cs.secret_id == secret_id and cs.name == name, self.container.container_secrets) ) if not found_container_secrets: pecan.abort(404, u._('Secret provided is not in the container')) for container_secret in found_container_secrets: self.container_secret_repo.delete_entity_by_id( container_secret.id, external_project_id) pecan.response.status = 204 LOG.info('Deleted container secret for project: %s', external_project_id) barbican-9.1.0.dev50/barbican/api/hooks.py0000664000175000017500000000335213616500636020430 0ustar sahidsahid00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import pecan import webob from oslo_serialization import jsonutils as json try: import newrelic.agent newrelic_loaded = True except ImportError: newrelic_loaded = False from barbican.model import repositories class JSONErrorHook(pecan.hooks.PecanHook): def on_error(self, state, exc): if isinstance(exc, webob.exc.HTTPError): exc.body = json.dump_as_bytes({ 'code': exc.status_int, 'title': exc.title, 'description': exc.detail }) state.response.content_type = "application/json" return exc.body class BarbicanTransactionHook(pecan.hooks.TransactionHook): """Custom hook for Barbican transactions.""" def __init__(self): super(BarbicanTransactionHook, self).__init__( start=repositories.start, start_ro=repositories.start_read_only, commit=repositories.commit, rollback=repositories.rollback, clear=repositories.clear ) class NewRelicHook(pecan.hooks.PecanHook): def on_error(self, state, exc): if newrelic_loaded: newrelic.agent.record_exception() barbican-9.1.0.dev50/barbican/api/middleware/0000775000175000017500000000000013616500640021040 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/api/middleware/context.py0000664000175000017500000001150113616500636023101 0ustar sahidsahid00000000000000# Copyright 2011-2012 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import webob.exc from barbican.api import middleware as mw from barbican.common import config from barbican.common import utils import barbican.context from barbican import i18n as u LOG = utils.getLogger(__name__) CONF = config.CONF class BaseContextMiddleware(mw.Middleware): def process_request(self, req): request_id = req.headers.get('x-openstack-request-id') if not request_id: request_id = 'req-' + utils.generate_uuid() setattr(req, 'request_id', request_id) def process_response(self, resp): resp.headers['x-openstack-request-id'] = resp.request.request_id LOG.info('Processed request: %(status)s - %(method)s %(url)s', {"status": resp.status, "method": resp.request.method, "url": resp.request.url}) return resp class ContextMiddleware(BaseContextMiddleware): def __init__(self, app): super(ContextMiddleware, self).__init__(app) def process_request(self, req): """Convert authentication information into a request context Generate a barbican.context.RequestContext object from the available authentication headers and store on the 'context' attribute of the req object. :param req: wsgi request object that will be given the context object :raises webob.exc.HTTPUnauthorized: when value of the X-Identity-Status header is not 'Confirmed' and anonymous access is disallowed """ super(ContextMiddleware, self).process_request(req) if req.headers.get('X-Identity-Status') == 'Confirmed': req.context = self._get_authenticated_context(req) elif CONF.allow_anonymous_access: req.context = self._get_anonymous_context() LOG.debug("==== Inserted barbican unauth " "request context: %s ====", req.context.to_dict()) else: raise webob.exc.HTTPUnauthorized() # Ensure that down wind mw.Middleware/app can see this context. req.environ['barbican.context'] = req.context def _get_anonymous_context(self): kwargs = { 'user': None, 'tenant': None, 'is_admin': False, 'read_only': True, } return barbican.context.RequestContext(**kwargs) def _get_authenticated_context(self, req): ctx = barbican.context.RequestContext.from_environ(req.environ) if ctx.project_id is None: LOG.debug("X_PROJECT_ID not found in request") return webob.exc.HTTPUnauthorized() ctx.is_admin = CONF.admin_role.strip().lower() in ctx.roles return ctx class UnauthenticatedContextMiddleware(BaseContextMiddleware): def _get_project_id_from_header(self, req): project_id = req.headers.get('X-Project-Id') if not project_id: accept_header = req.headers.get('Accept') if not accept_header: req.headers['Accept'] = 'text/plain' raise webob.exc.HTTPBadRequest(detail=u._('Missing X-Project-Id')) return project_id def process_request(self, req): """Create a context without an authorized user.""" super(UnauthenticatedContextMiddleware, self).process_request(req) project_id = self._get_project_id_from_header(req) config_admin_role = CONF.admin_role.strip().lower() roles_header = req.headers.get('X-Roles', '') roles = [r.strip().lower() for r in roles_header.split(',') if r] # If a role wasn't specified we default to admin if not roles: roles = [config_admin_role] kwargs = { 'user': req.headers.get('X-User-Id'), 'domain': req.headers.get('X-Domain-Id'), 'user_domain': req.headers.get('X-User-Domain-Id'), 'project_domain': req.headers.get('X-Project-Domain-Id'), 'project_id': project_id, 'roles': roles, 'is_admin': config_admin_role in roles, 'request_id': req.request_id } context = barbican.context.RequestContext(**kwargs) req.environ['barbican.context'] = context barbican-9.1.0.dev50/barbican/api/middleware/__init__.py0000664000175000017500000000564213616500636023165 0ustar sahidsahid00000000000000# Copyright (c) 2013-2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Barbican middleware modules. """ import sys import webob.dec from barbican.common import utils LOG = utils.getLogger(__name__) class Middleware(object): """Base WSGI middleware wrapper These classes require an application to be initialized that will be called next. By default the middleware will simply call its wrapped app, or you can override __call__ to customize its behavior. """ def __init__(self, application): self.application = application @classmethod def factory(cls, global_conf, **local_conf): def filter(app): return cls(app) return filter def process_request(self, req): """Called on each request. If this returns None, the next application down the stack will be executed. If it returns a response then that response will be returned and execution will stop here. """ return None def process_response(self, response): """Do whatever you'd like to the response.""" return response @webob.dec.wsgify def __call__(self, req): response = self.process_request(req) if response: return response response = req.get_response(self.application) response.request = req return self.process_response(response) # Brought over from an OpenStack project class Debug(Middleware): """Debug helper class This class can be inserted into any WSGI application chain to get information about the request and response. """ @webob.dec.wsgify def __call__(self, req): LOG.debug(("*" * 40) + " REQUEST ENVIRON") for key, value in req.environ.items(): LOG.debug('%s=%s', key, value) LOG.debug(' ') resp = req.get_response(self.application) LOG.debug(("*" * 40) + " RESPONSE HEADERS") for (key, value) in resp.headers.items(): LOG.debug('%s=%s', key, value) LOG.debug(' ') resp.app_iter = self.print_generator(resp.app_iter) return resp @staticmethod def print_generator(app_iter): """Iterator that prints the contents of a wrapper string iterator.""" LOG.debug(("*" * 40) + " BODY") for part in app_iter: sys.stdout.write(part) sys.stdout.flush() yield part LOG.debug(' ') barbican-9.1.0.dev50/barbican/api/middleware/simple.py0000664000175000017500000000211213616500636022704 0ustar sahidsahid00000000000000# Copyright 2011 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ A filter middleware that just outputs to logs, for instructive/sample purposes only. """ from barbican.api import middleware from barbican.common import utils LOG = utils.getLogger(__name__) class SimpleFilter(middleware.Middleware): def __init__(self, app): super(SimpleFilter, self).__init__(app) def process_request(self, req): """Just announce we have been called.""" LOG.debug("Calling SimpleFilter") return None barbican-9.1.0.dev50/barbican/i18n.py0000664000175000017500000000144013616500636017307 0ustar sahidsahid00000000000000# Copyright 2010-2011 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import oslo_i18n as i18n _translators = i18n.TranslatorFactory(domain='barbican') # The translation function using the well-known name "_" _ = _translators.primary barbican-9.1.0.dev50/barbican/tasks/0000775000175000017500000000000013616500640017277 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tasks/common.py0000664000175000017500000000576313616500636021161 0ustar sahidsahid00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Tasking related information that is shared/common across modules. """ from barbican import i18n as u RETRY_MSEC_DEFAULT = 60 * 1000 class RetryTasks(object): """Defines tasks that can be retried/scheduled. RPC tasks invoked from the queue are handled via methods on barbican.queue.server.Tasks. These calls in turn delegate to the 'process()' method of BaseTask sub-classes. These calls in turn delegate to business logic and plugins via modules in this barbican.tasks package. This class defines a common mechanism for the business logic and plugins to indicate what RPC tasks need to be retried in a way that the Tasks class can interpret as high level RPC tasks to enqueue later. In particular the following generic options are available: INVOKE_SAME_TASK - Invoke this same task later NO_ACTION_REQUIRED - To retry/scheduling actions are required The following task/context-specific actions are available: INVOKE_CERT_STATUS_CHECK_TASK - Check certificate status later """ INVOKE_SAME_TASK = "Invoke Same Task Again Later" NO_ACTION_REQUIRED = "No Retry/Schedule Actions Are Needed" INVOKE_CERT_STATUS_CHECK_TASK = "Check Certificate Status Later" class FollowOnProcessingStatusDTO(object): """Follow On Processing status data transfer object (DTO). An object of this type is optionally returned by the BaseTask.handle_processing() method defined below, and is used to guide follow on processing and to provide status feedback to clients. """ def __init__( self, status=u._('Unknown'), status_message=u._('Unknown'), retry_task=RetryTasks.NO_ACTION_REQUIRED, retry_msec=RETRY_MSEC_DEFAULT ): """Creates a new FollowOnProcessingStatusDTO. :param status: Status for cert order :param status_message: Message to explain status type. :param retry_msec: Number of milliseconds to wait for retry :param retry_task: Task to retry, one of :class:`RetryTasks` """ self.status = status self.status_message = status_message self.retry_task = retry_task if not retry_msec: self.retry_msec = 0 else: self.retry_msec = max(int(retry_msec), 0) def is_follow_on_needed(self): if self.retry_task: return RetryTasks.NO_ACTION_REQUIRED != self.retry_task else: return False barbican-9.1.0.dev50/barbican/tasks/certificate_resources.py0000664000175000017500000005401713616500636024241 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from ldap3.utils.dn import parse_dn from OpenSSL import crypto from barbican.common import exception as excep from barbican.common import hrefs from barbican.common import resources as res import barbican.common.utils as utils from barbican.model import models from barbican.model import repositories as repos from barbican.plugin.interface import certificate_manager as cert from barbican.plugin import resources as plugin from barbican.tasks import common LOG = utils.getLogger(__name__) # Order sub-status definitions ORDER_STATUS_REQUEST_PENDING = models.OrderStatus( "cert_request_pending", "Request has been submitted to the CA. " "Waiting for certificate to be generated" ) ORDER_STATUS_CERT_GENERATED = models.OrderStatus( "cert_generated", "Certificate has been generated" ) ORDER_STATUS_DATA_INVALID = models.OrderStatus( "cert_data_invalid", "CA rejected request data as invalid" ) ORDER_STATUS_CA_UNAVAIL_FOR_ISSUE = models.OrderStatus( "cert_ca_unavail_for_issue", "Unable to submit certificate request. CA unavailable" ) ORDER_STATUS_INVALID_OPERATION = models.OrderStatus( "cert_invalid_operation", "CA returned invalid operation" ) ORDER_STATUS_INTERNAL_ERROR = models.OrderStatus( "cert_internal_error", "Internal error during certificate operations" ) ORDER_STATUS_CA_UNAVAIL_FOR_CHECK = models.OrderStatus( "cert_ca_unavail_for_status_check", "Unable to get certificate request status. CA unavailable." ) def refresh_certificate_resources(): # Before CA operations can be performed, the CA table must be populated cert.CertificatePluginManager().refresh_ca_table() def issue_certificate_request(order_model, project_model, result_follow_on): """Create the initial order with CA. Note that this method may be called more than once if retries are required. Barbican metadata is used to store intermediate information, including selected plugins by name, to support such retries. :param: order_model - order associated with this cert request :param: project_model - project associated with this request :param: result_follow_on - A :class:`FollowOnProcessingStatusDTO` instance instantiated by the client that this function may optionally update with information on how to process this task into the future. :returns: container_model - container with the relevant cert if the request has been completed. None otherwise """ plugin_meta = _get_plugin_meta(order_model) barbican_meta = _get_barbican_meta(order_model) # TODO(john-wood-w) We need to de-conflict barbican_meta (stored with order # and not shown to plugins) with barbican_meta_dto (shared with plugins). # As a minimum we should change the name of the DTO to something like # 'extended_meta_dto' or some such. barbican_meta_for_plugins_dto = cert.BarbicanMetaDTO() # refresh the CA table. This is mostly a no-op unless the entries # for a plugin are expired. cert.CertificatePluginManager().refresh_ca_table() cert_plugin = _get_cert_plugin(barbican_meta, barbican_meta_for_plugins_dto, order_model, project_model) barbican_meta['plugin_name'] = utils.generate_fullname_for(cert_plugin) # Generate CSR if needed. request_type = order_model.meta.get(cert.REQUEST_TYPE) if request_type == cert.CertificateRequestType.STORED_KEY_REQUEST: csr = barbican_meta.get('generated_csr') if csr is None: # TODO(alee) Fix this to be a non-project specific call once # the ACL patches go in. csr = _generate_csr_from_private_key(order_model, project_model) barbican_meta['generated_csr'] = csr barbican_meta_for_plugins_dto.generated_csr = csr result = cert_plugin.issue_certificate_request( order_model.id, order_model.meta, plugin_meta, barbican_meta_for_plugins_dto) # Save plugin and barbican metadata for this order. _save_plugin_metadata(order_model, plugin_meta) _save_barbican_metadata(order_model, barbican_meta) # Handle result return _handle_task_result( result, result_follow_on, order_model, project_model, request_type, unavailable_status=ORDER_STATUS_CA_UNAVAIL_FOR_ISSUE) def _get_cert_plugin(barbican_meta, barbican_meta_for_plugins_dto, order_model, project_model): cert_plugin_name = barbican_meta.get('plugin_name') if cert_plugin_name: return cert.CertificatePluginManager().get_plugin_by_name( cert_plugin_name) ca_id = _get_ca_id(order_model.meta, project_model.id) if ca_id: ca = repos.get_ca_repository().get(ca_id) barbican_meta_for_plugins_dto.plugin_ca_id = ca.plugin_ca_id return cert.CertificatePluginManager().get_plugin_by_name( ca.plugin_name) else: return cert.CertificatePluginManager().get_plugin(order_model.meta) def check_certificate_request(order_model, project_model, result_follow_on): """Check the status of a certificate request with the CA. Note that this method may be called more than once if retries are required. Barbican metadata is used to store intermediate information, including selected plugins by name, to support such retries. :param: order_model - order associated with this cert request :param: project_model - project associated with this request :param: result_follow_on - A :class:`FollowOnProcessingStatusDTO` instance instantiated by the client that this function may optionally update with information on how to process this task into the future. :returns: container_model - container with the relevant cert if the request has been completed. None otherwise. """ plugin_meta = _get_plugin_meta(order_model) barbican_meta = _get_barbican_meta(order_model) # TODO(john-wood-w) See note above about DTO's name. barbican_meta_for_plugins_dto = cert.BarbicanMetaDTO() cert_plugin = cert.CertificatePluginManager().get_plugin_by_name( barbican_meta.get('plugin_name')) result = cert_plugin.check_certificate_status( order_model.id, order_model.meta, plugin_meta, barbican_meta_for_plugins_dto) # Save plugin order plugin state _save_plugin_metadata(order_model, plugin_meta) request_type = order_model.meta.get(cert.REQUEST_TYPE) return _handle_task_result( result, result_follow_on, order_model, project_model, request_type, unavailable_status=ORDER_STATUS_CA_UNAVAIL_FOR_CHECK) def create_subordinate_ca(project_model, name, description, subject_dn, parent_ca_ref, creator_id): """Create a subordinate CA :param name - name of the subordinate CA :param: description - description of the subordinate CA :param: subject_dn - subject DN of the subordinate CA :param: parent_ca_ref - Barbican URL reference to the parent CA :param: creator_id - id for creator of the subordinate CA :return: :class models.CertificateAuthority model object for new sub CA """ # check that the parent ref exists and is accessible parent_ca_id = hrefs.get_ca_id_from_ref(parent_ca_ref) ca_repo = repos.get_ca_repository() parent_ca = ca_repo.get(entity_id=parent_ca_id, suppress_exception=True) if not parent_ca: raise excep.InvalidParentCA(parent_ca_ref=parent_ca_ref) # Parent CA must be a base CA or a subCA owned by this project if (parent_ca.project_id is not None and parent_ca.project_id != project_model.id): raise excep.UnauthorizedSubCA() # get the parent plugin, raises CertPluginNotFound if missing cert_plugin = cert.CertificatePluginManager().get_plugin_by_name( parent_ca.plugin_name) # confirm that the plugin supports creating subordinate CAs if not cert_plugin.supports_create_ca(): raise excep.SubCAsNotSupported() # make call to create the subordinate ca create_ca_dto = cert.CACreateDTO( name=name, description=description, subject_dn=subject_dn, parent_ca_id=parent_ca.plugin_ca_id) new_ca_dict = cert_plugin.create_ca(create_ca_dto) if not new_ca_dict: raise excep.SubCANotCreated(name=name) # create and store the subordinate CA as a new certificate authority object new_ca_dict['plugin_name'] = parent_ca.plugin_name new_ca_dict['creator_id'] = creator_id new_ca_dict['project_id'] = project_model.id new_ca = models.CertificateAuthority(new_ca_dict) ca_repo.create_from(new_ca) return new_ca def delete_subordinate_ca(external_project_id, ca): """Deletes a subordinate CA and any related artifacts :param external_project_id: external project ID :param ca: class:`models.CertificateAuthority` to be deleted :return: None """ # TODO(alee) See if the checks below can be moved to the RBAC code # Check that this CA is a subCA if ca.project_id is None: raise excep.CannotDeleteBaseCA() # Check that the user's project owns this subCA project = res.get_or_create_project(external_project_id) if ca.project_id != project.id: raise excep.UnauthorizedSubCA() project_ca_repo = repos.get_project_ca_repository() (project_cas, _, _, _) = project_ca_repo.get_by_create_date( project_id=project.id, ca_id=ca.id, suppress_exception=True) preferred_ca_repo = repos.get_preferred_ca_repository() (preferred_cas, _, _, _) = preferred_ca_repo.get_by_create_date( project_id=project.id, ca_id=ca.id, suppress_exception=True) # Can not delete a project preferred CA, if other project CAs exist. One # of those needs to be designated as the preferred CA first. if project_cas and preferred_cas and not is_last_project_ca(project.id): raise excep.CannotDeletePreferredCA() # Remove the CA as preferred if preferred_cas: preferred_ca_repo.delete_entity_by_id(preferred_cas[0].id, external_project_id) # Remove the CA from project list if project_cas: project_ca_repo.delete_entity_by_id(project_cas[0].id, external_project_id) # Delete the CA entry from plugin cert_plugin = cert.CertificatePluginManager().get_plugin_by_name( ca.plugin_name) cert_plugin.delete_ca(ca.plugin_ca_id) # Finally, delete the CA entity from the CA repository ca_repo = repos.get_ca_repository() ca_repo.delete_entity_by_id( entity_id=ca.id, external_project_id=external_project_id) def is_last_project_ca(project_id): """Returns True iff project has exactly one project CA :param project_id: internal project ID :return: Boolean """ project_ca_repo = repos.get_project_ca_repository() _, _, _, total = project_ca_repo.get_by_create_date( project_id=project_id, suppress_exception=True ) return total == 1 def _handle_task_result(result, result_follow_on, order_model, project_model, request_type, unavailable_status): if cert.CertificateStatus.WAITING_FOR_CA == result.status: _update_result_follow_on( result_follow_on, order_status=ORDER_STATUS_REQUEST_PENDING, retry_task=common.RetryTasks.INVOKE_CERT_STATUS_CHECK_TASK, retry_msec=result.retry_msec) elif cert.CertificateStatus.CERTIFICATE_GENERATED == result.status: _update_result_follow_on( result_follow_on, order_status=ORDER_STATUS_CERT_GENERATED) container_model = _save_secrets(result, project_model, request_type, order_model) return container_model elif cert.CertificateStatus.CLIENT_DATA_ISSUE_SEEN == result.status: raise cert.CertificateStatusClientDataIssue(result.status_message) elif cert.CertificateStatus.CA_UNAVAILABLE_FOR_REQUEST == result.status: _update_result_follow_on( result_follow_on, order_status=unavailable_status, retry_task=common.RetryTasks.INVOKE_SAME_TASK, retry_msec=cert.ERROR_RETRY_MSEC) _notify_ca_unavailable(order_model, result) elif cert.CertificateStatus.INVALID_OPERATION == result.status: raise cert.CertificateStatusInvalidOperation(result.status_message) else: raise cert.CertificateStatusNotSupported(result.status) return None def _add_private_key_to_generated_cert_container(container_id, order_model, project_model): keypair_container_id, keypair_container = _get_container_from_order_meta( order_model, project_model) private_key_id = None for cs in keypair_container.container_secrets: if cs.name == 'private_key': private_key_id = cs.secret_id new_consec_assoc = models.ContainerSecret() new_consec_assoc.name = 'private_key' new_consec_assoc.container_id = container_id new_consec_assoc.secret_id = private_key_id container_secret_repo = repos.get_container_secret_repository() container_secret_repo.create_from(new_consec_assoc) def modify_certificate_request(order_model, updated_meta): """Update the order with CA.""" # TODO(chellygel): Add the modify certificate request logic. LOG.debug('in modify_certificate_request') raise NotImplementedError # pragma: no cover def get_global_preferred_ca(): project = res.get_or_create_global_preferred_project() preferred_ca_repository = repos.get_preferred_ca_repository() cas = preferred_ca_repository.get_project_entities(project.id) if not cas: return None else: return cas[0] def get_project_preferred_ca_id(project_id): """Compute the preferred CA ID for a project First priority: a preferred CA is defined for the project Second priority: a preferred CA is defined globally Else: None """ preferred_ca_repository = repos.get_preferred_ca_repository() cas, offset, limit, total = preferred_ca_repository.get_by_create_date( project_id=project_id, suppress_exception=True) if total > 0: return cas[0].ca_id global_ca = get_global_preferred_ca() if global_ca: return global_ca.ca_id def _get_ca_id(order_meta, project_id): ca_id = order_meta.get(cert.CA_ID) if ca_id: return ca_id return get_project_preferred_ca_id(project_id) def _update_result_follow_on( result_follow_on, order_status=None, retry_task=common.RetryTasks.NO_ACTION_REQUIRED, retry_msec=common.RETRY_MSEC_DEFAULT): if order_status: result_follow_on.status = order_status.id result_follow_on.status_message = order_status.message result_follow_on.retry_task = retry_task if retry_msec and retry_msec >= 0: result_follow_on.retry_msec = retry_msec def _get_plugin_meta(order_model): if order_model: order_plugin_meta_repo = repos.get_order_plugin_meta_repository() return order_plugin_meta_repo.get_metadata_for_order(order_model.id) else: return {} def _get_barbican_meta(order_model): if order_model: order_barbican_meta_repo = repos.get_order_barbican_meta_repository() return order_barbican_meta_repo.get_metadata_for_order(order_model.id) else: return {} def _generate_csr_from_private_key(order_model, project_model): """Generate a CSR from the private key. :param: order_model - order for the request :param: project_model - project for this request :return: CSR (certificate signing request) in PEM format :raise: :class:`StoredKeyPrivateKeyNotFound` if private key not found :class:`StoredKeyContainerNotFound` if container not found """ container_id, container = _get_container_from_order_meta(order_model, project_model) if not container: raise excep.StoredKeyContainerNotFound(container_id) passphrase = None private_key = None for cs in container.container_secrets: secret_repo = repos.get_secret_repository() if cs.name == 'private_key': private_key_model = secret_repo.get( cs.secret_id, project_model.external_id) private_key = plugin.get_secret( 'application/pkcs8', private_key_model, project_model) elif cs.name == 'private_key_passphrase': passphrase_model = secret_repo.get( cs.secret_id, project_model.external_id) passphrase = plugin.get_secret( 'text/plain;charset=utf-8', passphrase_model, project_model) passphrase = str(passphrase) if not private_key: raise excep.StoredKeyPrivateKeyNotFound(container.id) if passphrase is None: pkey = crypto.load_privatekey( crypto.FILETYPE_PEM, private_key ) else: pkey = crypto.load_privatekey( crypto.FILETYPE_PEM, private_key, passphrase.encode('utf-8') ) subject_name = order_model.meta.get('subject_dn') subject_name_dns = parse_dn(subject_name) extensions = order_model.meta.get('extensions', None) req = crypto.X509Req() subj = req.get_subject() # Note: must iterate over the DNs in reverse order, or the resulting # subject name will be reversed. for ava in reversed(subject_name_dns): key, val, extra = ava setattr(subj, key.upper(), val) req.set_pubkey(pkey) if extensions: # TODO(alee-3) We need code here to parse the encoded extensions and # convert them into X509Extension objects. This code will also be # used in the validation code. Commenting out for now till we figure # out how to do this. # req.add_extensions(extensions) pass req.sign(pkey, 'sha256') csr = crypto.dump_certificate_request(crypto.FILETYPE_PEM, req) return csr def _get_container_from_order_meta(order_model, project_model): container_ref = order_model.meta.get('container_ref') # extract container_id as the last part of the URL container_id = hrefs.get_container_id_from_ref(container_ref) container_repo = repos.get_container_repository() container = container_repo.get(container_id, project_model.external_id, suppress_exception=True) return container_id, container def _notify_ca_unavailable(order_model, result): """Notify observer(s) that the CA was unavailable at this time.""" cert.get_event_plugin_manager().notify_ca_is_unavailable( order_model.project_id, hrefs.convert_order_to_href(order_model.id), result.status_message, result.retry_msec) def _save_plugin_metadata(order_model, plugin_meta): """Add plugin metadata to an order.""" if not isinstance(plugin_meta, dict): plugin_meta = {} order_plugin_meta_repo = repos.get_order_plugin_meta_repository() order_plugin_meta_repo.save(plugin_meta, order_model) def _save_barbican_metadata(order_model, barbican_meta): """Add barbican metadata to an order.""" if not isinstance(barbican_meta, dict): barbican_meta = {} order_barbican_meta_repo = repos.get_order_barbican_meta_repository() order_barbican_meta_repo.save(barbican_meta, order_model) def _save_secrets(result, project_model, request_type, order_model): cert_secret_model, transport_key_model = plugin.store_secret( unencrypted_raw=result.certificate, content_type_raw='application/octet-stream', content_encoding='base64', secret_model=models.Secret(), project_model=project_model) # save the certificate chain as a secret. if result.intermediates: intermediates_secret_model, transport_key_model = plugin.store_secret( unencrypted_raw=result.intermediates, content_type_raw='application/octet-stream', content_encoding='base64', secret_model=models.Secret(), project_model=project_model ) else: intermediates_secret_model = None container_model = models.Container() container_model.type = "certificate" container_model.status = models.States.ACTIVE container_model.project_id = project_model.id container_repo = repos.get_container_repository() container_repo.create_from(container_model) # create container_secret for certificate new_consec_assoc = models.ContainerSecret() new_consec_assoc.name = 'certificate' new_consec_assoc.container_id = container_model.id new_consec_assoc.secret_id = cert_secret_model.id container_secret_repo = repos.get_container_secret_repository() container_secret_repo.create_from(new_consec_assoc) if intermediates_secret_model: # create container_secret for intermediate certs new_consec_assoc = models.ContainerSecret() new_consec_assoc.name = 'intermediates' new_consec_assoc.container_id = container_model.id new_consec_assoc.secret_id = intermediates_secret_model.id container_secret_repo.create_from(new_consec_assoc) if request_type == cert.CertificateRequestType.STORED_KEY_REQUEST: _add_private_key_to_generated_cert_container(container_model.id, order_model, project_model) return container_model barbican-9.1.0.dev50/barbican/tasks/keystone_consumer.py0000664000175000017500000001176513616500636023444 0ustar sahidsahid00000000000000# Copyright (c) 2014 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Server-side Keystone notification payload processing logic. """ from barbican.common import utils from barbican import i18n as u from barbican.model import repositories as rep from barbican.tasks import resources LOG = utils.getLogger(__name__) class KeystoneEventConsumer(resources.BaseTask): """Event consumer listening for notifications sent by Keystone deployment. Currently this processes only Keystone project delete event. """ def get_name(self): return u._('Project cleanup via Keystone notifications') def __init__(self, db_start=rep.start, db_commit=rep.commit, db_rollback=rep.rollback, db_clear=rep.clear): LOG.debug('Creating KeystoneEventConsumer task processor') self.db_start = db_start self.db_commit = db_commit self.db_rollback = db_rollback self.db_clear = db_clear def process(self, *args, **kwargs): try: self.db_start() super(KeystoneEventConsumer, self).process(*args, **kwargs) self.db_commit() except Exception: """Exceptions that reach here needs to revert the entire transaction. No need to log error message as its already done earlier. """ self.db_rollback() raise finally: self.db_clear() def retrieve_entity(self, project_id, resource_type=None, operation_type=None): project_repo = rep.get_project_repository() return project_repo.find_by_external_project_id( external_project_id=project_id, suppress_exception=True) def handle_processing(self, barbican_project, *args, **kwargs): self.handle_cleanup(barbican_project, *args, **kwargs) def handle_error(self, project, status, message, exception, project_id=None, resource_type=None, operation_type=None): LOG.error( 'Error processing Keystone event, project_id=%(project_id)s, ' 'event resource=%(resource)s, event operation=%(operation)s, ' 'status=%(status)s, error message=%(message)s', { 'project_id': project.project_id, 'resource': resource_type, 'operation': operation_type, 'status': status, 'message': message }) def handle_success(self, project, result, project_id=None, resource_type=None, operation_type=None): # Note: The processing 'result' argument can be ignored as 'result' # only pertains to long-running tasks. See the documentation for # BaseTask for more details. LOG.info( 'Successfully handled Keystone event, ' 'project_id=%(project_id)s, event resource=%(resource)s, ' 'event operation=%(operation)s', { 'project_id': project_id, 'resource': resource_type, 'operation': operation_type } ) def handle_cleanup(self, project, project_id=None, resource_type=None, operation_type=None): """Cleans up Barbican resources needed for Keystone project delete. :param project: Barbican project entity which is retrieved by project id available in Keystone notification. :param project_id: project identifier as present in Keystone notification. :param resource_type: type of resource updated as part of Keystone notification e.g. Keystone project, domain, user etc. :param operation_type: type of operation (created, updated, deleted etc.) performed on Keystone resource. """ if project is None: LOG.info('No action is needed as there are no Barbican resources ' 'present for Keystone project_id=%s', project_id) return # barbican entities use projects table 'id' field as foreign key. # Delete apis are using that id to lookup related entities and not # keystone project id which requires additional project table join. project_id = project.id rep.delete_all_project_resources(project_id) # reached here means there is no error so log the successful # cleanup log entry. LOG.info('Successfully completed Barbican resources cleanup for ' 'Keystone project_id=%s', project_id) barbican-9.1.0.dev50/barbican/tasks/__init__.py0000664000175000017500000000000013616500636021403 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tasks/resources.py0000664000175000017500000003714513616500636021702 0ustar sahidsahid00000000000000# Copyright (c) 2013-2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Task resources for the Barbican API. """ import abc import six from barbican import api from barbican.common import utils from barbican import i18n as u from barbican.model import models from barbican.model import repositories as rep from barbican.plugin import resources as plugin from barbican.tasks import certificate_resources as cert from barbican.tasks import common LOG = utils.getLogger(__name__) @six.add_metaclass(abc.ABCMeta) class BaseTask(object): """Base asynchronous task.""" @abc.abstractmethod def get_name(self): """Localized task name A hook method to return a short localized name for this task. The returned name in the form 'u.('Verb Noun')'. For example: u._('Create Secret') """ def process_and_suppress_exceptions(self, *args, **kwargs): """Invokes the process() template method, suppressing all exceptions. TODO(john-wood-w) This method suppresses exceptions for flows that do not want to rollback database modifications in reaction to such exceptions, as this could also rollback the marking of the entity (eg. order) in the ERROR status via the handle_error() call below. For Liberty, we might want to consider a workflow manager instead of these process_xxxx() method as shown here: https://gist.github.com/jfwood/a8130265b0db3c793ec8 :param args: List of arguments passed in from the client. :param kwargs: Dict of arguments passed in from the client. :return: Returns :class:`FollowOnProcessingStatusDTO` if follow-on processing (such as retrying this or another task) is required, otherwise a None return indicates that no follow-on processing is required. """ try: return self.process(*args, **kwargs) except Exception: LOG.exception("Suppressing exception while trying to " "process task '%s'.", self.get_name()) def process(self, *args, **kwargs): """A template method for all asynchronous tasks. This method should not be overridden by sub-classes. Rather the abstract methods below should be overridden. :param args: List of arguments passed in from the client. :param kwargs: Dict of arguments passed in from the client. :return: Returns :class:`FollowOnProcessingStatusDTO` if follow-on processing (such as retrying this or another task) is required, otherwise a None return indicates that no follow-on processing is required. """ name = self.get_name() result = None # Retrieve the target entity (such as an models.Order instance). try: entity = self.retrieve_entity(*args, **kwargs) except Exception: # Serious error! LOG.exception("Could not retrieve information needed to " "process task '%s'.", name) raise # Process the target entity. try: result = self.handle_processing(entity, *args, **kwargs) except Exception as e_orig: LOG.exception("Could not perform processing for task '%s'.", name) # Handle failure to process entity. try: status, message = api.generate_safe_exception_message(name, e_orig) self.handle_error(entity, status, message, e_orig, *args, **kwargs) except Exception: LOG.exception("Problem handling an error for task '%s', " "raising original exception.", name) raise e_orig # Handle successful conclusion of processing. try: self.handle_success(entity, result, *args, **kwargs) except Exception: LOG.exception("Could not process after successfully " "executing task '%s'.", name) raise return result @abc.abstractmethod def retrieve_entity(self, *args, **kwargs): """A hook method to retrieve an entity for processing. :param args: List of arguments passed in from the client. :param kwargs: Dict of arguments passed in from the client. :return: Entity instance to process in subsequent hook methods. """ @abc.abstractmethod def handle_processing(self, entity, *args, **kwargs): """A hook method to handle processing on behalf of an entity. :param args: List of arguments passed in from the client. :param kwargs: Dict of arguments passed in from the client. :return: None if no follow on processing is needed for this task, otherwise a :class:`FollowOnProcessingStatusDTO` instance with information on how to process this task into the future. """ @abc.abstractmethod def handle_error(self, entity, status, message, exception, *args, **kwargs): """A hook method to deal with errors seen during processing. This method could be used to mark entity as being in error, and/or to record an error cause. :param entity: Entity retrieved from _retrieve_entity() above. :param status: Status code for exception. :param message: Reason/message for the exception. :param exception: Exception raised from handle_processing() above. :param args: List of arguments passed in from the client. :param kwargs: Dict of arguments passed in from the client. :return: None """ @abc.abstractmethod def handle_success(self, entity, result, *args, **kwargs): """A hook method to post-process after successful entity processing. This method could be used to mark entity as being active, or to add information/references to the entity. :param entity: Entity retrieved from _retrieve_entity() above. :param result: A :class:`FollowOnProcessingStatusDTO` instance representing processing result status, None implies that no follow on processing is required. :param args: List of arguments passed in from the client. :param kwargs: Dict of arguments passed in from the client. :return: None """ class _OrderTaskHelper(object): """Supports order-related BaseTask operations. BaseTask sub-classes can delegate to an instance of this class to perform common order-related operations. """ def __init__(self): self.order_repo = rep.get_order_repository() def retrieve_entity(self, order_id, external_project_id, *args, **kwargs): """Retrieve an order entity by its PK ID.""" return self.order_repo.get( entity_id=order_id, external_project_id=external_project_id) def handle_error(self, order, status, message, exception, *args, **kwargs): """Stamp the order entity as terminated due to an error.""" order.status = models.States.ERROR order.error_status_code = status order.set_error_reason_safely(message) self.order_repo.save(order) def handle_success(self, order, result, *args, **kwargs): """Handle if the order entity is terminated or else long running. The 'result' argument (if present) indicates if a order should now be terminated due to it being completed, or else should be held in the PENDING state due to follow on workflow processing. If 'result' is not provided, the order is presumed completed. """ is_follow_on_needed = False sub_status = None sub_status_message = None if result: is_follow_on_needed = result.is_follow_on_needed() sub_status = result.status sub_status_message = result.status_message if not is_follow_on_needed: order.status = models.States.ACTIVE else: order.status = models.States.PENDING if sub_status: order.set_sub_status_safely(sub_status) if sub_status_message: order.set_sub_status_message_safely(sub_status_message) self.order_repo.save(order) class BeginTypeOrder(BaseTask): """Handles beginning processing of a TypeOrder.""" def get_name(self): return u._('Process TypeOrder') def __init__(self): super(BeginTypeOrder, self).__init__() LOG.debug('Creating BeginTypeOrder task processor') self.project_repo = rep.get_project_repository() self.helper = _OrderTaskHelper() def retrieve_entity(self, *args, **kwargs): return self.helper.retrieve_entity(*args, **kwargs) def handle_processing(self, order, *args, **kwargs): return self.handle_order(order) def handle_order(self, order): """Handle secret creation using meta info. If type is key create secret if type is asymmetric create secrets create containers if type is certificate TBD :param order: Order to process. :return: None if no follow on processing is needed for this task, otherwise a :class:`FollowOnProcessingStatusDTO` instance with information on how to process this task into the future. """ result_follow_on = common.FollowOnProcessingStatusDTO() order_info = order.to_dict_fields() order_type = order_info.get('type') meta_info = order_info.get('meta') if order_info.get('creator_id'): meta_info.setdefault('creator_id', order_info.get('creator_id')) # Retrieve the project. project = self.project_repo.get(order.project_id) if order_type == models.OrderType.KEY: # Create Secret new_secret = plugin.generate_secret( meta_info, meta_info.get('payload_content_type', 'application/octet-stream'), project ) order.secret_id = new_secret.id LOG.debug("...done creating keys order's secret.") elif order_type == models.OrderType.ASYMMETRIC: # Create asymmetric Secret new_container = plugin.generate_asymmetric_secret( meta_info, meta_info.get('payload_content_type', 'application/octet-stream'), project) order.container_id = new_container.id LOG.debug("...done creating asymmetric order's secret.") elif order_type == models.OrderType.CERTIFICATE: # Request a certificate new_container = cert.issue_certificate_request( order, project, result_follow_on) if new_container: order.container_id = new_container.id LOG.debug("...done requesting a certificate.") else: raise NotImplementedError( u._('Order type "{order_type}" not implemented.').format( order_type=order_type)) return result_follow_on def handle_error(self, order, status, message, exception, *args, **kwargs): self.helper.handle_error( order, status, message, exception, *args, **kwargs) def handle_success(self, order, result, *args, **kwargs): self.helper.handle_success( order, result, *args, **kwargs) class UpdateOrder(BaseTask): """Handles updating an order.""" def get_name(self): return u._('Update Order') def __init__(self): super(UpdateOrder, self).__init__() LOG.debug('Creating UpdateOrder task processor') self.helper = _OrderTaskHelper() def retrieve_entity(self, *args, **kwargs): return self.helper.retrieve_entity(*args, **kwargs) def handle_processing( self, order, order_id, external_project_id, updated_meta): self.handle_order(order, updated_meta) def handle_order(self, order, updated_meta): """Handle Order Update :param order: Order to update. """ order_info = order.to_dict_fields() order_type = order_info.get('type') if order_type == models.OrderType.CERTIFICATE: # Update a certificate request cert.modify_certificate_request(order, updated_meta) LOG.debug("...done updating a certificate order.") else: raise NotImplementedError( u._('Order type "{order_type}" not implemented.').format( order_type=order_type)) LOG.debug("...done updating order.") def handle_error(self, order, status, message, exception, *args, **kwargs): self.helper.handle_error( order, status, message, exception, *args, **kwargs) def handle_success(self, order, result, *args, **kwargs): self.helper.handle_success( order, result, *args, **kwargs) class CheckCertificateStatusOrder(BaseTask): """Handles checking the status of a certificate order.""" def get_name(self): return u._('Check Certificate Order Status') def __init__(self): LOG.debug('Creating CheckCertificateStatusOrder task processor') self.project_repo = rep.get_project_repository() self.helper = _OrderTaskHelper() def retrieve_entity(self, *args, **kwargs): return self.helper.retrieve_entity(*args, **kwargs) def handle_processing(self, order, *args, **kwargs): return self.handle_order(order) def handle_order(self, order): """Handle checking the status of a certificate order. :param order: Order to process. :return: None if no follow on processing is needed for this task, otherwise a :class:`FollowOnProcessingStatusDTO` instance with information on how to process this task into the future. """ result_follow_on = common.FollowOnProcessingStatusDTO() order_info = order.to_dict_fields() order_type = order_info.get('type') # Retrieve the project. project = self.project_repo.get(order.project_id) if order_type != models.OrderType.CERTIFICATE: raise NotImplementedError( u._('Order type "{order_type}" not supported.').format( order_type=order_type)) # Request a certificate new_container = cert.check_certificate_request( order, project, result_follow_on) if new_container: order.container_id = new_container.id LOG.debug("...done checking status of a certificate order.") return result_follow_on def handle_error(self, order, status, message, exception, *args, **kwargs): self.helper.handle_error( order, status, message, exception, *args, **kwargs) def handle_success(self, order, result, *args, **kwargs): self.helper.handle_success( order, result, *args, **kwargs) barbican-9.1.0.dev50/barbican/version.py0000664000175000017500000000135613616500636020223 0ustar sahidsahid00000000000000# Copyright 2010-2011 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pbr.version version_info = pbr.version.VersionInfo('barbican') __version__ = version_info.release_string() barbican-9.1.0.dev50/barbican/common/0000775000175000017500000000000013616500640017442 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/common/accept.py0000664000175000017500000000224713616500636021265 0ustar sahidsahid00000000000000# Copyright 2018 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from webob import acceptparse if hasattr(acceptparse, 'create_accept_header'): # WebOb >= 1.8.0 NoHeaderType = getattr(acceptparse, 'AcceptNoHeader') ValidHeaderType = getattr(acceptparse, 'AcceptValidHeader') create_accept_header = getattr(acceptparse, 'create_accept_header') else: # WebOb < 1.8.0 NoHeaderType = getattr(acceptparse, 'MIMENilAccept') ValidHeaderType = getattr(acceptparse, 'MIMEAccept') def create_accept_header(header_value): if not header_value: return NoHeaderType() else: return ValidHeaderType(header_value) barbican-9.1.0.dev50/barbican/common/exception.py0000664000175000017500000003235213616500636022024 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Barbican exception subclasses """ from barbican import i18n as u _FATAL_EXCEPTION_FORMAT_ERRORS = False class BarbicanException(Exception): """Base Barbican Exception To correctly use this class, inherit from it and define a 'message' property. That message will get printf'd with the keyword arguments provided to the constructor. """ message = u._("An unknown exception occurred") def __init__(self, message_arg=None, *args, **kwargs): if not message_arg: message_arg = self.message try: self.message = message_arg % kwargs except Exception as e: if _FATAL_EXCEPTION_FORMAT_ERRORS: raise e else: # at least get the core message out if something happened pass super(BarbicanException, self).__init__(self.message) class BarbicanHTTPException(BarbicanException): """Base Barbican Exception to handle HTTP responses To correctly use this class, inherit from it and define the following properties: - message: The message that will be displayed in the server log. - client_message: The message that will actually be outputted to the client. - status_code: The HTTP status code that should be returned. The default status code is 500. """ client_message = u._("failure seen - please contact site administrator.") status_code = 500 def __init__(self, message_arg=None, client_message=None, *args, **kwargs): if not client_message: client_message = self.client_message try: self.client_message = client_message % kwargs except Exception as e: if _FATAL_EXCEPTION_FORMAT_ERRORS: raise e else: # at least get the core message out if something happened pass super(BarbicanHTTPException, self).__init__( message_arg, self.client_message, *args, **kwargs) class MissingArgumentError(BarbicanException): message = u._("Missing required argument.") class MissingMetadataField(BarbicanHTTPException): message = u._("Missing required metadata field for %(required)s") client_message = message status_code = 400 class InvalidMetadataRequest(BarbicanHTTPException): message = u._("Invalid Metadata. Keys and Values must be Strings.") client_message = message status_code = 400 class InvalidMetadataKey(BarbicanHTTPException): message = u._("Invalid Key. Key must be URL safe.") client_message = message status_code = 400 class InvalidSubjectDN(BarbicanHTTPException): message = u._("Invalid subject DN: %(subject_dn)s") client_message = message status_code = 400 class InvalidContainer(BarbicanHTTPException): message = u._("Invalid container: %(reason)s") client_message = message status_code = 400 class InvalidExtensionsData(BarbicanHTTPException): message = u._("Invalid extensions data.") client_message = message status_code = 400 class InvalidCMCData(BarbicanHTTPException): message = u._("Invalid CMC Data") client_message = message status_code = 400 class InvalidPKCS10Data(BarbicanHTTPException): message = u._("Invalid PKCS10 Data: %(reason)s") client_message = message status_code = 400 class InvalidCertificateRequestType(BarbicanHTTPException): message = u._("Invalid Certificate Request Type") client_message = message status_code = 400 class CertificateExtensionsNotSupported(BarbicanHTTPException): message = u._("Extensions are not yet supported. " "Specify a valid profile instead.") client_message = message status_code = 400 class FullCMCNotSupported(BarbicanHTTPException): message = u._("Full CMC Requests are not yet supported.") client_message = message status_code = 400 class NotFound(BarbicanException): message = u._("An object with the specified identifier was not found.") class ConstraintCheck(BarbicanException): message = u._("A defined SQL constraint check failed: %(error)s") class NotSupported(BarbicanException): message = u._("Operation is not supported.") class Invalid(BarbicanException): message = u._("Data supplied was not valid.") class NoDataToProcess(BarbicanHTTPException): message = u._("No data supplied to process.") client_message = message status_code = 400 class LimitExceeded(BarbicanHTTPException): message = u._("The request returned a 413 Request Entity Too Large. This " "generally means that rate limiting or a quota threshold " "was breached.") client_message = u._("Provided information too large to process") status_code = 413 def __init__(self, *args, **kwargs): super(LimitExceeded, self).__init__(*args, **kwargs) self.retry_after = (int(kwargs['retry']) if kwargs.get('retry') else None) class InvalidObject(BarbicanHTTPException): status_code = 400 def __init__(self, *args, **kwargs): self.invalid_property = kwargs.get('property') self.message = u._("Failed to validate JSON information: ") self.client_message = u._("Provided object does not match " "schema '{schema}': " "{reason}. Invalid property: " "'{property}'").format(*args, **kwargs) self.message = self.message + self.client_message super(InvalidObject, self).__init__(*args, **kwargs) class PayloadDecodingError(BarbicanHTTPException): status_code = 400 message = u._("Error while attempting to decode payload.") client_message = u._("Unable to decode request data.") class UnsupportedField(BarbicanHTTPException): message = u._("No support for value set on field '%(field)s' on " "schema '%(schema)s': %(reason)s") client_message = u._("Provided field value is not supported") status_code = 400 def __init__(self, *args, **kwargs): super(UnsupportedField, self).__init__(*args, **kwargs) self.invalid_field = kwargs.get('field') class FeatureNotImplemented(BarbicanException): message = u._("Feature not implemented for value set on field " "'%(field)s' on " "schema '%(schema)s': %(reason)s") def __init__(self, *args, **kwargs): super(FeatureNotImplemented, self).__init__(*args, **kwargs) self.invalid_field = kwargs.get('field') class StoredKeyContainerNotFound(BarbicanException): message = u._("Container %(container_id)s does not exist for stored " "key certificate generation.") class StoredKeyPrivateKeyNotFound(BarbicanException): message = u._("Container %(container_id)s does not reference a private " "key needed for stored key certificate generation.") class ProvidedTransportKeyNotFound(BarbicanHTTPException): message = u._("Provided Transport key %(transport_key_id)s " "could not be found") client_message = u._("Provided transport key was not found.") status_code = 400 class InvalidCAID(BarbicanHTTPException): message = u._("Invalid CA_ID: %(ca_id)s") client_message = u._("The ca_id provided in the request is invalid") status_code = 400 class CANotDefinedForProject(BarbicanHTTPException): message = u._("CA specified by ca_id %(ca_id)s not defined for project: " "%(project_id)s") client_message = u._("The ca_id provided in the request is not defined " "for this project") status_code = 403 class QuotaReached(BarbicanHTTPException): message = u._("Quota reached for project %(external_project_id)s. Only " "%(quota)s %(resource_type)s are allowed.") client_message = u._("Creation not allowed because a quota has " "been reached") status_code = 403 def __init__(self, *args, **kwargs): super(QuotaReached, self).__init__(*args, **kwargs) self.external_project_id = kwargs.get('external_project_id') self.quota = kwargs.get('quota') self.resource_type = kwargs.get('resource_type') class InvalidParentCA(BarbicanHTTPException): message = u._("Invalid Parent CA: %(parent_ca_ref)s") client_message = message status_code = 400 class SubCAsNotSupported(BarbicanHTTPException): message = u._("Plugin does not support generation of subordinate CAs") client_message = message status_code = 400 class SubCANotCreated(BarbicanHTTPException): message = u._("Errors in creating subordinate CA: %(name)") client_message = message class CannotDeleteBaseCA(BarbicanHTTPException): message = u._("Only subordinate CAs can be deleted.") status_code = 403 class UnauthorizedSubCA(BarbicanHTTPException): message = u._("Subordinate CA is not owned by this project") client_message = message status_code = 403 class CannotDeletePreferredCA(BarbicanHTTPException): message = u._("A new project preferred CA must be set " "before this one can be deleted.") status_code = 409 class BadSubCACreationRequest(BarbicanHTTPException): message = u._("Errors returned by CA when attempting to " "create subordinate CA: %(reason)s") client_message = message status_code = 400 class SubCACreationErrors(BarbicanHTTPException): message = u._("Errors returned by CA when attempting to create " "subordinate CA: %(reason)s") client_message = message class SubCADeletionErrors(BarbicanHTTPException): message = u._("Errors returned by CA when attempting to delete " "subordinate CA: %(reason)s") client_message = message class PKCS11Exception(BarbicanException): message = u._("There was an error with the PKCS#11 library.") class P11CryptoPluginKeyException(PKCS11Exception): message = u._("More than one key found for label") class P11CryptoPluginException(PKCS11Exception): message = u._("General exception") class P11CryptoKeyHandleException(PKCS11Exception): message = u._("No key handle was found") class P11CryptoTokenException(PKCS11Exception): message = u._("No token was found in slot %(slot_id)s") class MultipleStorePreferredPluginMissing(BarbicanException): """Raised when a preferred plugin is missing in service configuration.""" def __init__(self, store_name): super(MultipleStorePreferredPluginMissing, self).__init__( u._("Preferred Secret Store plugin '{store_name}' is not " "currently set in service configuration. This is probably a " "server misconfiguration.").format( store_name=store_name) ) self.store_name = store_name class MultipleStorePluginStillInUse(BarbicanException): """Raised when a used plugin is missing in service configuration.""" def __init__(self, store_name): super(MultipleStorePluginStillInUse, self).__init__( u._("Secret Store plugin '{store_name}' is still in use and can " "not be removed. Its missing in service configuration. This is" " probably a server misconfiguration.").format( store_name=store_name) ) self.store_name = store_name class MultipleSecretStoreLookupFailed(BarbicanException): """Raised when a plugin lookup suffix is missing during config read.""" def __init__(self): msg = u._("Plugin lookup property 'stores_lookup_suffix' is not " "defined in service configuration") super(MultipleSecretStoreLookupFailed, self).__init__(msg) class MultipleStoreIncorrectGlobalDefault(BarbicanException): """Raised when a global default for only one plugin is not set to True.""" def __init__(self, occurrence): msg = None if occurrence > 1: msg = u._("There are {count} plugins with global default as " "True in service configuration. Only one plugin can have" " this as True").format(count=occurrence) else: msg = u._("There is no plugin defined with global default as True." " One of plugin must be identified as global default") super(MultipleStoreIncorrectGlobalDefault, self).__init__(msg) class MultipleStorePluginValueMissing(BarbicanException): """Raised when a store plugin value is missing in service configuration.""" def __init__(self, section_name): super(MultipleStorePluginValueMissing, self).__init__( u._("In section '{0}', secret_store_plugin value is missing" ).format(section_name) ) self.section_name = section_name barbican-9.1.0.dev50/barbican/common/policy.py0000664000175000017500000000226013616500636021320 0ustar sahidsahid00000000000000# Copyright 2011-2012 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from barbican.common import config from barbican.common import policies CONF = config.CONF ENFORCER = None def reset(): global ENFORCER if ENFORCER: ENFORCER.clear() ENFORCER = None def init(): global ENFORCER global saved_file_rules if not ENFORCER: ENFORCER = policy.Enforcer(CONF) register_rules(ENFORCER) ENFORCER.load_rules() def register_rules(enforcer): enforcer.register_defaults(policies.list_rules()) def get_enforcer(): init() return ENFORCER barbican-9.1.0.dev50/barbican/common/utils.py0000664000175000017500000001701213616500636021162 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Common utilities for Barbican. """ import collections import importlib import mimetypes import uuid from oslo_log import log from oslo_utils import uuidutils import pecan import re import six from six.moves.urllib import parse from barbican.common import config from barbican import i18n as u CONF = config.CONF # Current API version API_VERSION = 'v1' # Added here to remove cyclic dependency. # In barbican.model.models module SecretType.OPAQUE was imported from # barbican.plugin.interface.secret_store which introduces a cyclic dependency # if `secret_store` plugin needs to use db model classes. So moving shared # value to another common python module which is already imported in both. SECRET_TYPE_OPAQUE = "opaque" # nosec def _do_allow_certain_content_types(func, content_types_list=[]): # Allows you to bypass pecan's content-type restrictions cfg = pecan.util._cfg(func) cfg.setdefault('content_types', {}) cfg['content_types'].update((value, '') for value in content_types_list) return func def allow_certain_content_types(*content_types_list): def _wrapper(func): return _do_allow_certain_content_types(func, content_types_list) return _wrapper def allow_all_content_types(f): return _do_allow_certain_content_types(f, mimetypes.types_map.values()) def get_base_url_from_request(): """Derive base url from wsgi request if CONF.host_href is not set Use host.href as base URL if its set in barbican.conf. If its not set, then derives value from wsgi request. WSGI request uses HOST header or HTTP_X_FORWARDED_FOR header (in case of proxy) for host + port part of its url. Proxies can also set HTTP_X_FORWARDED_PROTO header for indicating http vs https. Some of unit tests does not have pecan context that's why using request attr check on pecan instance. """ if not CONF.host_href and hasattr(pecan.request, 'application_url'): p_url = parse.urlsplit(pecan.request.application_url) # Pecan does not handle X_FORWARDED_PROTO yet, so we need to # handle it ourselves. see lp#1445290 scheme = pecan.request.environ.get('HTTP_X_FORWARDED_PROTO', 'http') # Pecan does not handle url reconstruction according to # https://www.python.org/dev/peps/pep-0333/#url-reconstruction netloc = pecan.request.environ.get('HTTP_HOST', p_url.netloc) # FIXME: implement SERVER_NAME lookup if HTTP_HOST is not set if p_url.path: # Remove the version from the path to extract the base path base_path = re.sub('/v[0-9\.]+$', '', p_url.path) base_url = '%s://%s%s' % (scheme, netloc, base_path) else: base_url = '%s://%s' % (scheme, netloc) return base_url else: # when host_href is set or flow is not within wsgi request context return CONF.host_href def hostname_for_refs(resource=None): """Return the HATEOAS-style return URI reference for this service.""" base_url = get_base_url_from_request() ref = ['{base}/{version}'.format(base=base_url, version=API_VERSION)] if resource: ref.append('/' + resource) return ''.join(ref) # Return a logger instance. # Note: Centralize access to the logger to avoid the dreaded # 'ArgsAlreadyParsedError: arguments already parsed: cannot # register CLI option' # error. def getLogger(name): return log.getLogger(name) def get_accepted_encodings(req): """Returns a list of client acceptable encodings sorted by q value. For details see: http://tools.ietf.org/html/rfc2616#section-14.3 :param req: request object :returns: list of client acceptable encodings sorted by q value. """ header = req.get_header('Accept-Encoding') return get_accepted_encodings_direct(header) def get_accepted_encodings_direct(content_encoding_header): """Returns a list of client acceptable encodings sorted by q value. For details see: http://tools.ietf.org/html/rfc2616#section-14.3 :param req: request object :returns: list of client acceptable encodings sorted by q value. """ if content_encoding_header is None: return None Encoding = collections.namedtuple('Encoding', ['coding', 'quality']) encodings = list() for enc in content_encoding_header.split(','): if ';' in enc: coding, qvalue = enc.split(';') try: qvalue = qvalue.split('=')[1] quality = float(qvalue.strip()) except ValueError: # can't convert quality to float return None if quality > 1.0 or quality < 0.0: # quality is outside valid range return None if quality > 0.0: encodings.append(Encoding(coding.strip(), quality)) else: encodings.append(Encoding(enc.strip(), 1)) # Sort the encodings by quality encodings = sorted(encodings, key=lambda e: e.quality, reverse=True) return [encoding.coding for encoding in encodings] def generate_fullname_for(instance): """Produce a fully qualified class name for the specified instance. :param instance: The instance to generate information from. :return: A string providing the package.module information for the instance. :raises: ValueError if the given instance is null """ if not instance: raise ValueError(u._("Cannot generate a fullname for a null instance")) module = type(instance).__module__ class_name = type(instance).__name__ if module is None or module == six.moves.builtins.__name__: return class_name return "{module}.{class_name}".format(module=module, class_name=class_name) def get_class_for(module_name, class_name): """Create a Python class from its text-specified components.""" # Load the module via name, raising ImportError if module cannot be # loaded. python_module = importlib.import_module(module_name) # Load and return the resolved Python class, raising AttributeError if # class cannot be found. return getattr(python_module, class_name) def generate_uuid(): return uuidutils.generate_uuid() def is_multiple_backends_enabled(): try: secretstore_conf = config.get_module_config('secretstore') except KeyError: # Ensure module is initialized from barbican.plugin.interface import secret_store # nopep8 secretstore_conf = config.get_module_config('secretstore') return secretstore_conf.secretstore.enable_multiple_secret_stores def validate_id_is_uuid(input_id, version=4): """Validates provided id is uuid4 format value. Returns true when provided id is a valid version 4 uuid otherwise returns False. This validation is to be used only for ids which are generated by barbican (e.g. not for keystone project_id) """ try: value = uuid.UUID(input_id, version=version) except Exception: return False return str(value) == input_id barbican-9.1.0.dev50/barbican/common/config.py0000664000175000017500000004022413616500636021270 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Configuration setup for Barbican. """ import logging import os from oslo_config import cfg from oslo_log import log from oslo_middleware import cors from oslo_service import _options from barbican import i18n as u import barbican.version MAX_BYTES_REQUEST_INPUT_ACCEPTED = 15000 DEFAULT_MAX_SECRET_BYTES = 10000 KS_NOTIFICATIONS_GRP_NAME = 'keystone_notifications' context_opts = [ cfg.StrOpt('admin_role', default='admin', help=u._('Role used to identify an authenticated user as ' 'administrator.')), cfg.BoolOpt('allow_anonymous_access', default=False, help=u._('Allow unauthenticated users to access the API with ' 'read-only privileges. This only applies when using ' 'ContextMiddleware.')), ] common_opts = [ cfg.IntOpt('max_allowed_request_size_in_bytes', default=MAX_BYTES_REQUEST_INPUT_ACCEPTED, help=u._("Maximum allowed http request size against the " "barbican-api.")), cfg.IntOpt('max_allowed_secret_in_bytes', default=DEFAULT_MAX_SECRET_BYTES, help=u._("Maximum allowed secret size in bytes.")), ] host_opts = [ cfg.StrOpt('host_href', default='http://localhost:9311', help=u._("Host name, for use in HATEOAS-style references Note: " "Typically this would be the load balanced endpoint " "that clients would use to communicate back with this " "service. If a deployment wants to derive host from " "wsgi request instead then make this blank. Blank is " "needed to override default config value which is " "'http://localhost:9311'")), ] db_opts = [ cfg.StrOpt('sql_connection', default="sqlite:///barbican.sqlite", secret=True, help=u._("SQLAlchemy connection string for the reference " "implementation registry server. Any valid " "SQLAlchemy connection string is fine. See: " "http://www.sqlalchemy.org/docs/05/reference/" "sqlalchemy/connections.html#sqlalchemy." "create_engine. Note: For absolute addresses, use " "'////' slashes after 'sqlite:'.")), cfg.IntOpt('sql_idle_timeout', default=3600, help=u._("Period in seconds after which SQLAlchemy should " "reestablish its connection to the database. MySQL " "uses a default `wait_timeout` of 8 hours, after " "which it will drop idle connections. This can result " "in 'MySQL Gone Away' exceptions. If you notice this, " "you can lower this value to ensure that SQLAlchemy " "reconnects before MySQL can drop the connection.")), cfg.IntOpt('sql_max_retries', default=60, help=u._("Maximum number of database connection retries " "during startup. Set to -1 to specify an infinite " "retry count.")), cfg.IntOpt('sql_retry_interval', default=1, help=u._("Interval between retries of opening a SQL " "connection.")), cfg.BoolOpt('db_auto_create', default=True, help=u._("Create the Barbican database on service startup.")), cfg.IntOpt('max_limit_paging', default=100, help=u._("Maximum page size for the 'limit' paging URL " "parameter.")), cfg.IntOpt('default_limit_paging', default=10, help=u._("Default page size for the 'limit' paging URL " "parameter.")), cfg.StrOpt('sql_pool_class', default="QueuePool", help=u._("Accepts a class imported from the sqlalchemy.pool " "module, and handles the details of building the " "pool for you. If commented out, SQLAlchemy will " "select based on the database dialect. Other options " "are QueuePool (for SQLAlchemy-managed connections) " "and NullPool (to disabled SQLAlchemy management of " "connections). See http://docs.sqlalchemy.org/en/" "latest/core/pooling.html for more details")), cfg.BoolOpt('sql_pool_logging', default=False, help=u._("Show SQLAlchemy pool-related debugging output in " "logs (sets DEBUG log level output) if specified.")), cfg.IntOpt('sql_pool_size', default=5, help=u._("Size of pool used by SQLAlchemy. This is the largest " "number of connections that will be kept persistently " "in the pool. Can be set to 0 to indicate no size " "limit. To disable pooling, use a NullPool with " "sql_pool_class instead. Comment out to allow " "SQLAlchemy to select the default.")), cfg.IntOpt('sql_pool_max_overflow', default=10, help=u._("# The maximum overflow size of the pool used by " "SQLAlchemy. When the number of checked-out " "connections reaches the size set in sql_pool_size, " "additional connections will be returned up to this " "limit. It follows then that the total number of " "simultaneous connections the pool will allow is " "sql_pool_size + sql_pool_max_overflow. Can be set " "to -1 to indicate no overflow limit, so no limit " "will be placed on the total number of concurrent " "connections. Comment out to allow SQLAlchemy to " "select the default.")), ] retry_opt_group = cfg.OptGroup(name='retry_scheduler', title='Retry/Scheduler Options') retry_opts = [ cfg.FloatOpt( 'initial_delay_seconds', default=10.0, help=u._('Seconds (float) to wait before starting retry scheduler')), cfg.FloatOpt( 'periodic_interval_max_seconds', default=10.0, help=u._('Seconds (float) to wait between periodic schedule events')), ] queue_opt_group = cfg.OptGroup(name='queue', title='Queue Application Options') queue_opts = [ cfg.BoolOpt('enable', default=False, help=u._('True enables queuing, False invokes ' 'workers synchronously')), cfg.StrOpt('namespace', default='barbican', help=u._('Queue namespace')), cfg.StrOpt('topic', default='barbican.workers', help=u._('Queue topic name')), cfg.StrOpt('version', default='1.1', help=u._('Version of tasks invoked via queue')), cfg.StrOpt('server_name', default='barbican.queue', help=u._('Server name for RPC task processing server')), cfg.IntOpt('asynchronous_workers', default=1, help=u._('Number of asynchronous worker processes')), ] ks_queue_opt_group = cfg.OptGroup(name=KS_NOTIFICATIONS_GRP_NAME, title='Keystone Notification Options') ks_queue_opts = [ cfg.BoolOpt('enable', default=False, help=u._('True enables keystone notification listener ' ' functionality.')), cfg.StrOpt('control_exchange', default='keystone', help=u._('The default exchange under which topics are scoped. ' 'May be overridden by an exchange name specified in ' 'the transport_url option.')), cfg.StrOpt('topic', default='notifications', help=u._("Keystone notification queue topic name. This name " "needs to match one of values mentioned in Keystone " "deployment's 'notification_topics' configuration " "e.g." " notification_topics=notifications, " " barbican_notifications" "Multiple servers may listen on a topic and messages " "will be dispatched to one of the servers in a " "round-robin fashion. That's why Barbican service " "should have its own dedicated notification queue so " "that it receives all of Keystone notifications. " "Alternatively if the chosen oslo.messaging backend " "supports listener pooling (for example rabbitmq), " "setting a non-default 'pool_name' option " "should be preferred.")), cfg.StrOpt('pool_name', help=u._("Pool name for notifications listener. " "Setting this to a distinctive value will allow " "barbican notifications listener to receive its own " "copy of all messages from the topic without " "without interfering with other services listening " "on the same topic. This feature is supported only " "by some oslo.messaging backends " "(in particilar by rabbitmq) and for those it is " "preferrable to use it instead of separate " "notification topic for barbican.")), cfg.BoolOpt('allow_requeue', default=False, help=u._('True enables requeue feature in case of notification' ' processing error. Enable this only when underlying ' 'transport supports this feature.')), cfg.StrOpt('version', default='1.0', help=u._('Version of tasks invoked via notifications')), cfg.IntOpt('thread_pool_size', default=10, help=u._('Define the number of max threads to be used for ' 'notification server processing functionality.')), ] quota_opt_group = cfg.OptGroup(name='quotas', title='Quota Options') quota_opts = [ cfg.IntOpt('quota_secrets', default=-1, help=u._('Number of secrets allowed per project')), cfg.IntOpt('quota_orders', default=-1, help=u._('Number of orders allowed per project')), cfg.IntOpt('quota_containers', default=-1, help=u._('Number of containers allowed per project')), cfg.IntOpt('quota_consumers', default=-1, help=u._('Number of consumers allowed per project')), cfg.IntOpt('quota_cas', default=-1, help=u._('Number of CAs allowed per project')) ] def list_opts(): yield None, context_opts yield None, common_opts yield None, host_opts yield None, db_opts yield None, _options.eventlet_backdoor_opts yield retry_opt_group, retry_opts yield queue_opt_group, queue_opts yield ks_queue_opt_group, ks_queue_opts yield quota_opt_group, quota_opts # Flag to indicate barbican configuration is already parsed once or not _CONFIG_PARSED_ONCE = False def parse_args(conf, args=None, usage=None, default_config_files=None): global _CONFIG_PARSED_ONCE conf(args=args if args else [], project='barbican', prog='barbican', version=barbican.version.__version__, usage=usage, default_config_files=default_config_files) conf.pydev_debug_host = os.environ.get('PYDEV_DEBUG_HOST') conf.pydev_debug_port = os.environ.get('PYDEV_DEBUG_PORT') # Assign cfg.CONF handle to parsed barbican configuration once at startup # only. No need to keep re-assigning it with separate plugin conf usage if not _CONFIG_PARSED_ONCE: cfg.CONF = conf _CONFIG_PARSED_ONCE = True def new_config(): conf = cfg.ConfigOpts() log.register_options(conf) conf.register_opts(context_opts) conf.register_opts(common_opts) conf.register_opts(host_opts) conf.register_opts(db_opts) conf.register_opts(_options.eventlet_backdoor_opts) conf.register_opts(_options.periodic_opts) conf.register_opts(_options.ssl_opts, "ssl") conf.register_group(retry_opt_group) conf.register_opts(retry_opts, group=retry_opt_group) conf.register_group(queue_opt_group) conf.register_opts(queue_opts, group=queue_opt_group) conf.register_group(ks_queue_opt_group) conf.register_opts(ks_queue_opts, group=ks_queue_opt_group) conf.register_group(quota_opt_group) conf.register_opts(quota_opts, group=quota_opt_group) # Update default values from libraries that carry their own oslo.config # initialization and configuration. set_middleware_defaults() return conf def setup_remote_pydev_debug(): """Required setup for remote debugging.""" if CONF.pydev_debug_host and CONF.pydev_debug_port: try: try: from pydev import pydevd except ImportError: import pydevd pydevd.settrace(CONF.pydev_debug_host, port=int(CONF.pydev_debug_port), stdoutToServer=True, stderrToServer=True) except Exception: LOG.exception('Unable to join debugger, please ' 'make sure that the debugger processes is ' 'listening on debug-host \'%(debug-host)s\' ' 'debug-port \'%(debug-port)s\'.', {'debug-host': CONF.pydev_debug_host, 'debug-port': CONF.pydev_debug_port}) raise def set_middleware_defaults(): """Update default configuration options for oslo.middleware.""" cors.set_defaults( allow_headers=['X-Auth-Token', 'X-Openstack-Request-Id', 'X-Project-Id', 'X-Identity-Status', 'X-User-Id', 'X-Storage-Token', 'X-Domain-Id', 'X-User-Domain-Id', 'X-Project-Domain-Id', 'X-Roles'], expose_headers=['X-Auth-Token', 'X-Openstack-Request-Id', 'X-Project-Id', 'X-Identity-Status', 'X-User-Id', 'X-Storage-Token', 'X-Domain-Id', 'X-User-Domain-Id', 'X-Project-Domain-Id', 'X-Roles'], allow_methods=['GET', 'PUT', 'POST', 'DELETE', 'PATCH'] ) CONF = new_config() LOG = logging.getLogger(__name__) parse_args(CONF) # Adding global scope dict for all different configs created in various # modules. In barbican, each plugin module creates its own *new* config # instance so its error prone to share/access config values across modules # as these module imports introduce a cyclic dependency. To avoid this, each # plugin can set this dict after its own config instance is created and parsed. _CONFIGS = {} def set_module_config(name, module_conf): """Each plugin can set its own conf instance with its group name.""" _CONFIGS[name] = module_conf def get_module_config(name): """Get handle to plugin specific config instance by its group name.""" return _CONFIGS[name] barbican-9.1.0.dev50/barbican/common/__init__.py0000664000175000017500000000000013616500636021546 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/common/policies/0000775000175000017500000000000013616500640021251 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/common/policies/acls.py0000664000175000017500000000730613616500636022560 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy # FIXME(hrybacki): Repetitive check strings: Port to simpler checks # - secret_acls:delete, secret_acls:put_patch # - container_acls:delete container_acls:put_patch rules = [ policy.DocumentedRuleDefault( name='secret_acls:get', check_str='rule:all_but_audit and rule:secret_project_match', scope_types=[], description='Retrieve the ACL settings for a given secret.' 'If no ACL is defined for that secret, then Default ACL ' 'is returned.', operations=[ { 'path': '/v1/secrets/{secret-id}/acl', 'method': 'GET' }, ] ), policy.DocumentedRuleDefault( name='secret_acls:delete', check_str='rule:secret_project_admin or rule:secret_project_creator', scope_types=[], description='Delete the ACL settings for a given secret.', operations=[ { 'path': '/v1/secrets/{secret-id}/acl', 'method': 'DELETE' }, ] ), policy.DocumentedRuleDefault( name='secret_acls:put_patch', check_str='rule:secret_project_admin or rule:secret_project_creator', scope_types=[], description='Create new, replaces, or updates existing ACL for a ' + 'given secret.', operations=[ { 'path': '/v1/secrets/{secret-id}/acl', 'method': 'PUT' }, { 'path': '/v1/secrets/{secret-id}/acl', 'method': 'PATCH' }, ] ), policy.DocumentedRuleDefault( name='container_acls:get', check_str='rule:all_but_audit and rule:container_project_match', scope_types=[], description='Retrieve the ACL settings for a given container.', operations=[ { 'path': '/v1/containers/{container-id}/acl', 'method': 'GET' } ] ), policy.DocumentedRuleDefault( name='container_acls:delete', check_str='rule:container_project_admin or ' + 'rule:container_project_creator', scope_types=[], description='Delete ACL for a given container. No content is returned ' 'in the case of successful deletion.', operations=[ { 'path': '/v1/containers/{container-id}/acl', 'method': 'DELETE' } ] ), policy.DocumentedRuleDefault( name='container_acls:put_patch', check_str='rule:container_project_admin or ' + 'rule:container_project_creator', scope_types=[], description='Create new or replaces existing ACL for a given ' 'container.', operations=[ { 'path': '/v1/containers/{container-id}/acl', 'method': 'PUT' }, { 'path': '/v1/containers/{container-id}/acl', 'method': 'PATCH' } ] ), ] def list_rules(): return rules barbican-9.1.0.dev50/barbican/common/policies/secrets.py0000664000175000017500000000565013616500636023306 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy rules = [ policy.DocumentedRuleDefault( name='secret:decrypt', check_str='rule:secret_decrypt_non_private_read or ' + 'rule:secret_project_creator or ' + 'rule:secret_project_admin or rule:secret_acl_read', scope_types=[], description='Retrieve a secrets payload.', operations=[ { 'path': '/v1/secrets/{uuid}/payload', 'method': 'GET' } ] ), policy.DocumentedRuleDefault( name='secret:get', check_str='rule:secret_non_private_read or ' + 'rule:secret_project_creator or ' + 'rule:secret_project_admin or rule:secret_acl_read', scope_types=[], description='Retrieves a secrets metadata.', operations=[ { 'path': '/v1/secrets/{secret-id}', 'method': 'GET"' } ] ), policy.DocumentedRuleDefault( name='secret:put', check_str='rule:admin_or_creator and rule:secret_project_match', scope_types=[], description='Add the payload to an existing metadata-only secret.', operations=[ { 'path': '/v1/secrets/{secret-id}', 'method': 'PUT' } ] ), policy.DocumentedRuleDefault( name='secret:delete', check_str='rule:secret_project_admin or ' + 'rule:secret_project_creator', scope_types=[], description='Delete a secret by uuid.', operations=[ { 'path': '/v1/secrets/{secret-id}', 'method': 'DELETE' } ] ), policy.DocumentedRuleDefault( name='secrets:post', check_str='rule:admin_or_creator', scope_types=[], description='Creates a Secret entity.', operations=[ { 'path': '/v1/secrets', 'method': 'POST' } ] ), policy.DocumentedRuleDefault( name='secrets:get', check_str='rule:all_but_audit', scope_types=[], description='Lists a projects secrets.', operations=[ { 'path': '/v1/secrets', 'method': 'GET' } ] ) ] def list_rules(): return rules barbican-9.1.0.dev50/barbican/common/policies/secretstores.py0000664000175000017500000000565213616500636024365 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy rules = [ policy.DocumentedRuleDefault( name='secretstores:get', check_str='rule:admin', scope_types=[], description='Get list of available secret store backends.', operations=[ { 'path': '/v1/secret-stores', 'method': 'GET' } ] ), policy.DocumentedRuleDefault( name='secretstores:get_global_default', check_str='rule:admin', scope_types=[], description='Get a reference to the secret store that is used as ' + 'default secret store backend for the deployment.', operations=[ { 'path': '/v1/secret-stores/global-default', 'method': 'GET' } ] ), policy.DocumentedRuleDefault( name='secretstores:get_preferred', check_str='rule:admin', scope_types=[], description='Get a reference to the preferred secret store if ' + 'assigned previously.', operations=[ { 'path': '/v1/secret-stores/preferred', 'method': 'GET' } ] ), policy.DocumentedRuleDefault( name='secretstore_preferred:post', check_str='rule:admin', scope_types=[], description='Set a secret store backend to be preferred store ' + 'backend for their project.', operations=[ { 'path': '/v1/secret-stores/{ss-id}/preferred', 'method': 'POST' } ] ), policy.DocumentedRuleDefault( name='secretstore_preferred:delete', check_str='rule:admin', scope_types=[], description='Remove preferred secret store backend setting for ' + 'their project.', operations=[ { 'path': '/v1/secret-stores/{ss-id}/preferred', 'method': 'DELETE' } ] ), policy.DocumentedRuleDefault( name='secretstore:get', check_str='rule:admin', scope_types=[], description='Get details of secret store by its ID.', operations=[ { 'path': '/v1/secret-stores/{ss-id}', 'method': 'GET' } ] ), ] def list_rules(): return rules barbican-9.1.0.dev50/barbican/common/policies/secretmeta.py0000664000175000017500000000513113616500636023764 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy rules = [ policy.DocumentedRuleDefault( name='secret_meta:get', check_str='rule:all_but_audit', scope_types=[], description='metadata/: Lists a secrets user-defined metadata. || ' + 'metadata/{key}: Retrieves a secrets user-added metadata.', operations=[ { 'path': '/v1/secrets/{secret-id}/metadata', 'method': 'GET' }, { 'path': '/v1/secrets/{secret-id}/metadata/{meta-key}', 'method': 'GET' } ] ), policy.DocumentedRuleDefault( name='secret_meta:post', check_str='rule:admin_or_creator', scope_types=[], description='Adds a new key/value pair to the secrets user-defined ' + 'metadata.', operations=[ { 'path': '/v1/secrets/{secret-id}/metadata/{meta-key}', 'method': 'POST' } ] ), policy.DocumentedRuleDefault( name='secret_meta:put', check_str='rule:admin_or_creator', scope_types=[], description='metadata/: Sets the user-defined metadata for a secret ' + '|| metadata/{key}: Updates an existing key/value pair ' + 'in the secrets user-defined metadata.', operations=[ { 'path': '/v1/secrets/{secret-id}/metadata', 'method': 'PUT' }, { 'path': '/v1/secrets/{secret-id}/metadata/{meta-key}', 'method': 'PUT' } ] ), policy.DocumentedRuleDefault( name='secret_meta:delete', check_str='rule:admin_or_creator', scope_types=[], description='Delete secret user-defined metadata by key.', operations=[ { 'path': '/v1/secrets/{secret-id}/metadata/{meta-key}', 'method': 'DELETE' } ] ), ] def list_rules(): return rules barbican-9.1.0.dev50/barbican/common/policies/orders.py0000664000175000017500000000427313616500636023134 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy rules = [ policy.DocumentedRuleDefault( name='orders:get', check_str='rule:all_but_audit', scope_types=[], description='Gets list of all orders associated with a project.', operations=[ { 'path': '/v1/orders', 'method': 'GET' } ] ), policy.DocumentedRuleDefault( name='orders:post', check_str='rule:admin_or_creator', scope_types=[], description='Creates an order.', operations=[ { 'path': '/v1/orders', 'method': 'POST' } ] ), policy.DocumentedRuleDefault( name='orders:put', check_str='rule:admin_or_creator', scope_types=[], description='Unsupported method for the orders API.', operations=[ { 'path': '/v1/orders', 'method': 'PUT' } ] ), policy.DocumentedRuleDefault( name='order:get', check_str='rule:all_users', scope_types=[], description='Retrieves an orders metadata.', operations=[ { 'path': '/v1/orders/{order-id}', 'method': 'GET' } ] ), policy.DocumentedRuleDefault( name='order:delete', check_str='rule:admin', scope_types=[], description='Deletes an order.', operations=[ { 'path': '/v1/orders/{order-id}', 'method': 'DELETE' } ], ) ] def list_rules(): return rules barbican-9.1.0.dev50/barbican/common/policies/transportkeys.py0000664000175000017500000000365413616500636024570 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy rules = [ policy.DocumentedRuleDefault( name='transport_key:get', check_str='rule:all_users', scope_types=[], description='Get a specific transport key.', operations=[ { 'path': '/v1/transport_keys/{key-id}}', 'method': 'GET' } ] ), policy.DocumentedRuleDefault( name='transport_key:delete', check_str='rule:admin', scope_types=[], description='Delete a specific transport key.', operations=[ { 'path': '/v1/transport_keys/{key-id}', 'method': 'DELETE' } ] ), policy.DocumentedRuleDefault( name='transport_keys:get', check_str='rule:all_users', scope_types=[], description='Get a list of all transport keys.', operations=[ { 'path': '/v1/transport_keys', 'method': 'GET' } ] ), policy.DocumentedRuleDefault( name='transport_keys:post', check_str='rule:admin', scope_types=[], description='Create a new transport key.', operations=[ { 'path': '/v1/transport_keys', 'method': 'POST' } ] ), ] def list_rules(): return rules barbican-9.1.0.dev50/barbican/common/policies/quotas.py0000664000175000017500000000430013616500636023141 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy rules = [ policy.DocumentedRuleDefault( name='quotas:get', check_str='rule:all_users', scope_types=[], description='List quotas for the project the user belongs to.', operations=[ { 'path': '/v1/quotas', 'method': 'GET' } ] ), policy.DocumentedRuleDefault( name='project_quotas:get', check_str='rule:service_admin', scope_types=[], description='List quotas for the specified project.', operations=[ { 'path': '/v1/project-quotas', 'method': 'GET' }, { 'path': '/v1/project-quotas/{uuid}', 'method': 'GET' } ] ), policy.DocumentedRuleDefault( name='project_quotas:put', check_str='rule:service_admin', scope_types=[], description='Create or update the configured project quotas for ' 'the project with the specified UUID.', operations=[ { 'path': '/v1/project-quotas/{uuid}', 'method': 'PUT' } ] ), policy.DocumentedRuleDefault( name='project_quotas:delete', check_str='rule:service_admin', scope_types=[], description='Delete the project quotas configuration for the ' 'project with the requested UUID.', operations=[ { 'path': '/v1/quotas}', 'method': 'DELETE' } ] ), ] def list_rules(): return rules barbican-9.1.0.dev50/barbican/common/policies/base.py0000664000175000017500000000711013616500636022541 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy rules = [ policy.RuleDefault( name='admin', check_str='role:admin'), policy.RuleDefault( name='observer', check_str='role:observer'), policy.RuleDefault( name='creator', check_str='role:creator'), policy.RuleDefault( name='audit', check_str='role:audit'), policy.RuleDefault( name='service_admin', check_str='role:key-manager:service-admin'), policy.RuleDefault( name='admin_or_creator', check_str='rule:admin or rule:creator'), policy.RuleDefault( name='all_but_audit', check_str='rule:admin or rule:observer or rule:creator'), policy.RuleDefault( name='all_users', check_str='rule:admin or rule:observer or rule:creator or ' + 'rule:audit or rule:service_admin'), policy.RuleDefault( name='secret_project_match', check_str='project_id:%(target.secret.project_id)s'), policy.RuleDefault( name='secret_acl_read', check_str="'read':%(target.secret.read)s"), policy.RuleDefault( name='secret_private_read', check_str="'False':%(target.secret.read_project_access)s"), policy.RuleDefault( name='secret_creator_user', check_str="user_id:%(target.secret.creator_id)s"), policy.RuleDefault( name='container_project_match', check_str="project_id:%(target.container.project_id)s"), policy.RuleDefault( name='container_acl_read', check_str="'read':%(target.container.read)s"), policy.RuleDefault( name='container_private_read', check_str="'False':%(target.container.read_project_access)s"), policy.RuleDefault( name='container_creator_user', check_str="user_id:%(target.container.creator_id)s"), policy.RuleDefault( name='secret_non_private_read', check_str="rule:all_users and rule:secret_project_match and not " + "rule:secret_private_read"), policy.RuleDefault( name='secret_decrypt_non_private_read', check_str="rule:all_but_audit and rule:secret_project_match and not " + "rule:secret_private_read"), policy.RuleDefault( name='container_non_private_read', check_str="rule:all_users and rule:container_project_match and not " + "rule:container_private_read"), policy.RuleDefault( name='secret_project_admin', check_str="rule:admin and rule:secret_project_match"), policy.RuleDefault( name='secret_project_creator', check_str="rule:creator and rule:secret_project_match and " + "rule:secret_creator_user"), policy.RuleDefault( name='container_project_admin', check_str="rule:admin and rule:container_project_match"), policy.RuleDefault( name='container_project_creator', check_str="rule:creator and rule:container_project_match and " + "rule:container_creator_user"), ] def list_rules(): return rules barbican-9.1.0.dev50/barbican/common/policies/consumers.py0000664000175000017500000000607113616500636023652 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy # FIXME(hrybacki): Note that the GET rules have the same check strings. # The POST/DELETE rules also share the check stirngs. # These can probably be turned into constants in base rules = [ policy.DocumentedRuleDefault( name='consumer:get', check_str='rule:admin or rule:observer or rule:creator or ' + 'rule:audit or rule:container_non_private_read or ' + 'rule:container_project_creator or ' + 'rule:container_project_admin or rule:container_acl_read', scope_types=[], description='List a specific consumer for a given container.', operations=[ { 'path': '/v1/containers/{container-id}/consumers/' + '{consumer-id}', 'method': 'GET' } ] ), policy.DocumentedRuleDefault( name='consumers:get', check_str='rule:admin or rule:observer or rule:creator or ' + 'rule:audit or rule:container_non_private_read or ' + 'rule:container_project_creator or ' + 'rule:container_project_admin or rule:container_acl_read', scope_types=[], description='List a containers consumers.', operations=[ { 'path': '/v1/containers/{container-id}/consumers', 'method': 'GET' } ] ), policy.DocumentedRuleDefault( name='consumers:post', check_str='rule:admin or rule:container_non_private_read or ' + 'rule:container_project_creator or ' + 'rule:container_project_admin or rule:container_acl_read', scope_types=[], description='Creates a consumer.', operations=[ { 'path': '/v1/containers/{container-id}/consumers', 'method': 'POST' } ] ), policy.DocumentedRuleDefault( name='consumers:delete', check_str='rule:admin or rule:container_non_private_read or ' + 'rule:container_project_creator or ' + 'rule:container_project_admin or rule:container_acl_read', scope_types=[], description='Deletes a consumer.', operations=[ { 'path': '/v1/containers/{container-id}/consumers/' + '{consumer-id}', 'method': 'DELETE' } ] ), ] def list_rules(): return rules barbican-9.1.0.dev50/barbican/common/policies/__init__.py0000664000175000017500000000256713616500636023401 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import itertools from barbican.common.policies import acls from barbican.common.policies import base from barbican.common.policies import consumers from barbican.common.policies import containers from barbican.common.policies import orders from barbican.common.policies import quotas from barbican.common.policies import secretmeta from barbican.common.policies import secrets from barbican.common.policies import secretstores from barbican.common.policies import transportkeys def list_rules(): return itertools.chain( base.list_rules(), acls.list_rules(), consumers.list_rules(), containers.list_rules(), orders.list_rules(), quotas.list_rules(), secretmeta.list_rules(), secrets.list_rules(), secretstores.list_rules(), transportkeys.list_rules(), ) barbican-9.1.0.dev50/barbican/common/policies/containers.py0000664000175000017500000000552213616500636024001 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy rules = [ policy.DocumentedRuleDefault( name='containers:post', check_str='rule:admin_or_creator', scope_types=[], description='Creates a container.', operations=[ { 'path': '/v1/containers', 'method': 'POST' } ] ), policy.DocumentedRuleDefault( name='containers:get', check_str='rule:all_but_audit', scope_types=[], description='Lists a projects containers.', operations=[ { 'path': '/v1/containers', 'method': 'GET' } ] ), policy.DocumentedRuleDefault( name='container:get', check_str='rule:container_non_private_read or ' + 'rule:container_project_creator or ' + 'rule:container_project_admin or ' + 'rule:container_acl_read', scope_types=[], description='Retrieves a single container.', operations=[ { 'path': '/v1/containers/{container-id}', 'method': 'GET' } ] ), policy.DocumentedRuleDefault( name='container:delete', check_str='rule:container_project_admin or ' + 'rule:container_project_creator', scope_types=[], description='Deletes a container.', operations=[ { 'path': '/v1/containers/{uuid}', 'method': 'DELETE' } ] ), policy.DocumentedRuleDefault( name='container_secret:post', check_str='rule:admin', scope_types=[], description='Add a secret to an existing container.', operations=[ { 'path': '/v1/containers/{container-id}/secrets', 'method': 'POST' } ] ), policy.DocumentedRuleDefault( name='container_secret:delete', check_str='rule:admin', scope_types=[], description='Remove a secret from a container.', operations=[ { 'path': '/v1/containers/{container-id}/secrets/{secret-id}', 'method': 'DELETE' } ] ), ] def list_rules(): return rules barbican-9.1.0.dev50/barbican/common/validators.py0000664000175000017500000011256513616500636022203 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ API JSON validators. """ import abc import base64 import re import jsonschema as schema from ldap3.core import exceptions as ldap_exceptions from ldap3.utils.dn import parse_dn from OpenSSL import crypto from oslo_utils import timeutils import six from barbican.api import controllers from barbican.common import config from barbican.common import exception from barbican.common import hrefs from barbican.common import utils from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repo from barbican.plugin.interface import secret_store from barbican.plugin.util import mime_types DEFAULT_MAX_SECRET_BYTES = config.DEFAULT_MAX_SECRET_BYTES LOG = utils.getLogger(__name__) CONF = config.CONF MYSQL_SMALL_INT_MAX = 32767 ACL_OPERATIONS = ['read', 'write', 'delete', 'list'] def secret_too_big(data): if isinstance(data, six.text_type): return len(data.encode('UTF-8')) > CONF.max_allowed_secret_in_bytes else: return len(data) > CONF.max_allowed_secret_in_bytes def get_invalid_property(validation_error): # we are interested in the second item which is the failed propertyName. if validation_error.schema_path and len(validation_error.schema_path) > 1: return validation_error.schema_path[1] def validate_stored_key_rsa_container(project_id, container_ref, req): try: container_id = hrefs.get_container_id_from_ref(container_ref) except Exception: reason = u._("Bad Container Reference {ref}").format( ref=container_ref ) raise exception.InvalidContainer(reason=reason) container_repo = repo.get_container_repository() container = container_repo.get_container_by_id(entity_id=container_id, suppress_exception=True) if not container: reason = u._("Container Not Found") raise exception.InvalidContainer(reason=reason) if container.type != 'rsa': reason = u._("Container Wrong Type") raise exception.InvalidContainer(reason=reason) ctxt = controllers._get_barbican_context(req) inst = controllers.containers.ContainerController(container) controllers._do_enforce_rbac(inst, req, controllers.containers.CONTAINER_GET, ctxt) @six.add_metaclass(abc.ABCMeta) class ValidatorBase(object): """Base class for validators.""" name = '' @abc.abstractmethod def validate(self, json_data, parent_schema=None): """Validate the input JSON. :param json_data: JSON to validate against this class' internal schema. :param parent_schema: Name of the parent schema to this schema. :returns: dict -- JSON content, post-validation and : normalization/defaulting. :raises: schema.ValidationError on schema violations. """ def _full_name(self, parent_schema=None): """Validator schema name accessor Returns the full schema name for this validator, including parent name. """ schema_name = self.name if parent_schema: schema_name = u._( "{schema_name}' within '{parent_schema_name}").format( schema_name=self.name, parent_schema_name=parent_schema) return schema_name def _assert_schema_is_valid(self, json_data, schema_name): """Assert that the JSON structure is valid for the given schema. :raises: InvalidObject exception if the data is not schema compliant. """ try: schema.validate(json_data, self.schema) except schema.ValidationError as e: raise exception.InvalidObject(schema=schema_name, reason=e.message, property=get_invalid_property(e)) def _assert_validity(self, valid_condition, schema_name, message, property): """Assert that a certain condition is met. :raises: InvalidObject exception if the condition is not met. """ if not valid_condition: raise exception.InvalidObject(schema=schema_name, reason=message, property=property) class NewSecretValidator(ValidatorBase): """Validate a new secret.""" def __init__(self): self.name = 'Secret' # TODO(jfwood): Get the list of mime_types from the crypto plugins? self.schema = { "type": "object", "properties": { "name": {"type": ["string", "null"], "maxLength": 255}, "algorithm": {"type": "string", "maxLength": 255}, "mode": {"type": "string", "maxLength": 255}, "bit_length": { "type": "integer", "minimum": 1, "maximum": MYSQL_SMALL_INT_MAX }, "expiration": {"type": "string", "maxLength": 255}, "payload": {"type": "string"}, "secret_type": { "type": "string", "maxLength": 80, "enum": [secret_store.SecretType.SYMMETRIC, secret_store.SecretType.PASSPHRASE, secret_store.SecretType.PRIVATE, secret_store.SecretType.PUBLIC, secret_store.SecretType.CERTIFICATE, secret_store.SecretType.OPAQUE] }, "payload_content_type": { "type": ["string", "null"], "maxLength": 255 }, "payload_content_encoding": { "type": "string", "maxLength": 255, "enum": [ "base64" ] }, "transport_key_needed": { "type": "string", "enum": ["true", "false"] }, "transport_key_id": {"type": "string"}, }, } def validate(self, json_data, parent_schema=None): """Validate the input JSON for the schema for secrets.""" schema_name = self._full_name(parent_schema) self._assert_schema_is_valid(json_data, schema_name) json_data['name'] = self._extract_name(json_data) expiration = self._extract_expiration(json_data, schema_name) self._assert_expiration_is_valid(expiration, schema_name) json_data['expiration'] = expiration content_type = json_data.get('payload_content_type') if 'payload' in json_data: content_encoding = json_data.get('payload_content_encoding') self._validate_content_parameters(content_type, content_encoding, schema_name) payload = self._extract_payload(json_data) self._assert_validity(payload, schema_name, u._("If 'payload' specified, must be non " "empty"), "payload") self._validate_payload_by_content_encoding(content_encoding, payload, schema_name) json_data['payload'] = payload elif 'payload_content_type' in json_data: # parent_schema would be populated if it comes from an order. self._assert_validity(parent_schema is not None, schema_name, u._("payload must be provided when " "payload_content_type is specified"), "payload") if content_type: self._assert_validity( mime_types.is_supported(content_type), schema_name, u._("payload_content_type is not one of {supported}" ).format(supported=mime_types.SUPPORTED), "payload_content_type") return json_data def _extract_name(self, json_data): """Extracts and returns the name from the JSON data.""" name = json_data.get('name') if isinstance(name, six.string_types): return name.strip() return None def _extract_expiration(self, json_data, schema_name): """Extracts and returns the expiration date from the JSON data.""" expiration = None expiration_raw = json_data.get('expiration') if expiration_raw and expiration_raw.strip(): try: expiration_tz = timeutils.parse_isotime(expiration_raw.strip()) expiration = timeutils.normalize_time(expiration_tz) except ValueError: LOG.exception("Problem parsing expiration date") raise exception.InvalidObject( schema=schema_name, reason=u._("Invalid date for 'expiration'"), property="expiration") return expiration def _assert_expiration_is_valid(self, expiration, schema_name): """Asserts that the given expiration date is valid. Expiration dates must be in the future, not the past. """ if expiration: # Verify not already expired. utcnow = timeutils.utcnow() self._assert_validity(expiration > utcnow, schema_name, u._("'expiration' is before current time"), "expiration") def _validate_content_parameters(self, content_type, content_encoding, schema_name): """Content parameter validator. Check that the content_type, content_encoding and the parameters that they affect are valid. """ self._assert_validity( content_type is not None, schema_name, u._("If 'payload' is supplied, 'payload_content_type' must also " "be supplied."), "payload_content_type") self._assert_validity( mime_types.is_supported(content_type), schema_name, u._("payload_content_type is not one of {supported}" ).format(supported=mime_types.SUPPORTED), "payload_content_type") self._assert_validity( mime_types.is_content_type_with_encoding_supported( content_type, content_encoding), schema_name, u._("payload_content_encoding is not one of {supported}").format( supported=mime_types.get_supported_encodings(content_type)), "payload_content_encoding") def _validate_payload_by_content_encoding(self, payload_content_encoding, payload, schema_name): if payload_content_encoding == 'base64': try: base64.b64decode(payload) except Exception: LOG.exception("Problem parsing payload") raise exception.InvalidObject( schema=schema_name, reason=u._("Invalid payload for payload_content_encoding"), property="payload") def _extract_payload(self, json_data): """Extracts and returns the payload from the JSON data. :raises: LimitExceeded if the payload is too big """ payload = json_data.get('payload', '') if secret_too_big(payload): raise exception.LimitExceeded() return payload.strip() class NewSecretMetadataValidator(ValidatorBase): """Validate new secret metadata.""" def __init__(self): self.name = 'SecretMetadata' self.schema = { "type": "object", "$schema": "http://json-schema.org/draft-03/schema", "properties": { "metadata": {"type": "object", "required": True}, } } def validate(self, json_data, parent_schema=None): """Validate the input JSON for the schema for secret metadata.""" schema_name = self._full_name(parent_schema) self._assert_schema_is_valid(json_data, schema_name) return self._extract_metadata(json_data) def _extract_metadata(self, json_data): """Extracts and returns the metadata from the JSON data.""" metadata = json_data['metadata'] for key in metadata: # make sure key is a string and url-safe. if not isinstance(key, six.string_types): raise exception.InvalidMetadataRequest() self._check_string_url_safe(key) # make sure value is a string. value = metadata[key] if not isinstance(value, six.string_types): raise exception.InvalidMetadataRequest() # If key is not lowercase, then change it if not key.islower(): del metadata[key] metadata[key.lower()] = value return metadata def _check_string_url_safe(self, string): """Checks if string can be part of a URL.""" if not re.match("^[A-Za-z0-9_-]*$", string): raise exception.InvalidMetadataKey() class NewSecretMetadatumValidator(ValidatorBase): """Validate new secret metadatum.""" def __init__(self): self.name = 'SecretMetadatum' self.schema = { "type": "object", "$schema": "http://json-schema.org/draft-03/schema", "properties": { "key": { "type": "string", "maxLength": 255, "required": True }, "value": { "type": "string", "maxLength": 255, "required": True }, }, "additionalProperties": False } def validate(self, json_data, parent_schema=None): """Validate the input JSON for the schema for secret metadata.""" schema_name = self._full_name(parent_schema) self._assert_schema_is_valid(json_data, schema_name) key = self._extract_key(json_data) value = self._extract_value(json_data) return {"key": key, "value": value} def _extract_key(self, json_data): """Extracts and returns the metadata from the JSON data.""" key = json_data['key'] self._check_string_url_safe(key) key = key.lower() return key def _extract_value(self, json_data): """Extracts and returns the metadata from the JSON data.""" value = json_data['value'] return value def _check_string_url_safe(self, string): """Checks if string can be part of a URL.""" if not re.match("^[A-Za-z0-9_-]*$", string): raise exception.InvalidMetadataKey() class CACommonHelpersMixin(object): def _validate_subject_dn_data(self, subject_dn): """Confirm that the subject_dn contains valid data Validate that the subject_dn string parses without error If not, raise InvalidSubjectDN """ try: parse_dn(subject_dn) except ldap_exceptions.LDAPInvalidDnError: raise exception.InvalidSubjectDN(subject_dn=subject_dn) # TODO(atiwari) - Split this validator module and unit tests # into smaller modules class TypeOrderValidator(ValidatorBase, CACommonHelpersMixin): """Validate a new typed order.""" def __init__(self): self.name = 'Order' self.schema = { "type": "object", "$schema": "http://json-schema.org/draft-03/schema", "properties": { "meta": { "type": "object", "required": True }, "type": { "type": "string", "required": True, "enum": ['key', 'asymmetric', 'certificate'] } } } def validate(self, json_data, parent_schema=None): schema_name = self._full_name(parent_schema) self._assert_schema_is_valid(json_data, schema_name) order_type = json_data.get('type').lower() if order_type == models.OrderType.CERTIFICATE: certificate_meta = json_data.get('meta') self._validate_certificate_meta(certificate_meta, schema_name) elif order_type == models.OrderType.ASYMMETRIC: asymmetric_meta = json_data.get('meta') self._validate_asymmetric_meta(asymmetric_meta, schema_name) elif order_type == models.OrderType.KEY: key_meta = json_data.get('meta') self._validate_key_meta(key_meta, schema_name) else: self._raise_feature_not_implemented(order_type, schema_name) return json_data def _validate_key_meta(self, key_meta, schema_name): """Validation specific to meta for key type order.""" secret_validator = NewSecretValidator() secret_validator.validate(key_meta, parent_schema=self.name) self._assert_validity(key_meta.get('payload') is None, schema_name, u._("'payload' not allowed " "for key type order"), "meta") # Validation secret generation related fields. # TODO(jfwood): Invoke the crypto plugin for this purpose self._validate_meta_parameters(key_meta, "key", schema_name) def _validate_asymmetric_meta(self, asymmetric_meta, schema_name): """Validation specific to meta for asymmetric type order.""" # Validate secret metadata. secret_validator = NewSecretValidator() secret_validator.validate(asymmetric_meta, parent_schema=self.name) self._assert_validity(asymmetric_meta.get('payload') is None, schema_name, u._("'payload' not allowed " "for asymmetric type order"), "meta") self._validate_meta_parameters(asymmetric_meta, "asymmetric key", schema_name) def _get_required_metadata_value(self, metadata, key): data = metadata.get(key, None) if data is None: raise exception.MissingMetadataField(required=key) return data def _validate_certificate_meta(self, certificate_meta, schema_name): """Validation specific to meta for certificate type order.""" self._assert_validity(certificate_meta.get('payload') is None, schema_name, u._("'payload' not allowed " "for certificate type order"), "meta") if 'profile' in certificate_meta: if 'ca_id' not in certificate_meta: raise exception.MissingMetadataField(required='ca_id') jump_table = { 'simple-cmc': self._validate_simple_cmc_request, 'full-cmc': self._validate_full_cmc_request, 'stored-key': self._validate_stored_key_request, 'custom': self._validate_custom_request } request_type = certificate_meta.get("request_type", "custom") if request_type not in jump_table: raise exception.InvalidCertificateRequestType(request_type) jump_table[request_type](certificate_meta) def _validate_simple_cmc_request(self, certificate_meta): """Validates simple CMC (which are PKCS10 requests).""" request_data = self._get_required_metadata_value( certificate_meta, "request_data") self._validate_pkcs10_data(request_data) def _validate_full_cmc_request(self, certificate_meta): """Validate full CMC request. :param certificate_meta: request data from the order :raises: FullCMCNotSupported """ raise exception.FullCMCNotSupported() def _validate_stored_key_request(self, certificate_meta): """Validate stored-key cert request.""" self._get_required_metadata_value( certificate_meta, "container_ref") subject_dn = self._get_required_metadata_value( certificate_meta, "subject_dn") self._validate_subject_dn_data(subject_dn) # container will be validated by validate_stored_key_rsa_container() extensions = certificate_meta.get("extensions", None) if extensions: self._validate_extensions_data(extensions) def _validate_custom_request(self, certificate_meta): """Validate custom data request We cannot do any validation here because the request parameters are custom. Validation will be done by the plugin. We may choose to select the relevant plugin and call the supports() method to raise validation errors. """ pass def _validate_pkcs10_data(self, request_data): """Confirm that the request_data is valid base64 encoded PKCS#10. Base64 decode the request, if it fails raise PayloadDecodingError. Then parse data into the ASN.1 structure defined by PKCS10 and verify the signing information. If parsing of verifying fails, raise InvalidPKCS10Data. """ try: csr_pem = base64.b64decode(request_data) except Exception: raise exception.PayloadDecodingError() try: csr = crypto.load_certificate_request(crypto.FILETYPE_PEM, csr_pem) except Exception: reason = u._("Bad format") raise exception.InvalidPKCS10Data(reason=reason) try: pubkey = csr.get_pubkey() csr.verify(pubkey) except Exception: reason = u._("Signing key incorrect") raise exception.InvalidPKCS10Data(reason=reason) def _validate_full_cmc_data(self, request_data): """Confirm that request_data is valid Full CMC data.""" """ TODO(alee-3) complete this function Parse data into the ASN.1 structure defined for full CMC. If parsing fails, raise InvalidCMCData """ pass def _validate_extensions_data(self, extensions): """Confirm that the extensions data is valid. :param extensions: base 64 encoded ASN.1 string of extension data :raises: CertificateExtensionsNotSupported """ """ TODO(alee-3) complete this function Parse the extensions data into the correct ASN.1 structure. If the parsing fails, throw InvalidExtensionsData. For now, fail this validation because extensions parsing is not supported. """ raise exception.CertificateExtensionsNotSupported() def _validate_meta_parameters(self, meta, order_type, schema_name): self._assert_validity(meta.get('algorithm'), schema_name, u._("'algorithm' is required field " "for {0} type order").format(order_type), "meta") self._assert_validity(meta.get('bit_length'), schema_name, u._("'bit_length' is required field " "for {0} type order").format(order_type), "meta") self._validate_bit_length(meta, schema_name) def _extract_expiration(self, json_data, schema_name): """Extracts and returns the expiration date from the JSON data.""" expiration = None expiration_raw = json_data.get('expiration', None) if expiration_raw and expiration_raw.strip(): try: expiration_tz = timeutils.parse_isotime(expiration_raw) expiration = timeutils.normalize_time(expiration_tz) except ValueError: LOG.exception("Problem parsing expiration date") raise exception.InvalidObject(schema=schema_name, reason=u._("Invalid date " "for 'expiration'"), property="expiration") return expiration def _validate_bit_length(self, meta, schema_name): bit_length = int(meta.get('bit_length')) if bit_length % 8 != 0: raise exception.UnsupportedField(field="bit_length", schema=schema_name, reason=u._("Must be a" " positive integer" " that is a" " multiple of 8")) def _raise_feature_not_implemented(self, order_type, schema_name): raise exception.FeatureNotImplemented(field='type', schema=schema_name, reason=u._("Feature not " "implemented for " "'{0}' order type") .format(order_type)) class ACLValidator(ValidatorBase): """Validate ACL(s).""" def __init__(self): self.name = 'ACL' self.schema = { "$schema": "http://json-schema.org/draft-04/schema#", "definitions": { "acl_defintion": { "type": "object", "properties": { "users": { "type": "array", "items": [ {"type": "string", "maxLength": 255} ] }, "project-access": {"type": "boolean"} }, "additionalProperties": False } }, "type": "object", "properties": { "read": {"$ref": "#/definitions/acl_defintion"}, }, "additionalProperties": False } def validate(self, json_data, parent_schema=None): schema_name = self._full_name(parent_schema) self._assert_schema_is_valid(json_data, schema_name) return json_data class ContainerConsumerValidator(ValidatorBase): """Validate a Consumer.""" def __init__(self): self.name = 'Consumer' self.schema = { "type": "object", "properties": { "URL": {"type": "string", "minLength": 1}, "name": {"type": "string", "maxLength": 255, "minLength": 1} }, "required": ["name", "URL"] } def validate(self, json_data, parent_schema=None): schema_name = self._full_name(parent_schema) self._assert_schema_is_valid(json_data, schema_name) return json_data class ContainerSecretValidator(ValidatorBase): """Validate a Container Secret.""" def __init__(self): self.name = 'ContainerSecret' self.schema = { "type": "object", "properties": { "name": {"type": "string", "maxLength": 255}, "secret_ref": {"type": "string", "minLength": 1} }, "required": ["secret_ref"] } def validate(self, json_data, parent_schema=None): schema_name = self._full_name(parent_schema) self._assert_schema_is_valid(json_data, schema_name) return json_data class ContainerValidator(ValidatorBase): """Validator for all types of Container.""" def __init__(self): self.name = 'Container' self.schema = { "type": "object", "properties": { "name": {"type": ["string", "null"], "maxLength": 255}, "type": { "type": "string", # TODO(hgedikli): move this to a common location "enum": ["generic", "rsa", "certificate"] }, "secret_refs": { "type": "array", "items": { "type": "object", "required": ["secret_ref"], "properties": { "name": { "type": ["string", "null"], "maxLength": 255 }, "secret_ref": {"type": "string", "minLength": 1} } } } }, "required": ["type"] } def validate(self, json_data, parent_schema=None): schema_name = self._full_name(parent_schema) self._assert_schema_is_valid(json_data, schema_name) container_type = json_data.get('type') secret_refs = json_data.get('secret_refs') if not secret_refs: return json_data secret_refs_names = set(secret_ref.get('name', '') for secret_ref in secret_refs) self._assert_validity( len(secret_refs_names) == len(secret_refs), schema_name, u._("Duplicate reference names are not allowed"), "secret_refs") # The combination of container_id and secret_id is expected to be # primary key for container_secret so same secret id (ref) cannot be # used within a container secret_ids = set(self._get_secret_id_from_ref(secret_ref) for secret_ref in secret_refs) self._assert_validity( len(secret_ids) == len(secret_refs), schema_name, u._("Duplicate secret ids are not allowed"), "secret_refs") # Ensure that our secret refs are valid relative to our config, no # spoofing allowed! req_host_href = utils.get_base_url_from_request() for secret_ref in secret_refs: if not secret_ref.get('secret_ref').startswith(req_host_href): raise exception.UnsupportedField( field='secret_ref', schema=schema_name, reason=u._( "Secret_ref does not match the configured hostname, " "please try again" ) ) if container_type == 'rsa': self._validate_rsa(secret_refs_names, schema_name) elif container_type == 'certificate': self._validate_certificate(secret_refs_names, schema_name) return json_data def _validate_rsa(self, secret_refs_names, schema_name): required_names = {'public_key', 'private_key'} optional_names = {'private_key_passphrase'} contains_unsupported_names = self._contains_unsupported_names( secret_refs_names, required_names | optional_names) self._assert_validity( not contains_unsupported_names, schema_name, u._("only 'private_key', 'public_key' and " "'private_key_passphrase' reference names are " "allowed for RSA type"), "secret_refs") self._assert_validity( self._has_minimum_required(secret_refs_names, required_names), schema_name, u._("The minimum required reference names are 'public_key' and" "'private_key' for RSA type"), "secret_refs") def _validate_certificate(self, secret_refs_names, schema_name): required_names = {'certificate'} optional_names = {'private_key', 'private_key_passphrase', 'intermediates'} contains_unsupported_names = self._contains_unsupported_names( secret_refs_names, required_names.union(optional_names)) self._assert_validity( not contains_unsupported_names, schema_name, u._("only 'private_key', 'certificate' , " "'private_key_passphrase', or 'intermediates' " "reference names are allowed for Certificate type"), "secret_refs") self._assert_validity( self._has_minimum_required(secret_refs_names, required_names), schema_name, u._("The minimum required reference name is 'certificate' " "for Certificate type"), "secret_refs") def _contains_unsupported_names(self, secret_refs_names, supported_names): if secret_refs_names.difference(supported_names): return True return False def _has_minimum_required(self, secret_refs_names, required_names): if required_names.issubset(secret_refs_names): return True return False def _get_secret_id_from_ref(self, secret_ref): secret_id = secret_ref.get('secret_ref') if secret_id.endswith('/'): secret_id = secret_id.rsplit('/', 2)[1] elif '/' in secret_id: secret_id = secret_id.rsplit('/', 1)[1] return secret_id class NewTransportKeyValidator(ValidatorBase): """Validate a new transport key.""" def __init__(self): self.name = 'Transport Key' self.schema = { "type": "object", "properties": { "plugin_name": {"type": "string"}, "transport_key": {"type": "string"}, }, } def validate(self, json_data, parent_schema=None): schema_name = self._full_name(parent_schema) self._assert_schema_is_valid(json_data, schema_name) plugin_name = json_data.get('plugin_name', '').strip() self._assert_validity(plugin_name, schema_name, u._("plugin_name must be provided"), "plugin_name") json_data['plugin_name'] = plugin_name transport_key = json_data.get('transport_key', '').strip() self._assert_validity(transport_key, schema_name, u._("transport_key must be provided"), "transport_key") json_data['transport_key'] = transport_key return json_data class ProjectQuotaValidator(ValidatorBase): """Validate a new project quota.""" def __init__(self): self.name = 'Project Quota' self.schema = { 'type': 'object', 'properties': { 'project_quotas': { 'type': 'object', 'properties': { 'secrets': {'type': 'integer'}, 'orders': {'type': 'integer'}, 'containers': {'type': 'integer'}, 'consumers': {'type': 'integer'}, 'cas': {'type': 'integer'} }, 'additionalProperties': False, } }, 'required': ['project_quotas'], 'additionalProperties': False } def validate(self, json_data, parent_schema=None): schema_name = self._full_name(parent_schema) self._assert_schema_is_valid(json_data, schema_name) return json_data class NewCAValidator(ValidatorBase, CACommonHelpersMixin): """Validate new CA(s).""" def __init__(self): self.name = 'CA' self.schema = { 'type': 'object', 'properties': { 'name': {'type': 'string', "minLength": 1}, 'subject_dn': {'type': 'string', "minLength": 1}, 'parent_ca_ref': {'type': 'string', "minLength": 1}, 'description': {'type': 'string'}, }, 'required': ['name', 'subject_dn', 'parent_ca_ref'], 'additionalProperties': False } def validate(self, json_data, parent_schema=None): schema_name = self._full_name(parent_schema) self._assert_schema_is_valid(json_data, schema_name) subject_dn = json_data['subject_dn'] self._validate_subject_dn_data(subject_dn) return json_data class SecretConsumerValidator(ValidatorBase): """Validate a new Secret Consumer.""" def __init__(self): self.name = "Secret Consumer" self.schema = { "type": "object", "properties": { "service": { "type": "string", "maxLength": 255, "minLength": 1, }, "resource_type": { "type": "string", "maxLength": 255, "minLength": 1, }, "resource_id": {"type": "string", "minLength": 1}, }, "required": ["service", "resource_type", "resource_id"], } def validate(self, json_data, parent_schema=None): schema_name = self._full_name(parent_schema) self._assert_schema_is_valid(json_data, schema_name) return json_data barbican-9.1.0.dev50/barbican/common/quota.py0000664000175000017500000001701013616500636021151 0ustar sahidsahid00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import config from barbican.common import exception from barbican.common import hrefs from barbican.common import resources as res from barbican.model import repositories as repo # All negative values will be treated as unlimited UNLIMITED_VALUE = -1 DISABLED_VALUE = 0 CONF = config.CONF class QuotaDriver(object): """Driver to enforce quotas and obtain quota information.""" def __init__(self): self.repo = repo.get_project_quotas_repository() def _get_resources(self): """List of resources that can be constrained by a quota""" return ['secrets', 'orders', 'containers', 'consumers', 'cas'] def _get_defaults(self): """Return list of default quotas""" quotas = { 'secrets': CONF.quotas.quota_secrets, 'orders': CONF.quotas.quota_orders, 'containers': CONF.quotas.quota_containers, 'consumers': CONF.quotas.quota_consumers, 'cas': CONF.quotas.quota_cas } return quotas def _extract_project_quotas(self, project_quotas_model): """Convert project quotas model to Python dict :param project_quotas_model: Model containing quota information :return: Python dict containing quota information """ resp_quotas = {} for resource in self._get_resources(): resp_quotas[resource] = getattr(project_quotas_model, resource) return resp_quotas def _compute_effective_quotas(self, configured_quotas): """Merge configured and default quota information When a quota value is not set, use the default value :param configured_quotas: configured quota values :return: effective quotas """ default_quotas = self._get_defaults() resp_quotas = dict(configured_quotas) for resource, quota in resp_quotas.items(): if quota is None: resp_quotas[resource] = default_quotas[resource] return resp_quotas def get_effective_quotas(self, external_project_id): """Collect and return the effective quotas for a project :param external_project_id: external ID of current project :return: dict with effective quotas """ try: retrieved_project_quotas = self.repo.get_by_external_project_id( external_project_id) except exception.NotFound: resp_quotas = self._get_defaults() else: resp_quotas = self._compute_effective_quotas( self._extract_project_quotas(retrieved_project_quotas)) return resp_quotas def is_unlimited_value(self, v): """A helper method to check for unlimited value.""" return v <= UNLIMITED_VALUE def is_disabled_value(self, v): """A helper method to check for disabled value.""" return v == DISABLED_VALUE def set_project_quotas(self, external_project_id, parsed_project_quotas): """Create a new database entry, or update existing one :param external_project_id: ID of project whose quotas are to be set :param parsed_project_quotas: quota values to save in database :return: None """ project = res.get_or_create_project(external_project_id) self.repo.create_or_update_by_project_id(project.id, parsed_project_quotas) # commit to DB to avoid async issues if the enforcer is called from # another thread repo.commit() def get_project_quotas(self, external_project_id): """Retrieve configured quota information from database :param external_project_id: ID of project for whose values are wanted :return: the values """ try: retrieved_project_quotas = self.repo.get_by_external_project_id( external_project_id) except exception.NotFound: return None resp_quotas = self._extract_project_quotas(retrieved_project_quotas) resp = {'project_quotas': resp_quotas} return resp def get_project_quotas_list(self, offset_arg=None, limit_arg=None): """Return a dict and list of all configured quota information :return: a dict and list of a page of quota config info """ retrieved_project_quotas, offset, limit, total =\ self.repo.get_by_create_date(offset_arg=offset_arg, limit_arg=limit_arg, suppress_exception=True) resp_quotas = [] for quotas in retrieved_project_quotas: list_item = {'project_id': quotas.project.external_id, 'project_quotas': self._extract_project_quotas(quotas)} resp_quotas.append(list_item) resp = {'project_quotas': resp_quotas} resp_overall = hrefs.add_nav_hrefs( 'project_quotas', offset, limit, total, resp) resp_overall.update({'total': total}) return resp_overall def delete_project_quotas(self, external_project_id): """Remove configured quota information from database :param external_project_id: ID of project whose quotas will be deleted :raises NotFound: if project has no configured values :return: None """ self.repo.delete_by_external_project_id(external_project_id) def get_quotas(self, external_project_id): """Get the effective quotas for a project Effective quotas are based on both configured and default values :param external_project_id: ID of project for which to get quotas :return: dict of effective quota values """ resp_quotas = self.get_effective_quotas(external_project_id) resp = {'quotas': resp_quotas} return resp class QuotaEnforcer(object): """Checks quotas limits and current resource usage levels""" def __init__(self, resource_type, resource_repo): self.quota_driver = QuotaDriver() self.resource_type = resource_type self.resource_repo = resource_repo def enforce(self, project): """Enforce the quota limit for the resource :param project: the project object corresponding to the sender :raises QuotaReached: exception raised if quota forbids request :return: None """ quotas = self.quota_driver.get_effective_quotas(project.external_id) quota = quotas[self.resource_type] reached = False count = 0 if self.quota_driver.is_unlimited_value(quota): pass elif self.quota_driver.is_disabled_value(quota): reached = True else: count = self.resource_repo.get_count(project.id) if count >= quota: reached = True if reached: raise exception.QuotaReached( external_project_id=project.external_id, resource_type=self.resource_type, quota=quota) barbican-9.1.0.dev50/barbican/common/resources.py0000664000175000017500000000367513616500636022046 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Shared business logic. """ from barbican.common import exception from barbican.common import utils from barbican.model import models from barbican.model import repositories LOG = utils.getLogger(__name__) GLOBAL_PREFERRED_PROJECT_ID = "GLOBAL_PREFERRED" def get_or_create_global_preferred_project(): return get_or_create_project(GLOBAL_PREFERRED_PROJECT_ID) def get_or_create_project(project_id): """Returns project with matching project_id. Creates it if it does not exist. :param project_id: The external-to-Barbican ID for this project. :param project_repo: Project repository. :return: Project model instance """ project_repo = repositories.get_project_repository() project = project_repo.find_by_external_project_id(project_id, suppress_exception=True) if not project: LOG.debug('Creating project for %s', project_id) project = models.Project() project.external_id = project_id project.status = models.States.ACTIVE try: project_repo.create_from(project) except exception.ConstraintCheck: # catch race condition for when another thread just created one project = project_repo.find_by_external_project_id( project_id, suppress_exception=False) return project barbican-9.1.0.dev50/barbican/common/hrefs.py0000664000175000017500000001514413616500636021135 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from barbican.common import utils def convert_resource_id_to_href(resource_slug, resource_id): """Convert the resource ID to a HATEOAS-style href with resource slug.""" if resource_id: resource = '{slug}/{id}'.format(slug=resource_slug, id=resource_id) else: resource = '{slug}/????'.format(slug=resource_slug) return utils.hostname_for_refs(resource=resource) def convert_secret_to_href(secret_id): """Convert the secret IDs to a HATEOAS-style href.""" return convert_resource_id_to_href('secrets', secret_id) def convert_order_to_href(order_id): """Convert the order IDs to a HATEOAS-style href.""" return convert_resource_id_to_href('orders', order_id) def convert_container_to_href(container_id): """Convert the container IDs to a HATEOAS-style href.""" return convert_resource_id_to_href('containers', container_id) def convert_transport_key_to_href(transport_key_id): """Convert the transport key IDs to a HATEOAS-style href.""" return convert_resource_id_to_href('transport_keys', transport_key_id) def convert_consumer_to_href(consumer_id): """Convert the consumer ID to a HATEOAS-style href.""" return convert_resource_id_to_href('consumers', consumer_id) + '/consumers' def convert_user_meta_to_href(secret_id): """Convert the consumer ID to a HATEOAS-style href.""" return convert_resource_id_to_href('secrets', secret_id) + '/metadata' def convert_certificate_authority_to_href(ca_id): """Convert the ca ID to a HATEOAS-style href.""" return convert_resource_id_to_href('cas', ca_id) def convert_secret_stores_to_href(secret_store_id): """Convert the secret-store ID to a HATEOAS-style href.""" return convert_resource_id_to_href('secret-stores', secret_store_id) # TODO(hgedikli) handle list of fields in here def convert_to_hrefs(fields): """Convert id's within a fields dict to HATEOAS-style hrefs.""" if 'secret_id' in fields: fields['secret_ref'] = convert_secret_to_href(fields['secret_id']) del fields['secret_id'] if 'order_id' in fields: fields['order_ref'] = convert_order_to_href(fields['order_id']) del fields['order_id'] if 'container_id' in fields: fields['container_ref'] = convert_container_to_href( fields['container_id']) del fields['container_id'] if 'transport_key_id' in fields: fields['transport_key_ref'] = convert_transport_key_to_href( fields['transport_key_id']) del fields['transport_key_id'] return fields def convert_list_to_href(resources_name, offset, limit): """Supports pretty output of paged-list hrefs. Convert the offset/limit info to a HATEOAS-style href suitable for use in a list navigation paging interface. """ resource = '{0}?limit={1}&offset={2}'.format(resources_name, limit, offset) return utils.hostname_for_refs(resource=resource) def previous_href(resources_name, offset, limit): """Supports pretty output of previous-page hrefs. Create a HATEOAS-style 'previous' href suitable for use in a list navigation paging interface, assuming the provided values are the currently viewed page. """ offset = max(0, offset - limit) return convert_list_to_href(resources_name, offset, limit) def next_href(resources_name, offset, limit): """Supports pretty output of next-page hrefs. Create a HATEOAS-style 'next' href suitable for use in a list navigation paging interface, assuming the provided values are the currently viewed page. """ offset = offset + limit return convert_list_to_href(resources_name, offset, limit) def add_nav_hrefs(resources_name, offset, limit, total_elements, data): """Adds next and/or previous hrefs to paged list responses. :param resources_name: Name of api resource :param offset: Element number (ie. index) where current page starts :param limit: Max amount of elements listed on current page :param total_elements: Total number of elements :returns: augmented dictionary with next and/or previous hrefs """ if offset > 0: data.update({'previous': previous_href(resources_name, offset, limit)}) if total_elements > (offset + limit): data.update({'next': next_href(resources_name, offset, limit)}) return data def get_container_id_from_ref(container_ref): """Parse a container reference and return the container ID TODO(Dave) Add some extra checking for valid prefix The container ID is the right-most element of the URL :param container_ref: HTTP reference of container :return: a string containing the ID of the container """ container_id = container_ref.rsplit('/', 1)[1] return container_id def get_secret_id_from_ref(secret_ref): """Parse a secret reference and return the secret ID :param secret_ref: HTTP reference of secret :return: a string containing the ID of the secret """ secret_id = secret_ref.rsplit('/', 1)[1] return secret_id def get_secrets_id_from_refs(secret_refs): """Parse a secret reference and return the list of secret ID :param secret_refs: a list of HTTP reference of secret :return: a string containing the ID of the secret """ if secret_refs is None: return None secret_ids = [] for secret_ref in secret_refs: secret_id = secret_ref.get('secret_ref') if secret_id.endswith('/'): secret_id = secret_id.rsplit('/', 2)[1] elif '/' in secret_id: secret_id = secret_id.rsplit('/', 1)[1] else: secret_id = secret_id secret_ids.append(secret_id) return secret_ids def get_ca_id_from_ref(ca_ref): """Parse a ca_ref and return the CA ID :param ca_ref: HHTO reference of the CA :return: a string containing the ID of the CA """ ca_id = ca_ref.rsplit('/', 1)[1] return ca_id barbican-9.1.0.dev50/barbican/plugin/0000775000175000017500000000000013616500640017450 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/plugin/util/0000775000175000017500000000000013616500640020425 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/plugin/util/utils.py0000664000175000017500000000376113616500636022153 0ustar sahidsahid00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Utilities to support plugins and plugin managers. """ from barbican.common import utils LOG = utils.getLogger(__name__) def instantiate_plugins(extension_manager, invoke_args=(), invoke_kwargs={}): """Attempt to create each plugin managed by a stevedore manager. While we could have let the stevedore 'extension_manager' create our plugins by passing 'invoke_on_load=True' to its initializer, its logic handles and suppresses any root cause exceptions emanating from the plugins' initializers. This function allows those exceptions to be exposed. :param extension_manager: A :class:`NamedExtensionManager` instance that has already processed the configured plugins, but has not yet created instances of these plugins. :param invoke_args: Arguments to pass to the new plugin instance. :param invoke_kwargs: Keyword arguments to pass to the new plugin instance. """ for ext in extension_manager.extensions: if not ext.obj: try: plugin_instance = ext.plugin(*invoke_args, **invoke_kwargs) except Exception: LOG.logger.disabled = False # Ensure not suppressing logs. LOG.exception("Problem seen creating plugin: '%s'", ext.name) else: ext.obj = plugin_instance def get_active_plugins(extension_manager): return [ext.obj for ext in extension_manager.extensions if ext.obj] barbican-9.1.0.dev50/barbican/plugin/util/mime_types.py0000664000175000017500000001301313616500636023155 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Barbican defined mime-types """ import six from barbican.common import utils # Supported content types # Note: These types may be provided by clients. PLAIN_TEXT = ['text/plain', 'text/plain;charset=utf-8', 'text/plain; charset=utf-8'] PLAIN_TEXT_CHARSETS = ['utf-8'] BINARY = ['application/octet-stream', 'application/pkcs8'] SUPPORTED = PLAIN_TEXT + BINARY # Normalizes client types to internal types. INTERNAL_CTYPES = {'text/plain': 'text/plain', 'text/plain;charset=utf-8': 'text/plain', 'text/plain; charset=utf-8': 'text/plain', 'application/octet-stream': 'application/octet-stream', 'application/pkcs8': 'application/pkcs8', 'application/aes': 'application/aes'} # Maps mime-types used to specify secret data formats to the types that can # be requested for secrets via GET calls. # Note: Raw client types are converted into the 'INTERNAL_CTYPES' types # which are then used as the keys to the 'CTYPES_MAPPINGS' below. CTYPES_PLAIN = {'default': 'text/plain'} CTYPES_BINARY = {'default': 'application/octet-stream'} CTYPES_PKCS8 = {'default': 'application/pkcs8'} CTYPES_AES = {'default': 'application/aes'} CTYPES_MAPPINGS = {'text/plain': CTYPES_PLAIN, 'application/octet-stream': CTYPES_BINARY, 'application/pkcs8': CTYPES_PKCS8, 'application/aes': CTYPES_AES} # Supported encodings ENCODINGS = ['base64'] # Maps normalized content-types to supported encoding(s) CTYPES_TO_ENCODINGS = {'text/plain': None, 'application/octet-stream': ['base64', 'binary'], 'application/pkcs8': ['base64', 'binary'], 'application/aes': None} def normalize_content_type(mime_type): """Normalize the supplied content-type to an internal form.""" stripped = list(six.moves.map(lambda x: x.strip(), mime_type.split(';'))) mime = stripped[0].lower() if len(stripped) > 1: # mime type includes charset charset_type = stripped[1].lower() if '=' not in charset_type: # charset is malformed return mime_type else: charset = list(six.moves.map(lambda x: x.strip(), charset_type.split('=')))[1] if charset not in PLAIN_TEXT_CHARSETS: # unsupported charset return mime_type return INTERNAL_CTYPES.get(mime, mime_type) def is_supported(mime_type): normalized_type = normalize_content_type(mime_type) return normalized_type in SUPPORTED def is_base64_encoding_supported(mime_type): if is_supported(mime_type): encodings = CTYPES_TO_ENCODINGS[INTERNAL_CTYPES[mime_type]] return encodings and ('base64' in encodings) return False def is_content_type_with_encoding_supported(content_type, content_encoding): if not is_supported(content_type): return False normalized_type = normalize_content_type(content_type) encodings = CTYPES_TO_ENCODINGS[INTERNAL_CTYPES[normalized_type]] if encodings: return content_encoding in encodings else: return content_encoding is None def get_supported_encodings(content_type): normalized_type = normalize_content_type(content_type) return CTYPES_TO_ENCODINGS[INTERNAL_CTYPES[normalized_type]] def is_base64_processing_needed(content_type, content_encoding): content_encodings = utils.get_accepted_encodings_direct(content_encoding) if content_encodings: if 'base64' not in content_encodings: return False if is_supported(content_type): encodings = CTYPES_TO_ENCODINGS[INTERNAL_CTYPES[content_type]] return encodings and 'base64' in encodings return False def use_binary_content_as_is(content_type, content_encoding): """Checks if headers are valid to allow binary content as-is.""" content_encodings = utils.get_accepted_encodings_direct(content_encoding) if content_encodings: if 'binary' not in content_encodings: return False if is_supported(content_type): encodings = CTYPES_TO_ENCODINGS[INTERNAL_CTYPES.get(content_type)] return encodings and 'binary' in encodings return INTERNAL_CTYPES.get(content_type) in BINARY def augment_fields_with_content_types(secret): """Add content-types and encodings information to a Secret's fields. Generate a dict of content types based on the data associated with the specified secret. :param secret: The models.Secret instance to add 'content_types' to. """ fields = secret.to_dict_fields() if not secret.secret_store_metadata: return fields content_type = secret.secret_store_metadata.get('content_type') if content_type and content_type.value in CTYPES_MAPPINGS: fields.update( {'content_types': CTYPES_MAPPINGS[content_type.value]} ) return fields barbican-9.1.0.dev50/barbican/plugin/util/translations.py0000664000175000017500000001606013616500636023530 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from OpenSSL import crypto from oslo_serialization import base64 import six from barbican import i18n as u # noqa from barbican.plugin.interface import secret_store as s from barbican.plugin.util import mime_types def normalize_before_encryption(unencrypted, content_type, content_encoding, secret_type, enforce_text_only=False): """Normalize unencrypted prior to plugin encryption processing. This normalizes the secrets before they are handed off to the SecretStore for storage. This converts all data to Base64 data. If the data is plain text then it encoded using utf-8 first and then Base64 encoded. Binary data is simply converted to Base64. :param str unencrypted: Raw payload :param str content_type: The media type for the payload :param str content_encoding: Transfer encoding :param str secret_type: The type of secret :param bool enforce_text_only: Require text content_type or base64 content_encoding :returns: Tuple containing the normalized (base64 encoded) payload and the normalized media type. """ if not unencrypted: raise s.SecretNoPayloadProvidedException() # Validate and normalize content-type. normalized_media_type = normalize_content_type(content_type) # Process plain-text type. if normalized_media_type in mime_types.PLAIN_TEXT: # normalize text to binary and then base64 encode it if six.PY3: b64payload = base64.encode_as_bytes(unencrypted) else: unencrypted_bytes = unencrypted.encode('utf-8') b64payload = base64.encode_as_bytes(unencrypted_bytes) # Process binary type. else: if not content_encoding: b64payload = base64.encode_as_bytes(unencrypted) elif content_encoding.lower() == 'base64': if not isinstance(unencrypted, six.binary_type): b64payload = unencrypted.encode('utf-8') else: b64payload = unencrypted elif enforce_text_only: # For text-based protocols (such as the one-step secret POST), # only 'base64' encoding is possible/supported. raise s.SecretContentEncodingMustBeBase64() else: # Unsupported content-encoding request. raise s.SecretContentEncodingNotSupportedException( content_encoding ) return b64payload, normalized_media_type def normalize_content_type(content_type): """Normalize the content type and validate that it is supported.""" normalized_mime = mime_types.normalize_content_type(content_type) if not mime_types.is_supported(normalized_mime): raise s.SecretContentTypeNotSupportedException(content_type) return normalized_mime def analyze_before_decryption(content_type): """Determine support for desired content type.""" if not mime_types.is_supported(content_type): raise s.SecretAcceptNotSupportedException(content_type) def denormalize_after_decryption(unencrypted, content_type): """Translate the decrypted data into the desired content type. This is called when the raw keys are requested by the user. The secret returned from the SecretStore is the unencrypted parameter. This 'denormalizes' the data back to its binary format. """ # Process plain-text type. if content_type in mime_types.PLAIN_TEXT: # normalize text to binary string try: unencrypted = base64.decode_as_text(unencrypted) except UnicodeDecodeError: raise s.SecretAcceptNotSupportedException(content_type) # Process binary type. elif content_type in mime_types.BINARY: unencrypted = base64.decode_as_bytes(unencrypted) else: raise s.SecretContentTypeNotSupportedException(content_type) return unencrypted def convert_pem_to_der(pem, secret_type): if secret_type == s.SecretType.PRIVATE: return _convert_private_pem_to_der(pem) elif secret_type == s.SecretType.PUBLIC: return _convert_public_pem_to_der(pem) elif secret_type == s.SecretType.CERTIFICATE: return _convert_certificate_pem_to_der(pem) else: reason = u._("Secret type can not be converted to DER") raise s.SecretGeneralException(reason=reason) def convert_der_to_pem(der, secret_type): if secret_type == s.SecretType.PRIVATE: return _convert_private_der_to_pem(der) elif secret_type == s.SecretType.PUBLIC: return _convert_public_der_to_pem(der) elif secret_type == s.SecretType.CERTIFICATE: return _convert_certificate_der_to_pem(der) else: reason = u._("Secret type can not be converted to PEM") raise s.SecretGeneralException(reason=reason) def _convert_private_pem_to_der(pem): private_key = serialization.load_pem_private_key( pem, password=None, backend=default_backend() ) der = private_key.private_bytes( encoding=serialization.Encoding.DER, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption() ) return der def _convert_private_der_to_pem(der): private_key = serialization.load_der_private_key( der, password=None, backend=default_backend() ) pem = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption() ) return pem def _convert_public_pem_to_der(pem): public_key = serialization.load_pem_public_key( pem, backend=default_backend() ) der = public_key.public_bytes( encoding=serialization.Encoding.DER, format=serialization.PublicFormat.SubjectPublicKeyInfo ) return der def _convert_public_der_to_pem(der): public_key = serialization.load_der_public_key( der, backend=default_backend() ) pem = public_key.public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo ) return pem def _convert_certificate_pem_to_der(pem): cert = crypto.load_certificate(crypto.FILETYPE_PEM, pem) der = crypto.dump_certificate(crypto.FILETYPE_ASN1, cert) return der def _convert_certificate_der_to_pem(der): cert = crypto.load_certificate(crypto.FILETYPE_ASN1, der) pem = crypto.dump_certificate(crypto.FILETYPE_PEM, cert) return pem barbican-9.1.0.dev50/barbican/plugin/util/multiple_backends.py0000664000175000017500000003155313616500636024500 0ustar sahidsahid00000000000000# (c) Copyright 2016 Hewlett Packard Enterprise Development Company LP # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import collections from oslo_config import cfg from barbican.common import config from barbican.common import exception from barbican.common import utils from barbican import i18n as u from barbican.model import models as db_models from barbican.model import repositories as db_repos LOG = utils.getLogger(__name__) LOOKUP_PLUGINS_PREFIX = "secretstore:" def read_multiple_backends_config(): """Reads and validates multiple backend related configuration. Multiple backends configuration is read only when multiple secret store flag is enabled. Configuration is validated to make sure that section specific to provided suffix exists in service configuration. Also validated that only one of section has global_default = True and its not missing. """ conf = config.get_module_config('secretstore') parsed_stores = None if utils.is_multiple_backends_enabled(): suffix_list = conf.secretstore.stores_lookup_suffix if not suffix_list: raise exception.MultipleSecretStoreLookupFailed() def register_options_dynamically(conf, group_name): store_opt_group = cfg.OptGroup( name=group_name, title='Plugins needed for this backend') store_opts = [ cfg.StrOpt('secret_store_plugin', help=u._('Internal name used to identify' 'secretstore_plugin') ), cfg.StrOpt('crypto_plugin', help=u._('Internal name used to identify ' 'crypto_plugin.') ), cfg.BoolOpt('global_default', default=False, help=u._('Flag to indicate if this plugin is ' 'global default plugin for deployment. ' 'Default is False.') ), ] conf.register_group(store_opt_group) conf.register_opts(store_opts, group=store_opt_group) group_names = [] # construct group names using those suffix and dynamically register # oslo config options under that group name for suffix in suffix_list: group_name = LOOKUP_PLUGINS_PREFIX + suffix register_options_dynamically(conf, group_name) group_names.append(group_name) store_conf = collections.namedtuple('store_conf', ['store_plugin', 'crypto_plugin', 'global_default']) parsed_stores = [] global_default_count = 0 # Section related to group names based of suffix list are always found # as we are dynamically registering group and its options. for group_name in group_names: conf_section = getattr(conf, group_name) if conf_section.global_default: global_default_count += 1 store_plugin = conf_section.secret_store_plugin if not store_plugin: raise exception.MultipleStorePluginValueMissing(conf_section) parsed_stores.append(store_conf(store_plugin, conf_section.crypto_plugin, conf_section.global_default)) if global_default_count != 1: raise exception.MultipleStoreIncorrectGlobalDefault( global_default_count) return parsed_stores def sync_secret_stores(secretstore_manager, crypto_manager=None): """Synchronize secret store plugin names between service conf and database This method reads secret and crypto store plugin name from service configuration and then synchronizes corresponding data maintained in database SecretStores table. Any new plugin name(s) added in service configuration is added as a new entry in SecretStores table. If global_default value is changed for existing plugins, then global_default flag is updated to reflect that change in database. If plugin name is removed from service configuration, then removal is possible as long as respective plugin names are NOT set as preferred secret store for a project. If it is used and plugin name is removed, then error is raised. This logic is intended to be invoked at server startup so any error raised here will result in critical failure. """ if not utils.is_multiple_backends_enabled(): return # doing local import to avoid circular dependency between manager and # current utils module from barbican.plugin.crypto import manager as cm secret_stores_repo = db_repos.get_secret_stores_repository() proj_store_repo = db_repos.get_project_secret_store_repository() if crypto_manager is None: crypto_manager = cm.get_manager() def get_friendly_name_dict(ext_manager): """Returns dict of plugin internal name and friendly name entries.""" names_dict = {} for ext in ext_manager.extensions: if ext.obj and hasattr(ext.obj, 'get_plugin_name'): names_dict[ext.name] = ext.obj.get_plugin_name() return names_dict ss_friendly_names = get_friendly_name_dict(secretstore_manager) crypto_friendly_names = get_friendly_name_dict(crypto_manager) # get existing secret stores data from database db_stores = secret_stores_repo.get_all() # read secret store data from service configuration conf_stores = [] for parsed_store in secretstore_manager.parsed_stores: crypto_plugin = parsed_store.crypto_plugin if not crypto_plugin: crypto_plugin = None if crypto_plugin: friendly_name = crypto_friendly_names.get(crypto_plugin) else: friendly_name = ss_friendly_names.get(parsed_store.store_plugin) conf_stores.append(db_models.SecretStores( name=friendly_name, store_plugin=parsed_store.store_plugin, crypto_plugin=crypto_plugin, global_default=parsed_store.global_default)) if db_stores: def fn_match(lh_store, rh_store): return (lh_store.store_plugin == rh_store.store_plugin and lh_store.crypto_plugin == rh_store.crypto_plugin) for conf_store in conf_stores: # find existing db entry for plugin using conf based plugin names db_store_match = next((db_store for db_store in db_stores if fn_match(conf_store, db_store)), None) if db_store_match: # update existing db entry if global default is changed now if db_store_match.global_default != conf_store.global_default: db_store_match.global_default = conf_store.global_default # persist flag change. db_store_match.save() # remove matches store from local list after processing db_stores.remove(db_store_match) else: # new conf entry as no match found in existing entries secret_stores_repo.create_from(conf_store) # entries still present in db list are no longer configured in service # configuration, so try to remove them provided there is no project # is using it as preferred secret store. for db_store in db_stores: if proj_store_repo.get_count_by_secret_store(db_store.id) == 0: secret_stores_repo.delete_entity_by_id(db_store.id, None) else: raise exception.MultipleStorePluginStillInUse(db_store.name) else: # initial setup case when there is no secret stores data in db for conf_store in conf_stores: secret_stores_repo.create_from(conf_store) def get_global_default_secret_store(): secret_store_repo = db_repos.get_secret_stores_repository() default_ss = None for secret_store in secret_store_repo.get_all(): if secret_store.global_default: default_ss = secret_store break return default_ss def get_applicable_crypto_plugins(manager, project_id, existing_plugin_name): """Get list of crypto plugins available for use. :param: manager instance of crypto manager :param: project_id project to identify preferred store if set :param: existing_plugin_name full plugin name. If a secret has an existing plugin defined, then we do not care if any preferred plugins have been defined. We will return all configured plugins as if multiple plugin support was not enabled. Subsequent code in the caller will select the plugin by name. When multiple backends support is enabled: It return project preferred plugin as list when it is setup earlier. If project preferred plugin is not set, then it uses plugin from default secret store. Plugin name is 'crypto_plugin' field value on identified secret store data. It returns matched plugin as list to match existing functionality. When multiple backends support is NOT enabled: In this case, it just returns list of all active plugins which is existing functionality before support for multiple backends is added. """ return _get_applicable_plugins_for_type(manager, project_id, existing_plugin_name, 'crypto_plugin') def get_applicable_store_plugins(manager, project_id, existing_plugin_name): """Get list of secret store plugins available for use. :param: manager instance of secret store manager :param: project_id project to identify preferred store if set :param: existing_plugin_name full plugin name. If a secret has an existing plugin defined, then we do not care if any preferred plugins have been defined. We will return all configured plugins as if multiple plugin support was not enabled. Subsequent code in the caller will select the plugin by name. When multiple backends support is enabled: It return project preferred plugin as list when it is setup earlier. If project preferred plugin is not set, then it uses plugin from default secret store. Plugin name is 'store_plugin' field value on identified secret store data. It returns matched plugin as list to match existing functionality. When multiple backends support is NOT enabled: In this case, it just returns list of all active plugins which is existing functionality before support for multiple backends is added. """ return _get_applicable_plugins_for_type(manager, project_id, existing_plugin_name, 'store_plugin') def _get_applicable_plugins_for_type(manager, project_id, existing_plugin_name, plugin_type_field): plugins = [] plugin_dict = {ext.name: ext.obj for ext in manager.extensions if ext.obj} if utils.is_multiple_backends_enabled() and existing_plugin_name is None: proj_store_repo = db_repos.get_project_secret_store_repository() plugin_store = proj_store_repo.get_secret_store_for_project( project_id, None, suppress_exception=True) # If project specific store is not set, then use global default one. if not plugin_store: if manager.global_default_store_dict is None: # Need to cache data as dict instead of db object to be usable # across various request sqlalchemy sessions store_dict = get_global_default_secret_store().to_dict_fields() manager.global_default_store_dict = store_dict secret_store_data = manager.global_default_store_dict else: secret_store_data = plugin_store.secret_store.to_dict_fields() applicable_plugin_name = secret_store_data[plugin_type_field] if applicable_plugin_name in plugin_dict: plugins = [plugin_dict.get(applicable_plugin_name)] elif applicable_plugin_name: # applicable_plugin_name has value raise exception.MultipleStorePreferredPluginMissing( applicable_plugin_name) else: plugins = plugin_dict.values() return plugins barbican-9.1.0.dev50/barbican/plugin/util/__init__.py0000664000175000017500000000000013616500636022531 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/plugin/castellan_secret_store.py0000664000175000017500000001324713616500636024565 0ustar sahidsahid00000000000000# Copyright (c) 2018 Red Hat Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import abc import six from castellan.common.objects import opaque_data from castellan import key_manager from oslo_context import context from oslo_log import log from barbican.plugin.interface import secret_store as ss LOG = log.getLogger(__name__) @six.add_metaclass(abc.ABCMeta) class CastellanSecretStore(ss.SecretStoreBase): KEY_ID = "key_id" ALG = "alg" BIT_LENGTH = "bit_length" def _set_params(self, conf): self.key_manager = key_manager.API(conf) self.context = context.get_current() @abc.abstractmethod def get_conf(self, conf): """Get plugin configuration This method is supposed to be implemented by the relevant subclass. This method reads in the config for the plugin in barbican.conf -- which should look like the way other barbican plugins are configured, and convert them to the proper oslo.config object to be passed to the keymanager API. (keymanager.API(conf) @returns oslo.config object """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def get_plugin_name(self): """Get plugin name This method is implemented by the subclass. Note that this name must be unique across the deployment. """ raise NotImplementedError # pragma: no cover def get_secret(self, secret_type, secret_metadata): secret_ref = secret_metadata[CastellanSecretStore.KEY_ID] try: secret = self.key_manager.get( self.context, secret_ref) return ss.SecretDTO(secret_type, secret.get_encoded(), ss.KeySpec(), secret_metadata['content_type']) except Exception as e: LOG.exception("Error retrieving secret {}: {}".format( secret_ref, six.text_type(e))) raise ss.SecretGeneralException(e) def store_secret(self, secret_dto): if not self.store_secret_supports(secret_dto.key_spec): raise ss.SecretAlgorithmNotSupportedException( secret_dto.key_spec.alg) try: secret_ref = self.key_manager.store( self.context, opaque_data.OpaqueData(secret_dto.secret) ) return {CastellanSecretStore.KEY_ID: secret_ref} except Exception as e: LOG.exception("Error storing secret: {}".format( six.text_type(e))) raise ss.SecretGeneralException(e) def delete_secret(self, secret_metadata): secret_ref = secret_metadata[CastellanSecretStore.KEY_ID] try: self.key_manager.delete( self.context, secret_ref) except KeyError: LOG.warning("Attempting to delete a non-existent secret {}".format( secret_ref)) except Exception as e: LOG.exception("Error deleting secret: {}".format( six.text_type(e))) raise ss.SecretGeneralException(e) def generate_symmetric_key(self, key_spec): if not self.generate_supports(key_spec): raise ss.SecretAlgorithmNotSupportedException( key_spec.alg) try: secret_ref = self.key_manager.create_key( self.context, key_spec.alg, key_spec.bit_length ) return {CastellanSecretStore.KEY_ID: secret_ref} except Exception as e: LOG.exception("Error generating symmetric key: {}".format( six.text_type(e))) raise ss.SecretGeneralException(e) def generate_asymmetric_key(self, key_spec): if not self.generate_supports(key_spec): raise ss.SecretAlgorithmNotSupportedException( key_spec.alg) if key_spec.passphrase: raise ss.GeneratePassphraseNotSupportedException() try: private_ref, public_ref = self.key_manager.create_key_pair( self.context, key_spec.alg, key_spec.bit_length ) private_key_metadata = { CastellanSecretStore.ALG: key_spec.alg, CastellanSecretStore.BIT_LENGTH: key_spec.bit_length, CastellanSecretStore.KEY_ID: private_ref } public_key_metadata = { CastellanSecretStore.ALG: key_spec.alg, CastellanSecretStore.BIT_LENGTH: key_spec.bit_length, CastellanSecretStore.KEY_ID: public_ref } return ss.AsymmetricKeyMetadataDTO( private_key_metadata, public_key_metadata, None ) except Exception as e: LOG.exception("Error generating asymmetric key: {}".format( six.text_type(e))) raise ss.SecretGeneralException(e) @abc.abstractmethod def store_secret_supports(self, key_spec): raise NotImplementedError # pragma: no cover @abc.abstractmethod def generate_supports(self, key_spec): raise NotImplementedError # pragma: no cover barbican-9.1.0.dev50/barbican/plugin/vault_secret_store.py0000664000175000017500000000632213616500636023746 0ustar sahidsahid00000000000000# Copyright (c) 2018 Red Hat Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import config import barbican.plugin.castellan_secret_store as css from castellan.i18n import _ from castellan import options from oslo_config import cfg from oslo_log import log LOG = log.getLogger(__name__) DEFAULT_VAULT_URL = "http://127.0.0.1:8200" DEFAULT_MOUNTPOINT = "secret" vault_opt_group = cfg.OptGroup(name='vault_plugin', title='Vault Plugin') vault_opts = [ cfg.StrOpt('root_token_id', help='root token for vault'), cfg.StrOpt('approle_role_id', help='AppRole role_id for authentication with vault'), cfg.StrOpt('approle_secret_id', help='AppRole secret_id for authentication with vault'), cfg.StrOpt('kv_mountpoint', default=DEFAULT_MOUNTPOINT, help='Mountpoint of KV store in Vault to use, for example: ' '{}'.format(DEFAULT_MOUNTPOINT)), cfg.StrOpt('vault_url', default=DEFAULT_VAULT_URL, help='Use this endpoint to connect to Vault, for example: ' '"%s"' % DEFAULT_VAULT_URL), cfg.StrOpt('ssl_ca_crt_file', help='Absolute path to ca cert file'), cfg.BoolOpt('use_ssl', default=False, help=_('SSL Enabled/Disabled')), ] CONF = config.new_config() CONF.register_group(vault_opt_group) CONF.register_opts(vault_opts, group=vault_opt_group) config.parse_args(CONF) def list_opts(): yield vault_opt_group, vault_opts # pragma: no cover class VaultSecretStore(css.CastellanSecretStore): def __init__(self, conf=CONF): """Constructor - create the vault secret store.""" vault_conf = self.get_conf(conf) self._set_params(vault_conf) def get_plugin_name(self): return "VaultSecretStore" def get_conf(self, conf=CONF): """Convert secret store conf into oslo conf Returns an oslo.config() object to pass to keymanager.API(conf) """ vault_conf = cfg.ConfigOpts() options.set_defaults( vault_conf, backend='vault', vault_root_token_id=conf.vault_plugin.root_token_id, vault_approle_role_id=conf.vault_plugin.approle_role_id, vault_approle_secret_id=conf.vault_plugin.approle_secret_id, vault_kv_mountpoint=conf.vault_plugin.kv_mountpoint, vault_url=conf.vault_plugin.vault_url, vault_ssl_ca_crt_file=conf.vault_plugin.ssl_ca_crt_file, vault_use_ssl=conf.vault_plugin.use_ssl ) return vault_conf def store_secret_supports(self, key_spec): return True def generate_supports(self, key_spec): return True barbican-9.1.0.dev50/barbican/plugin/kmip_secret_store.py0000664000175000017500000006135313616500636023560 0ustar sahidsahid00000000000000# Copyright (c) 2014 Johns Hopkins University Applied Physics Laboratory # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ An implementation of the SecretStore that uses the KMIP backend. """ import base64 import os import ssl import stat from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from kmip.core import enums from kmip.core.factories import credentials from kmip.pie import client from kmip.pie import objects from oslo_config import cfg from oslo_log import log from barbican.common import config from barbican.common import exception from barbican import i18n as u # noqa from barbican.plugin.interface import secret_store as ss from barbican.plugin.util import translations LOG = log.getLogger(__name__) CONF = config.new_config() kmip_opt_group = cfg.OptGroup(name='kmip_plugin', title='KMIP Plugin') kmip_opts = [ cfg.StrOpt('username', help=u._('Username for authenticating with KMIP server') ), cfg.StrOpt('password', help=u._('Password for authenticating with KMIP server'), secret=True, ), cfg.StrOpt('host', default='localhost', help=u._('Address of the KMIP server') ), cfg.PortOpt('port', default=5696, help=u._('Port for the KMIP server'), ), cfg.StrOpt('ssl_version', default='PROTOCOL_TLSv1_2', help=u._('SSL version, maps to the module ssl\'s constants'), ), cfg.StrOpt('ca_certs', help=u._('File path to concatenated "certification authority" ' 'certificates'), ), cfg.StrOpt('certfile', help=u._('File path to local client certificate'), ), cfg.StrOpt('keyfile', help=u._('File path to local client certificate keyfile'), ), cfg.BoolOpt('pkcs1_only', default=False, help=u._('Only support PKCS#1 encoding of asymmetric keys'), ), cfg.StrOpt('plugin_name', help=u._('User friendly plugin name'), default='KMIP HSM'), ] CONF.register_group(kmip_opt_group) CONF.register_opts(kmip_opts, group=kmip_opt_group) config.parse_args(CONF) def list_opts(): yield kmip_opt_group, kmip_opts attribute_debug_msg = "Created attribute type %s with value %s" def convert_pem_to_der(pem_pkcs1): # cryptography adds an extra '\n' to end of PEM file # added if statement so if future version removes extra \n tests will not # break if pem_pkcs1.endswith(b'\n'): pem_pkcs1 = pem_pkcs1[:-1] # neither PyCrypto or cryptography support export in DER format with PKCS1 # encoding so doing by hand der_pkcs1_b64 = b''.join(pem_pkcs1.split(b'\n')[1:-1]) der_pkcs1 = base64.b64decode(der_pkcs1_b64) return der_pkcs1 def get_public_key_der_pkcs1(pem): """Converts PEM public key to DER PKCS1""" rsa_public = serialization.load_pem_public_key( pem, backend=default_backend()) pem_pkcs1 = rsa_public.public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.PKCS1) return convert_pem_to_der(pem_pkcs1) def get_private_key_der_pkcs1(pem): """Converts PEM private key to DER PKCS1""" rsa_private = serialization.load_pem_private_key( pem, None, backend=default_backend()) pem_pkcs1 = rsa_private.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption()) return convert_pem_to_der(pem_pkcs1) class KMIPSecretStoreError(exception.BarbicanException): def __init__(self, message): super(KMIPSecretStoreError, self).__init__(message) class KMIPSecretStoreActionNotSupported(exception.BarbicanHTTPException): """Raised if no plugins are found that support the requested operation.""" client_message = u._("KMIP plugin action not support.") status_code = 400 def __init__(self, message): self.message = message super(KMIPSecretStoreActionNotSupported, self).__init__() class KMIPSecretStore(ss.SecretStoreBase): KEY_UUID = "key_uuid" VALID_BIT_LENGTHS = "valid_bit_lengths" KMIP_ALGORITHM_ENUM = "kmip_algorithm_enum" def __init__(self, conf=CONF): """Initializes KMIPSecretStore Creates a dictionary of mappings between SecretStore enum values and pyKMIP enum values. Initializes the KMIP client with credentials needed to connect to the KMIP server. """ super(KMIPSecretStore, self).__init__() self.valid_alg_dict = { ss.KeyAlgorithm.AES: { KMIPSecretStore.VALID_BIT_LENGTHS: [128, 192, 256], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.AES}, ss.KeyAlgorithm.DES: { KMIPSecretStore.VALID_BIT_LENGTHS: [56], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.DES}, ss.KeyAlgorithm.DESEDE: { KMIPSecretStore.VALID_BIT_LENGTHS: [56, 64, 112, 128, 168, 192], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.TRIPLE_DES}, ss.KeyAlgorithm.DSA: { KMIPSecretStore.VALID_BIT_LENGTHS: [1024, 2048, 3072], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.DSA}, ss.KeyAlgorithm.HMACSHA1: { KMIPSecretStore.VALID_BIT_LENGTHS: [], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.HMAC_SHA1}, ss.KeyAlgorithm.HMACSHA256: { KMIPSecretStore.VALID_BIT_LENGTHS: [], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.HMAC_SHA256}, ss.KeyAlgorithm.HMACSHA384: { KMIPSecretStore.VALID_BIT_LENGTHS: [], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.HMAC_SHA384}, ss.KeyAlgorithm.HMACSHA512: { KMIPSecretStore.VALID_BIT_LENGTHS: [], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.HMAC_SHA512}, ss.KeyAlgorithm.RSA: { KMIPSecretStore.VALID_BIT_LENGTHS: [1024, 2048, 3072, 4096], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.RSA}, } self.pkcs1_only = conf.kmip_plugin.pkcs1_only if self.pkcs1_only: LOG.debug("KMIP secret store only supports PKCS#1") del self.valid_alg_dict[ss.KeyAlgorithm.DSA] self.kmip_barbican_alg_map = { enums.CryptographicAlgorithm.AES: ss.KeyAlgorithm.AES, enums.CryptographicAlgorithm.DES: ss.KeyAlgorithm.DES, enums.CryptographicAlgorithm.TRIPLE_DES: ss.KeyAlgorithm.DESEDE, enums.CryptographicAlgorithm.DSA: ss.KeyAlgorithm.DSA, enums.CryptographicAlgorithm.HMAC_SHA1: ss.KeyAlgorithm.HMACSHA1, enums.CryptographicAlgorithm.HMAC_SHA256: ss.KeyAlgorithm.HMACSHA256, enums.CryptographicAlgorithm.HMAC_SHA384: ss.KeyAlgorithm.HMACSHA384, enums.CryptographicAlgorithm.HMAC_SHA512: ss.KeyAlgorithm.HMACSHA512, enums.CryptographicAlgorithm.RSA: ss.KeyAlgorithm.RSA } self.plugin_name = conf.kmip_plugin.plugin_name if conf.kmip_plugin.keyfile is not None: self._validate_keyfile_permissions(conf.kmip_plugin.keyfile) if (conf.kmip_plugin.username is None) and ( conf.kmip_plugin.password is None): self.credential = None else: credential_type = enums.CredentialType.USERNAME_AND_PASSWORD credential_value = {'Username': conf.kmip_plugin.username, 'Password': conf.kmip_plugin.password} self.credential = ( credentials.CredentialFactory().create_credential( credential_type, credential_value)) config = conf.kmip_plugin if not getattr(ssl, config.ssl_version, None): LOG.error("The configured SSL version (%s) is not available" " on the system.", config.ssl_version) self.client = client.ProxyKmipClient( hostname=config.host, port=config.port, cert=config.certfile, key=config.keyfile, ca=config.ca_certs, ssl_version=config.ssl_version, username=config.username, password=config.password) def get_plugin_name(self): return self.plugin_name def generate_symmetric_key(self, key_spec): """Generate a symmetric key. Creates KMIP attribute objects based on the given KeySpec to send to the server. :param key_spec: KeySpec with symmetric algorithm and bit_length :returns: dictionary holding key_id returned by server :raises: SecretGeneralException, SecretAlgorithmNotSupportedException """ LOG.debug("Starting symmetric key generation with KMIP plugin") if not self.generate_supports(key_spec): raise ss.SecretAlgorithmNotSupportedException( key_spec.alg) if key_spec.alg.lower() not in ss.KeyAlgorithm.SYMMETRIC_ALGORITHMS: raise KMIPSecretStoreError( u._("An unsupported algorithm {algorithm} was passed to the " "'generate_symmetric_key' method").format( algorithm=key_spec.alg)) algorithm = self._get_kmip_algorithm(key_spec.alg.lower()) try: with self.client: LOG.debug("Opened connection to KMIP client for secret " "generation") uuid = self.client.create(algorithm, key_spec.bit_length) LOG.debug("SUCCESS: Symmetric key generated with " "uuid: %s", uuid) return {KMIPSecretStore.KEY_UUID: uuid} except Exception as e: LOG.exception("Error opening or writing to client") raise ss.SecretGeneralException(e) def generate_asymmetric_key(self, key_spec): """Generate an asymmetric key pair. Creates KMIP attribute objects based on the given KeySpec to send to the server. The KMIP Secret Store currently does not support protecting the private key with a passphrase. :param key_spec: KeySpec with asymmetric algorithm and bit_length :returns: AsymmetricKeyMetadataDTO with the key UUIDs :raises: SecretGeneralException, SecretAlgorithmNotSupportedException KMIPSecretStoreActionNotSupported """ LOG.debug("Starting asymmetric key generation with KMIP plugin") if not self.generate_supports(key_spec): raise ss.SecretAlgorithmNotSupportedException( key_spec.alg) if key_spec.alg.lower() not in ss.KeyAlgorithm.ASYMMETRIC_ALGORITHMS: raise ss.SecretAlgorithmNotSupportedException(key_spec.alg) if key_spec.passphrase: raise KMIPSecretStoreActionNotSupported( u._('KMIP plugin does not currently support protecting the ' 'private key with a passphrase')) algorithm = self._get_kmip_algorithm(key_spec.alg.lower()) length = key_spec.bit_length try: with self.client: LOG.debug("Opened connection to KMIP client for " "asymmetric secret generation") public_uuid, private_uuid = self.client.create_key_pair( algorithm, length) LOG.debug("SUCCESS: Asymmetric key pair generated with " "public key uuid: %(public_uuid)s and " "private key uuid: %(private_uuid)s" % {'public_uuid': public_uuid, 'private_uuid': private_uuid}) private_key_metadata = {KMIPSecretStore.KEY_UUID: private_uuid} public_key_metadata = {KMIPSecretStore.KEY_UUID: public_uuid} passphrase_metadata = None return ss.AsymmetricKeyMetadataDTO(private_key_metadata, public_key_metadata, passphrase_metadata) except Exception as e: LOG.exception("Error opening or writing to client") raise ss.SecretGeneralException(e) def store_secret(self, secret_dto): """Stores a secret To store a secret in KMIP, the attributes must be known. :param secret_dto: SecretDTO of the secret to be stored :returns: Dictionary holding the key_uuid assigned by KMIP :raises: SecretGeneralException, SecretAlgorithmNotSupportedException """ LOG.debug("Starting secret storage with KMIP plugin") if not self.store_secret_supports(secret_dto.key_spec): raise ss.SecretAlgorithmNotSupportedException( secret_dto.key_spec.alg) secret_type = secret_dto.type object_type, key_format_type = ( self._map_type_ss_to_kmip(secret_type)) if object_type is None: raise KMIPSecretStoreError( u._('Secret object type {object_type} is ' 'not supported').format(object_type=object_type)) secret = self._get_kmip_secret(secret_dto) try: with self.client: LOG.debug("Opened connection to KMIP client") uuid = self.client.register(secret) LOG.debug("SUCCESS: Key stored with uuid: %s", uuid) return {KMIPSecretStore.KEY_UUID: uuid} except Exception as e: LOG.exception("Error opening or writing to client") raise ss.SecretGeneralException(e) def get_secret(self, secret_type, secret_metadata): """Gets a secret :param secret_type: secret type :param secret_metadata: Dictionary of key metadata, requires: {'key_uuid': } :returns: SecretDTO of the retrieved Secret :raises: SecretGeneralException """ LOG.debug("Starting secret retrieval with KMIP plugin") uuid = str(secret_metadata[KMIPSecretStore.KEY_UUID]) try: with self.client: LOG.debug("Opened connection to KMIP client for secret " "retrieval") managed_object = self.client.get(uuid) return self._get_barbican_secret(managed_object, secret_type) except Exception as e: LOG.exception("Error opening or writing to client") raise ss.SecretGeneralException(e) def generate_supports(self, key_spec): """Key generation supported? Specifies whether the plugin supports key generation with the given key_spec. Currently, asymmetric key pair generation does not support encrypting the private key with a passphrase. Checks both the algorithm and the bit length. Only symmetric algorithms are currently supported. :param key_spec: KeySpec for secret to be generates :returns: boolean indicating if secret can be generated """ alg_dict_entry = self.valid_alg_dict.get(key_spec.alg.lower()) if alg_dict_entry: valid_bit_lengths = alg_dict_entry.get( KMIPSecretStore.VALID_BIT_LENGTHS) if (key_spec.bit_length in valid_bit_lengths or not valid_bit_lengths): return True return False def delete_secret(self, secret_metadata): """Deletes the secret whose metadata is included in the dictionary. Returns nothing if successful, raises an exception if an error occurs :param secret_metadata: Dictionary of key metadata, requires: {'key_uuid': } :raises: SecretGeneralException """ LOG.debug("Starting secret deletion with KMIP plugin") uuid = str(secret_metadata[KMIPSecretStore.KEY_UUID]) try: with self.client: LOG.debug("Opened connection to KMIP client") self.client.destroy(uuid) except Exception as e: LOG.exception("Error opening or writing to client") raise ss.SecretGeneralException(e) def store_secret_supports(self, key_spec): """Key storage supported? Specifies whether the plugin supports storage of the secret given the attributes included in the KeySpec. For now this always returns true if the key spec's algorithm and bit length are not specified. The secret type may need to be added in the future. This must always return true if the algorithm and bit length are not specified because some secret types, like certificate, do not require algorithm and bit length, so true must always be returned for those cases. :param key_spec: KeySpec of secret to be stored :returns: boolean indicating if secret can be stored """ if key_spec.alg is not None and key_spec.bit_length is not None: return self.generate_supports(key_spec) else: return True def _get_kmip_secret(self, secret_dto): """Builds a KMIP object from a SecretDTO This is needed for register calls. The Barbican object needs to be converted to KMIP object before it can be stored :param secret_dto: SecretDTO of secret to be stored :returns: KMIP object """ secret_type = secret_dto.type key_spec = secret_dto.key_spec object_type, key_format_type = ( self._map_type_ss_to_kmip(secret_type)) normalized_secret = self._normalize_secret(secret_dto.secret, secret_type) kmip_object = None if object_type == enums.ObjectType.CERTIFICATE: kmip_object = objects.X509Certificate(normalized_secret) elif object_type == enums.ObjectType.OPAQUE_DATA: opaque_type = enums.OpaqueDataType.NONE kmip_object = objects.OpaqueObject(normalized_secret, opaque_type) elif object_type == enums.ObjectType.PRIVATE_KEY: algorithm = self._get_kmip_algorithm(key_spec.alg) length = key_spec.bit_length format_type = enums.KeyFormatType.PKCS_8 kmip_object = objects.PrivateKey( algorithm, length, normalized_secret, format_type) elif object_type == enums.ObjectType.PUBLIC_KEY: algorithm = self._get_kmip_algorithm(key_spec.alg) length = key_spec.bit_length format_type = enums.KeyFormatType.X_509 kmip_object = objects.PublicKey( algorithm, length, normalized_secret, format_type) elif object_type == enums.ObjectType.SYMMETRIC_KEY: algorithm = self._get_kmip_algorithm(key_spec.alg) length = key_spec.bit_length kmip_object = objects.SymmetricKey(algorithm, length, normalized_secret) elif object_type == enums.ObjectType.SECRET_DATA: data_type = enums.SecretDataType.PASSWORD kmip_object = objects.SecretData(normalized_secret, data_type) return kmip_object def _get_kmip_algorithm(self, ss_algorithm): alg_entry = self.valid_alg_dict.get(ss_algorithm) return alg_entry.get(KMIPSecretStore.KMIP_ALGORITHM_ENUM) def _get_barbican_secret(self, managed_object, secret_type): object_type = managed_object.object_type secret = managed_object.value if (object_type == enums.ObjectType.SYMMETRIC_KEY or object_type == enums.ObjectType.PRIVATE_KEY or object_type == enums.ObjectType.PUBLIC_KEY): algorithm = self.kmip_barbican_alg_map[ managed_object.cryptographic_algorithm] length = managed_object.cryptographic_length key_spec = ss.KeySpec(algorithm, length) else: key_spec = ss.KeySpec() secret = self._denormalize_secret(secret, secret_type) secret_dto = ss.SecretDTO( secret_type, secret, key_spec, content_type=None, transport_key=None) return secret_dto def _map_type_ss_to_kmip(self, object_type): """Map SecretType to KMIP type enum Returns None if the type is not supported. :param object_type: SecretType enum value :returns: KMIP type enums if supported, None if not supported """ if object_type == ss.SecretType.SYMMETRIC: return enums.ObjectType.SYMMETRIC_KEY, enums.KeyFormatType.RAW elif object_type == ss.SecretType.PRIVATE: if self.pkcs1_only: return enums.ObjectType.PRIVATE_KEY, enums.KeyFormatType.PKCS_1 else: return enums.ObjectType.PRIVATE_KEY, enums.KeyFormatType.PKCS_8 elif object_type == ss.SecretType.PUBLIC: if self.pkcs1_only: return enums.ObjectType.PUBLIC_KEY, enums.KeyFormatType.PKCS_1 else: return enums.ObjectType.PUBLIC_KEY, enums.KeyFormatType.X_509 elif object_type == ss.SecretType.CERTIFICATE: return enums.ObjectType.CERTIFICATE, enums.KeyFormatType.X_509 elif object_type == ss.SecretType.PASSPHRASE: return enums.ObjectType.SECRET_DATA, enums.KeyFormatType.RAW elif object_type == ss.SecretType.OPAQUE: return enums.ObjectType.OPAQUE_DATA, enums.KeyFormatType.RAW else: return None, None def _raise_secret_general_exception(self, result): msg = u._( "Status: {status}, Reason: {reason}, " "Message: {message}" ).format( status=result.result_status, reason=result.result_reason, message=result.result_message ) LOG.error("ERROR from KMIP server: %s", msg) raise ss.SecretGeneralException(msg) def _validate_keyfile_permissions(self, path): """Check that file has permissions appropriate for a sensitive key Key files are extremely sensitive, they should be owned by the user who they relate to. They should be readable only (to avoid accidental changes). They should not be readable or writable by any other user. :raises: KMIPSecretStoreError """ expected = (stat.S_IRUSR | stat.S_IFREG) # 0o100400 st = os.stat(path) if st.st_mode != expected: raise KMIPSecretStoreError( u._('Bad key file permissions found, expected 400 ' 'for path: {file_path}').format(file_path=path) ) def _normalize_secret(self, secret, secret_type): """Normalizes secret for use by KMIP plugin""" data = base64.b64decode(secret) if (self.pkcs1_only and secret_type in [ss.SecretType.PUBLIC, ss.SecretType.PRIVATE]): if secret_type == ss.SecretType.PUBLIC: data = get_public_key_der_pkcs1(data) elif secret_type == ss.SecretType.PRIVATE: data = get_private_key_der_pkcs1(data) elif secret_type in [ss.SecretType.PUBLIC, ss.SecretType.PRIVATE, ss.SecretType.CERTIFICATE]: data = translations.convert_pem_to_der(data, secret_type) return data def _denormalize_secret(self, secret, secret_type): """Converts secret back to the format expected by Barbican core""" data = secret if secret_type in [ss.SecretType.PUBLIC, ss.SecretType.PRIVATE, ss.SecretType.CERTIFICATE]: data = translations.convert_der_to_pem(data, secret_type) return base64.b64encode(data) barbican-9.1.0.dev50/barbican/plugin/dogtag_config_opts.py0000664000175000017500000000512113616500636023665 0ustar sahidsahid00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_config import cfg from barbican.common import config from barbican import i18n as u import barbican.plugin.interface.certificate_manager as cm CONF = config.new_config() dogtag_plugin_group = cfg.OptGroup(name='dogtag_plugin', title="Dogtag Plugin Options") dogtag_plugin_opts = [ cfg.StrOpt('pem_path', default='/etc/barbican/kra_admin_cert.pem', help=u._('Path to PEM file for authentication')), cfg.StrOpt('dogtag_host', default="localhost", help=u._('Hostname for the Dogtag instance')), cfg.PortOpt('dogtag_port', default=8443, help=u._('Port for the Dogtag instance')), cfg.StrOpt('nss_db_path', default='/etc/barbican/alias', help=u._('Path to the NSS certificate database')), cfg.StrOpt('nss_password', help=u._('Password for the NSS certificate databases'), secret=True), cfg.StrOpt('simple_cmc_profile', default='caOtherCert', help=u._('Profile for simple CMC requests')), cfg.StrOpt('auto_approved_profiles', default="caServerCert", help=u._('List of automatically approved enrollment profiles')), cfg.StrOpt('ca_expiration_time', default=cm.CA_INFO_DEFAULT_EXPIRATION_DAYS, help=u._('Time in days for CA entries to expire')), cfg.StrOpt('plugin_working_dir', default='/etc/barbican/dogtag', help=u._('Working directory for Dogtag plugin')), cfg.StrOpt('plugin_name', help=u._('User friendly plugin name'), default='Dogtag KRA'), cfg.IntOpt('retries', help=u._('Retries when storing or generating secrets'), default=3) ] CONF.register_group(dogtag_plugin_group) CONF.register_opts(dogtag_plugin_opts, group=dogtag_plugin_group) config.parse_args(CONF) def list_opts(): yield dogtag_plugin_group, dogtag_plugin_opts barbican-9.1.0.dev50/barbican/plugin/interface/0000775000175000017500000000000013616500640021410 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/plugin/interface/secret_store.py0000664000175000017500000006223613616500636024501 0ustar sahidsahid00000000000000# Copyright (c) 2014 Johns Hopkins University Applied Physics Laboratory # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import abc from oslo_config import cfg import six from stevedore import named from barbican.common import config from barbican.common import exception from barbican.common import utils from barbican import i18n as u from barbican.plugin.util import multiple_backends from barbican.plugin.util import utils as plugin_utils _SECRET_STORE = None CONF = config.new_config() DEFAULT_PLUGIN_NAMESPACE = 'barbican.secretstore.plugin' DEFAULT_PLUGINS = ['store_crypto'] store_opt_group = cfg.OptGroup(name='secretstore', title='Secret Store Plugin Options') store_opts = [ cfg.StrOpt('namespace', default=DEFAULT_PLUGIN_NAMESPACE, help=u._('Extension namespace to search for plugins.') ), cfg.MultiStrOpt('enabled_secretstore_plugins', default=DEFAULT_PLUGINS, help=u._('List of secret store plugins to load.') ), cfg.BoolOpt('enable_multiple_secret_stores', default=False, help=u._('Flag to enable multiple secret store plugin' ' backend support. Default is False') ), cfg.ListOpt('stores_lookup_suffix', help=u._('List of suffix to use for looking up plugins which ' 'are supported with multiple backend support.') ) ] CONF.register_group(store_opt_group) CONF.register_opts(store_opts, group=store_opt_group) config.parse_args(CONF) config.set_module_config("secretstore", CONF) def list_opts(): yield store_opt_group, store_opts class SecretStorePluginNotFound(exception.BarbicanHTTPException): """Raised when no plugins are installed.""" client_message = u._("No plugin was found that could support your request") status_code = 400 def __init__(self, plugin_name=None): if plugin_name: message = u._('Secret store plugin "{name}"' ' not found.').format(name=plugin_name) else: message = u._("Secret store plugin not found.") super(SecretStorePluginNotFound, self).__init__(message) class SecretStoreSupportedPluginNotFound(exception.BarbicanHTTPException): """Raised when no secret store supported plugin is found.""" client_message = u._("Secret store supported plugin not found.") status_code = 400 def __init__(self, key_spec): message = u._("Could not find a secret store plugin for storing " "secret with algorithm '{alg}' and bit-length " "'{len}'.").format(alg=key_spec.alg, len=key_spec.bit_length) super(SecretStoreSupportedPluginNotFound, self).__init__( message) class SecretGenerateSupportedPluginNotFound(exception.BarbicanHTTPException): """Raised when no secret generate supported plugin is found.""" client_message = u._("Secret generate supported plugin not found.") status_code = 400 def __init__(self, key_spec): message = u._("Could not find a secret store plugin for generating " "secret with algorithm '{alg}' and bit-length " "'{len}'.").format(alg=key_spec.alg, len=key_spec.bit_length) super(SecretGenerateSupportedPluginNotFound, self).__init__( message) class SecretContentTypeNotSupportedException(exception.BarbicanHTTPException): """Raised when support for payload content type is not available.""" status_code = 400 def __init__(self, content_type): super(SecretContentTypeNotSupportedException, self).__init__( u._("A Content-Type of '{content_type}' for secrets is " "not supported").format( content_type=content_type) ) self.content_type = content_type self.client_message = u._( "content-type of '{content_type}' not supported").format( content_type=content_type) class SecretContentEncodingNotSupportedException( exception.BarbicanHTTPException): """Raised when support for payload content encoding is not available.""" status_code = 400 def __init__(self, content_encoding): super(SecretContentEncodingNotSupportedException, self).__init__( u._("Secret Content-Encoding of '{content_encoding}' " "not supported").format( content_encoding=content_encoding) ) self.content_encoding = content_encoding self.client_message = u._( "content-encoding of '{content_encoding}' not supported").format( content_encoding=content_encoding) class SecretNoPayloadProvidedException(exception.BarbicanException): """Raised when secret information is not provided.""" def __init__(self): super(SecretNoPayloadProvidedException, self).__init__( u._('No secret information provided to encrypt.') ) class SecretContentEncodingMustBeBase64(exception.BarbicanHTTPException): """Raised when encoding must be base64.""" client_message = u._("Text-based binary secret payloads must " "specify a content-encoding of 'base64'") status_code = 400 def __init__(self): super(SecretContentEncodingMustBeBase64, self).__init__( u._("Encoding type must be 'base64' for text-based payloads.") ) class SecretGeneralException(exception.BarbicanException): """Raised when a system fault has occurred.""" def __init__(self, reason=u._('Unknown')): super(SecretGeneralException, self).__init__( u._('Problem seen during crypto processing - ' 'Reason: {reason}').format(reason=reason) ) self.reason = reason class SecretPayloadDecodingError(exception.BarbicanHTTPException): """Raised when payload could not be decoded.""" client_message = u._("Problem decoding payload") status_code = 400 def __init__(self): super(SecretPayloadDecodingError, self).__init__( u._("Problem decoding payload") ) class SecretAcceptNotSupportedException(exception.BarbicanHTTPException): """Raised when requested decrypted content-type is not available.""" client_message = u._("Wrong payload content-type") status_code = 406 def __init__(self, accept): super(SecretAcceptNotSupportedException, self).__init__( u._("Secret Accept of '{accept}' not supported").format( accept=accept) ) self.accept = accept class SecretNotFoundException(exception.BarbicanHTTPException): """Raised when secret information could not be located.""" client_message = u._("Secret not found.") status_code = 404 def __init__(self): super(SecretNotFoundException, self).__init__( u._('No secret information found')) class SecretAlgorithmNotSupportedException(exception.BarbicanHTTPException): """Raised when support for an algorithm is not available.""" client_message = u._("Requested algorithm is not supported") status_code = 400 def __init__(self, algorithm): super(SecretAlgorithmNotSupportedException, self).__init__( u._("Secret algorithm of '{algorithm}' not supported").format( algorithm=algorithm) ) self.algorithm = algorithm class GeneratePassphraseNotSupportedException(exception.BarbicanHTTPException): """Raised when generating keys encrypted by passphrase is not supported.""" client_message = ( u._("Generating keys encrypted with passphrases is not supported") ) status_code = 400 def __init__(self): super(GeneratePassphraseNotSupportedException, self).__init__( self.client_message ) class SecretStorePluginsNotConfigured(exception.BarbicanException): """Raised when there are no secret store plugins configured.""" def __init__(self): super(SecretStorePluginsNotConfigured, self).__init__( u._('No secret store plugins have been configured') ) class StorePluginNotAvailableOrMisconfigured(exception.BarbicanException): """Raised when a plugin that was previously used can not be found.""" def __init__(self, plugin_name): super(StorePluginNotAvailableOrMisconfigured, self).__init__( u._("The requested Store Plugin {plugin_name} is not " "currently available. This is probably a server " "misconfiguration.").format( plugin_name=plugin_name) ) self.plugin_name = plugin_name class SecretType(object): """Constant to define the symmetric key type. Used by getSecret to retrieve a symmetric key. """ SYMMETRIC = "symmetric" """Constant to define the public key type. Used by getSecret to retrieve a public key. """ PUBLIC = "public" """Constant to define the private key type. Used by getSecret to retrieve a private key. """ PRIVATE = "private" """Constant to define the passphrase type. Used by getSecret to retrieve a passphrase.""" PASSPHRASE = "passphrase" # nosec """Constant to define the certificate type. Used by getSecret to retrieve a certificate.""" CERTIFICATE = "certificate" """Constant to define the opaque date type. Used by getSecret to retrieve opaque data. Opaque data can be any kind of data. This data type signals to Barbican to just store the information and do not worry about the format or encoding. This is the default type if no type is specified by the user.""" OPAQUE = utils.SECRET_TYPE_OPAQUE class KeyAlgorithm(object): """Constant for the Diffie Hellman algorithm.""" DIFFIE_HELLMAN = "diffie_hellman" """Constant for the DSA algorithm.""" DSA = "dsa" """Constant for the RSA algorithm.""" RSA = "rsa" """Constant for the Elliptic Curve algorithm.""" EC = "ec" """Constant for the HMACSHA1 algorithm.""" HMACSHA1 = "hmacsha1" """Constant for the HMACSHA256 algorithm.""" HMACSHA256 = "hmacsha256" """Constant for the HMACSHA384 algorithm.""" HMACSHA384 = "hmacsha384" """Constant for the HMACSHA512 algorithm.""" HMACSHA512 = "hmacsha512" """List of asymmetric algorithms""" ASYMMETRIC_ALGORITHMS = [DIFFIE_HELLMAN, DSA, RSA, EC] """Constant for the AES algorithm.""" AES = "aes" """Constant for the DES algorithm.""" DES = "des" """Constant for the DESede (triple-DES) algorithm.""" DESEDE = "desede" """List of symmetric algorithms""" SYMMETRIC_ALGORITHMS = [AES, DES, DESEDE, HMACSHA1, HMACSHA256, HMACSHA384, HMACSHA512] class KeySpec(object): """This object specifies the algorithm and bit length for a key.""" def __init__(self, alg=None, bit_length=None, mode=None, passphrase=None): """Creates a new KeySpec. :param alg:algorithm for the key :param bit_length:bit length of the key :param mode:algorithm mode for the key :param passphrase:passphrase for the private_key """ self.alg = alg self.bit_length = bit_length self.mode = mode # TODO(john-wood-w) Paul, is 'mode' required? self.passphrase = passphrase class SecretDTO(object): """This object is a secret data transfer object (DTO). This object encapsulates a key and attributes about the key. The attributes include a KeySpec that contains the algorithm and bit length. The attributes also include information on the encoding of the key. """ # TODO(john-wood-w) Remove 'content_type' once secret normalization work is # completed. def __init__(self, type, secret, key_spec, content_type, transport_key=None): """Creates a new SecretDTO. The secret is stored in the secret parameter. In the future this DTO may include compression and key wrapping information. :param type: SecretType for secret :param secret: secret, as a base64-encoded string :param key_spec: KeySpec key specifications :param content_type: Content type of the secret, one of MIME types such as 'text/plain' or 'application/octet-stream' :param transport_key: presence of this parameter indicates that the secret has been encrypted using a transport key. The transport key is a base64 encoded x509 transport certificate. """ self.type = type or SecretType.OPAQUE self.secret = secret self.key_spec = key_spec self.content_type = content_type self.transport_key = transport_key class AsymmetricKeyMetadataDTO(object): """This DTO encapsulates metadata(s) for asymmetric key components. These components are private_key_meta, public_key_meta and passphrase_meta. """ def __init__(self, private_key_meta=None, public_key_meta=None, passphrase_meta=None): """Constructor for AsymmetricKeyMetadataDTO :param private_key_meta: private key metadata :param public_key_meta: public key metadata :param passphrase_meta: passphrase key metadata """ self.private_key_meta = private_key_meta self.public_key_meta = public_key_meta self.passphrase_meta = passphrase_meta @six.add_metaclass(abc.ABCMeta) class SecretStoreBase(object): @abc.abstractmethod def get_plugin_name(self): """Gets user friendly plugin name. This plugin name is expected to be read from config file. There will be a default defined for plugin name which can be customized in specific deployment if needed. This name needs to be unique across a deployment. """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def generate_symmetric_key(self, key_spec): """Generate a new symmetric key and store it. Generates a new symmetric key and stores it in the secret store. A dictionary is returned that contains metadata about the newly created symmetric key. The dictionary of metadata is stored by Barbican and passed into other methods to aid the plugins. This can be useful for plugins that generate a unique ID in the external data store and use it to retrieve the key in the future. The returned dictionary may be empty if the SecretStore does not require it. :param key_spec: KeySpec that contains details on the type of key to generate :returns: an optional dictionary containing metadata about the key """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def generate_asymmetric_key(self, key_spec): """Generate a new asymmetric key pair and store it. Generates a new asymmetric key pair and stores it in the secret store. An object of type AsymmetricKeyMetadataDTO will be returned containing attributes of metadata for newly created key pairs. The metadata is stored by Barbican and passed into other methods to aid the plugins. This can be useful for plugins that generate a unique ID in the external data store and use it to retrieve the key pairs in the future. :param key_spec: KeySpec that contains details on the type of key to generate :returns: An object of type AsymmetricKeyMetadataDTO containing metadata about the key pair. """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def store_secret(self, secret_dto): """Stores a key. The SecretDTO contains the bytes of the secret and properties of the secret. The SecretStore retrieves the secret bytes, stores them, and returns a dictionary of metadata about the secret. This can be useful for plugins that generate a unique ID in the external data store and use it to retrieve the secret in the future. The returned dictionary may be empty if the SecretStore does not require it. :param secret_dto: SecretDTO for secret :returns: an optional dictionary containing metadata about the secret """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def get_secret(self, secret_type, secret_metadata): """Retrieves a secret from the secret store. Retrieves a secret from the secret store and returns a SecretDTO that contains the secret. The secret_metadata parameter is the metadata returned from one of the generate or store methods. This data is used by the plugins to retrieve the key. The secret_type parameter may be useful for secret stores to know the expected format of the secret. For instance if the type is SecretDTO.PRIVATE then a PKCS8 structure is returned. This way secret stores do not need to manage the secret type on their own. :param secret_type: secret type :param secret_metadata: secret metadata :returns: SecretDTO that contains secret """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def generate_supports(self, key_spec): """Returns a boolean indicating if the secret type is supported. This checks if the algorithm and bit length are supported by the generate methods. This is useful to call before calling generate_symmetric_key or generate_asymetric_key to see if the key type is supported before trying to generate it. :param key_spec: KeySpec that contains details on the algorithm and bit length :returns: boolean indicating if the algorithm is supported """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def delete_secret(self, secret_metadata): """Deletes a secret from the secret store. Deletes a secret from a secret store. It can no longer be referenced after this call. :param secret_metadata: secret_metadata """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def store_secret_supports(self, key_spec): """Returns a boolean indicating if the secret can be stored. Checks if the secret store can store the secret, give the attributes of the secret in the KeySpec. For example, some plugins may need to know the attributes in order to store the secret, but other plugins may be able to store the secret as a blob if no attributes are given. :param key_spec: KeySpec for the secret :returns: a boolean indicating if the secret can be stored """ raise NotImplementedError # pragma: no cover def get_transport_key(self): """Gets a transport key. Returns the current valid transport key associated with this plugin. The transport key is expected to be a base64 encoded x509 certificate containing a public key. Admins are responsible for deleting old keys from the database using the DELETE method on the TransportKey resource. By default, returns None. Plugins that support transport key wrapping should override this method. """ return None def is_transport_key_current(self, transport_key): """Determines if the provided transport key is the current valid key Returns true if the transport key is the current valid transport key. If the key is not valid, then barbican core will request a new transport key from the plugin. Returns False by default. Plugins that support transport key wrapping should override this method. """ return False def _enforce_extensions_configured(plugin_related_function): def _check_plugins_configured(self, *args, **kwargs): if not self.extensions: raise SecretStorePluginsNotConfigured() return plugin_related_function(self, *args, **kwargs) return _check_plugins_configured class SecretStorePluginManager(named.NamedExtensionManager): def __init__(self, conf=CONF, invoke_args=(), invoke_kwargs={}): ss_conf = config.get_module_config('secretstore') plugin_names = self._get_internal_plugin_names(ss_conf) super(SecretStorePluginManager, self).__init__( ss_conf.secretstore.namespace, plugin_names, invoke_on_load=False, # Defer creating plugins to utility below. invoke_args=invoke_args, invoke_kwds=invoke_kwargs, name_order=True # extensions sorted as per order of plugin names ) plugin_utils.instantiate_plugins(self, invoke_args, invoke_kwargs) multiple_backends.sync_secret_stores(self) @_enforce_extensions_configured def get_plugin_store(self, key_spec, plugin_name=None, transport_key_needed=False, project_id=None): """Gets a secret store plugin. :param: plugin_name: set to plugin_name to get specific plugin :param: key_spec: KeySpec of key that will be stored :param: transport_key_needed: set to True if a transport key is required. :returns: SecretStoreBase plugin implementation """ active_plugins = multiple_backends.get_applicable_store_plugins( self, project_id=project_id, existing_plugin_name=plugin_name) if plugin_name is not None: for plugin in active_plugins: if utils.generate_fullname_for(plugin) == plugin_name: return plugin raise SecretStorePluginNotFound(plugin_name) if not transport_key_needed: for plugin in active_plugins: if plugin.store_secret_supports(key_spec): return plugin else: for plugin in active_plugins: if (plugin.get_transport_key() is not None and plugin.store_secret_supports(key_spec)): return plugin raise SecretStoreSupportedPluginNotFound(key_spec) @_enforce_extensions_configured def get_plugin_retrieve_delete(self, plugin_name): """Gets a secret retrieve/delete plugin. If this function is being called, it is because we are trying to retrieve or delete an already stored secret. Thus, the plugin name is actually gotten from the plugin metadata that has already been stored in the database. So, in this case, if this plugin is not available, this might be due to a server misconfiguration. :returns: SecretStoreBase plugin implementation :raises: StorePluginNotAvailableOrMisconfigured: If the plugin wasn't found it's because the plugin parameters were not properly configured on the database side. """ for plugin in plugin_utils.get_active_plugins(self): if utils.generate_fullname_for(plugin) == plugin_name: return plugin raise StorePluginNotAvailableOrMisconfigured(plugin_name) @_enforce_extensions_configured def get_plugin_generate(self, key_spec, project_id=None): """Gets a secret generate plugin. :param key_spec: KeySpec that contains details on the type of key to generate :returns: SecretStoreBase plugin implementation """ active_plugins = multiple_backends.get_applicable_store_plugins( self, project_id=project_id, existing_plugin_name=None) for plugin in active_plugins: if plugin.generate_supports(key_spec): return plugin raise SecretGenerateSupportedPluginNotFound(key_spec) def _get_internal_plugin_names(self, secretstore_conf): """Gets plugin names used for loading via stevedore. When multiple secret store support is enabled, then secret store plugin names are read via updated configuration structure. If not enabled, then it reads MultiStr property in 'secretstore' config section. """ # to cache default global secret store value on first use self.global_default_store_dict = None if utils.is_multiple_backends_enabled(): self.parsed_stores = multiple_backends.\ read_multiple_backends_config() plugin_names = [store.store_plugin for store in self.parsed_stores if store.store_plugin] else: plugin_names = secretstore_conf.secretstore.\ enabled_secretstore_plugins return plugin_names def get_manager(): global _SECRET_STORE if not _SECRET_STORE: _SECRET_STORE = SecretStorePluginManager() return _SECRET_STORE barbican-9.1.0.dev50/barbican/plugin/interface/__init__.py0000664000175000017500000000000013616500636023514 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/plugin/interface/certificate_manager.py0000664000175000017500000007154413616500636025756 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ SSL Certificate resources for Barbican. The resources here should be generic across all certificate-related implementations. Hence do not place vendor-specific content in this module. """ import abc import datetime from oslo_config import cfg from oslo_utils import encodeutils import six from stevedore import named from barbican.common import config from barbican.common import exception import barbican.common.utils as utils from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repos from barbican.plugin.util import utils as plugin_utils LOG = utils.getLogger(__name__) CONF = config.new_config() # Configuration for certificate processing plugins: DEFAULT_PLUGIN_NAMESPACE = 'barbican.certificate.plugin' DEFAULT_PLUGINS = ['simple_certificate'] cert_opt_group = cfg.OptGroup(name='certificate', title='Certificate Plugin Options') cert_opts = [ cfg.StrOpt('namespace', default=DEFAULT_PLUGIN_NAMESPACE, help=u._('Extension namespace to search for plugins.') ), cfg.MultiStrOpt('enabled_certificate_plugins', default=DEFAULT_PLUGINS, help=u._('List of certificate plugins to load.') ) ] CONF.register_group(cert_opt_group) CONF.register_opts(cert_opts, group=cert_opt_group) config.parse_args(CONF) def list_opts(): yield cert_opt_group, cert_opts yield cert_event_opt_group, cert_event_opts # Configuration for certificate eventing plugins: DEFAULT_EVENT_PLUGIN_NAMESPACE = 'barbican.certificate.event.plugin' DEFAULT_EVENT_PLUGINS = ['simple_certificate_event'] cert_event_opt_group = cfg.OptGroup(name='certificate_event', title='Certificate Event Plugin Options') cert_event_opts = [ cfg.StrOpt('namespace', default=DEFAULT_EVENT_PLUGIN_NAMESPACE, help=u._('Extension namespace to search for eventing plugins.') ), cfg.MultiStrOpt('enabled_certificate_event_plugins', default=DEFAULT_EVENT_PLUGINS, help=u._('List of certificate plugins to load.') ) ] CONF.register_group(cert_event_opt_group) CONF.register_opts(cert_event_opts, group=cert_event_opt_group) ERROR_RETRY_MSEC = 300000 RETRY_MSEC = 3600000 CA_INFO_DEFAULT_EXPIRATION_DAYS = 1 CA_PLUGIN_TYPE_DOGTAG = "dogtag" CA_PLUGIN_TYPE_SYMANTEC = "symantec" # fields to distinguish CA types and subject key identifiers CA_TYPE = "ca_type" CA_SUBJECT_KEY_IDENTIFIER = "ca_subject_key_identifier" # field to get the certificate request type REQUEST_TYPE = "request_type" # fields for the ca_id, plugin_ca_id CA_ID = "ca_id" PLUGIN_CA_ID = "plugin_ca_id" # fields for ca_info dict keys INFO_NAME = "name" INFO_DESCRIPTION = "description" INFO_CA_SIGNING_CERT = "ca_signing_certificate" INFO_INTERMEDIATES = "intermediates" INFO_EXPIRATION = "expiration" # Singleton to avoid loading the CertificateEventManager plugins more than once _EVENT_PLUGIN_MANAGER = None class CertificateRequestType(object): """Constants to define the certificate request type.""" CUSTOM_REQUEST = "custom" FULL_CMC_REQUEST = "full-cmc" SIMPLE_CMC_REQUEST = "simple-cmc" STORED_KEY_REQUEST = "stored-key" class CertificatePluginNotFound(exception.BarbicanException): """Raised when no certificate plugin supporting a request is available.""" def __init__(self, plugin_name=None): if plugin_name: message = u._( 'Certificate plugin "{name}"' ' not found.').format(name=plugin_name) else: message = u._("Certificate plugin not found or configured.") super(CertificatePluginNotFound, self).__init__(message) class CertificatePluginNotFoundForCAID(exception.BarbicanException): """Raised when no certificate plugin is available for a CA_ID.""" def __init__(self, ca_id): message = u._( 'Certificate plugin not found for "{ca_id}".').format(ca_id=ca_id) super(CertificatePluginNotFoundForCAID, self).__init__(message) class CertificateEventPluginNotFound(exception.BarbicanException): """Raised with no certificate event plugin supporting request.""" def __init__(self, plugin_name=None): if plugin_name: message = u._( 'Certificate event plugin "{name}" ' 'not found.').format(name=plugin_name) else: message = u._("Certificate event plugin not found.") super(CertificateEventPluginNotFound, self).__init__(message) class CertificateStatusNotSupported(exception.BarbicanException): """Raised when cert status returned is unknown.""" def __init__(self, status): super(CertificateStatusNotSupported, self).__init__( u._("Certificate status of {status} not " "supported").format(status=status) ) self.status = status class CertificateGeneralException(exception.BarbicanException): """Raised when a system fault has occurred.""" def __init__(self, reason=u._('Unknown')): super(CertificateGeneralException, self).__init__( u._('Problem seen during certificate processing - ' 'Reason: {reason}').format(reason=reason) ) self.reason = reason class CertificateStatusClientDataIssue(exception.BarbicanHTTPException): """Raised when the CA has encountered an issue with request data.""" client_message = "" status_code = 400 def __init__(self, reason=u._('Unknown')): super(CertificateStatusClientDataIssue, self).__init__( u._('Problem with data in certificate request - ' 'Reason: {reason}').format(reason=reason) ) self.client_message = self.message class CertificateStatusInvalidOperation(exception.BarbicanHTTPException): """Raised when the CA has encountered an issue with request data.""" client_message = "" status_code = 400 def __init__(self, reason=u._('Unknown')): super(CertificateStatusInvalidOperation, self).__init__( u._('Invalid operation requested - ' 'Reason: {reason}').format(reason=reason) ) self.client_message = self.message @six.add_metaclass(abc.ABCMeta) class CertificateEventPluginBase(object): """Base class for certificate eventing plugins. This class is the base plugin contract for issuing certificate related events from Barbican. """ @abc.abstractmethod def notify_certificate_is_ready( self, project_id, order_ref, container_ref): """Notify that a certificate has been generated and is ready to use. :param project_id: Project ID associated with this certificate :param order_ref: HATEOAS reference URI to the submitted Barbican Order :param container_ref: HATEOAS reference URI to the Container storing the certificate :returns: None """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def notify_ca_is_unavailable( self, project_id, order_ref, error_msg, retry_in_msec): """Notify that the certificate authority (CA) isn't available. :param project_id: Project ID associated with this order :param order_ref: HATEOAS reference URI to the submitted Barbican Order :param error_msg: Error message if it is available :param retry_in_msec: Delay before attempting to talk to the CA again. If this is 0, then no attempt will be made. :returns: None """ raise NotImplementedError # pragma: no cover @six.add_metaclass(abc.ABCMeta) class CertificatePluginBase(object): """Base class for certificate plugins. This class is the base plugin contract for certificates. """ @abc.abstractmethod def get_default_ca_name(self): """Get the default CA name Provides a default CA name to be returned in the default get_ca_info() method. If get_ca_info() is overridden (to support multiple CAs for instance), then this method may not be called. In that case, just implement this method to return a dummy variable. If this value is used, it should be unique amongst all the CA plugins. :return: The default CA name :rtype: str """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def get_default_signing_cert(self): """Get the default CA signing cert Provides a default CA signing cert to be returned in the default get_ca_info() method. If get_ca_info() is overridden (to support multiple CAs for instance), then this method may not be called. In that case, just implement this method to return a dummy variable. :return: The default CA signing cert :rtype: str """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def get_default_intermediates(self): """Get the default CA certificate chain Provides a default CA certificate to be returned in the default get_ca_info() method. If get_ca_info() is overridden (to support multiple CAs for instance), then this method may not be called. In that case, just implement this method to return a dummy variable. :return: The default CA certificate chain :rtype: str """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def issue_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Create the initial order :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf :param barbican_meta_dto: Data transfer object :class:`BarbicanMetaDTO` containing data added to the request by the Barbican server to provide additional context for processing, but which are not in the original request. For example, the plugin_ca_id :returns: A :class:`ResultDTO` instance containing the result populated by the plugin implementation :rtype: :class:`ResultDTO` """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def modify_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Update the order meta-data :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf :param barbican_meta_dto: Data transfer object :class:`BarbicanMetaDTO` containing data added to the request by the Barbican server to provide additional context for processing, but which are not in the original request. For example, the plugin_ca_id :returns: A :class:`ResultDTO` instance containing the result populated by the plugin implementation :rtype: :class:`ResultDTO` """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def cancel_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Cancel the order :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf :param barbican_meta_dto: Data transfer object :class:`BarbicanMetaDTO` containing data added to the request by the Barbican server to provide additional context for processing, but which are not in the original request. For example, the plugin_ca_id :returns: A :class:`ResultDTO` instance containing the result populated by the plugin implementation :rtype: :class:`ResultDTO` """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def check_certificate_status(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Check status of the order :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf :param barbican_meta_dto: Data transfer object :class:`BarbicanMetaDTO` containing data added to the request by the Barbican server to provide additional context for processing, but which are not in the original request. For example, the plugin_ca_id :returns: A :class:`ResultDTO` instance containing the result populated by the plugin implementation :rtype: :class:`ResultDTO` """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def supports(self, certificate_spec): """Returns if the plugin supports the certificate type. :param certificate_spec: Contains details on the certificate to generate the certificate order :returns: boolean indicating if the plugin supports the certificate type """ raise NotImplementedError # pragma: no cover def supported_request_types(self): """Returns the request_types supported by this plugin. :returns: a list of the Barbican-core defined request_types supported by this plugin. """ return [CertificateRequestType.CUSTOM_REQUEST] # pragma: no cover def get_ca_info(self): """Returns information about the CA(s) supported by this plugin. :returns: dictionary indexed by plugin_ca_id. Each entry consists of a dictionary of key-value pairs. An example dictionary containing the current supported attributes is shown below:: { "plugin_ca_id1": { INFO_NAME : "CA name", INFO_DESCRIPTION : "CA user friendly description", INFO_CA_SIGNING_CERT : "base 64 encoded signing cert", INFO_INTERMEDIATES = "base 64 encoded certificate chain" INFO_EXPIRATION = "ISO formatted UTC datetime for when this" "data will become stale" } } """ name = self.get_default_ca_name() expiration = (datetime.datetime.utcnow() + datetime.timedelta(days=CA_INFO_DEFAULT_EXPIRATION_DAYS)) default_info = { INFO_NAME: name, INFO_DESCRIPTION: "Certificate Authority - {0}".format(name), INFO_EXPIRATION: expiration.isoformat() } signing_cert = self.get_default_signing_cert() if signing_cert is not None: default_info[INFO_CA_SIGNING_CERT] = signing_cert intermediates = self.get_default_intermediates() if intermediates is not None: default_info[INFO_INTERMEDIATES] = intermediates return {name: default_info} def supports_create_ca(self): """Returns whether the plugin supports on-the-fly generation of subCAs :return: boolean, True if supported, defaults to False """ return False # pragma: no cover def create_ca(self, ca_create_dto): """Creates a subordinate CA upon request This call should only be made if a plugin returns True for supports_create_ca(). :param ca_create_dto: Data transfer object :class:`CACreateDTO` containing data required to generate a subordinate CA. This data includes the subject DN of the new CA signing certificate, a name for the new CA and a reference to the CA that will issue the new subordinate CA's signing certificate, :return: ca_info: Dictionary containing the data needed to create a models.CertificateAuthority object """ raise NotImplementedError # pragma: no cover def delete_ca(self, ca_id): """Deletes a subordinate CA Like the create_ca call, this should only be made if the plugin returns True for supports_create_ca() :param ca_id: id for the CA as specified by the plugin :return: None """ raise NotImplementedError # pragma: no cover class CACreateDTO(object): """Class that includes data needed to create a subordinate CA """ def __init__(self, name=None, description=None, subject_dn=None, parent_ca_id=None): """Creates a new CACreateDTO object. :param name: Name for the subordinate CA :param description: Description for the subordinate CA :param subject_dn: Subject DN for the new subordinate CA's signing certificate :param parent_ca_id: ID of the CA which is supposed to sign the subordinate CA's signing certificate. This is ID as known to the plugin (not the Barbican UUID) """ self.name = name self.description = description self.subject_dn = subject_dn self.parent_ca_id = parent_ca_id class CertificateStatus(object): """Defines statuses for certificate request process. In particular: CERTIFICATE_GENERATED - Indicates a certificate was created WAITING_FOR_CA - Waiting for Certificate authority (CA) to complete order CLIENT_DATA_ISSUE_SEEN - Problem was seen with client-provided data CA_UNAVAILABLE_FOR_REQUEST - CA was not available, will try again later REQUEST_CANCELED - The client or CA cancelled this order INVALID_OPERATION - Unexpected error seen processing order """ CERTIFICATE_GENERATED = "certificate generated" WAITING_FOR_CA = "waiting for CA" CLIENT_DATA_ISSUE_SEEN = "client data issue seen" CA_UNAVAILABLE_FOR_REQUEST = "CA unavailable for request" REQUEST_CANCELED = "request canceled" INVALID_OPERATION = "invalid operation" class ResultDTO(object): """Result data transfer object (DTO). An object of this type is returned by most certificate plugin methods, and is used to guide follow on processing and to provide status feedback to clients. """ def __init__(self, status, status_message=None, certificate=None, intermediates=None, retry_msec=RETRY_MSEC, retry_method=None): """Creates a new ResultDTO. :param status: Status for cert order :param status_message: Message to explain status type. :param certificate: Certificate returned from CA to be stored in container :param intermediates: Intermediates to be stored in container :param retry_msec: Number of milliseconds to wait for retry :param retry_method: Method to be called for retry, if None then retry the current method """ self.status = status self.status_message = status_message self.certificate = certificate self.intermediates = intermediates self.retry_msec = int(retry_msec) self.retry_method = retry_method class BarbicanMetaDTO(object): """Barbican meta data transfer object Information needed to process a certificate request that is not specified in the original request, and written by Barbican core, that is needed by the plugin to process requests. """ def __init__(self, plugin_ca_id=None, generated_csr=None): """Creates a new BarbicanMetaDTO. :param plugin_ca_id: ca_id as known to the plugin :param generated_csr: csr generated in the stored-key case :return: BarbicanMetaDTO """ self.plugin_ca_id = plugin_ca_id self.generated_csr = generated_csr class CertificatePluginManager(named.NamedExtensionManager): def __init__(self, conf=CONF, invoke_args=(), invoke_kwargs={}): self.ca_repo = repos.get_ca_repository() super(CertificatePluginManager, self).__init__( conf.certificate.namespace, conf.certificate.enabled_certificate_plugins, invoke_on_load=False, # Defer creating plugins to utility below. invoke_args=invoke_args, invoke_kwds=invoke_kwargs ) plugin_utils.instantiate_plugins( self, invoke_args, invoke_kwargs) def get_plugin(self, certificate_spec): """Gets a supporting certificate plugin. :param certificate_spec: Contains details on the certificate to generate the certificate order :returns: CertificatePluginBase plugin implementation """ request_type = certificate_spec.get( REQUEST_TYPE, CertificateRequestType.CUSTOM_REQUEST) for plugin in plugin_utils.get_active_plugins(self): supported_request_types = plugin.supported_request_types() if request_type not in supported_request_types: continue if plugin.supports(certificate_spec): return plugin raise CertificatePluginNotFound() def get_plugin_by_name(self, plugin_name): """Gets a supporting certificate plugin. :param plugin_name: Name of the plugin to invoke :returns: CertificatePluginBase plugin implementation """ for plugin in plugin_utils.get_active_plugins(self): if utils.generate_fullname_for(plugin) == plugin_name: return plugin raise CertificatePluginNotFound(plugin_name) def get_plugin_by_ca_id(self, ca_id): """Gets a plugin based on the ca_id. :param ca_id: id for CA in the CertificateAuthorities table :returns: CertificatePluginBase plugin implementation """ ca = self.ca_repo.get(ca_id, suppress_exception=True) if not ca: raise CertificatePluginNotFoundForCAID(ca_id) return self.get_plugin_by_name(ca.plugin_name) def refresh_ca_table(self): """Refreshes the CertificateAuthority table.""" updates_made = False for plugin in plugin_utils.get_active_plugins(self): plugin_name = utils.generate_fullname_for(plugin) cas, offset, limit, total = self.ca_repo.get_by_create_date( plugin_name=plugin_name, suppress_exception=True) if total < 1: # if no entries are found, then the plugin has not yet been # queried or that plugin's entries have expired. # Most of the time, this will be a no-op for plugins. self.update_ca_info(plugin) updates_made = True if updates_made: # commit to DB to avoid async issues with different threads repos.commit() def update_ca_info(self, cert_plugin): """Update the CA info for a particular plugin.""" plugin_name = utils.generate_fullname_for(cert_plugin) try: new_ca_infos = cert_plugin.get_ca_info() except Exception as e: # The plugin gave an invalid CA, log and return LOG.error("ERROR getting CA from plugin: %s", encodeutils.exception_to_unicode(e)) return old_cas, offset, limit, total = self.ca_repo.get_by_create_date( plugin_name=plugin_name, suppress_exception=True, show_expired=True) if old_cas: for old_ca in old_cas: plugin_ca_id = old_ca.plugin_ca_id if plugin_ca_id not in new_ca_infos.keys(): # remove CAs that no longer exist self._delete_ca(old_ca) else: # update those that still exist self.ca_repo.update_entity( old_ca, new_ca_infos[plugin_ca_id]) old_ids = set([ca.plugin_ca_id for ca in old_cas]) else: old_ids = set() new_ids = set(new_ca_infos.keys()) # add new CAs add_ids = new_ids - old_ids for add_id in add_ids: try: self._add_ca(plugin_name, add_id, new_ca_infos[add_id]) except Exception as e: # The plugin gave an invalid CA, log and continue LOG.error("ERROR adding CA from plugin: %s", encodeutils.exception_to_unicode(e)) def _add_ca(self, plugin_name, plugin_ca_id, ca_info): parsed_ca = dict(ca_info) parsed_ca['plugin_name'] = plugin_name parsed_ca['plugin_ca_id'] = plugin_ca_id new_ca = models.CertificateAuthority(parsed_ca) self.ca_repo.create_from(new_ca) def _delete_ca(self, ca): self.ca_repo.delete_entity_by_id(ca.id, None) class _CertificateEventPluginManager(named.NamedExtensionManager, CertificateEventPluginBase): """Provides services for certificate event plugins. This plugin manager differs from others in that it implements the same contract as the plugins that it manages. This allows eventing operations to occur on all installed plugins (with this class acting as a composite plugin), rather than just eventing via an individual plugin. Each time this class is initialized it will load a new instance of each enabled plugin. This is undesirable, so rather than initializing a new instance of this class use the get_event_plugin_manager function at the module level. """ def __init__(self, conf=CONF, invoke_args=(), invoke_kwargs={}): super(_CertificateEventPluginManager, self).__init__( conf.certificate_event.namespace, conf.certificate_event.enabled_certificate_event_plugins, invoke_on_load=False, # Defer creating plugins to utility below. invoke_args=invoke_args, invoke_kwds=invoke_kwargs ) plugin_utils.instantiate_plugins( self, invoke_args, invoke_kwargs) def get_plugin_by_name(self, plugin_name): """Gets a supporting certificate event plugin. :returns: CertificateEventPluginBase plugin implementation """ for plugin in plugin_utils.get_active_plugins(self): if utils.generate_fullname_for(plugin) == plugin_name: return plugin raise CertificateEventPluginNotFound(plugin_name) def notify_certificate_is_ready( self, project_id, order_ref, container_ref): self._invoke_certificate_plugins( 'notify_certificate_is_ready', project_id, order_ref, container_ref) def notify_ca_is_unavailable( self, project_id, order_ref, error_msg, retry_in_msec): self._invoke_certificate_plugins( 'notify_ca_is_unavailable', project_id, order_ref, error_msg, retry_in_msec) def _invoke_certificate_plugins(self, method, *args, **kwargs): """Invoke same function on plugins as calling function.""" active_plugins = plugin_utils.get_active_plugins(self) if not active_plugins: raise CertificateEventPluginNotFound() for plugin in active_plugins: getattr(plugin, method)(*args, **kwargs) def get_event_plugin_manager(): global _EVENT_PLUGIN_MANAGER if _EVENT_PLUGIN_MANAGER: return _EVENT_PLUGIN_MANAGER _EVENT_PLUGIN_MANAGER = _CertificateEventPluginManager() return _EVENT_PLUGIN_MANAGER barbican-9.1.0.dev50/barbican/plugin/crypto/0000775000175000017500000000000013616500640020770 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/plugin/crypto/p11_crypto.py0000664000175000017500000003725713616500636023366 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import collections import threading import time from oslo_config import cfg from oslo_serialization import jsonutils as json from barbican.common import config from barbican.common import exception from barbican.common import utils from barbican import i18n as u from barbican.plugin.crypto import base as plugin from barbican.plugin.crypto import pkcs11 CONF = config.new_config() LOG = utils.getLogger(__name__) CachedKEK = collections.namedtuple("CachedKEK", ["kek", "expires"]) p11_crypto_plugin_group = cfg.OptGroup(name='p11_crypto_plugin', title="PKCS11 Crypto Plugin Options") p11_crypto_plugin_opts = [ cfg.StrOpt('library_path', help=u._('Path to vendor PKCS11 library')), cfg.StrOpt('login', help=u._('Password to login to PKCS11 session'), secret=True), cfg.StrOpt('mkek_label', help=u._('Master KEK label (as stored in the HSM)')), cfg.IntOpt('mkek_length', help=u._('Master KEK length in bytes.')), cfg.StrOpt('hmac_label', help=u._('Master HMAC Key label (as stored in the HSM)')), cfg.IntOpt('slot_id', help=u._('HSM Slot ID'), default=1), cfg.BoolOpt('rw_session', help=u._('Flag for Read/Write Sessions'), default=True), cfg.IntOpt('pkek_length', help=u._('Project KEK length in bytes.'), default=32), cfg.IntOpt('pkek_cache_ttl', help=u._('Project KEK Cache Time To Live, in seconds'), default=900), cfg.IntOpt('pkek_cache_limit', help=u._('Project KEK Cache Item Limit'), default=100), cfg.StrOpt('encryption_mechanism', help=u._('Secret encryption mechanism'), default='CKM_AES_CBC', deprecated_name='algorithm'), cfg.StrOpt('hmac_key_type', help=u._('HMAC Key Type'), default='CKK_AES'), cfg.StrOpt('hmac_keygen_mechanism', help=u._('HMAC Key Generation Algorithm'), default='CKM_AES_KEY_GEN'), cfg.StrOpt('hmac_keywrap_mechanism', help=u._('HMAC key wrap mechanism'), default='CKM_SHA256_HMAC'), cfg.StrOpt('seed_file', help=u._('File to pull entropy for seeding RNG'), default=''), cfg.IntOpt('seed_length', help=u._('Amount of data to read from file for seed'), default=32), cfg.StrOpt('plugin_name', help=u._('User friendly plugin name'), default='PKCS11 HSM'), cfg.BoolOpt('aes_gcm_generate_iv', help=u._('Generate IVs for CKM_AES_GCM mechanism.'), default=True, deprecated_name='generate_iv'), cfg.BoolOpt('always_set_cka_sensitive', help=u._('Always set CKA_SENSITIVE=CK_TRUE including ' 'CKA_EXTRACTABLE=CK_TRUE keys.'), default=True), ] CONF.register_group(p11_crypto_plugin_group) CONF.register_opts(p11_crypto_plugin_opts, group=p11_crypto_plugin_group) config.parse_args(CONF) def list_opts(): yield p11_crypto_plugin_group, p11_crypto_plugin_opts def json_dumps_compact(data): return json.dumps(data, separators=(',', ':')) class P11CryptoPlugin(plugin.CryptoPluginBase): """PKCS11 supporting implementation of the crypto plugin. """ def __init__(self, conf=CONF, ffi=None, pkcs11=None): self.conf = conf plugin_conf = conf.p11_crypto_plugin if plugin_conf.library_path is None: raise ValueError(u._("library_path is required")) # Use specified or create new pkcs11 object self.pkcs11 = pkcs11 or self._create_pkcs11(plugin_conf, ffi) # Save conf arguments self.encryption_mechanism = plugin_conf.encryption_mechanism self.mkek_key_type = 'CKK_AES' self.mkek_length = plugin_conf.mkek_length self.mkek_label = plugin_conf.mkek_label self.hmac_label = plugin_conf.hmac_label self.hmac_key_type = plugin_conf.hmac_key_type self.hmac_keygen_mechanism = plugin_conf.hmac_keygen_mechanism self.pkek_length = plugin_conf.pkek_length self.pkek_cache_ttl = plugin_conf.pkek_cache_ttl self.pkek_cache_limit = plugin_conf.pkek_cache_limit self._configure_object_cache() def get_plugin_name(self): return self.conf.p11_crypto_plugin.plugin_name def encrypt(self, encrypt_dto, kek_meta_dto, project_id): return self._call_pkcs11(self._encrypt, encrypt_dto, kek_meta_dto, project_id) def decrypt(self, decrypt_dto, kek_meta_dto, kek_meta_extended, project_id): return self._call_pkcs11(self._decrypt, decrypt_dto, kek_meta_dto, kek_meta_extended, project_id) def bind_kek_metadata(self, kek_meta_dto): return self._call_pkcs11(self._bind_kek_metadata, kek_meta_dto) def generate_symmetric(self, generate_dto, kek_meta_dto, project_id): return self._call_pkcs11(self._generate_symmetric, generate_dto, kek_meta_dto, project_id) def generate_asymmetric(self, generate_dto, kek_meta_dto, project_id): raise NotImplementedError(u._("Feature not implemented for PKCS11")) def supports(self, type_enum, algorithm=None, bit_length=None, mode=None): if type_enum == plugin.PluginSupportTypes.ENCRYPT_DECRYPT: return True elif type_enum == plugin.PluginSupportTypes.SYMMETRIC_KEY_GENERATION: return True elif type_enum == plugin.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION: return False else: return False def _call_pkcs11(self, func, *args, **kwargs): # Wrap pkcs11 calls to enable a single retry when exceptions are raised # that can be fixed by reinitializing the pkcs11 library try: return func(*args, **kwargs) except (exception.PKCS11Exception) as pe: LOG.warning("Reinitializing PKCS#11 library: %s", pe) self._reinitialize_pkcs11() return func(*args, **kwargs) def _encrypt(self, encrypt_dto, kek_meta_dto, project_id): kek = self._load_kek_from_meta_dto(kek_meta_dto) try: session = self._get_session() ct_data = self.pkcs11.encrypt( kek, encrypt_dto.unencrypted, session ) finally: if 'session' in locals(): self._return_session(session) kek_meta_extended = json_dumps_compact({ 'iv': base64.b64encode(ct_data['iv']), 'mechanism': self.encryption_mechanism }) return plugin.ResponseDTO(ct_data['ct'], kek_meta_extended) def _decrypt(self, decrypt_dto, kek_meta_dto, kek_meta_extended, project_id): kek = self._load_kek_from_meta_dto(kek_meta_dto) meta_extended = json.loads(kek_meta_extended) iv = base64.b64decode(meta_extended['iv']) mech = meta_extended['mechanism'] try: session = self._get_session() pt_data = self.pkcs11.decrypt( mech, kek, iv, decrypt_dto.encrypted, session ) finally: if 'session' in locals(): self._return_session(session) return pt_data def _bind_kek_metadata(self, kek_meta_dto): if not kek_meta_dto.plugin_meta: # Generate wrapped kek and jsonify wkek = self._generate_wrapped_kek( self.pkek_length, kek_meta_dto.kek_label ) # Persisted by Barbican kek_meta_dto.plugin_meta = json_dumps_compact(wkek) kek_meta_dto.algorithm = 'AES' kek_meta_dto.bit_length = self.pkek_length * 8 kek_meta_dto.mode = 'CBC' return kek_meta_dto def _generate_symmetric(self, generate_dto, kek_meta_dto, project_id): kek = self._load_kek_from_meta_dto(kek_meta_dto) byte_length = int(generate_dto.bit_length) // 8 try: session = self._get_session() buf = self.pkcs11.generate_random(byte_length, session) ct_data = self.pkcs11.encrypt(kek, buf, session) finally: if 'session' in locals(): self._return_session(session) kek_meta_extended = json_dumps_compact( {'iv': base64.b64encode(ct_data['iv']), 'mechanism': self.encryption_mechanism} ) return plugin.ResponseDTO(ct_data['ct'], kek_meta_extended) def _configure_object_cache(self): # Master Key cache self.mk_cache = {} self.mk_cache_lock = threading.RLock() # Project KEK cache self.pkek_cache = collections.OrderedDict() self.pkek_cache_lock = threading.RLock() # Session for object caching self.caching_session = self._get_session() self.caching_session_lock = threading.RLock() # Cache master keys self._get_master_key(self.mkek_key_type, self.mkek_label) self._get_master_key(self.hmac_key_type, self.hmac_label) def _pkek_cache_add(self, kek, label): with self.pkek_cache_lock: if label in self.pkek_cache: raise ValueError('{0} is already in the cache'.format(label)) now = int(time.time()) ckek = CachedKEK(kek, now + self.pkek_cache_ttl) if len(self.pkek_cache) >= self.pkek_cache_limit: with self.caching_session_lock: session = self.caching_session self._pkek_cache_expire(now, session) # Test again if call above didn't remove any items if len(self.pkek_cache) >= self.pkek_cache_limit: (l, k) = self.pkek_cache.popitem(last=False) self.pkcs11.destroy_object(k.kek, session) self.pkek_cache[label] = ckek def _pkek_cache_get(self, label, default=None): kek = default with self.pkek_cache_lock: ckek = self.pkek_cache.get(label) if ckek is not None: if int(time.time()) < ckek.expires: kek = ckek.kek else: with self.caching_session_lock: self.pkcs11.destroy_object(ckek.kek, self.caching_session) del self.pkek_cache[label] return kek def _pkek_cache_expire(self, now, session): # Look for expired items, starting from oldest for (label, kek) in self.pkek_cache.items(): if now >= kek.expires: self.pkcs11.destroy_object(kek.kek, session) del self.pkek_cache[label] else: break def _create_pkcs11(self, plugin_conf, ffi=None): seed_random_buffer = None if plugin_conf.seed_file: with open(plugin_conf.seed_file, 'rb') as f: seed_random_buffer = f.read(plugin_conf.seed_length) return pkcs11.PKCS11( library_path=plugin_conf.library_path, login_passphrase=plugin_conf.login, rw_session=plugin_conf.rw_session, slot_id=plugin_conf.slot_id, encryption_mechanism=plugin_conf.encryption_mechanism, ffi=ffi, seed_random_buffer=seed_random_buffer, generate_iv=plugin_conf.aes_gcm_generate_iv, always_set_cka_sensitive=plugin_conf.always_set_cka_sensitive, hmac_keywrap_mechanism=plugin_conf.hmac_keywrap_mechanism ) def _reinitialize_pkcs11(self): self.pkcs11.finalize() self.pkcs11 = None with self.caching_session_lock: self.caching_session = None with self.pkek_cache_lock: self.pkek_cache.clear() with self.mk_cache_lock: self.mk_cache.clear() self.pkcs11 = self._create_pkcs11(self.conf.p11_crypto_plugin) self._configure_object_cache() def _get_session(self): return self.pkcs11.get_session() def _return_session(self, session): self.pkcs11.return_session(session) def _get_master_key(self, key_type, label): with self.mk_cache_lock: session = self.caching_session key = self.mk_cache.get(label, None) if key is None: with self.caching_session_lock: key = self.pkcs11.get_key_handle(key_type, label, session) if key is None: raise exception.P11CryptoKeyHandleException( u._("Could not find key labeled {0}").format(label) ) self.mk_cache[label] = key return key def _load_kek_from_meta_dto(self, kek_meta_dto): meta = json.loads(kek_meta_dto.plugin_meta) kek = self._load_kek( kek_meta_dto.kek_label, meta['iv'], meta['wrapped_key'], meta['hmac'], meta['mkek_label'], meta['hmac_label'] ) return kek def _load_kek(self, key_label, iv, wrapped_key, hmac, mkek_label, hmac_label): with self.pkek_cache_lock: kek = self._pkek_cache_get(key_label) if kek is None: # Decode data iv = base64.b64decode(iv) wrapped_key = base64.b64decode(wrapped_key) hmac = base64.b64decode(hmac) kek_data = iv + wrapped_key with self.caching_session_lock: session = self.caching_session # Get master keys mkek = self._get_master_key(self.mkek_key_type, mkek_label) mkhk = self._get_master_key(self.hmac_key_type, hmac_label) # Verify HMAC self.pkcs11.verify_hmac(mkhk, hmac, kek_data, session) # Unwrap KEK kek = self.pkcs11.unwrap_key(mkek, iv, wrapped_key, session) self._pkek_cache_add(kek, key_label) return kek def _generate_wrapped_kek(self, key_length, key_label): with self.caching_session_lock: session = self.caching_session # Get master keys mkek = self._get_master_key(self.mkek_key_type, self.mkek_label) mkhk = self._get_master_key(self.hmac_key_type, self.hmac_label) # Generate KEK kek = self.pkcs11.generate_key( 'CKK_AES', key_length, 'CKM_AES_KEY_GEN', session, encrypt=True ) # Wrap KEK wkek = self.pkcs11.wrap_key(mkek, kek, session) # HMAC Wrapped KEK wkek_data = wkek['iv'] + wkek['wrapped_key'] wkek_hmac = self.pkcs11.compute_hmac(mkhk, wkek_data, session) # Cache KEK self._pkek_cache_add(kek, key_label) return { 'iv': base64.b64encode(wkek['iv']), 'wrapped_key': base64.b64encode(wkek['wrapped_key']), 'hmac': base64.b64encode(wkek_hmac), 'mkek_label': self.mkek_label, 'hmac_label': self.hmac_label } barbican-9.1.0.dev50/barbican/plugin/crypto/simple_crypto.py0000664000175000017500000002274113616500636024246 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os from cryptography import fernet from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import dsa from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.hazmat.primitives import serialization from oslo_config import cfg from oslo_utils import encodeutils import six from barbican.common import config from barbican.common import utils from barbican import i18n as u from barbican.plugin.crypto import base as c CONF = config.new_config() LOG = utils.getLogger(__name__) simple_crypto_plugin_group = cfg.OptGroup(name='simple_crypto_plugin', title="Simple Crypto Plugin Options") simple_crypto_plugin_opts = [ cfg.StrOpt('kek', default='dGhpcnR5X3R3b19ieXRlX2tleWJsYWhibGFoYmxhaGg=', help=u._('Key encryption key to be used by Simple Crypto ' 'Plugin'), secret=True), cfg.StrOpt('plugin_name', help=u._('User friendly plugin name'), default='Software Only Crypto'), ] CONF.register_group(simple_crypto_plugin_group) CONF.register_opts(simple_crypto_plugin_opts, group=simple_crypto_plugin_group) config.parse_args(CONF) def list_opts(): yield simple_crypto_plugin_group, simple_crypto_plugin_opts class SimpleCryptoPlugin(c.CryptoPluginBase): """Insecure implementation of the crypto plugin.""" def __init__(self, conf=CONF): self.master_kek = conf.simple_crypto_plugin.kek self.plugin_name = conf.simple_crypto_plugin.plugin_name LOG.info("{} initialized".format(self.plugin_name)) def get_plugin_name(self): return self.plugin_name def _get_kek(self, kek_meta_dto): if not kek_meta_dto.plugin_meta: raise ValueError(u._('KEK not yet created.')) # the kek is stored encrypted. Need to decrypt. encryptor = fernet.Fernet(self.master_kek) # Note : If plugin_meta type is unicode, encode to byte. if isinstance(kek_meta_dto.plugin_meta, six.text_type): kek_meta_dto.plugin_meta = kek_meta_dto.plugin_meta.encode('utf-8') return encryptor.decrypt(kek_meta_dto.plugin_meta) def encrypt(self, encrypt_dto, kek_meta_dto, project_id): kek = self._get_kek(kek_meta_dto) unencrypted = encrypt_dto.unencrypted if not isinstance(unencrypted, six.binary_type): raise ValueError( u._( 'Unencrypted data must be a byte type, but was ' '{unencrypted_type}' ).format( unencrypted_type=type(unencrypted) ) ) encryptor = fernet.Fernet(kek) cyphertext = encryptor.encrypt(unencrypted) return c.ResponseDTO(cyphertext, None) def decrypt(self, encrypted_dto, kek_meta_dto, kek_meta_extended, project_id): kek = self._get_kek(kek_meta_dto) encrypted = encrypted_dto.encrypted decryptor = fernet.Fernet(kek) return decryptor.decrypt(encrypted) def bind_kek_metadata(self, kek_meta_dto): kek_meta_dto.algorithm = 'aes' kek_meta_dto.bit_length = 128 kek_meta_dto.mode = 'cbc' if not kek_meta_dto.plugin_meta: # the kek is stored encrypted in the plugin_meta field encryptor = fernet.Fernet(self.master_kek) key = fernet.Fernet.generate_key() kek_meta_dto.plugin_meta = encryptor.encrypt(key) return kek_meta_dto def generate_symmetric(self, generate_dto, kek_meta_dto, project_id): byte_length = int(generate_dto.bit_length) // 8 unencrypted = os.urandom(byte_length) return self.encrypt(c.EncryptDTO(unencrypted), kek_meta_dto, project_id) def generate_asymmetric(self, generate_dto, kek_meta_dto, project_id): """Generate asymmetric keys based on below rules: - RSA, with passphrase (supported) - RSA, without passphrase (supported) - DSA, without passphrase (supported) - DSA, with passphrase (supported) """ if(generate_dto.algorithm is None or generate_dto .algorithm.lower() == 'rsa'): private_key = rsa.generate_private_key( public_exponent=65537, key_size=generate_dto.bit_length, backend=default_backend() ) elif generate_dto.algorithm.lower() == 'dsa': private_key = dsa.generate_private_key( key_size=generate_dto.bit_length, backend=default_backend() ) else: raise c.CryptoPrivateKeyFailureException() public_key = private_key.public_key() if generate_dto.algorithm.lower() == 'rsa': private_key = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=self._get_encryption_algorithm( generate_dto.passphrase) ) public_key = public_key.public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo ) if generate_dto.algorithm.lower() == 'dsa': private_key = private_key.private_bytes( encoding=serialization.Encoding.DER, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=self._get_encryption_algorithm( generate_dto.passphrase) ) public_key = public_key.public_bytes( encoding=serialization.Encoding.DER, format=serialization.PublicFormat.SubjectPublicKeyInfo ) private_dto = self.encrypt(c.EncryptDTO(private_key), kek_meta_dto, project_id) public_dto = self.encrypt(c.EncryptDTO(public_key), kek_meta_dto, project_id) passphrase_dto = None if generate_dto.passphrase: if isinstance(generate_dto.passphrase, six.text_type): generate_dto.passphrase = generate_dto.passphrase.encode( 'utf-8') passphrase_dto = self.encrypt(c.EncryptDTO(generate_dto. passphrase), kek_meta_dto, project_id) return private_dto, public_dto, passphrase_dto def supports(self, type_enum, algorithm=None, bit_length=None, mode=None): if type_enum == c.PluginSupportTypes.ENCRYPT_DECRYPT: return True if type_enum == c.PluginSupportTypes.SYMMETRIC_KEY_GENERATION: return self._is_algorithm_supported(algorithm, bit_length, mode) elif type_enum == c.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION: return self._is_algorithm_supported(algorithm, bit_length, mode) else: return False def _get_encryption_algorithm(self, passphrase): """Choose whether to use encryption or not based on passphrase serialization.BestAvailableEncryption fails if passphrase is not given or if less than one byte therefore we need to check if it is valid or not """ if passphrase: # encryption requires password in bytes format algorithm = serialization.BestAvailableEncryption( # default encoding is utf-8 encodeutils.safe_encode(passphrase) ) else: algorithm = serialization.NoEncryption() return algorithm def _is_algorithm_supported(self, algorithm=None, bit_length=None, mode=None): """check if algorithm and bit_length combination is supported.""" if algorithm is None or bit_length is None: return False length_factor = 1 # xts-mode cuts the effective key for the algorithm in half, # so the bit_length must be the double of the supported length. # in the future there should be a validation of supported modes too. if mode is not None and mode.lower() == "xts": length_factor = 2 if (algorithm.lower() in c.PluginSupportTypes.SYMMETRIC_ALGORITHMS and bit_length / length_factor in c.PluginSupportTypes.SYMMETRIC_KEY_LENGTHS): return True elif (algorithm.lower() in c.PluginSupportTypes.ASYMMETRIC_ALGORITHMS and bit_length in c.PluginSupportTypes.ASYMMETRIC_KEY_LENGTHS): return True else: return False barbican-9.1.0.dev50/barbican/plugin/crypto/manager.py0000664000175000017500000001400313616500636022757 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_config import cfg from stevedore import named import threading from barbican.common import config from barbican.common import utils from barbican import i18n as u from barbican.plugin.crypto import base from barbican.plugin.util import multiple_backends from barbican.plugin.util import utils as plugin_utils _PLUGIN_MANAGER = None _PLUGIN_MANAGER_LOCK = threading.RLock() CONF = config.new_config() DEFAULT_PLUGIN_NAMESPACE = 'barbican.crypto.plugin' DEFAULT_PLUGINS = ['simple_crypto'] crypto_opt_group = cfg.OptGroup(name='crypto', title='Crypto Plugin Options') crypto_opts = [ cfg.StrOpt('namespace', default=DEFAULT_PLUGIN_NAMESPACE, help=u._('Extension namespace to search for plugins.') ), cfg.MultiStrOpt('enabled_crypto_plugins', default=DEFAULT_PLUGINS, help=u._('List of crypto plugins to load.') ) ] CONF.register_group(crypto_opt_group) CONF.register_opts(crypto_opts, group=crypto_opt_group) config.parse_args(CONF) config.set_module_config("crypto", CONF) def list_opts(): yield crypto_opt_group, crypto_opts class _CryptoPluginManager(named.NamedExtensionManager): def __init__(self, conf=CONF, invoke_args=(), invoke_kwargs={}): """Crypto Plugin Manager Each time this class is initialized it will load a new instance of each enabled crypto plugin. This is undesirable, so rather than initializing a new instance of this class use the PLUGIN_MANAGER at the module level. """ crypto_conf = config.get_module_config('crypto') plugin_names = self._get_internal_plugin_names(crypto_conf) super(_CryptoPluginManager, self).__init__( crypto_conf.crypto.namespace, plugin_names, invoke_on_load=False, # Defer creating plugins to utility below. invoke_args=invoke_args, invoke_kwds=invoke_kwargs, name_order=True # extensions sorted as per order of plugin names ) plugin_utils.instantiate_plugins( self, invoke_args, invoke_kwargs) def get_plugin_store_generate(self, type_needed, algorithm=None, bit_length=None, mode=None, project_id=None): """Gets a secret store or generate plugin that supports provided type. :param type_needed: PluginSupportTypes that contains details on the type of plugin required :returns: CryptoPluginBase plugin implementation """ active_plugins = multiple_backends.get_applicable_crypto_plugins( self, project_id=project_id, existing_plugin_name=None) if not active_plugins: raise base.CryptoPluginNotFound() for generating_plugin in active_plugins: if generating_plugin.supports( type_needed, algorithm, bit_length, mode): break else: operation = (u._("store or generate a secret of type {secret_type}" " with algorithm {algorithm}, bit length " "{bit_length}, and mode {mode}") .format(secret_type=type_needed, algorithm=algorithm, bit_length=bit_length, mode=mode)) raise base.CryptoPluginUnsupportedOperation(operation=operation) return generating_plugin def get_plugin_retrieve(self, plugin_name_for_store): """Gets a secret retrieve plugin that supports the provided type. :param type_needed: PluginSupportTypes that contains details on the type of plugin required :returns: CryptoPluginBase plugin implementation """ active_plugins = plugin_utils.get_active_plugins(self) if not active_plugins: raise base.CryptoPluginNotFound() for decrypting_plugin in active_plugins: plugin_name = utils.generate_fullname_for(decrypting_plugin) if plugin_name == plugin_name_for_store: break else: operation = (u._("retrieve a secret from plugin: {plugin}") .format(plugin=plugin_name_for_store)) raise base.CryptoPluginUnsupportedOperation(operation=operation) return decrypting_plugin def _get_internal_plugin_names(self, crypto_conf): """Gets plugin names used for loading via stevedore. When multiple secret store support is enabled, then crypto plugin names are read via updated configuration structure. If not enabled, then it reads MultiStr property in 'crypto' config section. """ # to cache default global secret store value on first use self.global_default_store_dict = None if utils.is_multiple_backends_enabled(): parsed_stores = multiple_backends.read_multiple_backends_config() plugin_names = [store.crypto_plugin for store in parsed_stores if store.crypto_plugin] else: plugin_names = crypto_conf.crypto.enabled_crypto_plugins return plugin_names def get_manager(): """Return a singleton crypto plugin manager.""" global _PLUGIN_MANAGER global _PLUGIN_MANAGER_LOCK if not _PLUGIN_MANAGER: with _PLUGIN_MANAGER_LOCK: if not _PLUGIN_MANAGER: _PLUGIN_MANAGER = _CryptoPluginManager() return _PLUGIN_MANAGER barbican-9.1.0.dev50/barbican/plugin/crypto/base.py0000664000175000017500000003547113616500636022273 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import abc import six from barbican.common import exception from barbican import i18n as u class CryptoPluginNotFound(exception.BarbicanException): """Raised when no plugins are installed.""" message = u._("Crypto plugin not found.") class CryptoKEKBindingException(exception.BarbicanException): """Raised when the bind_kek_metadata method from a plugin returns None.""" def __init__(self, plugin_name=u._('Unknown')): super(CryptoKEKBindingException, self).__init__( u._('Failed to bind kek metadata for ' 'plugin: {name}').format(name=plugin_name) ) self.plugin_name = plugin_name class CryptoPrivateKeyFailureException(exception.BarbicanException): """Raised when could not generate private key.""" def __init__(self): super(CryptoPrivateKeyFailureException, self).__init__( u._('Could not generate private key') ) class CryptoPluginUnsupportedOperation(exception.BarbicanException): """Raised when no crypto plugins support the operation.""" def __init__(self, operation): message = ( u._('Could not find an enabled crypto plugin backend ' 'that supports the requested operation: {operation}') .format(operation=operation)) super(CryptoPluginUnsupportedOperation, self).__init__(message) # TODO(john-wood-w) Need to harmonize these lower-level constants with the # higher level constants in secret_store.py. class PluginSupportTypes(object): """Class to hold the type enumeration that plugins may support.""" ENCRYPT_DECRYPT = "ENCRYPT_DECRYPT" SYMMETRIC_KEY_GENERATION = "SYMMETRIC_KEY_GENERATION" # A list of symmetric algorithms that are used to determine type of key gen SYMMETRIC_ALGORITHMS = ['aes', 'des', '3des', 'hmacsha1', 'hmacsha256', 'hmacsha384', 'hmacsha512'] SYMMETRIC_KEY_LENGTHS = [64, 128, 192, 256] ASYMMETRIC_KEY_GENERATION = "ASYMMETRIC_KEY_GENERATION" ASYMMETRIC_ALGORITHMS = ['rsa', 'dsa'] ASYMMETRIC_KEY_LENGTHS = [1024, 2048, 4096] class KEKMetaDTO(object): """Key Encryption Key Meta DTO Key Encryption Keys (KEKs) in Barbican are intended to represent a distinct key that is used to perform encryption on secrets for a particular project. ``KEKMetaDTO`` objects are provided to cryptographic backends by Barbican to allow plugins to persist metadata related to the project's KEK. For example, a plugin that interfaces with a Hardware Security Module (HSM) may want to use a different encryption key for each project. Such a plugin could use the ``KEKMetaDTO`` object to save the key ID used for that project. Barbican will persist the KEK metadata and ensure that it is provided to the plugin every time a request from that same project is processed. .. attribute:: plugin_name String attribute used by Barbican to identify the plugin that is bound to the KEK metadata. Plugins should not change this attribute. .. attribute:: kek_label String attribute used to label the project's KEK by the plugin. The value of this attribute should be meaningful to the plugin. Barbican does not use this value. .. attribute:: algorithm String attribute used to identify the encryption algorithm used by the plugin. e.g. "AES", "3DES", etc. This value should be meaningful to the plugin. Barbican does not use this value. .. attribute:: mode String attribute used to identify the algorithm mode used by the plugin. e.g. "CBC", "GCM", etc. This value should be meaningful to the plugin. Barbican does not use this value. .. attribute:: bit_length Integer attribute used to identify the bit length of the KEK by the plugin. This value should be meaningful to the plugin. Barbican does not use this value. .. attribute:: plugin_meta String attribute used to persist any additional metadata that does not fit in any other attribute. The value of this attribute is defined by the plugin. It could be used to store external system references, such as Key IDs in an HSM, URIs to an external service, or any other data that the plugin deems necessary to persist. Because this is just a plain text field, a plug in may even choose to persist data such as key value pairs in a JSON object. """ def __init__(self, kek_datum): """Plugins should not have to create their own instance of this class. kek_datum is typically a barbican.model.models.KEKDatum instance. """ self.kek_label = kek_datum.kek_label self.plugin_name = kek_datum.plugin_name self.algorithm = kek_datum.algorithm self.bit_length = kek_datum.bit_length self.mode = kek_datum.mode self.plugin_meta = kek_datum.plugin_meta class GenerateDTO(object): """Secret Generation DTO Data Transfer Object used to pass all the necessary data for the plugin to generate a secret on behalf of the user. .. attribute:: generation_type String attribute used to identify the type of secret that should be generated. This will be either ``"symmetric"`` or ``"asymmetric"``. .. attribute:: algorithm String attribute used to specify what type of algorithm the secret will be used for. e.g. ``"AES"`` for a ``"symmetric"`` type, or ``"RSA"`` for ``"asymmetric"``. .. attribute:: mode String attribute used to specify what algorithm mode the secret will be used for. e.g. ``"CBC"`` for ``"AES"`` algorithm. .. attribute:: bit_length Integer attribute used to specify the bit length of the secret. For example, this attribute could specify the key length for an encryption key to be used in AES-CBC. """ def __init__(self, algorithm, bit_length, mode, passphrase=None): self.algorithm = algorithm self.bit_length = bit_length self.mode = mode self.passphrase = passphrase class ResponseDTO(object): """Data transfer object for secret generation response. Barbican guarantees that both the ``cypher_text`` and ``kek_metadata_extended`` will be persisted and then given back to the plugin when requesting a decryption operation. ``kek_metadata_extended`` takes the idea of Key Encryption Key (KEK) metadata further by giving plugins the option to store secret-level KEK metadata. One example of using secret-level KEK metadata would be plugins that want to use a unique KEK for every secret that is encrypted. Such a plugin could use ``kek_metadata_extended`` to store the Key ID for the KEK used to encrypt this particular secret. :param cypher_text: Byte data resulting from the encryption of the secret data. :param kek_meta_extended: Optional String object to be persisted alongside the cyphertext. """ def __init__(self, cypher_text, kek_meta_extended=None): self.cypher_text = cypher_text self.kek_meta_extended = kek_meta_extended class DecryptDTO(object): """Secret Decryption DTO Data Transfer Object used to pass all the necessary data for the plugin to perform decryption of a secret. Currently, this DTO only contains the data produced by the plugin during encryption, but in the future this DTO will contain more information, such as a transport key for secret wrapping back to the client. .. attribute:: encrypted The data that was produced by the plugin during encryption. For some plugins this will be the actual bytes that need to be decrypted to produce the secret. In other implementations, this may just be a reference to some external system that can produce the unencrypted secret. """ def __init__(self, encrypted): self.encrypted = encrypted class EncryptDTO(object): """Secret Encryption DTO Data Transfer Object used to pass all the necessary data for the plugin to perform encryption of a secret. Currently, this DTO only contains the raw bytes to be encrypted by the plugin, but in the future this may contain more information. .. attribute:: unencrypted The secret data in Bytes to be encrypted by the plugin. """ def __init__(self, unencrypted): self.unencrypted = unencrypted @six.add_metaclass(abc.ABCMeta) class CryptoPluginBase(object): """Base class for all Crypto plugins. Barbican requests operations by invoking the methods on an instance of the implementing class. Barbican's plugin manager handles the life-cycle of the Data Transfer Objects (DTOs) that are passed into these methods, and persist the data that is assigned to these DTOs by the plugin. """ @abc.abstractmethod def get_plugin_name(self): """Gets user friendly plugin name. This plugin name is expected to be read from config file. There will be a default defined for plugin name which can be customized in specific deployment if needed. This name needs to be unique across a deployment. """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def encrypt(self, encrypt_dto, kek_meta_dto, project_id): """Encryption handler function This method will be called by Barbican when requesting an encryption operation on a secret on behalf of a project. :param encrypt_dto: :class:`EncryptDTO` instance containing the raw secret byte data to be encrypted. :type encrypt_dto: :class:`EncryptDTO` :param kek_meta_dto: :class:`KEKMetaDTO` instance containing information about the project's Key Encryption Key (KEK) to be used for encryption. Plugins may assume that binding via :meth:`bind_kek_metadata` has already taken place before this instance is passed in. :type kek_meta_dto: :class:`KEKMetaDTO` :param project_id: Project ID associated with the unencrypted data. :return: A response DTO containing the cyphertext and KEK information. :rtype: :class:`ResponseDTO` """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def decrypt(self, decrypt_dto, kek_meta_dto, kek_meta_extended, project_id): """Decrypt encrypted_datum in the context of the provided project. :param decrypt_dto: data transfer object containing the cyphertext to be decrypted. :param kek_meta_dto: Key encryption key metadata to use for decryption :param kek_meta_extended: Optional per-secret KEK metadata to use for decryption. :param project_id: Project ID associated with the encrypted datum. :returns: str -- unencrypted byte data """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def bind_kek_metadata(self, kek_meta_dto): """Key Encryption Key Metadata binding function Bind a key encryption key (KEK) metadata to the sub-system handling encryption/decryption, updating information about the key encryption key (KEK) metadata in the supplied 'kek_metadata' data-transfer-object instance, and then returning this instance. This method is invoked prior to the encrypt() method above. Implementors should fill out the supplied 'kek_meta_dto' instance (an instance of KEKMetadata above) as needed to completely describe the kek metadata and to complete the binding process. Barbican will persist the contents of this instance once this method returns. :param kek_meta_dto: Key encryption key metadata to bind, with the 'kek_label' attribute guaranteed to be unique, and the and 'plugin_name' attribute already configured. :returns: kek_meta_dto: Returns the specified DTO, after modifications. """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def generate_symmetric(self, generate_dto, kek_meta_dto, project_id): """Generate a new key. :param generate_dto: data transfer object for the record associated with this generation request. Some relevant parameters can be extracted from this object, including bit_length, algorithm and mode :param kek_meta_dto: Key encryption key metadata to use for decryption :param project_id: Project ID associated with the data. :returns: An object of type ResponseDTO containing encrypted data and kek_meta_extended, the former the resultant cypher text, the latter being optional per-secret metadata needed to decrypt (over and above the per-project metadata managed outside of the plugins) """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def generate_asymmetric(self, generate_dto, kek_meta_dto, project_id): """Create a new asymmetric key. :param generate_dto: data transfer object for the record associated with this generation request. Some relevant parameters can be extracted from this object, including bit_length, algorithm and passphrase :param kek_meta_dto: Key encryption key metadata to use for decryption :param project_id: Project ID associated with the data. :returns: A tuple containing objects for private_key, public_key and optionally one for passphrase. The objects will be of type ResponseDTO. Each object containing encrypted data and kek_meta_extended, the former the resultant cypher text, the latter being optional per-secret metadata needed to decrypt (over and above the per-project metadata managed outside of the plugins) """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def supports(self, type_enum, algorithm=None, bit_length=None, mode=None): """Used to determine if the plugin supports the requested operation. :param type_enum: Enumeration from PluginSupportsType class :param algorithm: String algorithm name if needed """ raise NotImplementedError # pragma: no cover barbican-9.1.0.dev50/barbican/plugin/crypto/__init__.py0000664000175000017500000000000013616500636023074 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/plugin/crypto/pkcs11.py0000664000175000017500000007020513616500636022455 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import collections import textwrap import cffi from cryptography.hazmat.primitives import padding import six from barbican.common import exception from barbican.common import utils from barbican import i18n as u LOG = utils.getLogger(__name__) Attribute = collections.namedtuple("Attribute", ["type", "value"]) CKAttributes = collections.namedtuple("CKAttributes", ["template", "cffivals"]) CKMechanism = collections.namedtuple("CKMechanism", ["mech", "cffivals"]) CKR_OK = 0 CKF_RW_SESSION = (1 << 1) CKF_SERIAL_SESSION = (1 << 2) CKU_SO = 0 CKU_USER = 1 CKS_RO_PUBLIC_SESSION = 0 CKS_RO_USER_FUNCTIONS = 1 CKS_RW_PUBLIC_SESSION = 2 CKS_RW_USER_FUNCTIONS = 3 CKO_SECRET_KEY = 4 CKK_AES = 0x1f CKK_GENERIC_SECRET = 0x10 CKK_SHA256_HMAC = 0x0000002B _KEY_TYPES = { 'CKK_AES': CKK_AES, 'CKK_GENERIC_SECRET': CKK_GENERIC_SECRET, 'CKK_SHA256_HMAC': CKK_SHA256_HMAC } CKA_CLASS = 0 CKA_TOKEN = 1 CKA_PRIVATE = 2 CKA_LABEL = 3 CKA_APPLICATION = 0x10 CKA_VALUE = 0x11 CKA_OBJECT_ID = 0x12 CKA_CERTIFICATE_TYPE = 0x80 CKA_ISSUER = 0x81 CKA_SERIAL_NUMBER = 0x82 CKA_AC_ISSUER = 0x83 CKA_OWNER = 0x84 CKA_ATTR_TYPES = 0x85 CKA_TRUSTED = 0x86 CKA_CERTIFICATE_CATEGORY = 0x87 CKA_JAVA_MIDP_SECURITY_DOMAIN = 0x88 CKA_URL = 0x89 CKA_HASH_OF_SUBJECT_PUBLIC_KEY = 0x8a CKA_HASH_OF_ISSUER_PUBLIC_KEY = 0x8b CKA_CHECK_VALUE = 0x90 CKA_KEY_TYPE = 0x100 CKA_SUBJECT = 0x101 CKA_ID = 0x102 CKA_SENSITIVE = 0x103 CKA_ENCRYPT = 0x104 CKA_DECRYPT = 0x105 CKA_WRAP = 0x106 CKA_UNWRAP = 0x107 CKA_SIGN = 0x108 CKA_SIGN_RECOVER = 0x109 CKA_VERIFY = 0x10a CKA_VERIFY_RECOVER = 0x10b CKA_DERIVE = 0x10c CKA_START_DATE = 0x110 CKA_END_DATE = 0x111 CKA_MODULUS = 0x120 CKA_MODULUS_BITS = 0x121 CKA_PUBLIC_EXPONENT = 0x122 CKA_PRIVATE_EXPONENT = 0x123 CKA_PRIME_1 = 0x124 CKA_PRIME_2 = 0x125 CKA_EXPONENT_1 = 0x126 CKA_EXPONENT_2 = 0x127 CKA_COEFFICIENT = 0x128 CKA_PRIME = 0x130 CKA_SUBPRIME = 0x131 CKA_BASE = 0x132 CKA_PRIME_BITS = 0x133 CKA_SUB_PRIME_BITS = 0x134 CKA_VALUE_BITS = 0x160 CKA_VALUE_LEN = 0x161 CKA_EXTRACTABLE = 0x162 CKA_LOCAL = 0x163 CKA_NEVER_EXTRACTABLE = 0x164 CKA_ALWAYS_SENSITIVE = 0x165 CKA_KEY_GEN_MECHANISM = 0x166 CKA_MODIFIABLE = 0x170 CKA_ECDSA_PARAMS = 0x180 CKA_EC_PARAMS = 0x180 CKA_EC_POINT = 0x181 CKA_SECONDARY_AUTH = 0x200 CKA_AUTH_PIN_FLAGS = 0x201 CKA_ALWAYS_AUTHENTICATE = 0x202 CKA_WRAP_WITH_TRUSTED = 0x210 CKA_HW_FEATURE_TYPE = 0x300 CKA_RESET_ON_INIT = 0x301 CKA_HAS_RESET = 0x302 CKA_PIXEL_X = 0x400 CKA_PIXEL_Y = 0x401 CKA_RESOLUTION = 0x402 CKA_CHAR_ROWS = 0x403 CKA_CHAR_COLUMNS = 0x404 CKA_COLOR = 0x405 CKA_BITS_PER_PIXEL = 0x406 CKA_CHAR_SETS = 0x480 CKA_ENCODING_METHODS = 0x481 CKA_MIME_TYPES = 0x482 CKA_MECHANISM_TYPE = 0x500 CKA_REQUIRED_CMS_ATTRIBUTES = 0x501 CKA_DEFAULT_CMS_ATTRIBUTES = 0x502 CKA_SUPPORTED_CMS_ATTRIBUTES = 0x503 CKM_SHA256_HMAC = 0x251 CKM_AES_KEY_GEN = 0x1080 CKM_AES_CBC = 0x1082 CKM_AES_MAC = 0x1083 CKM_AES_CBC_PAD = 0x1085 CKM_AES_GCM = 0x1087 CKM_AES_KEY_WRAP = 0x1090 CKM_GENERIC_SECRET_KEY_GEN = 0x350 VENDOR_SAFENET_CKM_AES_GCM = 0x8000011c # nCipher Vendor-defined Mechanisms CKM_NC_SHA256_HMAC_KEY_GEN = 0xDE436997 _ENCRYPTION_MECHANISMS = { 'CKM_AES_CBC': CKM_AES_CBC, 'CKM_AES_GCM': CKM_AES_GCM, 'VENDOR_SAFENET_CKM_AES_GCM': VENDOR_SAFENET_CKM_AES_GCM, } _CBC_IV_SIZE = 16 # bytes _CBC_BLOCK_SIZE = 128 # bits _KEY_GEN_MECHANISMS = { 'CKM_AES_KEY_GEN': CKM_AES_KEY_GEN, 'CKM_NC_SHA256_HMAC_KEY_GEN': CKM_NC_SHA256_HMAC_KEY_GEN, 'CKM_GENERIC_SECRET_KEY_GEN': CKM_GENERIC_SECRET_KEY_GEN, } _KEY_WRAP_MECHANISMS = { 'CKM_SHA256_HMAC': CKM_SHA256_HMAC, 'CKM_AES_MAC': CKM_AES_MAC } CKM_NAMES = dict() CKM_NAMES.update(_ENCRYPTION_MECHANISMS) CKM_NAMES.update(_KEY_GEN_MECHANISMS) CKM_NAMES.update(_KEY_WRAP_MECHANISMS) ERROR_CODES = { 1: 'CKR_CANCEL', 2: 'CKR_HOST_MEMORY', 3: 'CKR_SLOT_ID_INVALID', 5: 'CKR_GENERAL_ERROR', 6: 'CKR_FUNCTION_FAILED', 7: 'CKR_ARGUMENTS_BAD', 8: 'CKR_NO_EVENT', 9: 'CKR_NEED_TO_CREATE_THREADS', 0xa: 'CKR_CANT_LOCK', 0x10: 'CKR_ATTRIBUTE_READ_ONLY', 0x11: 'CKR_ATTRIBUTE_SENSITIVE', 0x12: 'CKR_ATTRIBUTE_TYPE_INVALID', 0x13: 'CKR_ATTRIBUTE_VALUE_INVALID', 0x20: 'CKR_DATA_INVALID', 0x21: 'CKR_DATA_LEN_RANGE', 0x30: 'CKR_DEVICE_ERROR', 0x31: 'CKR_DEVICE_MEMORY', 0x32: 'CKR_DEVICE_REMOVED', 0x40: 'CKR_ENCRYPTED_DATA_INVALID', 0x41: 'CKR_ENCRYPTED_DATA_LEN_RANGE', 0x50: 'CKR_FUNCTION_CANCELED', 0x51: 'CKR_FUNCTION_NOT_PARALLEL', 0x54: 'CKR_FUNCTION_NOT_SUPPORTED', 0x60: 'CKR_KEY_HANDLE_INVALID', 0x62: 'CKR_KEY_SIZE_RANGE', 0x63: 'CKR_KEY_TYPE_INCONSISTENT', 0x64: 'CKR_KEY_NOT_NEEDED', 0x65: 'CKR_KEY_CHANGED', 0x66: 'CKR_KEY_NEEDED', 0x67: 'CKR_KEY_INDIGESTIBLE', 0x68: 'CKR_KEY_FUNCTION_NOT_PERMITTED', 0x69: 'CKR_KEY_NOT_WRAPPABLE', 0x6a: 'CKR_KEY_UNEXTRACTABLE', 0x70: 'CKR_MECHANISM_INVALID', 0x71: 'CKR_MECHANISM_PARAM_INVALID', 0x82: 'CKR_OBJECT_HANDLE_INVALID', 0x90: 'CKR_OPERATION_ACTIVE', 0x91: 'CKR_OPERATION_NOT_INITIALIZED', 0xa0: 'CKR_PIN_INCORRECT', 0xa1: 'CKR_PIN_INVALID', 0xa2: 'CKR_PIN_LEN_RANGE', 0xa3: 'CKR_PIN_EXPIRED', 0xa4: 'CKR_PIN_LOCKED', 0xb0: 'CKR_SESSION_CLOSED', 0xb1: 'CKR_SESSION_COUNT', 0xb3: 'CKR_SESSION_HANDLE_INVALID', 0xb4: 'CKR_SESSION_PARALLEL_NOT_SUPPORTED', 0xb5: 'CKR_SESSION_READ_ONLY', 0xb6: 'CKR_SESSION_EXISTS', 0xb7: 'CKR_SESSION_READ_ONLY_EXISTS', 0xb8: 'CKR_SESSION_READ_WRITE_SO_EXISTS', 0xc0: 'CKR_SIGNATURE_INVALID', 0xc1: 'CKR_SIGNATURE_LEN_RANGE', 0xd0: 'CKR_TEMPLATE_INCOMPLETE', 0xd1: 'CKR_TEMPLATE_INCONSISTENT', 0xe0: 'CKR_TOKEN_NOT_PRESENT', 0xe1: 'CKR_TOKEN_NOT_RECOGNIZED', 0xe2: 'CKR_TOKEN_WRITE_PROTECTED', 0xf0: 'CKR_UNWRAPPING_KEY_HANDLE_INVALID', 0xf1: 'CKR_UNWRAPPING_KEY_SIZE_RANGE', 0xf2: 'CKR_UNWRAPPING_KEY_TYPE_INCONSISTENT', 0x100: 'CKR_USER_ALREADY_LOGGED_IN', 0x101: 'CKR_USER_NOT_LOGGED_IN', 0x102: 'CKR_USER_PIN_NOT_INITIALIZED', 0x103: 'CKR_USER_TYPE_INVALID', 0x104: 'CKR_USER_ANOTHER_ALREADY_LOGGED_IN', 0x105: 'CKR_USER_TOO_MANY_TYPES', 0x110: 'CKR_WRAPPED_KEY_INVALID', 0x112: 'CKR_WRAPPED_KEY_LEN_RANGE', 0x113: 'CKR_WRAPPING_KEY_HANDLE_INVALID', 0x114: 'CKR_WRAPPING_KEY_SIZE_RANGE', 0x115: 'CKR_WRAPPING_KEY_TYPE_INCONSISTENT', 0x120: 'CKR_RANDOM_SEED_NOT_SUPPORTED', 0x121: 'CKR_RANDOM_NO_RNG', 0x130: 'CKR_DOMAIN_PARAMS_INVALID', 0x150: 'CKR_BUFFER_TOO_SMALL', 0x160: 'CKR_SAVED_STATE_INVALID', 0x170: 'CKR_INFORMATION_SENSITIVE', 0x180: 'CKR_STATE_UNSAVEABLE', 0x190: 'CKR_CRYPTOKI_NOT_INITIALIZED', 0x191: 'CKR_CRYPTOKI_ALREADY_INITIALIZED', 0x1a0: 'CKR_MUTEX_BAD', 0x1a1: 'CKR_MUTEX_NOT_LOCKED', 0x200: 'CKR_FUNCTION_REJECTED', 1 << 31: 'CKR_VENDOR_DEFINED' } def build_ffi(): ffi = cffi.FFI() ffi.cdef(textwrap.dedent(""" typedef unsigned char CK_BYTE; typedef unsigned long CK_ULONG; typedef unsigned long CK_RV; typedef unsigned long CK_SESSION_HANDLE; typedef unsigned long CK_OBJECT_HANDLE; typedef unsigned long CK_SLOT_ID; typedef unsigned long CK_FLAGS; typedef unsigned long CK_STATE; typedef unsigned long CK_USER_TYPE; typedef unsigned char * CK_UTF8CHAR_PTR; typedef ... *CK_NOTIFY; typedef unsigned long ck_attribute_type_t; struct ck_attribute { ck_attribute_type_t type; void *value; unsigned long value_len; }; typedef struct ck_attribute CK_ATTRIBUTE; typedef CK_ATTRIBUTE *CK_ATTRIBUTE_PTR; typedef unsigned long ck_mechanism_type_t; struct ck_mechanism { ck_mechanism_type_t mechanism; void *parameter; unsigned long parameter_len; }; typedef struct ck_mechanism CK_MECHANISM; typedef CK_MECHANISM *CK_MECHANISM_PTR; typedef CK_BYTE *CK_BYTE_PTR; typedef CK_ULONG *CK_ULONG_PTR; typedef struct ck_session_info { CK_SLOT_ID slot_id; CK_STATE state; CK_FLAGS flags; unsigned long device_error; } CK_SESSION_INFO; typedef CK_SESSION_INFO *CK_SESSION_INFO_PTR; typedef struct CK_AES_GCM_PARAMS { char * pIv; unsigned long ulIvLen; unsigned long ulIvBits; char * pAAD; unsigned long ulAADLen; unsigned long ulTagBits; } CK_AES_GCM_PARAMS; """)) # FUNCTIONS ffi.cdef(textwrap.dedent(""" CK_RV C_Initialize(void *); CK_RV C_Finalize(void *); CK_RV C_OpenSession(CK_SLOT_ID, CK_FLAGS, void *, CK_NOTIFY, CK_SESSION_HANDLE *); CK_RV C_CloseSession(CK_SESSION_HANDLE); CK_RV C_GetSessionInfo(CK_SESSION_HANDLE, CK_SESSION_INFO_PTR); CK_RV C_Login(CK_SESSION_HANDLE, CK_USER_TYPE, CK_UTF8CHAR_PTR, CK_ULONG); CK_RV C_GetAttributeValue(CK_SESSION_HANDLE, CK_OBJECT_HANDLE, CK_ATTRIBUTE *, CK_ULONG); CK_RV C_SetAttributeValue(CK_SESSION_HANDLE, CK_OBJECT_HANDLE, CK_ATTRIBUTE *, CK_ULONG); CK_RV C_DestroyObject(CK_SESSION_HANDLE, CK_OBJECT_HANDLE); CK_RV C_FindObjectsInit(CK_SESSION_HANDLE, CK_ATTRIBUTE *, CK_ULONG); CK_RV C_FindObjects(CK_SESSION_HANDLE, CK_OBJECT_HANDLE *, CK_ULONG, CK_ULONG *); CK_RV C_FindObjectsFinal(CK_SESSION_HANDLE); CK_RV C_GenerateKey(CK_SESSION_HANDLE, CK_MECHANISM *, CK_ATTRIBUTE *, CK_ULONG, CK_OBJECT_HANDLE *); CK_RV C_UnwrapKey(CK_SESSION_HANDLE, CK_MECHANISM *, CK_OBJECT_HANDLE, CK_BYTE *, CK_ULONG, CK_ATTRIBUTE *, CK_ULONG, CK_OBJECT_HANDLE *); CK_RV C_WrapKey(CK_SESSION_HANDLE, CK_MECHANISM_PTR, CK_OBJECT_HANDLE, CK_OBJECT_HANDLE, CK_BYTE_PTR, CK_ULONG_PTR); CK_RV C_EncryptInit(CK_SESSION_HANDLE, CK_MECHANISM_PTR, CK_OBJECT_HANDLE); CK_RV C_Encrypt(CK_SESSION_HANDLE, CK_BYTE_PTR, CK_ULONG, CK_BYTE_PTR, CK_ULONG_PTR); CK_RV C_DecryptInit(CK_SESSION_HANDLE, CK_MECHANISM_PTR, CK_OBJECT_HANDLE); CK_RV C_Decrypt(CK_SESSION_HANDLE, CK_BYTE_PTR, CK_ULONG, CK_BYTE_PTR, CK_ULONG_PTR); CK_RV C_SignInit(CK_SESSION_HANDLE, CK_MECHANISM_PTR, CK_OBJECT_HANDLE); CK_RV C_Sign(CK_SESSION_HANDLE, CK_BYTE_PTR, CK_ULONG, CK_BYTE_PTR, CK_ULONG_PTR); CK_RV C_VerifyInit(CK_SESSION_HANDLE, CK_MECHANISM_PTR, CK_OBJECT_HANDLE); CK_RV C_Verify(CK_SESSION_HANDLE, CK_BYTE_PTR, CK_ULONG, CK_BYTE_PTR, CK_ULONG); CK_RV C_GenerateRandom(CK_SESSION_HANDLE, CK_BYTE_PTR, CK_ULONG); CK_RV C_SeedRandom(CK_SESSION_HANDLE, CK_BYTE_PTR, CK_ULONG); """)) return ffi class PKCS11(object): def __init__(self, library_path, login_passphrase, rw_session, slot_id, encryption_mechanism=None, ffi=None, algorithm=None, seed_random_buffer=None, generate_iv=None, always_set_cka_sensitive=None, hmac_keywrap_mechanism='CKM_SHA256_HMAC'): if algorithm: LOG.warning("WARNING: Using deprecated 'algorithm' argument.") encryption_mechanism = encryption_mechanism or algorithm if encryption_mechanism not in _ENCRYPTION_MECHANISMS: raise ValueError("Invalid encryption_mechanism.") self.encrypt_mech = _ENCRYPTION_MECHANISMS[encryption_mechanism] self.encrypt = getattr( self, '_{}_encrypt'.format(encryption_mechanism) ) if hmac_keywrap_mechanism not in _KEY_WRAP_MECHANISMS: raise ValueError("Invalid HMAC keywrap mechanism") self.ffi = ffi or build_ffi() self.lib = self.ffi.dlopen(library_path) rv = self.lib.C_Initialize(self.ffi.NULL) self._check_error(rv) # Session options self.login_passphrase = _to_bytes(login_passphrase) self.rw_session = rw_session self.slot_id = slot_id # Algorithm options self.algorithm = CKM_NAMES[encryption_mechanism] self.blocksize = 16 self.noncesize = 12 self.gcmtagsize = 16 self.generate_iv = generate_iv self.always_set_cka_sensitive = always_set_cka_sensitive self.hmac_keywrap_mechanism = CKM_NAMES[hmac_keywrap_mechanism] # Validate configuration and RNG session = self.get_session() if seed_random_buffer is not None: self._seed_random(session, seed_random_buffer) self._rng_self_test(session) self.return_session(session) def get_session(self): session = self._open_session(self.slot_id) # Get session info to check user state session_info = self._get_session_info(session) if session_info.state in (CKS_RO_PUBLIC_SESSION, CKS_RW_PUBLIC_SESSION): # Login public sessions self._login(self.login_passphrase, session) return session def return_session(self, session): self._close_session(session) def generate_random(self, length, session): buf = self._generate_random(length, session) return self.ffi.buffer(buf)[:] def get_key_handle(self, key_type, label, session): attributes = self._build_attributes([ Attribute(CKA_CLASS, CKO_SECRET_KEY), Attribute(CKA_KEY_TYPE, _KEY_TYPES[key_type]), Attribute(CKA_LABEL, str(label)) ]) rv = self.lib.C_FindObjectsInit( session, attributes.template, len(attributes.template) ) self._check_error(rv) count = self.ffi.new("CK_ULONG *") obj_handle_ptr = self.ffi.new("CK_OBJECT_HANDLE[2]") rv = self.lib.C_FindObjects(session, obj_handle_ptr, 2, count) self._check_error(rv) key = None if count[0] == 1: key = obj_handle_ptr[0] rv = self.lib.C_FindObjectsFinal(session) self._check_error(rv) if count[0] > 1: raise exception.P11CryptoPluginKeyException() return key def _CKM_AES_CBC_encrypt(self, key, pt_data, session): iv = self._generate_random(_CBC_IV_SIZE, session) ck_mechanism = self._build_cbc_mechanism(iv) rv = self.lib.C_EncryptInit(session, ck_mechanism.mech, key) self._check_error(rv) padder = padding.PKCS7(_CBC_BLOCK_SIZE).padder() padded_pt_data = padder.update(pt_data) padded_pt_data += padder.finalize() pt_len = len(padded_pt_data) ct_len = self.ffi.new("CK_ULONG *", pt_len) ct = self.ffi.new("CK_BYTE[{}]".format(ct_len[0])) rv = self.lib.C_Encrypt(session, padded_pt_data, pt_len, ct, ct_len) self._check_error(rv) return { "iv": self.ffi.buffer(iv)[:], "ct": self.ffi.buffer(ct, ct_len[0])[:] } def _build_cbc_mechanism(self, iv): mech = self.ffi.new("CK_MECHANISM *") mech.mechanism = self.encrypt_mech mech.parameter = iv mech.parameter_len = _CBC_IV_SIZE return CKMechanism(mech, None) def _CKM_AES_CBC_decrypt(self, key, iv, ct_data, session): iv = self.ffi.new("CK_BYTE[{}]".format(len(iv)), iv) ck_mechanism = self._build_cbc_mechanism(iv) rv = self.lib.C_DecryptInit(session, ck_mechanism.mech, key) self._check_error(rv) ct_len = len(ct_data) pt_len = self.ffi.new("CK_ULONG *", ct_len) pt = self.ffi.new("CK_BYTE[{0}]".format(pt_len[0])) rv = self.lib.C_Decrypt(session, ct_data, ct_len, pt, pt_len) self._check_error(rv) pt = self.ffi.buffer(pt, pt_len[0])[:] unpadder = padding.PKCS7(_CBC_BLOCK_SIZE).unpadder() unpadded_pt = unpadder.update(pt) unpadded_pt += unpadder.finalize() return unpadded_pt def _VENDOR_SAFENET_CKM_AES_GCM_encrypt(self, key, pt_data, session): iv = None if self.generate_iv: iv = self._generate_random(self.noncesize, session) ck_mechanism = self._build_gcm_mechanism(iv) rv = self.lib.C_EncryptInit(session, ck_mechanism.mech, key) self._check_error(rv) pt_len = len(pt_data) if self.generate_iv: ct_len = self.ffi.new("CK_ULONG *", pt_len + self.gcmtagsize) else: ct_len = self.ffi.new("CK_ULONG *", pt_len + self.gcmtagsize * 2) ct = self.ffi.new("CK_BYTE[{0}]".format(ct_len[0])) rv = self.lib.C_Encrypt(session, pt_data, pt_len, ct, ct_len) self._check_error(rv) if self.generate_iv: return { "iv": self.ffi.buffer(iv)[:], "ct": self.ffi.buffer(ct, ct_len[0])[:] } else: # HSM-generated IVs are appended to the end of the ciphertext return { "iv": self.ffi.buffer(ct, ct_len[0])[-self.gcmtagsize:], "ct": self.ffi.buffer(ct, ct_len[0])[:-self.gcmtagsize] } def _build_gcm_mechanism(self, iv=None): mech = self.ffi.new("CK_MECHANISM *") mech.mechanism = self.algorithm gcm = self.ffi.new("CK_AES_GCM_PARAMS *") if iv: iv_len = len(iv) gcm.pIv = iv gcm.ulIvLen = iv_len gcm.ulIvBits = iv_len * 8 gcm.ulTagBits = self.gcmtagsize * 8 mech.parameter = gcm mech.parameter_len = 48 return CKMechanism(mech, gcm) def _VENDOR_SAFENET_CKM_AES_GCM_decrypt(self, key, iv, ct_data, session): iv = self.ffi.new("CK_BYTE[{0}]".format(len(iv)), iv) ck_mechanism = self._build_gcm_mechanism(iv) rv = self.lib.C_DecryptInit(session, ck_mechanism.mech, key) self._check_error(rv) ct_len = len(ct_data) pt_len = self.ffi.new("CK_ULONG *", ct_len) pt = self.ffi.new("CK_BYTE[{0}]".format(pt_len[0])) rv = self.lib.C_Decrypt(session, ct_data, ct_len, pt, pt_len) self._check_error(rv) pt = self.ffi.buffer(pt, pt_len[0])[:] # Secrets stored by the old code uses 16 byte IVs, while the new code # uses 12 byte IVs to be more efficient with GCM. We can use this to # detect secrets stored by the old code and perform padding removal. # If we find a 16 byte IV, we check to make sure the decrypted plain # text is a multiple of the block size, and then that the end of the # plain text looks like padding, ie the last character is a value # between 1 and blocksize, and that there are that many consecutive # bytes of that value at the end. If all of that is true, we remove # the found padding. last_byte = ord(pt[-1:]) if len(iv) == self.blocksize and \ (len(pt) % self.blocksize) == 0 and \ 1 <= last_byte <= self.blocksize and \ pt.endswith(pt[-1:] * last_byte): pt = pt[:-last_byte] return pt def _CKM_AES_GCM_encrypt(self, key, pt_data, session): return self._VENDOR_SAFENET_CKM_AES_GCM_encrypt(key, pt_data, session) def _CKM_AES_GCM_decrypt(self, key, iv, ct_data, session): return self._VENDOR_SAFENET_CKM_AES_GCM_decrypt( key, iv, ct_data, session ) def decrypt(self, mechanism, key, iv, ct_data, session): if mechanism not in _ENCRYPTION_MECHANISMS: raise ValueError(u._("Unsupported decryption mechanism")) return getattr(self, '_{}_decrypt'.format(mechanism))( key, iv, ct_data, session ) def generate_key(self, key_type, key_length, mechanism, session, key_label=None, master_key=False, encrypt=False, sign=False, wrap=False): if not any((encrypt, sign, wrap)): raise exception.P11CryptoPluginException() if master_key and not key_label: raise ValueError(u._("key_label must be set for master_keys")) token = master_key extractable = not master_key # in some HSMs extractable keys cannot be marked sensitive sensitive = self.always_set_cka_sensitive or not extractable ck_attributes = [ Attribute(CKA_CLASS, CKO_SECRET_KEY), Attribute(CKA_KEY_TYPE, _KEY_TYPES[key_type]), Attribute(CKA_VALUE_LEN, key_length), Attribute(CKA_TOKEN, token), Attribute(CKA_PRIVATE, True), Attribute(CKA_SENSITIVE, sensitive), Attribute(CKA_ENCRYPT, encrypt), Attribute(CKA_DECRYPT, encrypt), Attribute(CKA_SIGN, sign), Attribute(CKA_VERIFY, sign), Attribute(CKA_WRAP, wrap), Attribute(CKA_UNWRAP, wrap), Attribute(CKA_EXTRACTABLE, extractable) ] if master_key: ck_attributes.append(Attribute(CKA_LABEL, key_label)) ck_attributes = self._build_attributes(ck_attributes) mech = self.ffi.new("CK_MECHANISM *") mech.mechanism = _KEY_GEN_MECHANISMS[mechanism] obj_handle_ptr = self.ffi.new("CK_OBJECT_HANDLE *") rv = self.lib.C_GenerateKey( session, mech, ck_attributes.template, len(ck_attributes.template), obj_handle_ptr ) self._check_error(rv) return obj_handle_ptr[0] def wrap_key(self, wrapping_key, key_to_wrap, session): mech = self.ffi.new("CK_MECHANISM *") mech.mechanism = CKM_AES_CBC_PAD iv = self._generate_random(16, session) mech.parameter = iv mech.parameter_len = 16 # Ask for length of the wrapped key wrapped_key_len = self.ffi.new("CK_ULONG *") rv = self.lib.C_WrapKey( session, mech, wrapping_key, key_to_wrap, self.ffi.NULL, wrapped_key_len ) self._check_error(rv) # Wrap key wrapped_key = self.ffi.new("CK_BYTE[{0}]".format(wrapped_key_len[0])) rv = self.lib.C_WrapKey( session, mech, wrapping_key, key_to_wrap, wrapped_key, wrapped_key_len ) self._check_error(rv) return { 'iv': self.ffi.buffer(iv)[:], 'wrapped_key': self.ffi.buffer(wrapped_key, wrapped_key_len[0])[:] } def unwrap_key(self, wrapping_key, iv, wrapped_key, session): ck_iv = self.ffi.new("CK_BYTE[]", iv) ck_wrapped_key = self.ffi.new("CK_BYTE[]", wrapped_key) unwrapped_key = self.ffi.new("CK_OBJECT_HANDLE *") mech = self.ffi.new("CK_MECHANISM *") mech.mechanism = CKM_AES_CBC_PAD mech.parameter = ck_iv mech.parameter_len = len(iv) ck_attributes = self._build_attributes([ Attribute(CKA_CLASS, CKO_SECRET_KEY), Attribute(CKA_KEY_TYPE, CKK_AES), Attribute(CKA_TOKEN, False), Attribute(CKA_PRIVATE, True), Attribute(CKA_SENSITIVE, True), Attribute(CKA_ENCRYPT, True), Attribute(CKA_DECRYPT, True), Attribute(CKA_EXTRACTABLE, True) ]) rv = self.lib.C_UnwrapKey( session, mech, wrapping_key, ck_wrapped_key, len(wrapped_key), ck_attributes.template, len(ck_attributes.template), unwrapped_key ) self._check_error(rv) return unwrapped_key[0] def compute_hmac(self, hmac_key, data, session): mech = self.ffi.new("CK_MECHANISM *") mech.mechanism = self.hmac_keywrap_mechanism rv = self.lib.C_SignInit(session, mech, hmac_key) self._check_error(rv) ck_data = self.ffi.new("CK_BYTE[]", data) buf = self.ffi.new("CK_BYTE[32]") buf_len = self.ffi.new("CK_ULONG *", 32) rv = self.lib.C_Sign(session, ck_data, len(data), buf, buf_len) self._check_error(rv) return self.ffi.buffer(buf, buf_len[0])[:] def verify_hmac(self, hmac_key, sig, data, session): mech = self.ffi.new("CK_MECHANISM *") mech.mechanism = self.hmac_keywrap_mechanism rv = self.lib.C_VerifyInit(session, mech, hmac_key) self._check_error(rv) ck_data = self.ffi.new("CK_BYTE[]", data) ck_sig = self.ffi.new("CK_BYTE[]", sig) rv = self.lib.C_Verify(session, ck_data, len(data), ck_sig, len(sig)) self._check_error(rv) def destroy_object(self, obj_handle, session): rv = self.lib.C_DestroyObject(session, obj_handle) self._check_error(rv) def finalize(self): rv = self.lib.C_Finalize(self.ffi.NULL) self._check_error(rv) def _check_error(self, value): if value != CKR_OK: code = ERROR_CODES.get(value, 'CKR_????') hex_code = "{hex} {code}".format(hex=hex(value), code=code) if code == 'CKR_TOKEN_NOT_PRESENT': raise exception.P11CryptoTokenException(slot_id=self.slot_id) raise exception.P11CryptoPluginException(u._( "HSM returned response code: {code}").format(code=hex_code)) def _seed_random(self, session, seed_random_buffer): """Call the C_SeedRandom() function with the seed_random data""" buf = self.ffi.new("CK_BYTE[]", seed_random_buffer.encode()) rv = self.lib.C_SeedRandom(session, buf, len(seed_random_buffer)) self._check_error(rv) def _generate_random(self, length, session): buf = self.ffi.new("CK_BYTE[{0}]".format(length)) rv = self.lib.C_GenerateRandom(session, buf, length) self._check_error(rv) return buf def _build_attributes(self, attrs): attributes = self.ffi.new("CK_ATTRIBUTE[{0}]".format(len(attrs))) val_list = [] for index, attr in enumerate(attrs): attributes[index].type = attr.type if isinstance(attr.value, bool): val_list.append(self.ffi.new("unsigned char *", int(attr.value))) attributes[index].value_len = 1 # sizeof(char) is 1 elif isinstance(attr.value, int): # second because bools are also considered ints val_list.append(self.ffi.new("CK_ULONG *", attr.value)) attributes[index].value_len = 8 elif isinstance(attr.value, str): buf = attr.value.encode('utf-8') val_list.append(self.ffi.new("char []", buf)) attributes[index].value_len = len(buf) elif isinstance(attr.value, bytes): val_list.append(self.ffi.new("char []", attr.value)) attributes[index].value_len = len(attr.value) else: raise TypeError(u._("Unknown attribute type provided.")) attributes[index].value = val_list[-1] return CKAttributes(attributes, val_list) def _open_session(self, slot): session_ptr = self.ffi.new("CK_SESSION_HANDLE *") flags = CKF_SERIAL_SESSION if self.rw_session: flags |= CKF_RW_SESSION rv = self.lib.C_OpenSession(slot, flags, self.ffi.NULL, self.ffi.NULL, session_ptr) self._check_error(rv) return session_ptr[0] def _close_session(self, session): rv = self.lib.C_CloseSession(session) self._check_error(rv) def _get_session_info(self, session): session_info_ptr = self.ffi.new("CK_SESSION_INFO *") rv = self.lib.C_GetSessionInfo(session, session_info_ptr) self._check_error(rv) return session_info_ptr[0] def _login(self, password, session): rv = self.lib.C_Login(session, CKU_USER, password, len(password)) self._check_error(rv) def _rng_self_test(self, session): test_random = self.generate_random(100, session) if test_random == b'\x00' * 100: raise exception.P11CryptoPluginException( u._("Apparent RNG self-test failure.")) def _to_bytes(string): if isinstance(string, six.binary_type): return string else: return string.encode('UTF-8') barbican-9.1.0.dev50/barbican/plugin/symantec.py0000664000175000017500000002603613616500636021661 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Barbican certificate processing plugins and support. """ from oslo_config import cfg from requests import exceptions as request_exceptions from symantecssl.core import Symantec from symantecssl import exceptions as symantec_exceptions from barbican.common import config from barbican import i18n as u from barbican.plugin.interface import certificate_manager as cert CONF = config.new_config() symantec_plugin_group = cfg.OptGroup(name='symantec_plugin', title='Symantec Plugin Options') symantec_plugin_opts = [ cfg.StrOpt('username', help=u._('Symantec username for authentication')), cfg.StrOpt('password', help=u._('Symantec password for authentication'), secret=True), cfg.StrOpt('url', help=u._('Domain of Symantec API')) ] CONF.register_group(symantec_plugin_group) CONF.register_opts(symantec_plugin_opts, group=symantec_plugin_group) config.parse_args(CONF) class SymantecCertificatePlugin(cert.CertificatePluginBase): """Symantec certificate plugin.""" def __init__(self, conf=CONF): self.username = conf.symantec_plugin.username self.password = conf.symantec_plugin.password self.url = conf.symantec_plugin.url if self.username is None: raise ValueError(u._("username is required")) if self.password is None: raise ValueError(u._("password is required")) if self.url is None: raise ValueError(u._("url is required")) def get_default_ca_name(self): return "Symantec CA" def get_default_signing_cert(self): # TODO(chellygel) Add code to get the signing cert return None def get_default_intermediates(self): # TODO(chellygel) Add code to get the cert chain return None def issue_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Create the initial order with CA :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf. :param barbican_meta_dto: additional data needed to process order. :returns: ResultDTO """ successful, error_msg, can_retry = _ca_create_order(order_meta, plugin_meta) status = cert.CertificateStatus.CA_UNAVAILABLE_FOR_REQUEST message = None if successful: status = cert.CertificateStatus.WAITING_FOR_CA elif can_retry: status = cert.CertificateStatus.CLIENT_DATA_ISSUE_SEEN message = error_msg return cert.ResultDTO(status=status, status_message=message) def modify_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Update the order meta-data :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf. :param barbican_meta_dto: additional data needed to process order. """ raise NotImplementedError # pragma: no cover def cancel_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Cancel the order :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf. :param barbican_meta_dto: additional data needed to process order. """ raise NotImplementedError # pragma: no cover def check_certificate_status(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Check status of the order :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf. :param barbican_meta_dto: additional data needed to process order. """ raise NotImplementedError # pragma: no cover def supports(self, certificate_spec): """Indicates if the plugin supports the certificate type. :param certificate_spec: Contains details on the certificate to generate the certificate order :returns: boolean indicating if the plugin supports the certificate type """ # TODO(chellygel): Research what certificate types are supported by # symantec. Returning True for testing purposes return True def _ca_create_order(self, order_meta, plugin_meta): """Creates an order with the Symantec CA. The PartnerOrderId and GeoTrustOrderId are returned and stored in plugin_meta. PartnerCode and ProductCode are also stored in plugin_meta for future use. All required order parameters must be stored as a dict in order_meta. Required fields are: PartnerCode, ProductCode, PartnerOrderId, OrganizationName, AddressLine1, City, Region, PostalCode, Country, OrganizationPhone ValidityPeriod, ServerCount, WebServerType, AdminContactFirstName, AdminContactLastName, AdminContactPhone, AdminContactEmail, AdminContactTitle, AdminContactAddressLine1, AdminContactCity, AdminContactRegion, AdminContactPostalCode, AdminContactCountry, BillingContact*, TechContact*, and CSR. *The Billing and Tech contact information follows the same convention as the AdminContact fields. Optional Parameters: TechSameAsAdmin, BillSameAsAdmin, more options can be found in Symantec's API docs. Contact Symantec for the API document. :returns: tuple with success, error message, and can retry """ api = Symantec(self.username, self.password, self.url) try: order_data = api.order(**order_meta) # GeotrustOrderId is used to handle emails from Symantec. # PartnerCode and ProductCode are being stored in plugin_meta for # convenience when calling _ca_get_order_status, _ca_modify_order, etc. plugin_meta["GeotrustOrderID"] = order_data["GeotrustOrderID"] plugin_meta["PartnerOrderID"] = order_data["PartnerOrderID"] plugin_meta["PartnerCode"] = order_meta["OrderDetails"]["PartnerCode"] plugin_meta["ProductCode"] = order_meta["OrderDetails"]["ProductCode"] return True, None, False except symantec_exceptions.SymantecError as e: return False, e, False except request_exceptions.RequestException as e: return False, e, True def _ca_get_order_status(self, plugin_meta): """Sends a request to the Symantec CA for details on an order. Parameters needed for GetOrderByPartnerOrderID: plugin_meta parameters: PartnerOrderId, PartnerCode If the order is complete, the Certificate is returned as a string. returns: tuple with success, error message, can retry, and the certificate (if available). """ api = Symantec(self.username, self.password, self.url) order_details = { "PartnerOrderID": plugin_meta["PartnerOrderID"], "PartnerCode": plugin_meta["PartnerCode"], "ReturnCertificateInfo": "TRUE", "ReturnFulfillment": "TRUE", "ReturnCaCerts": "TRUE", } try: order_data = api.get_order_by_partner_order_id(**order_details) if order_data["OrderInfo"]["OrderState"] == "COMPLETED": ca = order_data["Fulfillment"]["CACertificates"]["CACertificate"] return True, None, False, ca["CACert"] return True, None, False, None except symantec_exceptions.SymantecError as e: return False, e, False, None except request_exceptions.RequestException as e: return False, e, True, None def _ca_modify_order(self, order_meta, plugin_meta): """Sends a request to the Symantec CA to modify an order. Parameters needed for modifyOrder: PartnerOrderID - Needed to specify order PartnerCode - Needed to specify order ProductCode - Needed to specify order Also need a dict, order_meta with the parameters/values to modify. returns: tuple with success, error message, and can retry. """ api = Symantec(self.username, self.password, self.url) order_details = { "PartnerOrderID": plugin_meta["PartnerOrderID"], "PartnerCode": plugin_meta["PartnerCode"], "ProductCode": plugin_meta["ProductCode"], } order_details.update(order_meta) try: api.validate_order_parameters(**order_details) return True, None, False except symantec_exceptions.SymantecError as e: return False, e, False except request_exceptions.RequestException as e: return False, e, True def _ca_cancel_order(self, plugin_meta): """Sends a request to the Symantec CA to cancel an order. Parameters needed for modifyOrder: PartnerOrderID - Needed to specify order PartnerCode - Needed to specify order ProductCode - Needed to specify order returns: tuple with success, error message, and can retry. """ api = Symantec(self.username, self.password, self.url) order_details = { "PartnerOrderID": plugin_meta["PartnerOrderID"], "PartnerCode": plugin_meta["PartnerCode"], "ProductCode": plugin_meta["ProductCode"], "ModifyOrderOperation": "CANCEL", } try: api.modify_order(**order_details) return True, None, False except symantec_exceptions.SymantecError as e: return False, e, False except request_exceptions.RequestException as e: return False, e, True barbican-9.1.0.dev50/barbican/plugin/store_crypto.py0000664000175000017500000003106613616500636022571 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 from barbican.common import config from barbican.common import utils from barbican.model import models from barbican.model import repositories from barbican.plugin.crypto import base from barbican.plugin.crypto import manager from barbican.plugin.interface import secret_store as sstore CONF = config.new_config() config.parse_args(CONF) class StoreCryptoContext(object): """Context for crypto-adapter secret store plugins. This context object allows access to core Barbican resources such as datastore models. """ def __init__( self, project_model, secret_model=None, private_secret_model=None, public_secret_model=None, passphrase_secret_model=None, content_type=None): self.secret_model = secret_model self.private_secret_model = private_secret_model self.public_secret_model = public_secret_model self.passphrase_secret_model = passphrase_secret_model self.project_model = project_model self.content_type = content_type class StoreCryptoAdapterPlugin(object): """Secret store plugin adapting to 'crypto' devices as backend. HSM-style 'crypto' devices perform encryption/decryption processing but do not actually store the encrypted information, unlike other 'secret store' plugins that do provide storage. Hence, this adapter bridges between these two plugin styles, providing Barbican persistence services as needed to store information. Note that this class does not inherit from SecretStoreBase, as it also requires access to lower-level datastore entities such as KEKDatum. This additional information is passed in via the 'context' parameter. """ def __init__(self): super(StoreCryptoAdapterPlugin, self).__init__() def store_secret(self, secret_dto, context): """Store a secret. :param secret_dto: SecretDTO for secret :param context: StoreCryptoContext for secret :returns: an optional dictionary containing metadata about the secret """ # Find HSM-style 'crypto' plugin. encrypting_plugin = manager.get_manager().get_plugin_store_generate( base.PluginSupportTypes.ENCRYPT_DECRYPT, project_id=context.project_model.id ) # Find or create a key encryption key metadata. kek_datum_model, kek_meta_dto = _find_or_create_kek_objects( encrypting_plugin, context.project_model) # Secrets are base64 encoded before being passed to the secret stores. secret_bytes = base64.b64decode(secret_dto.secret) encrypt_dto = base.EncryptDTO(secret_bytes) # Enhance the context with content_type, This is needed to build # datum_model to store if not context.content_type: context.content_type = secret_dto.content_type # Create an encrypted datum instance and add the encrypted cyphertext. response_dto = encrypting_plugin.encrypt( encrypt_dto, kek_meta_dto, context.project_model.external_id ) # Convert binary data into a text-based format. _store_secret_and_datum( context, context.secret_model, kek_datum_model, response_dto) return None def get_secret(self, secret_type, metadata, context): """Retrieve a secret. :param secret_type: secret type :param metadata: secret metadata :param context: StoreCryptoContext for secret :returns: SecretDTO that contains secret """ if (not context.secret_model or not context.secret_model.encrypted_data): raise sstore.SecretNotFoundException() # TODO(john-wood-w) Need to revisit 1 to many datum relationship. datum_model = context.secret_model.encrypted_data[0] # Find HSM-style 'crypto' plugin. decrypting_plugin = manager.get_manager().get_plugin_retrieve( datum_model.kek_meta_project.plugin_name) # wrap the KEKDatum instance in our DTO kek_meta_dto = base.KEKMetaDTO(datum_model.kek_meta_project) # Convert from text-based storage format to binary. encrypted = base64.b64decode(datum_model.cypher_text) decrypt_dto = base.DecryptDTO(encrypted) # Decrypt the secret. secret = decrypting_plugin.decrypt(decrypt_dto, kek_meta_dto, datum_model.kek_meta_extended, context.project_model.external_id) secret = base64.b64encode(secret) key_spec = sstore.KeySpec(alg=context.secret_model.algorithm, bit_length=context.secret_model.bit_length, mode=context.secret_model.mode) return sstore.SecretDTO(secret_type, secret, key_spec, datum_model.content_type) def delete_secret(self, secret_metadata): """Delete a secret.""" pass def generate_symmetric_key(self, key_spec, context): """Generate a symmetric key. :param key_spec: KeySpec that contains details on the type of key to generate :param context: StoreCryptoContext for secret :returns: a dictionary that contains metadata about the key """ # Find HSM-style 'crypto' plugin. plugin_type = _determine_generation_type(key_spec.alg) if base.PluginSupportTypes.SYMMETRIC_KEY_GENERATION != plugin_type: raise sstore.SecretAlgorithmNotSupportedException(key_spec.alg) generating_plugin = manager.get_manager().get_plugin_store_generate( plugin_type, key_spec.alg, key_spec.bit_length, key_spec.mode, project_id=context.project_model.id) # Find or create a key encryption key metadata. kek_datum_model, kek_meta_dto = _find_or_create_kek_objects( generating_plugin, context.project_model) # Create an encrypted datum instance and add the created cypher text. generate_dto = base.GenerateDTO(key_spec.alg, key_spec.bit_length, key_spec.mode, None) # Create the encrypted meta. response_dto = generating_plugin.generate_symmetric( generate_dto, kek_meta_dto, context.project_model.external_id) # Convert binary data into a text-based format. _store_secret_and_datum( context, context.secret_model, kek_datum_model, response_dto) return None def generate_asymmetric_key(self, key_spec, context): """Generates an asymmetric key. Returns a AsymmetricKeyMetadataDTO object containing metadata(s) for asymmetric key components. The metadata can be used to retrieve individual components of asymmetric key pair. """ plugin_type = _determine_generation_type(key_spec.alg) if base.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION != plugin_type: raise sstore.SecretAlgorithmNotSupportedException(key_spec.alg) generating_plugin = manager.get_manager().get_plugin_store_generate( plugin_type, key_spec.alg, key_spec.bit_length, project_id=context.project_model.id) # Find or create a key encryption key metadata. kek_datum_model, kek_meta_dto = _find_or_create_kek_objects( generating_plugin, context.project_model) generate_dto = base.GenerateDTO(key_spec.alg, key_spec.bit_length, None, key_spec.passphrase) # Create the encrypted meta. private_key_dto, public_key_dto, passwd_dto = ( generating_plugin.generate_asymmetric( generate_dto, kek_meta_dto, context.project_model.external_id ) ) _store_secret_and_datum( context, context.private_secret_model, kek_datum_model, private_key_dto) _store_secret_and_datum( context, context.public_secret_model, kek_datum_model, public_key_dto) if key_spec.passphrase and passwd_dto: _store_secret_and_datum( context, context.passphrase_secret_model, kek_datum_model, passwd_dto) return sstore.AsymmetricKeyMetadataDTO() def generate_supports(self, key_spec): """Key generation supported? Specifies whether the plugin supports key generation with the given key_spec. """ return (key_spec and (key_spec.alg.lower() in sstore.KeyAlgorithm.ASYMMETRIC_ALGORITHMS or key_spec.alg.lower() in sstore.KeyAlgorithm.SYMMETRIC_ALGORITHMS)) def store_secret_supports(self, key_spec): """Key storage supported? Specifies whether the plugin supports storage of the secret given the attributes included in the KeySpec """ return True def _determine_generation_type(algorithm): """Determines the type based on algorithm.""" if not algorithm: raise sstore.SecretAlgorithmNotSupportedException(algorithm) symmetric_algs = base.PluginSupportTypes.SYMMETRIC_ALGORITHMS asymmetric_algs = base.PluginSupportTypes.ASYMMETRIC_ALGORITHMS if algorithm.lower() in symmetric_algs: return base.PluginSupportTypes.SYMMETRIC_KEY_GENERATION elif algorithm.lower() in asymmetric_algs: return base.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION else: raise sstore.SecretAlgorithmNotSupportedException(algorithm) def _find_or_create_kek_objects(plugin_inst, project_model): kek_repo = repositories.get_kek_datum_repository() # Find or create a key encryption key. full_plugin_name = utils.generate_fullname_for(plugin_inst) kek_datum_model = kek_repo.find_or_create_kek_datum(project_model, full_plugin_name) # Bind to the plugin's key management. # TODO(jwood): Does this need to be in a critical section? Should the # bind operation just be declared idempotent in the plugin contract? kek_meta_dto = base.KEKMetaDTO(kek_datum_model) if not kek_datum_model.bind_completed: kek_meta_dto = plugin_inst.bind_kek_metadata(kek_meta_dto) # By contract, enforce that plugins return a # (typically modified) DTO. if kek_meta_dto is None: raise base.CryptoKEKBindingException(full_plugin_name) _indicate_bind_completed(kek_meta_dto, kek_datum_model) kek_repo.save(kek_datum_model) return kek_datum_model, kek_meta_dto def _store_secret_and_datum( context, secret_model, kek_datum_model, generated_dto): # Create Secret entities in data store. if not secret_model.id: secret_model.project_id = context.project_model.id repositories.get_secret_repository().create_from(secret_model) # setup and store encrypted datum datum_model = models.EncryptedDatum(secret_model, kek_datum_model) datum_model.content_type = context.content_type datum_model.cypher_text = base64.b64encode(generated_dto.cypher_text) datum_model.kek_meta_extended = generated_dto.kek_meta_extended repositories.get_encrypted_datum_repository().create_from( datum_model) def _indicate_bind_completed(kek_meta_dto, kek_datum): """Updates the supplied kek_datum instance Updates the kek_datum per the contents of the supplied kek_meta_dto instance. This function is typically used once plugins have had a chance to bind kek_meta_dto to their crypto systems. :param kek_meta_dto: :param kek_datum: :return: None """ kek_datum.bind_completed = True kek_datum.algorithm = kek_meta_dto.algorithm kek_datum.bit_length = kek_meta_dto.bit_length kek_datum.mode = kek_meta_dto.mode kek_datum.plugin_meta = kek_meta_dto.plugin_meta barbican-9.1.0.dev50/barbican/plugin/dogtag.py0000664000175000017500000015040413616500636021300 0ustar sahidsahid00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import copy from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization import datetime import os from oslo_utils import uuidutils import time import pki subcas_available = True try: import pki.authority as authority import pki.feature as feature except ImportError: subcas_available = False import pki.cert import pki.client import pki.crypto as cryptoutil import pki.key as key import pki.kra import pki.profile from requests import exceptions as request_exceptions import six from barbican.common import exception from barbican.common import utils from barbican import i18n as u # we want to keep the dogtag config options separated. That way we # do not need to import every dogtag requirement to generate the # sample config import barbican.plugin.dogtag_config_opts # noqa import barbican.plugin.interface.certificate_manager as cm import barbican.plugin.interface.secret_store as sstore # reuse the conf object to not call config.new_config() twice CONF = barbican.plugin.dogtag_config_opts.CONF LOG = utils.getLogger(__name__) CERT_HEADER = "-----BEGIN CERTIFICATE-----" CERT_FOOTER = "-----END CERTIFICATE-----" KRA_TRANSPORT_NICK = "KRA transport cert" def _create_nss_db_if_needed(nss_db_path, nss_password): """Creates NSS DB if it's not setup already :returns: True or False whether the database was created or not. """ if not os.path.exists(nss_db_path): cryptoutil.NSSCryptoProvider.setup_database( nss_db_path, nss_password, over_write=True) return True else: LOG.info("The nss_db_path provided already exists, so the " "database is assumed to be already set up.") return False def _setup_nss_db_services(conf): """Sets up NSS Crypto functions This sets up the NSSCryptoProvider and the database it needs for it to store certificates. If the path specified in the configuration is already existent, it will assume that the database is already setup. This will also import the transport cert needed by the KRA if the NSS DB was created. """ nss_db_path, nss_password = (conf.dogtag_plugin.nss_db_path, conf.dogtag_plugin.nss_password) if nss_db_path is None: LOG.warning("nss_db_path was not provided so the crypto " "provider functions were not initialized.") return None if nss_password is None: raise ValueError(u._("nss_password is required")) if type(nss_password) is not six.binary_type: # Password needs to be a bytes object in Python 3 nss_password = nss_password.encode('UTF-8') nss_db_created = _create_nss_db_if_needed(nss_db_path, nss_password) crypto = cryptoutil.NSSCryptoProvider(nss_db_path, nss_password) if nss_db_created: _import_kra_transport_cert_to_nss_db(conf, crypto) return crypto def _import_kra_transport_cert_to_nss_db(conf, crypto): try: connection = create_connection(conf, 'kra') kraclient = pki.kra.KRAClient(connection, crypto) systemcert_client = kraclient.system_certs transport_cert = systemcert_client.get_transport_cert() crypto.import_cert(KRA_TRANSPORT_NICK, transport_cert, ",,") except Exception as e: LOG.debug("Error importing KRA transport cert.", exc_info=True) LOG.error("Error in importing transport cert." " KRA may not be enabled: %s", e) def create_connection(conf, subsystem_path): pem_path = conf.dogtag_plugin.pem_path if pem_path is None: raise ValueError(u._("pem_path is required")) # port is string type in PKIConnection connection = pki.client.PKIConnection( 'https', conf.dogtag_plugin.dogtag_host, str(conf.dogtag_plugin.dogtag_port), subsystem_path) connection.set_authentication_cert(pem_path) return connection crypto = _setup_nss_db_services(CONF) if crypto: crypto.initialize() class DogtagPluginAlgorithmException(exception.BarbicanException): message = u._("Invalid algorithm passed in") class DogtagPluginNotSupportedException(exception.NotSupported): message = u._("Operation not supported by Dogtag Plugin") def __init__(self, msg=None): if not msg: message = self.message else: message = msg super(DogtagPluginNotSupportedException, self).__init__(message) class DogtagPluginArchivalException(exception.BarbicanException): message = u._("Key archival failed. Error returned from KRA.") class DogtagPluginGenerationException(exception.BarbicanException): message = u._("Key generation failed. Error returned from KRA.") class DogtagKRAPlugin(sstore.SecretStoreBase): """Implementation of the secret store plugin with KRA as the backend.""" # metadata constants ALG = "alg" BIT_LENGTH = "bit_length" GENERATED = "generated" KEY_ID = "key_id" SECRET_MODE = "secret_mode" # nosec PASSPHRASE_KEY_ID = "passphrase_key_id" # nosec CONVERT_TO_PEM = "convert_to_pem" # string constants DSA_PRIVATE_KEY_HEADER = '-----BEGIN DSA PRIVATE KEY-----' DSA_PRIVATE_KEY_FOOTER = '-----END DSA PRIVATE KEY-----' DSA_PUBLIC_KEY_HEADER = '-----BEGIN DSA PUBLIC KEY-----' DSA_PUBLIC_KEY_FOOTER = '-----END DSA PUBLIC KEY-----' def __init__(self, conf=CONF): """Constructor - create the keyclient.""" LOG.debug("starting DogtagKRAPlugin init") connection = create_connection(conf, 'kra') # create kraclient kraclient = pki.kra.KRAClient(connection, crypto) self.keyclient = kraclient.keys self.keyclient.set_transport_cert(KRA_TRANSPORT_NICK) self.plugin_name = conf.dogtag_plugin.plugin_name self.retries = conf.dogtag_plugin.retries LOG.debug("completed DogtagKRAPlugin init") def get_plugin_name(self): return self.plugin_name def store_secret(self, secret_dto): """Store a secret in the KRA If secret_dto.transport_key is not None, then we expect secret_dto.secret to include a base64 encoded PKIArchiveOptions structure as defined in section 6.4 of RFC 2511. This package contains a transport key wrapped session key, the session key wrapped secret and parameters to specify the symmetric key wrapping. Otherwise, the data is unencrypted and we use a call to archive_key() to have the Dogtag KRA client generate the relevant session keys. The secret_dto contains additional information on the type of secret that is being stored. We will use that shortly. For, now, lets just assume that its all PASS_PHRASE_TYPE Returns a dict with the relevant metadata (which in this case is just the key_id """ data_type = key.KeyClient.PASS_PHRASE_TYPE key_id = None attempts = 0 offset_time = 1 while attempts <= self.retries and key_id is None: client_key_id = uuidutils.generate_uuid(dashed=False) if secret_dto.transport_key is not None: # TODO(alee-3) send the transport key with the archival request # once the Dogtag Client API changes. response = self.keyclient.archive_pki_options( client_key_id, data_type, secret_dto.secret, key_algorithm=None, key_size=None) else: response = self.keyclient.archive_key( client_key_id, data_type, secret_dto.secret, key_algorithm=None, key_size=None) key_id = response.get_key_id() if key_id is None: LOG.warning("key_id is None. attempts: {}".format(attempts)) attempts += 1 time.sleep(offset_time) offset_time += 1 if key_id is None: raise DogtagPluginArchivalException meta_dict = {DogtagKRAPlugin.KEY_ID: key_id} self._store_secret_attributes(meta_dict, secret_dto) return meta_dict def get_secret(self, secret_type, secret_metadata): """Retrieve a secret from the KRA The secret_metadata is simply the dict returned by a store_secret() or get_secret() call. We will extract the key_id from this dict. Note: There are two ways to retrieve secrets from the KRA. The first method calls retrieve_key without a wrapping key. This relies on the KRA client to generate a wrapping key (and wrap it with the KRA transport cert), and is completely transparent to the Barbican server. What is returned to the caller is the unencrypted secret. The second way is to provide a wrapping key that would be generated on the barbican client. That way only the client will be able to unwrap the secret. This wrapping key is provided in the secret_metadata by Barbican core. Format/Type of the secret returned in the SecretDTO object. ----------------------------------------------------------- The type of the secret returned is always dependent on the way it is stored using the store_secret method. In case of strings - like passphrase/PEM strings, the return will be a string. In case of binary data - the return will be the actual binary data. In case of retrieving an asymmetric key that is generated using the dogtag plugin, then the binary representation of, the asymmetric key in PEM format, is returned """ key_id = secret_metadata[DogtagKRAPlugin.KEY_ID] key_spec = sstore.KeySpec( alg=secret_metadata.get(DogtagKRAPlugin.ALG, None), bit_length=secret_metadata.get(DogtagKRAPlugin.BIT_LENGTH, None), mode=secret_metadata.get(DogtagKRAPlugin.SECRET_MODE, None), passphrase=None ) generated = secret_metadata.get(DogtagKRAPlugin.GENERATED, False) passphrase = self._get_passphrase_for_a_private_key( secret_type, secret_metadata, key_spec) recovered_key = None twsk = DogtagKRAPlugin._get_trans_wrapped_session_key(secret_type, secret_metadata) if DogtagKRAPlugin.CONVERT_TO_PEM in secret_metadata: # Case for returning the asymmetric keys generated in KRA. # Asymmetric keys generated in KRA are not generated in PEM format. # This marker DogtagKRAPlugin.CONVERT_TO_PEM is set in the # secret_metadata for asymmetric keys generated in KRA to # help convert the returned private/public keys to PEM format and # eventually return the binary data of the keys in PEM format. if secret_type == sstore.SecretType.PUBLIC: # Public key should be retrieved using the get_key_info method # as it is treated as an attribute of the asymmetric key pair # stored in the KRA database. key_info = self.keyclient.get_key_info(key_id) recovered_key = serialization.load_der_public_key( key_info.public_key, backend=default_backend() ).public_bytes( serialization.Encoding.PEM, serialization.PublicFormat.PKCS1) elif secret_type == sstore.SecretType.PRIVATE: key_data = self.keyclient.retrieve_key(key_id) private_key = serialization.load_der_private_key( key_data.data, password=None, backend=default_backend() ) if passphrase is not None: e_alg = serialization.BestAvailableEncryption(passphrase) else: e_alg = serialization.NoEncryption() recovered_key = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=e_alg ) else: # TODO(alee-3) send transport key as well when dogtag client API # changes in case the transport key has changed. key_data = self.keyclient.retrieve_key(key_id, twsk) if twsk: # The data returned is a byte array. recovered_key = key_data.encrypted_data else: recovered_key = key_data.data # TODO(alee) remove final field when content_type is removed # from secret_dto if generated: recovered_key = base64.b64encode(recovered_key) ret = sstore.SecretDTO( type=secret_type, secret=recovered_key, key_spec=key_spec, content_type=None, transport_key=None) return ret def delete_secret(self, secret_metadata): """Delete a secret from the KRA There is currently no way to delete a secret in Dogtag. We will be implementing such a method shortly. """ pass def generate_symmetric_key(self, key_spec): """Generate a symmetric key This calls generate_symmetric_key() on the KRA passing in the algorithm, bit_length and id (used as the client_key_id) from the secret. The remaining parameters are not used. Returns a metadata object that can be used for retrieving the secret. """ usages = [key.SymKeyGenerationRequest.DECRYPT_USAGE, key.SymKeyGenerationRequest.ENCRYPT_USAGE] algorithm = self._map_algorithm(key_spec.alg.lower()) if algorithm is None: raise DogtagPluginAlgorithmException passphrase = key_spec.passphrase if passphrase: raise DogtagPluginNotSupportedException( u._("Passphrase encryption is not supported for symmetric" " key generating algorithms.")) key_id = None attempts = 0 offset_time = 1 while attempts <= self.retries and key_id is None: client_key_id = uuidutils.generate_uuid() response = self.keyclient.generate_symmetric_key( client_key_id, algorithm, key_spec.bit_length, usages) key_id = response.get_key_id() if key_id is None: LOG.warning("generate_symkey: key_id is None. attempts: {}" .format(attempts)) attempts += 1 time.sleep(offset_time) offset_time += 1 if key_id is None: raise DogtagPluginGenerationException # Barbican expects stored keys to be base 64 encoded. We need to # add flag to the keyclient.generate_symmetric_key() call above # to ensure that the key that is stored is base64 encoded. # # As a workaround until that update is available, we will store a # parameter "generated" to indicate that the response must be base64 # encoded on retrieval. Note that this will not work for transport # key encoded data. return {DogtagKRAPlugin.ALG: key_spec.alg, DogtagKRAPlugin.BIT_LENGTH: key_spec.bit_length, DogtagKRAPlugin.KEY_ID: response.get_key_id(), DogtagKRAPlugin.GENERATED: True} def generate_asymmetric_key(self, key_spec): """Generate an asymmetric key. Note that barbican expects all secrets to be base64 encoded. """ usages = [key.AsymKeyGenerationRequest.DECRYPT_USAGE, key.AsymKeyGenerationRequest.ENCRYPT_USAGE] client_key_id = uuidutils.generate_uuid() algorithm = self._map_algorithm(key_spec.alg.lower()) passphrase = key_spec.passphrase if algorithm is None: raise DogtagPluginAlgorithmException passphrase_key_id = None passphrase_metadata = None if passphrase: if algorithm == key.KeyClient.DSA_ALGORITHM: raise DogtagPluginNotSupportedException( u._("Passphrase encryption is not " "supported for DSA algorithm") ) stored_passphrase_info = self.keyclient.archive_key( uuidutils.generate_uuid(), self.keyclient.PASS_PHRASE_TYPE, base64.b64encode(passphrase)) passphrase_key_id = stored_passphrase_info.get_key_id() passphrase_metadata = { DogtagKRAPlugin.KEY_ID: passphrase_key_id } # Barbican expects stored keys to be base 64 encoded. We need to # add flag to the keyclient.generate_asymmetric_key() call above # to ensure that the key that is stored is base64 encoded. # # As a workaround until that update is available, we will store a # parameter "generated" to indicate that the response must be base64 # encoded on retrieval. Note that this will not work for transport # key encoded data. response = self.keyclient.generate_asymmetric_key( client_key_id, algorithm, key_spec.bit_length, usages) public_key_metadata = { DogtagKRAPlugin.ALG: key_spec.alg, DogtagKRAPlugin.BIT_LENGTH: key_spec.bit_length, DogtagKRAPlugin.KEY_ID: response.get_key_id(), DogtagKRAPlugin.CONVERT_TO_PEM: "true", DogtagKRAPlugin.GENERATED: True } private_key_metadata = { DogtagKRAPlugin.ALG: key_spec.alg, DogtagKRAPlugin.BIT_LENGTH: key_spec.bit_length, DogtagKRAPlugin.KEY_ID: response.get_key_id(), DogtagKRAPlugin.CONVERT_TO_PEM: "true", DogtagKRAPlugin.GENERATED: True } if passphrase_key_id: private_key_metadata[DogtagKRAPlugin.PASSPHRASE_KEY_ID] = ( passphrase_key_id ) return sstore.AsymmetricKeyMetadataDTO(private_key_metadata, public_key_metadata, passphrase_metadata) def generate_supports(self, key_spec): """Key generation supported? Specifies whether the plugin supports key generation with the given key_spec. For now, we will just check the algorithm. When dogtag adds a call to check the bit length as well, we will use that call to take advantage of the bit_length information """ return self._map_algorithm(key_spec.alg) is not None def store_secret_supports(self, key_spec): """Key storage supported? Specifies whether the plugin supports storage of the secret given the attributes included in the KeySpec """ return True @staticmethod def _map_algorithm(algorithm): """Map Barbican algorithms to Dogtag plugin algorithms. Note that only algorithms supported by Dogtag will be mapped. """ if algorithm is None: return None if algorithm.lower() == sstore.KeyAlgorithm.AES.lower(): return key.KeyClient.AES_ALGORITHM elif algorithm.lower() == sstore.KeyAlgorithm.DES.lower(): return key.KeyClient.DES_ALGORITHM elif algorithm.lower() == sstore.KeyAlgorithm.DESEDE.lower(): return key.KeyClient.DES3_ALGORITHM elif algorithm.lower() == sstore.KeyAlgorithm.DSA.lower(): return key.KeyClient.DSA_ALGORITHM elif algorithm.lower() == sstore.KeyAlgorithm.RSA.lower(): return key.KeyClient.RSA_ALGORITHM elif algorithm.lower() == sstore.KeyAlgorithm.DIFFIE_HELLMAN.lower(): # may be supported, needs to be tested return None elif algorithm.lower() == sstore.KeyAlgorithm.EC.lower(): # asymmetric keys not yet supported return None else: return None @staticmethod def _store_secret_attributes(meta_dict, secret_dto): # store the following attributes for retrieval key_spec = secret_dto.key_spec if key_spec is not None: if key_spec.alg is not None: meta_dict[DogtagKRAPlugin.ALG] = key_spec.alg if key_spec.bit_length is not None: meta_dict[DogtagKRAPlugin.BIT_LENGTH] = key_spec.bit_length if key_spec.mode is not None: meta_dict[DogtagKRAPlugin.SECRET_MODE] = key_spec.mode def _get_passphrase_for_a_private_key(self, secret_type, secret_metadata, key_spec): """Retrieve the passphrase for the private key stored in the KRA.""" if secret_type is None: return None if key_spec.alg is None: return None passphrase = None if DogtagKRAPlugin.PASSPHRASE_KEY_ID in secret_metadata: if key_spec.alg.upper() == key.KeyClient.RSA_ALGORITHM: passphrase = self.keyclient.retrieve_key( secret_metadata.get(DogtagKRAPlugin.PASSPHRASE_KEY_ID) ).data else: if key_spec.alg.upper() == key.KeyClient.DSA_ALGORITHM: raise sstore.SecretGeneralException( u._("DSA keys should not have a passphrase in the" " database, for being used during retrieval.") ) raise sstore.SecretGeneralException( u._("Secrets of type {secret_type} should not have a " "passphrase in the database, for being used during " "retrieval.").format(secret_type=secret_type) ) # note that Barbican expects the passphrase to be base64 encoded when # stored, so we need to decode it. if passphrase: passphrase = base64.b64decode(passphrase) return passphrase @staticmethod def _get_trans_wrapped_session_key(secret_type, secret_metadata): twsk = secret_metadata.get('trans_wrapped_session_key', None) if secret_type in [sstore.SecretType.PUBLIC, sstore.SecretType.PRIVATE]: if twsk: raise DogtagPluginNotSupportedException( u._("Encryption using session key is not supported when " "retrieving a {secret_type} " "key.").format(secret_type=secret_type) ) return twsk def _catch_request_exception(ca_related_function): def _catch_ca_unavailable(self, *args, **kwargs): try: return ca_related_function(self, *args, **kwargs) except request_exceptions.RequestException: return cm.ResultDTO( cm.CertificateStatus.CA_UNAVAILABLE_FOR_REQUEST) return _catch_ca_unavailable def _catch_enrollment_exceptions(ca_related_function): def _catch_enrollment_exception(self, *args, **kwargs): try: return ca_related_function(self, *args, **kwargs) except pki.BadRequestException as e: return cm.ResultDTO( cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, status_message=e.message) except pki.PKIException as e: raise cm.CertificateGeneralException( u._("Exception thrown by enroll_cert: {message}").format( message=e.message)) return _catch_enrollment_exception def _catch_subca_creation_exceptions(ca_related_function): def _catch_subca_exception(self, *args, **kwargs): try: return ca_related_function(self, *args, **kwargs) except pki.BadRequestException as e: raise exception.BadSubCACreationRequest(reason=e.message) except pki.PKIException as e: raise exception.SubCACreationErrors(reason=e.message) except request_exceptions.RequestException: raise exception.SubCACreationErrors( reason="Unable to connect to CA") return _catch_subca_exception def _catch_subca_deletion_exceptions(ca_related_function): def _catch_subca_exception(self, *args, **kwargs): try: return ca_related_function(self, *args, **kwargs) except pki.ResourceNotFoundException as e: LOG.warning("Sub-CA already deleted") pass except pki.PKIException as e: raise exception.SubCADeletionErrors(reason=e.message) except request_exceptions.RequestException: raise exception.SubCACreationErrors( reason="Unable to connect to CA") return _catch_subca_exception class DogtagCAPlugin(cm.CertificatePluginBase): """Implementation of the cert plugin with Dogtag CA as the backend.""" # order_metadata fields PROFILE_ID = "profile_id" # plugin_metadata fields REQUEST_ID = "request_id" def __init__(self, conf=CONF): """Constructor - create the cert clients.""" connection = create_connection(conf, 'ca') self.certclient = pki.cert.CertClient(connection) self.simple_cmc_profile = conf.dogtag_plugin.simple_cmc_profile self.auto_approved_profiles = conf.dogtag_plugin.auto_approved_profiles self.working_dir = conf.dogtag_plugin.plugin_working_dir if not os.path.isdir(self.working_dir): os.mkdir(self.working_dir) self._expiration = None self._expiration_delta = conf.dogtag_plugin.ca_expiration_time self._expiration_data_path = os.path.join(self.working_dir, "expiration_data.txt") self._host_aid_path = os.path.join(self.working_dir, "host_aid.txt") self._host_aid = None if not os.path.isfile(self._expiration_data_path): self.expiration = datetime.datetime.utcnow() global subcas_available subcas_available = self._are_subcas_enabled_on_backend(connection) if subcas_available: self.authority_client = authority.AuthorityClient(connection) if not os.path.isfile(self._host_aid_path): self.host_aid = self.get_host_aid() @property def expiration(self): if self._expiration is None: try: with open(self._expiration_data_path) as expiration_fh: self._expiration = datetime.datetime.strptime( expiration_fh.read(), "%Y-%m-%d %H:%M:%S.%f" ) except (ValueError, TypeError): LOG.warning("Invalid data read from expiration file") self.expiration = datetime.utcnow() return self._expiration @expiration.setter def expiration(self, val): with open(self._expiration_data_path, 'w') as expiration_fh: expiration_fh.write(val.strftime("%Y-%m-%d %H:%M:%S.%f")) self._expiration = val @property def host_aid(self): if self._host_aid is None: with open(self._host_aid_path) as host_aid_fh: self._host_aid = host_aid_fh.read() return self._host_aid @host_aid.setter def host_aid(self, val): if val is not None: with open(self._host_aid_path, 'w') as host_aid_fh: host_aid_fh.write(val) self._host_aid = val def _are_subcas_enabled_on_backend(self, connection): """Check if subca feature is available SubCA creation must be supported in both the Dogtag client as well as on the back-end server. Moreover, it must be enabled on the backend server. This method sets the subcas_available global variable. :return: True/False """ global subcas_available if subcas_available: # subcas are supported in the Dogtag client try: feature_client = feature.FeatureClient(connection) authority_feature = feature_client.get_feature("authority") if authority_feature.enabled: LOG.info("Sub-CAs are enabled by Dogtag server") return True else: LOG.info("Sub-CAs are not enabled by Dogtag server") except (request_exceptions.HTTPError, pki.ResourceNotFoundException): LOG.info("Sub-CAs are not supported by Dogtag server") else: LOG.info("Sub-CAs are not supported by Dogtag client") return False def _get_request_id(self, order_id, plugin_meta, operation): request_id = plugin_meta.get(self.REQUEST_ID, None) if not request_id: raise cm.CertificateGeneralException( u._( "{request} not found for {operation} for " "order_id {order_id}" ).format( request=self.REQUEST_ID, operation=operation, order_id=order_id ) ) return request_id @_catch_request_exception def _get_request(self, request_id): try: return self.certclient.get_request(request_id) except pki.RequestNotFoundException: return None @_catch_request_exception def _get_cert(self, cert_id): try: return self.certclient.get_cert(cert_id) except pki.CertNotFoundException: return None def get_default_ca_name(self): return "Dogtag CA" def get_default_signing_cert(self): # TODO(alee) Add code to get the signing cert return None def get_default_intermediates(self): # TODO(alee) Add code to get the cert chain return None def check_certificate_status(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Check the status of a certificate request. :param order_id: ID of the order associated with this request :param order_meta: order_metadata associated with this order :param plugin_meta: data populated by previous calls for this order, in particular the request_id :param barbican_meta_dto: additional data needed to process order. :return: cm.ResultDTO """ request_id = self._get_request_id(order_id, plugin_meta, "checking") request = self._get_request(request_id) if not request: raise cm.CertificateGeneralException( u._( "No request found for request_id {request_id} for " "order {order_id}" ).format( request_id=request_id, order_id=order_id ) ) request_status = request.request_status if request_status == pki.cert.CertRequestStatus.REJECTED: return cm.ResultDTO( cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, status_message=request.error_message) elif request_status == pki.cert.CertRequestStatus.CANCELED: return cm.ResultDTO( cm.CertificateStatus.REQUEST_CANCELED) elif request_status == pki.cert.CertRequestStatus.PENDING: return cm.ResultDTO( cm.CertificateStatus.WAITING_FOR_CA) elif request_status == pki.cert.CertRequestStatus.COMPLETE: # get the cert cert_id = request.cert_id if not cert_id: raise cm.CertificateGeneralException( u._( "Request {request_id} reports status_complete, but no " "cert_id has been returned" ).format( request_id=request_id ) ) cert = self._get_cert(cert_id) if not cert: raise cm.CertificateGeneralException( u._("Certificate not found for cert_id: {cert_id}").format( cert_id=cert_id ) ) return cm.ResultDTO( cm.CertificateStatus.CERTIFICATE_GENERATED, certificate=cert.encoded, intermediates=cert.pkcs7_cert_chain) else: raise cm.CertificateGeneralException( u._("Invalid request_status returned by CA")) @_catch_request_exception def issue_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Issue a certificate request to the Dogtag CA Call the relevant certificate issuance function depending on the Barbican defined request type in the order_meta. :param order_id: ID of the order associated with this request :param order_meta: dict containing all the inputs for this request. This includes the request_type. :param plugin_meta: Used to store data for status check :param barbican_meta_dto: additional data needed to process order. :return: cm.ResultDTO """ request_type = order_meta.get( cm.REQUEST_TYPE, cm.CertificateRequestType.CUSTOM_REQUEST) jump_table = { cm.CertificateRequestType.SIMPLE_CMC_REQUEST: self._issue_simple_cmc_request, cm.CertificateRequestType.FULL_CMC_REQUEST: self._issue_full_cmc_request, cm.CertificateRequestType.STORED_KEY_REQUEST: self._issue_stored_key_request, cm.CertificateRequestType.CUSTOM_REQUEST: self._issue_custom_certificate_request } if request_type not in jump_table: raise DogtagPluginNotSupportedException(u._( "Dogtag plugin does not support %s request type").format( request_type)) return jump_table[request_type](order_id, order_meta, plugin_meta, barbican_meta_dto) @_catch_enrollment_exceptions def _issue_simple_cmc_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Issue a simple CMC request to the Dogtag CA. :param order_id: :param order_meta: :param plugin_meta: :param barbican_meta_dto: :return: cm.ResultDTO """ if barbican_meta_dto.generated_csr is not None: csr = barbican_meta_dto.generated_csr else: # we expect the CSR to be base64 encoded PEM # Dogtag CA needs it to be unencoded csr = base64.b64decode(order_meta.get('request_data')) profile_id = order_meta.get('profile', self.simple_cmc_profile) inputs = { 'cert_request_type': 'pkcs10', 'cert_request': csr } return self._issue_certificate_request( profile_id, inputs, plugin_meta, barbican_meta_dto) @_catch_enrollment_exceptions def _issue_full_cmc_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Issue a full CMC request to the Dogtag CA. :param order_id: :param order_meta: :param plugin_meta: :param barbican_meta_dto: :return: cm.ResultDTO """ raise DogtagPluginNotSupportedException(u._( "Dogtag plugin does not support %s request type").format( cm.CertificateRequestType.FULL_CMC_REQUEST)) @_catch_enrollment_exceptions def _issue_stored_key_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Issue a simple CMC request to the Dogtag CA. :param order_id: :param order_meta: :param plugin_meta: :param barbican_meta_dto: :return: cm.ResultDTO """ return self._issue_simple_cmc_request( order_id, order_meta, plugin_meta, barbican_meta_dto) @_catch_enrollment_exceptions def _issue_custom_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Issue a custom certificate request to Dogtag CA :param order_id: ID of the order associated with this request :param order_meta: dict containing all the inputs required for a particular profile. One of these must be the profile_id. The exact fields (both optional and mandatory) depend on the profile, but they will be exposed to the user in a method to expose syntax. Depending on the profile, only the relevant fields will be populated in the request. All others will be ignored. :param plugin_meta: Used to store data for status check. :param barbican_meta_dto: Extra data to aid in processing. :return: cm.ResultDTO """ profile_id = order_meta.get(self.PROFILE_ID, None) if not profile_id: return cm.ResultDTO( cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, status_message=u._("No profile_id specified")) # we expect the csr to be base64 encoded PEM data. Dogtag CA expects # PEM data though so we need to decode it. updated_meta = copy.deepcopy(order_meta) if 'cert_request' in updated_meta: updated_meta['cert_request'] = base64.b64decode( updated_meta['cert_request']) return self._issue_certificate_request( profile_id, updated_meta, plugin_meta, barbican_meta_dto) def _issue_certificate_request(self, profile_id, inputs, plugin_meta, barbican_meta_dto): """Actually send the cert request to the Dogtag CA If the profile_id is one of the auto-approved profiles, then use the convenience enroll_cert() method to create and approve the request using the Barbican agent cert credentials. If not, then submit the request and wait for approval by a CA agent on the Dogtag CA. :param profile_id: enrollment profile :param inputs: dict of request inputs :param plugin_meta: Used to store data for status check. :param barbican_meta_dto: Extra data to aid in processing. :return: cm.ResultDTO """ ca_id = barbican_meta_dto.plugin_ca_id or self.get_default_ca_name() if profile_id in self.auto_approved_profiles: if ca_id == self.get_default_ca_name(): results = self.certclient.enroll_cert(profile_id, inputs) else: results = self.certclient.enroll_cert( profile_id, inputs, ca_id) return self._process_auto_enrollment_results( results, plugin_meta, barbican_meta_dto) else: request = self.certclient.create_enrollment_request( profile_id, inputs) if ca_id == self.get_default_ca_name(): results = self.certclient.submit_enrollment_request(request) else: results = self.certclient.submit_enrollment_request( request, ca_id) return self._process_pending_enrollment_results( results, plugin_meta, barbican_meta_dto) def _process_auto_enrollment_results(self, enrollment_results, plugin_meta, barbican_meta_dto): """Process results received from Dogtag CA for auto-enrollment This processes data from enroll_cert, which submits, approves and gets the cert issued and returns as a list of CertEnrollment objects. :param enrollment_results: list of CertEnrollmentResult objects :param plugin_meta: metadata dict for storing plugin specific data :param barbican_meta_dto: object containing extra data to help process the request :return: cm.ResultDTO """ # Although it is possible to create multiple certs in an invocation # of enroll_cert, Barbican cannot handle this case. Assume # only once cert and request generated for now. enrollment_result = enrollment_results[0] request = enrollment_result.request if not request: raise cm.CertificateGeneralException( u._("No request returned in enrollment_results")) # store the request_id in the plugin metadata plugin_meta[self.REQUEST_ID] = request.request_id cert = enrollment_result.cert return self._create_dto(request.request_status, request.request_id, request.error_message, cert) def _process_pending_enrollment_results(self, results, plugin_meta, barbican_meta_dto): """Process results received from Dogtag CA for pending enrollment This method processes data returned by submit_enrollment_request(), which creates requests that still need to be approved by an agent. :param results: CertRequestInfoCollection object :param plugin_meta: metadata dict for storing plugin specific data :param barbican_meta_dto: object containing extra data to help process the request :return: cm.ResultDTO """ # Although it is possible to create multiple certs in an invocation # of enroll_cert, Barbican cannot handle this case. Assume # only once cert and request generated for now cert_request_info = results.cert_request_info_list[0] status = cert_request_info.request_status request_id = getattr(cert_request_info, 'request_id', None) error_message = getattr(cert_request_info, 'error_message', None) # store the request_id in the plugin metadata if request_id: plugin_meta[self.REQUEST_ID] = request_id return self._create_dto(status, request_id, error_message, None) def _create_dto(self, request_status, request_id, error_message, cert): dto = None if request_status == pki.cert.CertRequestStatus.COMPLETE: if cert is not None: # Barbican is expecting base 64 encoded PEM, so we base64 # encode below. # # Currently there is an inconsistency in what Dogtag returns # for certificates and intermediates. For certs, we return # PEM, whereas for intermediates, we return headerless PEM. # This is being addressed in Dogtag ticket: # https://fedorahosted.org/pki/ticket/1374 # # Until this is addressed, simply add the missing headers cert_chain = (CERT_HEADER + "\r\n" + cert.pkcs7_cert_chain + CERT_FOOTER) dto = cm.ResultDTO(cm.CertificateStatus.CERTIFICATE_GENERATED, certificate=base64.b64encode(cert.encoded), intermediates=base64.b64encode(cert_chain)) else: raise cm.CertificateGeneralException( u._("request_id {req_id} returns COMPLETE but no cert " "returned").format(req_id=request_id)) elif request_status == pki.cert.CertRequestStatus.REJECTED: dto = cm.ResultDTO(cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, status_message=error_message) elif request_status == pki.cert.CertRequestStatus.CANCELED: dto = cm.ResultDTO(cm.CertificateStatus.REQUEST_CANCELED) elif request_status == pki.cert.CertRequestStatus.PENDING: dto = cm.ResultDTO(cm.CertificateStatus.WAITING_FOR_CA) else: raise cm.CertificateGeneralException( u._("Invalid request_status {status} for " "request_id {request_id}").format( status=request_status, request_id=request_id) ) return dto def modify_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Modify a certificate request. Once a certificate request is generated, it cannot be modified. The only alternative is to cancel the request (if it has not already completed) and attempt a fresh enrolment. That is what will be attempted here. :param order_id: ID for this order :param order_meta: order metadata. It is assumed that the newly modified request data will be present here. :param plugin_meta: data stored on behalf of the plugin for further operations :param barbican_meta_dto: additional data needed to process order. :return: ResultDTO: """ result_dto = self.cancel_certificate_request( order_id, order_meta, plugin_meta, barbican_meta_dto) if result_dto.status == cm.CertificateStatus.REQUEST_CANCELED: return self.issue_certificate_request( order_id, order_meta, plugin_meta, barbican_meta_dto) elif result_dto.status == cm.CertificateStatus.INVALID_OPERATION: return cm.ResultDTO( cm.CertificateStatus.INVALID_OPERATION, status_message=u._( "Modify request: unable to cancel: " "{message}").format(message=result_dto.status_message) ) else: # other status (ca_unavailable, client_data_issue) # return result from cancel operation return result_dto @_catch_request_exception def cancel_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Cancel a certificate request. :param order_id: ID for the order associated with this request :param order_meta: order metadata fdr this request :param plugin_meta: data stored by plugin for further processing. In particular, the request_id :param barbican_meta_dto: additional data needed to process order. :return: cm.ResultDTO: """ request_id = self._get_request_id(order_id, plugin_meta, "cancelling") try: review_response = self.certclient.review_request(request_id) self.certclient.cancel_request(request_id, review_response) return cm.ResultDTO(cm.CertificateStatus.REQUEST_CANCELED) except pki.RequestNotFoundException: return cm.ResultDTO( cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, status_message=u._("no request found for this order")) except pki.ConflictingOperationException as e: return cm.ResultDTO( cm.CertificateStatus.INVALID_OPERATION, status_message=e.message) def supports(self, certificate_spec): if cm.CA_TYPE in certificate_spec: return certificate_spec[cm.CA_TYPE] == cm.CA_PLUGIN_TYPE_DOGTAG if cm.CA_PLUGIN_TYPE_SYMANTEC in certificate_spec: # TODO(alee-3) Handle case where SKI is provided pass return True def supported_request_types(self): """Returns the request_types supported by this plugin. :returns: a list of the Barbican-core defined request_types supported by this plugin. """ return [cm.CertificateRequestType.SIMPLE_CMC_REQUEST, cm.CertificateRequestType.STORED_KEY_REQUEST, cm.CertificateRequestType.CUSTOM_REQUEST] def supports_create_ca(self): """Returns if this plugin and the backend CA supports subCAs :return: True/False """ return subcas_available @_catch_subca_creation_exceptions def create_ca(self, ca_create_dto): """Creates a subordinate CA upon request :param ca_create_dto: Data transfer object :class:`CACreateDTO` containing data required to generate a subordinate CA. This data includes the subject DN of the new CA signing certificate, a name for the new CA and a reference to the CA that will issue the new subordinate CA's signing certificate, :return: ca_info: Dictionary containing the data needed to create a models.CertificateAuthority object """ if not subcas_available: raise exception.SubCAsNotSupported( "Subordinate CAs are not supported by this Dogtag CA") parent_ca_id = self._get_correct_ca_id(ca_create_dto.parent_ca_id) ca_data = authority.AuthorityData( dn=ca_create_dto.subject_dn, parent_aid=parent_ca_id, description=ca_create_dto.name) new_ca_data = self.authority_client.create_ca(ca_data) cert = self.authority_client.get_cert(new_ca_data.aid, "PEM") chain = self.authority_client.get_chain(new_ca_data.aid, "PEM") return { cm.INFO_NAME: new_ca_data.description, cm.INFO_CA_SIGNING_CERT: cert, cm.INFO_EXPIRATION: self.expiration.isoformat(), cm.INFO_INTERMEDIATES: chain, cm.PLUGIN_CA_ID: new_ca_data.aid } def _get_correct_ca_id(self, plugin_ca_id): """Returns the correct authority id When the Dogtag plugin updates its CA list, any subcas will have a plugin_ca_id that matches the authority_id (aid) as returned from the backend CA. For migration purposes, though, ie. migrating from a non-subca environment to a subca one, we want the host CA to keep the same plugin_ca_id (which is the default_ca_name) so that no disruption occurs. Therefore, we need to store the host CA's authority ID (in get_ca_info) and return it here instead. """ if plugin_ca_id == self.get_default_ca_name(): return self.host_aid else: return plugin_ca_id @_catch_subca_deletion_exceptions def delete_ca(self, ca_id): """Deletes a subordinate CA :param ca_id: id for the CA as specified by the plugin :return: None """ if not subcas_available: raise exception.SubCAsNotSupported( "Subordinate CAs are not supported by this Dogtag CA") # ca must be disabled first self.authority_client.disable_ca(ca_id) self.authority_client.delete_ca(ca_id) def get_ca_info(self): if not subcas_available: return super(DogtagCAPlugin, self).get_ca_info() self.expiration = (datetime.datetime.utcnow() + datetime.timedelta( days=int(self._expiration_delta))) ret = {} cas = self.authority_client.list_cas() for ca_data in cas.ca_list: if not ca_data.enabled: continue cert = self.authority_client.get_cert(ca_data.aid, "PEM") chain = self.authority_client.get_chain(ca_data.aid, "PEM") ca_info = { cm.INFO_NAME: ca_data.description, cm.INFO_CA_SIGNING_CERT: cert, cm.INFO_INTERMEDIATES: chain, cm.INFO_EXPIRATION: self.expiration.isoformat() } # handle the migration case. The top level CA should continue # to work as before if ca_data.is_host_authority: ret[self.get_default_ca_name()] = ca_info self.host_aid = ca_data.aid else: ret[ca_data.aid] = ca_info return ret def get_host_aid(self): cas = self.authority_client.list_cas() for ca_data in cas.ca_list: if ca_data.is_host_authority: return ca_data.aid return None barbican-9.1.0.dev50/barbican/plugin/__init__.py0000664000175000017500000000000013616500636021554 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/plugin/simple_certificate_manager.py0000664000175000017500000001555613616500636025370 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Default implementation of Barbican certificate processing plugins and support. """ from barbican.common import utils from barbican.plugin.interface import certificate_manager as cert LOG = utils.getLogger(__name__) MSEC_UNTIL_CHECK_STATUS = 5000 class SimpleCertificatePlugin(cert.CertificatePluginBase): """Simple/default certificate plugin.""" def get_default_ca_name(self): return "Simple CA" def get_default_signing_cert(self): return "XXXXXXXXXXXXXXXXX" def get_default_intermediates(self): return "YYYYYYYYYYYYYYYY" def issue_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Create the initial order with CA :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf. :param barbican_meta_dto: additional data needed to process order. :returns: A :class:`ResultDTO` instance containing the result populated by the plugin implementation :rtype: :class:`ResultDTO` """ LOG.info('Invoking issue_certificate_request()') return cert.ResultDTO( cert.CertificateStatus.WAITING_FOR_CA, retry_msec=MSEC_UNTIL_CHECK_STATUS) def modify_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Update the order meta-data :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf. :param barbican_meta_dto: additional data needed to process order. :returns: A :class:`ResultDTO` instance containing the result populated by the plugin implementation :rtype: :class:`ResultDTO` """ LOG.info('Invoking modify_certificate_request()') return cert.ResultDTO(cert.CertificateStatus.WAITING_FOR_CA) def cancel_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Cancel the order :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf. :param barbican_meta_dto: additional data needed to process order. :returns: A :class:`ResultDTO` instance containing the result populated by the plugin implementation :rtype: :class:`ResultDTO` """ LOG.info('Invoking cancel_certificate_request()') return cert.ResultDTO(cert.CertificateStatus.REQUEST_CANCELED) def check_certificate_status(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Check status of the order :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf. :param barbican_meta_dto: additional data needed to process order. :returns: A :class:`ResultDTO` instance containing the result populated by the plugin implementation :rtype: :class:`ResultDTO` """ LOG.info('Invoking check_certificate_status()') return cert.ResultDTO(cert.CertificateStatus.CERTIFICATE_GENERATED) def supports(self, certificate_spec): """Indicates whether the plugin supports the certificate type. :param certificate_spec: Contains details on the certificate to generate the certificate order :returns: boolean indicating if the plugin supports the certificate type """ return True def supported_request_types(self): """Returns the request types supported by this plugin. :returns: dict containing Barbican-core defined request types supported by this plugin. """ return [cert.CertificateRequestType.CUSTOM_REQUEST, cert.CertificateRequestType.SIMPLE_CMC_REQUEST, cert.CertificateRequestType.FULL_CMC_REQUEST, cert.CertificateRequestType.STORED_KEY_REQUEST] class SimpleCertificateEventPlugin(cert.CertificateEventPluginBase): """Simple/default certificate event plugin.""" def notify_certificate_is_ready( self, project_id, order_ref, container_ref): """Notify that a certificate has been generated and is ready to use. :param project_id: Project ID associated with this certificate :param order_ref: HATEOAS reference URI to the submitted Barbican Order :param container_ref: HATEOAS reference URI to the Container storing the certificate :returns: None """ LOG.info('Invoking notify_certificate_is_ready()') def notify_ca_is_unavailable( self, project_id, order_ref, error_msg, retry_in_msec): """Notify that the certificate authority (CA) isn't available. :param project_id: Project ID associated with this order :param order_ref: HATEOAS reference URI to the submitted Barbican Order :param error_msg: Error message if it is available :param retry_in_msec: Delay before attempting to talk to the CA again. If this is 0, then no attempt will be made. :returns: None """ LOG.info('Invoking notify_ca_is_unavailable()') barbican-9.1.0.dev50/barbican/plugin/resources.py0000664000175000017500000004067613616500636022056 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import exception from barbican.common import utils from barbican.model import models from barbican.model import repositories as repos from barbican.plugin.interface import secret_store from barbican.plugin import store_crypto from barbican.plugin.util import translations as tr def _get_transport_key_model(key_spec, transport_key_needed, project_id): key_model = None if transport_key_needed: # get_plugin_store() will throw an exception if no suitable # plugin with transport key is found plugin_manager = secret_store.get_manager() store_plugin = plugin_manager.get_plugin_store( key_spec=key_spec, transport_key_needed=True, project_id=project_id) plugin_name = utils.generate_fullname_for(store_plugin) key_repo = repos.get_transport_key_repository() key_model = key_repo.get_latest_transport_key(plugin_name) if not key_model or not store_plugin.is_transport_key_current( key_model.transport_key): # transport key does not exist or is not current. # need to get a new transport key transport_key = store_plugin.get_transport_key() new_key_model = models.TransportKey(plugin_name, transport_key) key_model = key_repo.create_from(new_key_model) return key_model def _get_plugin_name_and_transport_key(transport_key_id): plugin_name = None transport_key = None if transport_key_id is not None: transport_key_repo = repos.get_transport_key_repository() try: transport_key_model = transport_key_repo.get( entity_id=transport_key_id) except exception.NotFound: raise exception.ProvidedTransportKeyNotFound(str(transport_key_id)) plugin_name = transport_key_model.plugin_name if plugin_name is None: raise ValueError("Invalid plugin name for transport key") transport_key = transport_key_model.transport_key return plugin_name, transport_key def store_secret(unencrypted_raw, content_type_raw, content_encoding, secret_model, project_model, transport_key_needed=False, transport_key_id=None): """Store a provided secret into secure backend.""" if _secret_already_has_stored_data(secret_model): raise ValueError('Secret already has encrypted data stored for it.') # Create a KeySpec to find a plugin that will support storing the secret key_spec = secret_store.KeySpec(alg=secret_model.algorithm, bit_length=secret_model.bit_length, mode=secret_model.mode) # If there is no secret data to store, then just create Secret entity and # leave. A subsequent call to this method should provide both the Secret # entity created here *and* the secret data to store into it. if not unencrypted_raw: key_model = _get_transport_key_model(key_spec, transport_key_needed, project_id=project_model.id) _save_secret_in_repo(secret_model, project_model) return secret_model, key_model plugin_name, transport_key = _get_plugin_name_and_transport_key( transport_key_id) unencrypted, content_type = tr.normalize_before_encryption( unencrypted_raw, content_type_raw, content_encoding, secret_model.secret_type, enforce_text_only=True) plugin_manager = secret_store.get_manager() store_plugin = plugin_manager.get_plugin_store(key_spec=key_spec, plugin_name=plugin_name, project_id=project_model.id) secret_dto = secret_store.SecretDTO(type=secret_model.secret_type, secret=unencrypted, key_spec=key_spec, content_type=content_type, transport_key=transport_key) secret_metadata = _store_secret_using_plugin(store_plugin, secret_dto, secret_model, project_model) _save_secret_in_repo(secret_model, project_model) _save_secret_metadata_in_repo(secret_model, secret_metadata, store_plugin, content_type) return secret_model, None def get_secret(requesting_content_type, secret_model, project_model, twsk=None, transport_key=None): secret_metadata = _get_secret_meta(secret_model) # NOTE: */* is the pecan default meaning no content type sent in. In this # case we should use the mime type stored in the metadata. if requesting_content_type == '*/*': requesting_content_type = secret_metadata['content_type'] tr.analyze_before_decryption(requesting_content_type) if twsk is not None: secret_metadata['trans_wrapped_session_key'] = twsk secret_metadata['transport_key'] = transport_key # Locate a suitable plugin to store the secret. plugin_manager = secret_store.get_manager() retrieve_plugin = plugin_manager.get_plugin_retrieve_delete( secret_metadata.get('plugin_name')) # Retrieve the secret. secret_dto = _get_secret( retrieve_plugin, secret_metadata, secret_model, project_model) if twsk is not None: del secret_metadata['transport_key'] del secret_metadata['trans_wrapped_session_key'] # Denormalize the secret. return tr.denormalize_after_decryption(secret_dto.secret, requesting_content_type) def get_transport_key_id_for_retrieval(secret_model): """Return a transport key ID for retrieval if the plugin supports it.""" secret_metadata = _get_secret_meta(secret_model) plugin_manager = secret_store.get_manager() retrieve_plugin = plugin_manager.get_plugin_retrieve_delete( secret_metadata.get('plugin_name')) transport_key_id = retrieve_plugin.get_transport_key() return transport_key_id def generate_secret(spec, content_type, project_model): """Generate a secret and store into a secure backend.""" # Locate a suitable plugin to store the secret. key_spec = secret_store.KeySpec(alg=spec.get('algorithm'), bit_length=spec.get('bit_length'), mode=spec.get('mode')) plugin_manager = secret_store.get_manager() generate_plugin = plugin_manager.get_plugin_generate( key_spec, project_id=project_model.id) # Create secret model to eventually save metadata to. secret_model = models.Secret(spec) secret_model['secret_type'] = secret_store.SecretType.SYMMETRIC # Generate the secret. secret_metadata = _generate_symmetric_key( generate_plugin, key_spec, secret_model, project_model, content_type) # Save secret and metadata. _save_secret_in_repo(secret_model, project_model) _save_secret_metadata_in_repo(secret_model, secret_metadata, generate_plugin, content_type) return secret_model def generate_asymmetric_secret(spec, content_type, project_model): """Generate an asymmetric secret and store into a secure backend.""" # Locate a suitable plugin to store the secret. key_spec = secret_store.KeySpec(alg=spec.get('algorithm'), bit_length=spec.get('bit_length'), passphrase=spec.get('passphrase')) plugin_manager = secret_store.get_manager() generate_plugin = plugin_manager.get_plugin_generate( key_spec, project_id=project_model.id) # Create secret models to eventually save metadata to. private_secret_model = models.Secret(spec) private_secret_model['secret_type'] = secret_store.SecretType.PRIVATE public_secret_model = models.Secret(spec) public_secret_model['secret_type'] = secret_store.SecretType.PUBLIC passphrase_secret_model = (models.Secret(spec) if spec.get('passphrase') else None) if passphrase_secret_model: passphrase_type = secret_store.SecretType.PASSPHRASE passphrase_secret_model['secret_type'] = passphrase_type asymmetric_meta_dto = _generate_asymmetric_key( generate_plugin, key_spec, private_secret_model, public_secret_model, passphrase_secret_model, project_model, content_type ) _save_secret_in_repo(private_secret_model, project_model) _save_secret_metadata_in_repo(private_secret_model, asymmetric_meta_dto.private_key_meta, generate_plugin, content_type) _save_secret_in_repo(public_secret_model, project_model) _save_secret_metadata_in_repo(public_secret_model, asymmetric_meta_dto.public_key_meta, generate_plugin, content_type) if passphrase_secret_model: _save_secret_in_repo(passphrase_secret_model, project_model) _save_secret_metadata_in_repo(passphrase_secret_model, asymmetric_meta_dto.passphrase_meta, generate_plugin, content_type) container_model = _create_container_for_asymmetric_secret(spec, project_model) _save_asymmetric_secret_in_repo( container_model, private_secret_model, public_secret_model, passphrase_secret_model) return container_model def delete_secret(secret_model, project_id): """Remove a secret from secure backend.""" secret_metadata = _get_secret_meta(secret_model) # We should only try to delete a secret using the plugin interface if # there's the metadata available. This addresses bug/1377330. if secret_metadata: # Locate a suitable plugin to delete the secret from. plugin_manager = secret_store.get_manager() delete_plugin = plugin_manager.get_plugin_retrieve_delete( secret_metadata.get('plugin_name')) # Delete the secret from plugin storage. delete_plugin.delete_secret(secret_metadata) # Delete the secret from data model. secret_repo = repos.get_secret_repository() secret_repo.delete_entity_by_id(entity_id=secret_model.id, external_project_id=project_id) def _store_secret_using_plugin(store_plugin, secret_dto, secret_model, project_model): if isinstance(store_plugin, store_crypto.StoreCryptoAdapterPlugin): context = store_crypto.StoreCryptoContext( project_model, secret_model=secret_model) secret_metadata = store_plugin.store_secret(secret_dto, context) else: secret_metadata = store_plugin.store_secret(secret_dto) return secret_metadata def _generate_symmetric_key( generate_plugin, key_spec, secret_model, project_model, content_type): if isinstance(generate_plugin, store_crypto.StoreCryptoAdapterPlugin): context = store_crypto.StoreCryptoContext( project_model, secret_model=secret_model, content_type=content_type) secret_metadata = generate_plugin.generate_symmetric_key( key_spec, context) else: secret_metadata = generate_plugin.generate_symmetric_key(key_spec) return secret_metadata def _generate_asymmetric_key(generate_plugin, key_spec, private_secret_model, public_secret_model, passphrase_secret_model, project_model, content_type): if isinstance(generate_plugin, store_crypto.StoreCryptoAdapterPlugin): context = store_crypto.StoreCryptoContext( project_model, private_secret_model=private_secret_model, public_secret_model=public_secret_model, passphrase_secret_model=passphrase_secret_model, content_type=content_type) asymmetric_meta_dto = generate_plugin.generate_asymmetric_key( key_spec, context) else: asymmetric_meta_dto = generate_plugin.generate_asymmetric_key(key_spec) return asymmetric_meta_dto def _get_secret(retrieve_plugin, secret_metadata, secret_model, project_model): if isinstance(retrieve_plugin, store_crypto.StoreCryptoAdapterPlugin): context = store_crypto.StoreCryptoContext( project_model, secret_model=secret_model) secret_dto = retrieve_plugin.get_secret(secret_model.secret_type, secret_metadata, context) else: secret_dto = retrieve_plugin.get_secret(secret_model.secret_type, secret_metadata) return secret_dto def _get_secret_meta(secret_model): if secret_model: secret_meta_repo = repos.get_secret_meta_repository() return secret_meta_repo.get_metadata_for_secret(secret_model.id) else: return {} def _save_secret_metadata_in_repo(secret_model, secret_metadata, store_plugin, content_type): """Add secret metadata to a secret.""" if not secret_metadata: secret_metadata = {} secret_metadata['plugin_name'] = utils.generate_fullname_for(store_plugin) secret_metadata['content_type'] = content_type secret_meta_repo = repos.get_secret_meta_repository() secret_meta_repo.save(secret_metadata, secret_model) def _save_secret_in_repo(secret_model, project_model): """Save a Secret entity.""" secret_repo = repos.get_secret_repository() # Create Secret entities in data store. if not secret_model.id: secret_model.project_id = project_model.id secret_repo.create_from(secret_model) else: secret_repo.save(secret_model) def _secret_already_has_stored_data(secret_model): if not secret_model: return False return secret_model.encrypted_data or secret_model.secret_store_metadata def _create_container_for_asymmetric_secret(spec, project_model): container_model = models.Container() container_model.name = spec.get('name') container_model.type = spec.get('algorithm', '').lower() container_model.status = models.States.ACTIVE container_model.project_id = project_model.id container_model.creator_id = spec.get('creator_id') return container_model def _save_asymmetric_secret_in_repo(container_model, private_secret_model, public_secret_model, passphrase_secret_model): container_repo = repos.get_container_repository() container_repo.create_from(container_model) # create container_secret for private_key _create_container_secret_association('private_key', private_secret_model, container_model) # create container_secret for public_key _create_container_secret_association('public_key', public_secret_model, container_model) if passphrase_secret_model: # create container_secret for passphrase _create_container_secret_association('private_key_passphrase', passphrase_secret_model, container_model) def _create_container_secret_association(assoc_name, secret_model, container_model): container_secret = models.ContainerSecret() container_secret.name = assoc_name container_secret.container_id = container_model.id container_secret.secret_id = secret_model.id container_secret_repo = repos.get_container_secret_repository() container_secret_repo.create_from(container_secret) barbican-9.1.0.dev50/barbican/plugin/snakeoil_ca.py0000664000175000017500000004141413616500636022303 0ustar sahidsahid00000000000000# Copyright 2014 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import base64 import datetime import os import re import subprocess # nosec from tempfile import mkstemp import uuid from OpenSSL import crypto from oslo_config import cfg from oslo_utils import fnmatch from oslo_utils import uuidutils from barbican.common import config from barbican.common import utils from barbican import i18n as u import barbican.plugin.interface.certificate_manager as cert_manager CONF = config.new_config() LOG = utils.getLogger(__name__) snakeoil_ca_plugin_group = cfg.OptGroup(name='snakeoil_ca_plugin', title="Snakeoil CA Plugin Options") snakeoil_ca_plugin_opts = [ cfg.StrOpt('ca_cert_path', help=u._('Path to CA certificate file')), cfg.StrOpt('ca_cert_key_path', help=u._('Path to CA certificate key file')), cfg.StrOpt('ca_cert_chain_path', help=u._('Path to CA certificate chain file')), cfg.StrOpt('ca_cert_pkcs7_path', help=u._('Path to CA chain pkcs7 file')), cfg.StrOpt('subca_cert_key_directory', default='/etc/barbican/snakeoil-cas', help=u._('Directory in which to store certs/keys for subcas')), ] CONF.register_group(snakeoil_ca_plugin_group) CONF.register_opts(snakeoil_ca_plugin_opts, group=snakeoil_ca_plugin_group) config.parse_args(CONF) def list_opts(): yield snakeoil_ca_plugin_group, snakeoil_ca_plugin_opts def set_subject_X509Name(target, dn): """Set target X509Name object with parsed dn. This is very basic and should certainly be replaced by something using cryptography for instance, but will do for a basic test CA """ # TODO(alee) Figure out why C (country) is not working fields = dn.split(',') for field in fields: m = re.search(r"(\w+)\s*=\s*(.+)", field.strip()) name = m.group(1) value = m.group(2) if name.lower() == 'ou': target.OU = value elif name.lower() == 'st': target.ST = value elif name.lower() == 'cn': target.CN = value elif name.lower() == 'l': target.L = value elif name.lower() == 'o': target.O = value return target class SnakeoilCA(object): def __init__(self, cert_path=None, key_path=None, chain_path=None, pkcs7_path=None, name=None, serial=1, key_size=2048, expiry_days=10 * 365, x509_version=2, subject_dn=None, signing_dn=None, signing_key=None, parent_chain_path=None): self.cert_path = cert_path self.key_path = key_path self.chain_path = chain_path self.pkcs7_path = pkcs7_path self.name = name self.serial = serial self.key_size = key_size self.expiry_days = expiry_days self.x509_version = x509_version self.subject_dn = subject_dn if signing_dn is not None: self.signing_dn = signing_dn else: self.signing_dn = subject_dn # self-signed self.signing_key = signing_key self.parent_chain_path = parent_chain_path self._cert_val = None self._key_val = None self._chain_val = None self._pkcs7_val = None @property def cert(self): self.ensure_exists() if self.cert_path: with open(self.cert_path, 'rb') as cert_fh: return crypto.load_certificate(crypto.FILETYPE_PEM, cert_fh.read()) else: return crypto.load_certificate(crypto.FILETYPE_PEM, self._cert_val) @cert.setter def cert(self, val): if self.cert_path: with open(self.cert_path, 'wb') as cert_fh: cert_fh.write(crypto.dump_certificate(crypto.FILETYPE_PEM, val)) else: self._cert_val = crypto.dump_certificate(crypto.FILETYPE_PEM, val) @property def key(self): self.ensure_exists() if self.key_path: with open(self.key_path, 'rb') as key_fh: return crypto.load_privatekey(crypto.FILETYPE_PEM, key_fh.read()) else: return crypto.load_privatekey(crypto.FILETYPE_PEM, self._key_val) @key.setter def key(self, val): if self.key_path: with open(self.key_path, 'wb') as key_fh: key_fh.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, val)) else: self._key_val = crypto.dump_privatekey(crypto.FILETYPE_PEM, val) @property def chain(self): self.ensure_exists() if self.chain_path: with open(self.chain_path, 'rb') as chain_fh: return chain_fh.read() else: return self._chain_val @chain.setter def chain(self, val): if self.chain_path: with open(self.chain_path, 'wb') as chain_fh: chain_fh.write(val) else: self._chain_val = val @property def pkcs7(self): self.ensure_exists() if self.pkcs7_path: with open(self.pkcs7_path, 'rb') as pkcs7_fh: return pkcs7_fh.read() else: return self._pkcs7_val @pkcs7.setter def pkcs7(self, val): if self.pkcs7_path: with open(self.pkcs7_path, 'wb') as pkcs7_fh: pkcs7_fh.write(val) else: self._pkcs7_val = val @property def exists(self): if self.cert_path is not None: cert_exists = os.path.isfile(self.cert_path) else: cert_exists = self._cert_val is not None if self.key_path is not None: key_exists = os.path.isfile(self.key_path) else: key_exists = self._key_val is not None if self.chain_path is not None: chain_exists = os.path.isfile(self.chain_path) else: chain_exists = self._chain_val is not None if self.pkcs7_path is not None: pkcs7_exists = os.path.isfile(self.pkcs7_path) else: pkcs7_exists = self._pkcs7_val is not None return (cert_exists and key_exists and pkcs7_exists and chain_exists) def ensure_exists(self): if not self.exists: LOG.debug('Keypair not found, creating new cert/key') self.cert, self.key, self.chain, self.pkcs7 = ( self.create_keypair()) def create_keypair(self): LOG.debug('Generating Snakeoil CA') key = crypto.PKey() key.generate_key(crypto.TYPE_RSA, self.key_size) cert = crypto.X509() cert.set_version(self.x509_version) cert.set_serial_number(self.serial) subject = cert.get_subject() set_subject_X509Name(subject, self.subject_dn) cert.set_subject(subject) cert.gmtime_adj_notBefore(0) cert.gmtime_adj_notAfter(self.expiry_days) cert.set_issuer(set_subject_X509Name( cert.get_issuer(), self.signing_dn)) cert.set_pubkey(key) cert.add_extensions([ crypto.X509Extension(b"basicConstraints", True, b"CA:TRUE, pathlen:5"), ]) if not self.signing_key: self.signing_key = key # self-signed cert.sign(self.signing_key, 'sha256') LOG.debug('Snakeoil CA cert/key generated') chain = b'' if self.parent_chain_path: with open(self.parent_chain_path, 'rb') as fh: chain = fh.read() chain += crypto.dump_certificate(crypto.FILETYPE_PEM, cert) pkcs7 = self._generate_pkcs7(chain) return cert, key, chain, pkcs7 def _generate_pkcs7(self, chain): fin, temp_in = mkstemp() os.write(fin, chain) os.close(fin) fout, temp_out = mkstemp() os.close(fout) subprocess.call(['/usr/bin/openssl', 'crl2pkcs7', '-nocrl', # nosec '-out', temp_out, '-certfile', temp_in], shell=False) with open(temp_out, 'rb') as pkcs7_fh: pkcs7 = pkcs7_fh.read() os.remove(temp_in) os.remove(temp_out) return pkcs7 class CertManager(object): def __init__(self, ca): self.ca = ca def get_new_serial(self): return uuid.uuid4().int def make_certificate(self, csr, expires=2 * 365): cert = crypto.X509() cert.set_serial_number(self.get_new_serial()) cert.gmtime_adj_notBefore(0) cert.gmtime_adj_notAfter(expires) cert.set_issuer(self.ca.cert.get_subject()) cert.set_subject(csr.get_subject()) cert.set_pubkey(csr.get_pubkey()) cert.sign(self.ca.key, 'sha256') return cert class SnakeoilCACertificatePlugin(cert_manager.CertificatePluginBase): """Snakeoil CA certificate plugin. This is used for easily generating certificates which are not useful in a production environment. """ def __init__(self, conf=CONF): self.cas = {} self.ca = SnakeoilCA( cert_path=conf.snakeoil_ca_plugin.ca_cert_path, key_path=conf.snakeoil_ca_plugin.ca_cert_key_path, chain_path=conf.snakeoil_ca_plugin.ca_cert_chain_path, pkcs7_path=conf.snakeoil_ca_plugin.ca_cert_pkcs7_path, name=self.get_default_ca_name(), subject_dn="cn=Snakeoil Certificate,o=example.com" ) self.cas[self.get_default_ca_name()] = self.ca self.subca_directory = conf.snakeoil_ca_plugin.subca_cert_key_directory if self.subca_directory: if not os.path.exists(self.subca_directory): os.makedirs(self.subca_directory) # pragma: no cover else: self._reload_previously_created_subcas() self.cert_manager = CertManager(self.ca) def _reload_previously_created_subcas(self): for file in os.listdir(self.subca_directory): if fnmatch.fnmatch(file, '*.key'): ca_id, _ext = os.path.splitext(file) self.cas[ca_id] = SnakeoilCA( cert_path=os.path.join(self.subca_directory, ca_id + ".cert"), key_path=os.path.join(self.subca_directory, file), chain_path=os.path.join(self.subca_directory, ca_id + ".chain"), pkcs7_path=os.path.join(self.subca_directory, ca_id + ".p7b") ) def get_default_ca_name(self): return "Snakeoil CA" def get_default_signing_cert(self): return crypto.dump_certificate(crypto.FILETYPE_PEM, self.ca.cert) def get_default_intermediates(self): return None def supported_request_types(self): return [cert_manager.CertificateRequestType.CUSTOM_REQUEST, cert_manager.CertificateRequestType.STORED_KEY_REQUEST] def issue_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): if barbican_meta_dto.generated_csr is not None: encoded_csr = barbican_meta_dto.generated_csr else: try: encoded_csr = base64.b64decode(order_meta['request_data']) except KeyError: return cert_manager.ResultDTO( cert_manager.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, status_message=u._("No request_data specified")) csr = crypto.load_certificate_request(crypto.FILETYPE_PEM, encoded_csr) ca_id = barbican_meta_dto.plugin_ca_id if ca_id: ca = self.cas.get(ca_id) if ca is None: raise cert_manager.CertificateGeneralException( "Invalid ca_id passed into snake oil plugin:" + ca_id) else: ca = self.ca cert_mgr = CertManager(ca) cert = cert_mgr.make_certificate(csr) cert_enc = crypto.dump_certificate(crypto.FILETYPE_PEM, cert) return cert_manager.ResultDTO( cert_manager.CertificateStatus.CERTIFICATE_GENERATED, certificate=base64.b64encode(cert_enc), intermediates=base64.b64encode(ca.pkcs7)) def modify_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): raise NotImplementedError def cancel_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): raise NotImplementedError def check_certificate_status(self, order_id, order_meta, plugin_meta, barbican_meta_dto): raise NotImplementedError def supports(self, certificate_spec): request_type = certificate_spec.get( cert_manager.REQUEST_TYPE, cert_manager.CertificateRequestType.CUSTOM_REQUEST) return request_type in self.supported_request_types() def supports_create_ca(self): return True def create_ca(self, ca_create_dto): # get the parent CA from the ca list, return error if not on list parent_ca_id = ca_create_dto.parent_ca_id if not parent_ca_id: raise cert_manager.CertificateGeneralException( "No parent id passed to snake oil plugin on create_ca") parent_ca = self.cas.get(parent_ca_id) if not parent_ca: raise cert_manager.CertificateGeneralException( "Invalid parent id passed to snake oil plugin:" + parent_ca_id) # create a new ca, passing in key and issuer from the parent new_ca_id = uuidutils.generate_uuid() new_cert_path = os.path.join(self.subca_directory, new_ca_id + ".cert") new_key_path = os.path.join(self.subca_directory, new_ca_id + ".key") new_chain_path = os.path.join(self.subca_directory, new_ca_id + ".chain") new_pkcs7_path = os.path.join(self.subca_directory, new_ca_id + ".p7b") parent_chain_path = parent_ca.chain_path new_ca = SnakeoilCA(cert_path=new_cert_path, key_path=new_key_path, chain_path=new_chain_path, pkcs7_path=new_pkcs7_path, name=ca_create_dto.name, subject_dn=ca_create_dto.subject_dn, signing_dn=parent_ca.subject_dn, signing_key=parent_ca.key, parent_chain_path=parent_chain_path) self.cas[new_ca_id] = new_ca expiration = (datetime.datetime.utcnow() + datetime.timedelta( days=cert_manager.CA_INFO_DEFAULT_EXPIRATION_DAYS)) return { cert_manager.INFO_NAME: new_ca.name, cert_manager.INFO_CA_SIGNING_CERT: crypto.dump_certificate( crypto.FILETYPE_PEM, new_ca.cert), cert_manager.INFO_EXPIRATION: expiration.isoformat(), cert_manager.INFO_INTERMEDIATES: new_ca.pkcs7, cert_manager.PLUGIN_CA_ID: new_ca_id } def get_ca_info(self): expiration = (datetime.datetime.utcnow() + datetime.timedelta( days=cert_manager.CA_INFO_DEFAULT_EXPIRATION_DAYS)) ret = {} for ca_id, ca in self.cas.items(): ca_info = { cert_manager.INFO_NAME: ca.name, cert_manager.INFO_CA_SIGNING_CERT: crypto.dump_certificate( crypto.FILETYPE_PEM, ca.cert), cert_manager.INFO_INTERMEDIATES: ca.pkcs7, cert_manager.INFO_EXPIRATION: expiration.isoformat() } ret[ca_id] = ca_info return ret def delete_ca(self, ca_id): self.cas.pop(ca_id) ca_files = [os.path.join(self.subca_directory, ca_id + ".cert"), os.path.join(self.subca_directory, ca_id + ".key"), os.path.join(self.subca_directory, ca_id + ".chain"), os.path.join(self.subca_directory, ca_id + ".p7b")] for ca_file in ca_files: if os.path.exists(ca_file): os.remove(ca_file) barbican-9.1.0.dev50/barbican/context.py0000664000175000017500000000274113616500636020221 0ustar sahidsahid00000000000000# Copyright 2011-2012 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import oslo_context from barbican.common import policy class RequestContext(oslo_context.context.RequestContext): """User security context object Stores information about the security context under which the user accesses the system, as well as additional request information. """ def __init__(self, policy_enforcer=None, **kwargs): # prefer usage of 'project' instead of 'tenant' if policy_enforcer: self.policy_enforcer = policy_enforcer else: policy.init() self.policy_enforcer = policy.get_enforcer() super(RequestContext, self).__init__(**kwargs) def to_dict(self): out_dict = super(RequestContext, self).to_dict() out_dict['roles'] = self.roles return out_dict @classmethod def from_dict(cls, values): return cls(**values) barbican-9.1.0.dev50/barbican/tests/0000775000175000017500000000000013616500640017314 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/fixture.py0000664000175000017500000000502613616500636021364 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fixtures from oslo_db.sqlalchemy import session from oslo_utils import timeutils import sqlalchemy as sa from barbican.model import models class SessionQueryFixture(fixtures.Fixture): """Fixture for testing queries on a session This fixture creates a SQLAlchemy sessionmaker for an in-memory sqlite database with sample data. """ def _setUp(self): self._engine = session.create_engine('sqlite:///:memory:') self.Session = sa.orm.sessionmaker(bind=self._engine) self.external_id = 'EXTERNAL_ID' models.BASE.metadata.create_all(self._engine) self._load_sample_data() def _load_sample_data(self): sess = self.Session() proj = models.Project() proj.external_id = self.external_id sess.add(proj) sess.commit() # commit to add proj.id self._add_secret(sess, proj, 'A', '2016-01-01T00:00:00', '2016-01-01T00:00:00') self._add_secret(sess, proj, 'B', '2016-02-01T00:00:00', '2016-02-01T00:00:00') self._add_secret(sess, proj, 'C', '2016-03-01T00:00:00', '2016-03-01T00:00:00') self._add_secret(sess, proj, 'D', '2016-04-01T00:00:00', '2016-04-01T00:00:00') self._add_secret(sess, proj, 'E', '2016-05-01T00:00:00', '2016-05-01T00:00:00') self._add_secret(sess, proj, 'F', '2016-06-01T00:00:00', '2016-06-01T00:00:00') sess.commit() # commit all secrets def _add_secret(self, session, project, name, created_at, updated_at): s = models.Secret() s.name = name s.created_at = timeutils.parse_isotime(created_at) s.updated_at = timeutils.parse_isotime(updated_at) s.project_id = project.id session.add(s) barbican-9.1.0.dev50/barbican/tests/test_middleware_auth.py0000664000175000017500000000333013616500636024067 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from six.moves import http_client host = "localhost" port = 9311 method = "GET" timeout = 1000 body = None path = "/" headers = "" expected_response = {"v1": "current", "build": "0.1.34dev"} # Typically an authenticated user session will make a request for a key to # barbican # The restful request in all likelihood contain an auth token # this test mimics such a request provided a token # if pki tokens are used, the token is rather large # uuid tokens are smaller and easier to test with # assume there is a "demo" user with only member role # curl -XPOST -d '{"auth":{"passwordCredentials":{"username": "demo", # "password": "secret"}, "tenantName": "demo"}}' # -H "Content-type: application/json" http://localhost:5000/v3/tokens # # pull out the token_id from above and use in ping_barbican # # TODO(malini) flesh this out def get_demo_token(password): pass def ping_barbican(token_id): headers = {'X_AUTH_TOKEN': token_id, 'X_IDENTITY_STATUS': 'Confirmed'} connection = http_client.HTTPConnection(host, port, timeout=timeout) connection.request(method, path, None, headers) response = connection.getresponse().read() connection.close() return response barbican-9.1.0.dev50/barbican/tests/objects/0000775000175000017500000000000013616500640020745 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/objects/test_ovo_base.py0000664000175000017500000000215213616500636024160 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from barbican.model import repositories as repos from barbican import objects from barbican.tests import database_utils class OVOTestCase(database_utils.RepositoryTestCase): """Base test case class for in-memory database unit tests.""" def setUp(self): super(OVOTestCase, self).setUp() self.session = repos.get_session() class TestBarbicanObject(OVOTestCase): def test_ovo_get_session(self): session = objects.BarbicanObject.get_session() self.assertEqual(self.session, session) barbican-9.1.0.dev50/barbican/tests/objects/test_ovo_project_secret_store.py0000664000175000017500000003174513616500636027507 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from oslo_utils import uuidutils from barbican.common import exception from barbican import objects from barbican.tests.objects import test_ovo_base class TestProjectSecretStore(test_ovo_base.OVOTestCase): def setUp(self): super(TestProjectSecretStore, self).setUp() self.init() def init(self): self.def_name = "PKCS11 HSM" self.def_store_plugin = "store_crypto" self.def_crypto_plugin = "p11_crypto" self.default_secret_stores = self._create_secret_store_obj( self.def_name, self.def_store_plugin, self.def_crypto_plugin, True) def _create_secret_store_obj(self, name, store_plugin, crypto_plugin=None, global_default=None): secret_stores_obj = objects.SecretStores(name=name, store_plugin=store_plugin, crypto_plugin=crypto_plugin, global_default=global_default) secret_stores_obj.create(session=self.session) return secret_stores_obj def _create_project(self): external_id = 'keystone_project_id' + uuidutils.generate_uuid( dashed=True) project = objects.Project(external_id=external_id) project.create(session=self.session) return project def _create_project_secret_store(self, project_id, secret_store_id): project_secret_store = objects.ProjectSecretStore( project_id=project_id, secret_store_id=secret_store_id) project_secret_store.create(session=self.session) return project_secret_store def test_ovo_create_by_entity_id(self): """Tests for 'create' call by project secret store id""" project = self._create_project() project_secret_store = self._create_project_secret_store( project.id, self.default_secret_stores.id) self.assertIsNotNone(project_secret_store) self.assertEqual(project.id, project_secret_store.project_id) self.assertEqual(self.default_secret_stores.id, project_secret_store.secret_store_id) self.assertEqual(objects.States.ACTIVE, project_secret_store.status) # assert values via relationship self.assertEqual(self.default_secret_stores.store_plugin, project_secret_store.secret_store.store_plugin) self.assertEqual(project.external_id, project_secret_store.project.external_id) def test_ovo_should_raise_notfound_exception_get_by_entity_id(self): self.assertRaises(exception.NotFound, objects.ProjectSecretStore.get, "invalid_id", suppress_exception=False) def test_ovo_delete_entity_by_id(self): project = self._create_project() project_secret_store = self._create_project_secret_store( project.id, self.default_secret_stores.id) project_secret_store = objects.ProjectSecretStore.get( project_secret_store.id, session=self.session) self.assertIsNotNone(project_secret_store) objects.ProjectSecretStore.delete_entity_by_id( project_secret_store.id, None, session=self.session) project_secret_store = objects.ProjectSecretStore.get( project_secret_store.id, suppress_exception=True, session=self.session) self.assertIsNone(project_secret_store) def test_ovo_should_raise_constraint_for_same_project_id(self): """Check preferred secret store is set only once for project""" project1 = self._create_project() name = "first_name" store_plugin = 'first_store' crypto_plugin = 'first_crypto' secret_stores1 = self._create_secret_store_obj(name, store_plugin, crypto_plugin, False) # set preferred secret store for project1 self._create_project_secret_store(project1.id, secret_stores1.id) name = "second_name" store_plugin = 'second_store' crypto_plugin = 'second_crypto' secret_stores2 = self._create_secret_store_obj(name, store_plugin, crypto_plugin, False) self.assertRaises(exception.ConstraintCheck, self._create_project_secret_store, project1.id, secret_stores2.id) def test_ovo_do_entity_name(self): """Code coverage for entity_name which is used in case of exception. Raising duplicate error when try to set another entry for existing project """ project1 = self._create_project() name = "first name" store_plugin = 'first_store' crypto_plugin = 'first_crypto' secret_stores1 = self._create_secret_store_obj(name, store_plugin, crypto_plugin, False) # set preferred secret store for project1 self._create_project_secret_store(project1.id, secret_stores1.id) try: name = "second_name" store_plugin = 'second_store' crypto_plugin = 'second_crypto' secret_stores2 = self._create_secret_store_obj(name, store_plugin, crypto_plugin, False) self._create_project_secret_store(project1.id, secret_stores2.id) self.assertFail() except exception.ConstraintCheck as ex: self.assertIn("SQL constraint check failed", six.text_type(ex)) def test_ovo_get_secret_store_for_project(self): project1 = self._create_project() name = "first_name" store_plugin = 'first_store' crypto_plugin = 'first_crypto' secret_stores1 = self._create_secret_store_obj(name, store_plugin, crypto_plugin, False) # set preferred secret store for project1 project_secret_store = self._create_project_secret_store( project1.id, secret_stores1.id) # get preferred secret store by barbican project id read_project_secret_stores = objects.ProjectSecretStore.\ get_secret_store_for_project(project1.id, None, session=self.session) self.assertEqual(project_secret_store.project_id, read_project_secret_stores.project_id) self.assertEqual(project_secret_store.secret_store_id, read_project_secret_stores.secret_store_id) # get preferred secret store by keystone project id read_project_secret_stores = objects.ProjectSecretStore. \ get_secret_store_for_project(None, project1.external_id, session=self.session) self.assertEqual(project_secret_store.project_id, read_project_secret_stores.project_id) self.assertEqual(project1.external_id, read_project_secret_stores.project.external_id) self.assertEqual(project_secret_store.secret_store_id, read_project_secret_stores.secret_store_id) def test_ovo_raise_notfound_exception_get_secret_store_for_project(self): self.assertRaises( exception.NotFound, objects.ProjectSecretStore.get_secret_store_for_project, "invalid_id", None, suppress_exception=False) def test_ovo_with_exception_suppressed_get_secret_store_for_project(self): returned_value = objects.ProjectSecretStore. \ get_secret_store_for_project("invalid_id", None, suppress_exception=True, session=self.session) self.assertIsNone(returned_value) def test_ovo_get_project_entities(self): entities = objects.ProjectSecretStore.get_project_entities( uuidutils.generate_uuid(dashed=False), session=self.session) self.assertEqual([], entities) def test_ovo_create_or_update_for_project(self): project1 = self._create_project() name = "first_name" store_plugin = 'first_store' crypto_plugin = 'first_crypto' secret_stores1 = self._create_secret_store_obj(name, store_plugin, crypto_plugin, False) # assert that no preferred secret store is set project. entity = objects.ProjectSecretStore.get_secret_store_for_project( project1.id, None, suppress_exception=True, session=self.session) self.assertIsNone(entity) # create/set preferred secret store now created_entity = \ objects.ProjectSecretStore.create_or_update_for_project( project1.id, secret_stores1.id, session=self.session) entity = objects.ProjectSecretStore.get_secret_store_for_project( project1.id, None, suppress_exception=False, session=self.session) self.assertIsNotNone(entity) # new preferred secret store self.assertEqual(project1.id, entity.project_id) self.assertEqual(secret_stores1.id, entity.secret_store_id) self.assertEqual(store_plugin, entity.secret_store.store_plugin) self.assertEqual(crypto_plugin, entity.secret_store.crypto_plugin) self.assertEqual(name, entity.secret_store.name) name = 'second_name' store_plugin = 'second_store' crypto_plugin = 'second_crypto' secret_stores2 = self._create_secret_store_obj(name, store_plugin, crypto_plugin, False) updated_entity = \ objects.ProjectSecretStore.create_or_update_for_project( project1.id, secret_stores2.id, session=self.session) self.assertEqual(created_entity.id, updated_entity.id) self.assertEqual(secret_stores2.id, updated_entity.secret_store_id) def test_ovo_get_count_by_secret_store(self): project1 = self._create_project() name = "first_name" store_plugin = 'first_store' crypto_plugin = 'first_crypto' secret_stores1 = self._create_secret_store_obj(name, store_plugin, crypto_plugin, False) count = objects.ProjectSecretStore.get_count_by_secret_store( secret_stores1.id, session=self.session) self.assertEqual(0, count) # create/set preferred secret store now objects.ProjectSecretStore.create_or_update_for_project( project1.id, secret_stores1.id, session=self.session) count = objects.ProjectSecretStore.get_count_by_secret_store( secret_stores1.id, session=self.session) self.assertEqual(1, count) project2 = self._create_project() objects.ProjectSecretStore.create_or_update_for_project( project2.id, secret_stores1.id, session=self.session) count = objects.ProjectSecretStore.get_count_by_secret_store( secret_stores1.id, session=self.session) self.assertEqual(2, count) def test_ovo_should_throw_exception_missing_project_id(self): project_secret_store_1 = objects.ProjectSecretStore( project_id=None, secret_store_id='ss_123456') project_secret_store_2 = objects.ProjectSecretStore( project_id='', secret_store_id='ss_123456') self.assertRaises(exception.MissingArgumentError, project_secret_store_1.create, session=self.session) self.assertRaises(exception.MissingArgumentError, project_secret_store_2.create, session=self.session) def test_ovo_should_throw_exception_missing_secret_store_id(self): project_secret_store_1 = objects.ProjectSecretStore( project_id='proj_123456', secret_store_id=None) project_secret_store_2 = objects.ProjectSecretStore( project_id='proj_123456', secret_store_id='') self.assertRaises(exception.MissingArgumentError, project_secret_store_1.create, session=self.session) self.assertRaises(exception.MissingArgumentError, project_secret_store_2.create, session=self.session) barbican-9.1.0.dev50/barbican/tests/objects/test_ovo_project_quotas.py0000664000175000017500000002425013616500636026313 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from barbican.common import exception from barbican import objects from barbican.tests.objects import test_ovo_base class TestProjectQuotas(test_ovo_base.OVOTestCase): def setUp(self): super(TestProjectQuotas, self).setUp() self.init() def init(self): self.parsed_project_quotas_1 = { 'secrets': 101, 'orders': 102, 'containers': 103, 'consumers': 105, 'cas': 106} self.parsed_project_quotas_2 = { 'secrets': 201, 'orders': 202, 'containers': 203, 'consumers': 205, 'cas': 206} self.parsed_project_quotas_3 = { 'secrets': 301, 'containers': 303, 'consumers': 305} project1 = objects.Project(external_id='11111') project1.create(session=self.session) self.project_id1 = project1.id self.external_id1 = project1.external_id project2 = objects.Project(external_id='2222') project2.create(session=self.session) self.project_id2 = project2.id self.external_id2 = project2.external_id project3 = objects.Project(external_id='3333') project3.create(session=self.session) self.project_id3 = project3.id self.external_id3 = project3.external_id def test_ovo_get_list_of_one_project_quotas(self): objects.ProjectQuotas.create_or_update_by_project_id( project_id=self.project_id1, parsed_project_quotas=self.parsed_project_quotas_1, session=self.session ) retrieved_project_quotas, offset, limit, total = \ objects.ProjectQuotas.get_by_create_date(session=self.session) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) self.assertEqual([self.project_id1], [s.project_id for s in retrieved_project_quotas]) self.assertEqual([self.external_id1], [s.project.external_id for s in retrieved_project_quotas]) self.assertEqual([101], [s.secrets for s in retrieved_project_quotas]) self.assertEqual([102], [s.orders for s in retrieved_project_quotas]) self.assertEqual([103], [s.containers for s in retrieved_project_quotas]) self.assertEqual([105], [s.consumers for s in retrieved_project_quotas]) self.assertEqual([106], [s.cas for s in retrieved_project_quotas]) def test_ovo_get_list_of_two_project_quotas(self): objects.ProjectQuotas.create_or_update_by_project_id( project_id=self.project_id1, parsed_project_quotas=self.parsed_project_quotas_1, session=self.session ) objects.ProjectQuotas.create_or_update_by_project_id( project_id=self.project_id2, parsed_project_quotas=self.parsed_project_quotas_2, session=self.session ) retrieved_project_quotas, offset, limit, total = \ objects.ProjectQuotas.get_by_create_date(session=self.session) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(2, total) self.assertItemsEqual([self.project_id1, self.project_id2], [s.project_id for s in retrieved_project_quotas]) self.assertItemsEqual([self.external_id1, self.external_id2], [s.project.external_id for s in retrieved_project_quotas]) self.assertItemsEqual([101, 201], [s.secrets for s in retrieved_project_quotas]) self.assertItemsEqual([102, 202], [s.orders for s in retrieved_project_quotas]) self.assertItemsEqual([103, 203], [s.containers for s in retrieved_project_quotas]) self.assertItemsEqual([105, 205], [s.consumers for s in retrieved_project_quotas]) self.assertItemsEqual([106, 206], [s.cas for s in retrieved_project_quotas]) def test_ovo_should_raise_get_list_of_zero_project_quotas(self): self.assertRaises( exception.NotFound, objects.ProjectQuotas.get_by_create_date, session=self.session, suppress_exception=False) def test_ovo_get_specific_project_quotas(self): objects.ProjectQuotas.create_or_update_by_project_id( self.project_id1, self.parsed_project_quotas_1, session=self.session) retrieved_project_quotas = \ objects.ProjectQuotas.get_by_external_project_id( self.external_id1, session=self.session) self.assertEqual(self.project_id1, retrieved_project_quotas.project_id) self.assertEqual(self.external_id1, retrieved_project_quotas.project.external_id) self.assertEqual(101, retrieved_project_quotas.secrets) self.assertEqual(102, retrieved_project_quotas.orders) self.assertEqual(103, retrieved_project_quotas.containers) self.assertEqual(105, retrieved_project_quotas.consumers) self.assertEqual(106, retrieved_project_quotas.cas) def test_ovo_project_quotas_with_some_defaults(self): objects.ProjectQuotas.create_or_update_by_project_id( self.project_id3, self.parsed_project_quotas_3, session=self.session) retrieved_project_quotas = \ objects.ProjectQuotas.get_by_external_project_id( self.external_id3, session=self.session) self.assertEqual(self.project_id3, retrieved_project_quotas.project_id) self.assertEqual(self.external_id3, retrieved_project_quotas.project.external_id) self.assertEqual(301, retrieved_project_quotas.secrets) self.assertIsNone(retrieved_project_quotas.orders) self.assertEqual(303, retrieved_project_quotas.containers) self.assertEqual(305, retrieved_project_quotas.consumers) self.assertIsNone(retrieved_project_quotas.cas) def test_ovo_update_specific_project_quotas(self): objects.ProjectQuotas.create_or_update_by_project_id( self.project_id1, self.parsed_project_quotas_1, session=self.session) self.session.commit() objects.ProjectQuotas.create_or_update_by_project_id( self.project_id1, self.parsed_project_quotas_2, session=self.session) self.session.commit() retrieved_project_quotas = \ objects.ProjectQuotas.get_by_external_project_id( self.external_id1, session=self.session) self.assertEqual(self.project_id1, retrieved_project_quotas.project_id) self.assertEqual(self.external_id1, retrieved_project_quotas.project.external_id) self.assertEqual(201, retrieved_project_quotas.secrets) self.assertEqual(202, retrieved_project_quotas.orders) self.assertEqual(203, retrieved_project_quotas.containers) self.assertEqual(205, retrieved_project_quotas.consumers) self.assertEqual(206, retrieved_project_quotas.cas) def test_ovo_should_raise_get_missing_specific_project_quotas(self): self.assertRaises( exception.NotFound, objects.ProjectQuotas.get_by_external_project_id, 'trollo', suppress_exception=False, session=self.session) def test_ovo_should_suppress_get_missing_specific_project_quotas(self): retrieved_project_quotas = \ objects.ProjectQuotas.get_by_external_project_id( 'trollo', suppress_exception=True, session=self.session) self.assertIsNone(retrieved_project_quotas) def test_ovo_get_by_create_date_nothing(self): retrieved_project_quotas, offset, limit, total = \ objects.ProjectQuotas.get_by_create_date( session=self.session, suppress_exception=True) self.assertEqual([], retrieved_project_quotas) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(0, total) def test_ovo_should_delete(self): objects.ProjectQuotas.create_or_update_by_project_id( self.project_id1, self.parsed_project_quotas_1, session=self.session) self.session.commit() objects.ProjectQuotas.delete_by_external_project_id( self.external_id1, session=self.session) def test_ovo_should_raise_delete_not_found(self): self.assertRaises( exception.NotFound, objects.ProjectQuotas.delete_by_external_project_id, 'trollo', session=self.session) def test_ovo_should_suppress_delete_not_found(self): objects.ProjectQuotas.delete_by_external_project_id( 'trollo', suppress_exception=True, session=self.session) def test_ovo_should_raise_not_found_get_by_entity_id(self): self.assertRaises( exception.NotFound, objects.ProjectQuotas.get, 'trollo', session=self.session) def test_ovo_should_throw_exception_missing_project_id(self): project_quotas = objects.ProjectQuotas() self.assertRaises(exception.MissingArgumentError, project_quotas.create, session=self.session) barbican-9.1.0.dev50/barbican/tests/objects/test_ovo_project.py0000664000175000017500000000555513616500636024726 0ustar sahidsahid00000000000000# Copyright 2018 Fujitsu. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from barbican.common import exception from barbican import objects from barbican.tests.objects import test_ovo_base class TestProject(test_ovo_base.OVOTestCase): def setUp(self): super(TestProject, self).setUp() self.session = objects.Project.get_session() def test_ovo_should_create_retrieve_deleted_project(self): # Create project create_project_ovo = objects.Project(external_id='fake_external_id', status='ACTIVE') create_project_ovo.create(session=self.session) project_id = create_project_ovo.id self.assertFalse(create_project_ovo.deleted) self.assertEqual('ACTIVE', create_project_ovo.status) self.assertIsNone(create_project_ovo.deleted_at) self.assertIsNotNone(create_project_ovo.id) # Get project get1_project_ovo = objects.Project.get(entity_id=project_id) self.assertEqual('ACTIVE', get1_project_ovo.status) # Update project update_project_ovo = objects.Project(id=project_id, status='ERROR') update_project_ovo.save(session=self.session) # Get project get2_project_ovo = objects.Project.get(entity_id=project_id) self.assertEqual('ERROR', get2_project_ovo.status) # Delete project objects.Project.delete_entity_by_id(entity_id=project_id, external_project_id=None, session=self.session) self.assertRaises(exception.NotFound, objects.Project.get, entity_id=project_id, session=self.session) def test_ovo_should_raise_no_result_found(self): self.assertRaises(exception.NotFound, objects.Project.get, entity_id="key project id") def test_ovo_find_by_external_project_id(self): external_id = 'fake2_external_id' project_ovo = objects.Project(external_id=external_id, status='ACTIVE') project_ovo.create(session=self.session) project = objects.Project.find_by_external_project_id( external_project_id=external_id, session=self.session) self.assertEqual(external_id, project.external_id) barbican-9.1.0.dev50/barbican/tests/objects/__init__.py0000664000175000017500000000000013616500636023051 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/model/0000775000175000017500000000000013616500640020414 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/model/test_models.py0000664000175000017500000010741013616500636023320 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime import unittest from barbican.common import exception from barbican.model import models from barbican.plugin.interface import secret_store from barbican.tests import utils class WhenCreatingNewSecret(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewSecret, self).setUp() self.parsed_secret = {'name': 'name', 'secret_type': secret_store.SecretType.OPAQUE, 'algorithm': 'algorithm', 'bit_length': 512, 'mode': 'mode', 'plain_text': 'not-encrypted', 'creator_id': 'creator12345'} self.parsed_order = {'secret': self.parsed_secret} def test_new_secret_is_created_from_dict(self): date_time = datetime.datetime.utcnow().isoformat() self.parsed_secret['expiration'] = date_time secret = models.Secret(self.parsed_secret) self.assertEqual(self.parsed_secret['name'], secret.name) self.assertEqual(self.parsed_secret['secret_type'], secret.secret_type) self.assertEqual(self.parsed_secret['algorithm'], secret.algorithm) self.assertEqual(self.parsed_secret['bit_length'], secret.bit_length) self.assertEqual(self.parsed_secret['mode'], secret.mode) self.assertIsInstance(secret.expiration, datetime.datetime) self.assertEqual(self.parsed_secret['creator_id'], secret.creator_id) self.assertEqual(secret.created_at, secret.updated_at) fields = secret.to_dict_fields() self.assertEqual(self.parsed_secret['secret_type'], fields['secret_type']) self.assertEqual(self.parsed_secret['algorithm'], fields['algorithm']) self.assertEqual(self.parsed_secret['creator_id'], fields['creator_id']) def test_new_secret_is_created_with_default_secret_type(self): secret_spec = dict(self.parsed_secret) date_time = datetime.datetime.utcnow().isoformat() secret_spec['expiration'] = date_time del secret_spec['secret_type'] secret = models.Secret(secret_spec) self.assertEqual(self.parsed_secret['secret_type'], secret.secret_type) class WhenCreatingNewSecretMetadata(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewSecretMetadata, self).setUp() self.key = 'dog' self.value = 'poodle' self.metadata = { 'key': self.key, 'value': self.value } def test_new_secret_metadata_is_created_from_dict(self): secret_meta = models.SecretUserMetadatum(self.key, self.value) self.assertEqual(self.key, secret_meta.key) self.assertEqual(self.value, secret_meta.value) fields = secret_meta.to_dict_fields() self.assertEqual(self.metadata['key'], fields['key']) self.assertEqual(self.metadata['value'], fields['value']) def test_should_raise_exception_metadata_with_no_key(self): self.assertRaises(exception.MissingArgumentError, models.SecretUserMetadatum, None, self.value) def test_should_raise_exception_metadata_with_no_value(self): self.assertRaises(exception.MissingArgumentError, models.SecretUserMetadatum, self.key, None) class WhenCreatingNewOrder(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewOrder, self).setUp() self.parsed_order = { 'type': 'certificate', 'meta': { 'email': 'email@email.com' }, 'sub_status': 'Pending', 'sub_status_message': 'Waiting for instructions...', 'creator_id': 'creator12345' } def test_new_order_is_created(self): order = models.Order(self.parsed_order) self.assertEqual(self.parsed_order['type'], order.type) self.assertEqual(self.parsed_order['meta'], order.meta) self.assertEqual(self.parsed_order['sub_status'], order.sub_status) self.assertEqual(self.parsed_order['creator_id'], order.creator_id) self.assertEqual( self.parsed_order['sub_status_message'], order.sub_status_message ) fields = order.to_dict_fields() self.assertEqual(self.parsed_order['sub_status'], fields['sub_status']) self.assertEqual(self.parsed_order['type'], fields['type']) self.assertEqual(self.parsed_order['creator_id'], fields['creator_id']) class WhenCreatingNewContainer(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewContainer, self).setUp() self.parsed_container = {'name': 'name', 'type': 'generic', 'secret_refs': [ {'name': 'test secret 1', 'secret_ref': '123'}, {'name': 'test secret 2', 'secret_ref': '123'}, {'name': 'test secret 3', 'secret_ref': '123'} ], 'creator_id': 'creator123456'} def test_new_container_is_created_from_dict(self): container = models.Container(self.parsed_container) self.assertEqual(self.parsed_container['name'], container.name) self.assertEqual(self.parsed_container['type'], container.type) self.assertEqual(self.parsed_container['creator_id'], container.creator_id) self.assertEqual(len(self.parsed_container['secret_refs']), len(container.container_secrets)) self.assertEqual(self.parsed_container['secret_refs'][0]['name'], container.container_secrets[0].name) self.assertEqual(self.parsed_container['secret_refs'][0]['secret_ref'], container.container_secrets[0].secret_id) self.assertEqual(self.parsed_container['secret_refs'][1]['name'], container.container_secrets[1].name) self.assertEqual(self.parsed_container['secret_refs'][1]['secret_ref'], container.container_secrets[1].secret_id) self.assertEqual(self.parsed_container['secret_refs'][2]['name'], container.container_secrets[2].name) self.assertEqual(self.parsed_container['secret_refs'][2]['secret_ref'], container.container_secrets[2].secret_id) fields = container.to_dict_fields() self.assertEqual(self.parsed_container['name'], fields['name']) self.assertEqual(self.parsed_container['type'], fields['type']) self.assertEqual(self.parsed_container['creator_id'], fields['creator_id']) def test_new_certificate_container_is_created_from_dict(self): self.parsed_container['type'] = 'certificate' container = models.Container(self.parsed_container) self.assertEqual(self.parsed_container['name'], container.name) self.assertEqual(self.parsed_container['type'], container.type) self.assertEqual(self.parsed_container['creator_id'], container.creator_id) self.assertEqual(len(self.parsed_container['secret_refs']), len(container.container_secrets)) self.assertEqual(self.parsed_container['secret_refs'][0]['name'], container.container_secrets[0].name) self.assertEqual(self.parsed_container['secret_refs'][0]['secret_ref'], container.container_secrets[0].secret_id) self.assertEqual(self.parsed_container['secret_refs'][1]['name'], container.container_secrets[1].name,) self.assertEqual(self.parsed_container['secret_refs'][1]['secret_ref'], container.container_secrets[1].secret_id) self.assertEqual(self.parsed_container['secret_refs'][2]['name'], container.container_secrets[2].name) self.assertEqual(self.parsed_container['secret_refs'][2]['secret_ref'], container.container_secrets[2].secret_id) def test_parse_secret_ref_uri(self): self.parsed_container['secret_refs'][0]['secret_ref'] = ( 'http://localhost:9110/123/secrets/123456') container = models.Container(self.parsed_container) self.assertEqual('123456', container.container_secrets[0].secret_id) self.parsed_container['secret_refs'][0]['secret_ref'] = ( 'http://localhost:9110/123/secrets/123456/') container = models.Container(self.parsed_container) self.assertEqual('123456', container.container_secrets[0].secret_id) class WhenCreatingNewConsumer(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewConsumer, self).setUp() self.parsed_consumer = {'name': 'name', 'URL': 'URL'} self.project_id = '12345project' self.container_id = '12345container' def test_new_consumer_is_created_from_dict(self): consumer = models.ContainerConsumerMetadatum(self.container_id, self.project_id, self.parsed_consumer) self.assertEqual(self.parsed_consumer['name'], consumer.name) self.assertEqual(self.parsed_consumer['URL'], consumer.URL) self.assertEqual(models.States.ACTIVE, consumer.status) def test_new_consumer_has_correct_hash(self): consumer_one = models.ContainerConsumerMetadatum(self.container_id, self.project_id, self.parsed_consumer) consumer_two = models.ContainerConsumerMetadatum(self.container_id, self.project_id, self.parsed_consumer) different_container = '67890container' consumer_three = models.ContainerConsumerMetadatum( different_container, self.project_id, self.parsed_consumer) self.assertEqual(consumer_one.data_hash, consumer_two.data_hash) self.assertNotEqual(consumer_one.data_hash, consumer_three.data_hash) class WhenProcessingJsonBlob(utils.BaseTestCase): def setUp(self): super(WhenProcessingJsonBlob, self).setUp() self.json_blob = models.JsonBlob() def test_process_bind_param_w_dict(self): res = self.json_blob.process_bind_param({'test': True}, None) self.assertEqual('{"test": true}', res) def test_process_result_value_w_json_str(self): res = self.json_blob.process_result_value('{"test": true}', None) self.assertTrue(res.get('test')) class WhenCreatingOrderRetryTask(utils.BaseTestCase): def test_create_new_order_task(self): order = models.Order({ 'type': 'certificate', 'meta': { 'email': 'email@email.com' }, 'sub_status': 'Pending', 'sub_status_message': 'Waiting for instructions...' }) at = datetime.datetime.utcnow() order_retry_task = models.OrderRetryTask() order_retry_task.order_id = order.id order_retry_task.retry_task = "foobar" order_retry_task.retry_at = at order_retry_task.retry_args = ["one", "two"] order_retry_task.retry_kwargs = {"three": "four"} self.assertEqual(order.id, order_retry_task.order_id) self.assertEqual("foobar", order_retry_task.retry_task) self.assertEqual(at, order_retry_task.retry_at) self.assertEqual( ["one", "two"], order_retry_task.retry_args, ) self.assertEqual( {"three": "four"}, order_retry_task.retry_kwargs, ) class WhenCreatingNewCertificateAuthority(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewCertificateAuthority, self).setUp() expiration = (datetime.datetime.utcnow() + datetime.timedelta(minutes=10)) self.parsed_ca = {'plugin_name': 'dogtag_plugin', 'plugin_ca_id': 'ca_master', 'expiration': expiration.isoformat(), 'name': 'Dogtag CA', 'description': 'Master CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY', 'creator_id': 'user12345', 'parent_ca_id': '12330-223-22', 'project_id': '12345'} def test_new_ca_is_created_from_dict(self): ca = models.CertificateAuthority(self.parsed_ca) self.assertEqual(self.parsed_ca['plugin_name'], ca.plugin_name) self.assertEqual(self.parsed_ca['plugin_ca_id'], ca.plugin_ca_id) self.assertEqual(self.parsed_ca['name'], ca.ca_meta['name'].value) self.assertEqual(self.parsed_ca['description'], ca.ca_meta['description'].value) self.assertEqual(self.parsed_ca['ca_signing_certificate'], ca.ca_meta['ca_signing_certificate'].value) self.assertEqual(self.parsed_ca['intermediates'], ca.ca_meta['intermediates'].value) self.assertIsInstance(ca.expiration, datetime.datetime) self.assertEqual(ca.created_at, ca.updated_at) self.assertEqual(self.parsed_ca['creator_id'], ca.creator_id) self.assertEqual(self.parsed_ca['project_id'], ca.project_id) class WhenCreatingNewProjectCertificateAuthority(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewProjectCertificateAuthority, self).setUp() expiration = (datetime.datetime.utcnow() + datetime.timedelta(minutes=10)) self.parsed_ca = {'plugin_name': 'dogtag_plugin', 'plugin_ca_id': 'ca_master', 'expiration': expiration.isoformat(), 'name': 'Dogtag CA', 'description': 'Master CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} def test_create_new_project_ca(self): ca = models.CertificateAuthority(self.parsed_ca) ca.id = '67890' project = models.Project() project.id = '12345' project_ca = models.ProjectCertificateAuthority(project.id, ca.id) self.assertEqual(ca.id, project_ca.ca_id) self.assertEqual(project.id, project_ca.project_id) class WhenCreatingNewPreferredCertificateAuthority(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewPreferredCertificateAuthority, self).setUp() expiration = (datetime.datetime.utcnow() + datetime.timedelta(minutes=10)) self.parsed_ca = {'plugin_name': 'dogtag_plugin', 'plugin_ca_id': 'ca_master', 'expiration': expiration.isoformat(), 'name': 'Dogtag CA', 'description': 'Master CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} def test_create_new_preferred_ca(self): ca = models.CertificateAuthority(self.parsed_ca) ca.id = '67890' project = models.Project() project.id = '12345' preferred_ca = models.PreferredCertificateAuthority(project.id, ca.id) self.assertEqual(ca.id, preferred_ca.ca_id) self.assertEqual(project.id, preferred_ca.project_id) class WhenCreatingNewSecretACL(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewSecretACL, self).setUp() self.secret_id = 'secret123456' self.user_ids = ['user12345', 'user67890'] self.operation = 'read' self.project_access = True def test_new_secretacl_for_given_all_input(self): acl = models.SecretACL(self.secret_id, self.operation, self.project_access, self.user_ids) self.assertEqual(self.secret_id, acl.secret_id) self.assertEqual(self.operation, acl.operation) self.assertEqual(self.project_access, acl.project_access) self.assertTrue(all(acl_user.user_id in self.user_ids for acl_user in acl.acl_users)) def test_new_secretacl_check_to_dict_fields(self): acl = models.SecretACL(self.secret_id, self.operation, self.project_access, self.user_ids) self.assertEqual(self.secret_id, acl.to_dict_fields()['secret_id']) self.assertEqual(self.operation, acl.to_dict_fields()['operation']) self.assertEqual(self.project_access, acl.to_dict_fields()['project_access']) self.assertTrue(all(user_id in self.user_ids for user_id in acl.to_dict_fields()['users'])) self.assertIsNone(acl.to_dict_fields()['acl_id']) def test_new_secretacl_for_bare_minimum_input(self): acl = models.SecretACL(self.secret_id, self.operation, None, None) self.assertEqual(self.secret_id, acl.secret_id) self.assertEqual(0, len(acl.acl_users)) self.assertEqual(self.operation, acl.operation) self.assertIsNone(acl.project_access) def test_new_secretacl_with_duplicate_userids_input(self): user_ids = list(self.user_ids) user_ids *= 2 # duplicate ids acl = models.SecretACL(self.secret_id, self.operation, None, user_ids=user_ids) self.assertEqual(self.secret_id, acl.secret_id) self.assertEqual(self.operation, acl.operation) self.assertIsNone(acl.project_access) self.assertEqual(2, len(acl.acl_users)) def test_should_throw_exception_missing_secret_id(self): self.assertRaises(exception.MissingArgumentError, models.SecretACL, None, 'read', ['user246'], None) def test_should_throw_exception_missing_operation(self): self.assertRaises(exception.MissingArgumentError, models.SecretACL, self.secret_id, None, None, ['user246']) def test_new_secretacl_expect_user_ids_as_list(self): acl = models.SecretACL(self.secret_id, self.operation, None, {'aUser': '12345'}) self.assertEqual(0, len(acl.acl_users)) class WhenCreatingNewContainerACL(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewContainerACL, self).setUp() self.container_id = 'container123456' self.user_ids = ['user12345', 'user67890'] self.operation = 'read' self.project_access = True def test_new_containeracl_for_given_all_input(self): acl = models.ContainerACL(self.container_id, self.operation, self.project_access, self.user_ids) self.assertEqual(self.container_id, acl.container_id) self.assertEqual(self.operation, acl.operation) self.assertEqual(self.project_access, acl.project_access) self.assertTrue(all(acl_user.user_id in self.user_ids for acl_user in acl.acl_users)) def test_new_containeracl_check_to_dict_fields(self): acl = models.ContainerACL(self.container_id, self.operation, self.project_access, self.user_ids) self.assertEqual(self.container_id, acl.to_dict_fields()['container_id']) self.assertEqual(self.operation, acl.to_dict_fields()['operation']) self.assertEqual(self.project_access, acl.to_dict_fields()['project_access']) self.assertTrue(all(user_id in self.user_ids for user_id in acl.to_dict_fields()['users'])) self.assertIsNone(acl.to_dict_fields()['acl_id']) def test_new_containeracl_for_bare_minimum_input(self): acl = models.ContainerACL(self.container_id, self.operation, None, None) self.assertEqual(self.container_id, acl.container_id) self.assertEqual(0, len(acl.acl_users)) self.assertEqual(self.operation, acl.operation) self.assertIsNone(acl.project_access) def test_new_containeracl_with_duplicate_userids_input(self): user_ids = list(self.user_ids) user_ids *= 2 # duplicate ids acl = models.ContainerACL(self.container_id, self.operation, True, user_ids=user_ids) self.assertEqual(self.container_id, acl.container_id) self.assertEqual(self.operation, acl.operation) self.assertTrue(acl.project_access) self.assertEqual(2, len(acl.acl_users)) def test_should_throw_exception_missing_container_id(self): self.assertRaises(exception.MissingArgumentError, models.ContainerACL, None, 'read', None, ['user246']) def test_should_throw_exception_missing_operation(self): self.assertRaises(exception.MissingArgumentError, models.ContainerACL, self.container_id, None, None, ['user246']) def test_new_containeracl_expect_user_ids_as_list(self): acl = models.ContainerACL(self.container_id, self.operation, None, {'aUser': '12345'}) self.assertEqual(0, len(acl.acl_users)) class WhenCreatingNewSecretACLUser(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewSecretACLUser, self).setUp() self.secret_acl_id = 'secret_acl_123456' self.user_ids = ['user12345', 'user67890'] def test_new_secretacl_user_for_given_all_input(self): acl_user = models.SecretACLUser(self.secret_acl_id, self.user_ids[0]) self.assertEqual(self.secret_acl_id, acl_user.acl_id) self.assertEqual(self.user_ids[0], acl_user.user_id) self.assertEqual(models.States.ACTIVE, acl_user.status) def test_new_secretacl_user_check_to_dict_fields(self): acl_user = models.SecretACLUser(self.secret_acl_id, self.user_ids[1]) self.assertEqual(self.secret_acl_id, acl_user.to_dict_fields()['acl_id']) self.assertEqual(self.user_ids[1], acl_user.to_dict_fields()['user_id']) self.assertEqual(models.States.ACTIVE, acl_user.to_dict_fields()['status']) def test_should_throw_exception_missing_user_id(self): self.assertRaises(exception.MissingArgumentError, models.SecretACLUser, self.secret_acl_id, None) class WhenCreatingNewContainerACLUser(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewContainerACLUser, self).setUp() self.container_acl_id = 'container_acl_123456' self.user_ids = ['user12345', 'user67890'] def test_new_secretacl_user_for_given_all_input(self): acl_user = models.ContainerACLUser(self.container_acl_id, self.user_ids[0]) self.assertEqual(self.container_acl_id, acl_user.acl_id) self.assertEqual(self.user_ids[0], acl_user.user_id) self.assertEqual(models.States.ACTIVE, acl_user.status) def test_new_secretacl_user_check_to_dict_fields(self): acl_user = models.ContainerACLUser(self.container_acl_id, self.user_ids[1]) self.assertEqual(self.container_acl_id, acl_user.to_dict_fields()['acl_id']) self.assertEqual(self.user_ids[1], acl_user.to_dict_fields()['user_id']) self.assertEqual(models.States.ACTIVE, acl_user.to_dict_fields()['status']) def test_should_throw_exception_missing_user_id(self): self.assertRaises(exception.MissingArgumentError, models.ContainerACLUser, self.container_acl_id, None) class WhenCreatingNewProjectQuotas(utils.BaseTestCase): def test_create_new_project_quotas(self): project = models.Project() project.id = '12345' project.external_id = '67890' parsed_project_quotas = { 'secrets': 101, 'orders': 102, 'containers': 103, 'consumers': 105, 'cas': 106} project_quotas = models.ProjectQuotas(project.id, parsed_project_quotas) self.assertEqual('12345', project_quotas.project_id) self.assertEqual(101, project_quotas.secrets) self.assertEqual(102, project_quotas.orders) self.assertEqual(103, project_quotas.containers) self.assertEqual(105, project_quotas.consumers) self.assertEqual(106, project_quotas.cas) def test_create_new_project_quotas_with_all_default_quotas(self): project = models.Project() project.id = '12345' project.external_id = '67890' project_quotas = models.ProjectQuotas(project.id, None) self.assertEqual('12345', project_quotas.project_id) self.assertIsNone(project_quotas.secrets) self.assertIsNone(project_quotas.orders) self.assertIsNone(project_quotas.containers) self.assertIsNone(project_quotas.consumers) self.assertIsNone(project_quotas.cas) def test_create_new_project_quotas_with_some_default_quotas(self): project = models.Project() project.id = '12345' project.external_id = '67890' parsed_project_quotas = { 'secrets': 101, 'containers': 103, 'consumers': 105} project_quotas = models.ProjectQuotas(project.id, parsed_project_quotas) self.assertEqual('12345', project_quotas.project_id) self.assertEqual(101, project_quotas.secrets) self.assertIsNone(project_quotas.orders) self.assertEqual(103, project_quotas.containers) self.assertEqual(105, project_quotas.consumers) self.assertIsNone(project_quotas.cas) def test_should_throw_exception_missing_project_id(self): self.assertRaises(exception.MissingArgumentError, models.ProjectQuotas, None, None) def test_project_quotas_check_to_dict_fields(self): project = models.Project() project.id = '12345' project.external_id = '67890' parsed_project_quotas = { 'secrets': 101, 'orders': 102, 'containers': 103, 'consumers': 105, 'cas': 106} project_quotas = models.ProjectQuotas(project.id, parsed_project_quotas) self.assertEqual(project.id, project_quotas.to_dict_fields()['project_id']) self.assertEqual(101, project_quotas.to_dict_fields()['secrets']) self.assertEqual(102, project_quotas.to_dict_fields()['orders']) self.assertEqual(103, project_quotas.to_dict_fields()['containers']) self.assertEqual(105, project_quotas.to_dict_fields()['consumers']) self.assertEqual(106, project_quotas.to_dict_fields()['cas']) class WhenCreatingNewSecretStores(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewSecretStores, self).setUp() def test_new_secret_stores_for_all_input(self): name = "db backend" store_plugin = 'store_crypto' crypto_plugin = 'simple_crypto' ss = models.SecretStores(name, store_plugin, crypto_plugin, global_default=True) self.assertEqual(store_plugin, ss.store_plugin) self.assertEqual(crypto_plugin, ss.crypto_plugin) self.assertEqual(name, ss.name) self.assertTrue(ss.global_default) self.assertEqual(models.States.ACTIVE, ss.status) def test_new_secret_stores_required_input_only(self): store_plugin = 'store_crypto' name = "db backend" ss = models.SecretStores(name, store_plugin) self.assertEqual(store_plugin, ss.store_plugin) self.assertEqual(name, ss.name) self.assertIsNone(ss.crypto_plugin) self.assertIsNone(ss.global_default) # False default is not used self.assertEqual(models.States.ACTIVE, ss.status) def test_should_throw_exception_missing_store_plugin(self): name = "db backend" self.assertRaises(exception.MissingArgumentError, models.SecretStores, name, None) self.assertRaises(exception.MissingArgumentError, models.SecretStores, name, "") def test_should_throw_exception_missing_name(self): store_plugin = 'store_crypto' self.assertRaises(exception.MissingArgumentError, models.SecretStores, None, store_plugin) self.assertRaises(exception.MissingArgumentError, models.SecretStores, "", store_plugin) def test_secret_stores_check_to_dict_fields(self): name = "pkcs11 backend" store_plugin = 'store_crypto' crypto_plugin = 'p11_crypto' ss = models.SecretStores(name, store_plugin, crypto_plugin, global_default=True) self.assertEqual(store_plugin, ss.to_dict_fields()['store_plugin']) self.assertEqual(crypto_plugin, ss.to_dict_fields()['crypto_plugin']) self.assertTrue(ss.to_dict_fields()['global_default']) self.assertEqual(models.States.ACTIVE, ss.to_dict_fields()['status']) self.assertEqual(name, ss.to_dict_fields()['name']) # check with required input only ss = models.SecretStores(name, store_plugin) self.assertEqual(store_plugin, ss.to_dict_fields()['store_plugin']) self.assertIsNone(ss.to_dict_fields()['crypto_plugin']) self.assertIsNone(ss.to_dict_fields()['global_default']) self.assertEqual(models.States.ACTIVE, ss.to_dict_fields()['status']) self.assertEqual(name, ss.to_dict_fields()['name']) class WhenCreatingNewProjectSecretStore(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewProjectSecretStore, self).setUp() def test_new_project_secret_store(self): project_id = 'proj_123456' name = "db backend" store_plugin = 'store_crypto' crypto_plugin = 'simple_crypto' ss = models.SecretStores(name, store_plugin, crypto_plugin, global_default=True) ss.id = "ss_123456" project_ss = models.ProjectSecretStore(project_id, ss.id) self.assertEqual(project_id, project_ss.project_id) self.assertEqual(ss.id, project_ss.secret_store_id) self.assertEqual(models.States.ACTIVE, project_ss.status) def test_should_throw_exception_missing_project_id(self): self.assertRaises(exception.MissingArgumentError, models.ProjectSecretStore, None, "ss_123456") self.assertRaises(exception.MissingArgumentError, models.ProjectSecretStore, "", "ss_123456") def test_should_throw_exception_missing_secret_store_id(self): self.assertRaises(exception.MissingArgumentError, models.ProjectSecretStore, "proj_123456", None) self.assertRaises(exception.MissingArgumentError, models.ProjectSecretStore, "proj_123456", "") def test_project_secret_store_check_to_dict_fields(self): project_id = 'proj_123456' secret_store_id = 'ss_7689012' project_ss = models.ProjectSecretStore(project_id, secret_store_id) self.assertEqual(project_id, project_ss.to_dict_fields()['project_id']) self.assertEqual(secret_store_id, project_ss.to_dict_fields()['secret_store_id']) self.assertEqual(models.States.ACTIVE, project_ss.to_dict_fields()['status']) class WhenCreatingNewSecretConsumer(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewSecretConsumer, self).setUp() self.secret_id = "12345secret" self.project_id = "12345project" self.service = "12345service" self.resource_type = "12345resource_type" self.resource_id = "12345resource_id" def test_new_secret_consumer(self): consumer = models.SecretConsumerMetadatum( self.secret_id, self.project_id, self.service, self.resource_type, self.resource_id ) self.assertEqual(self.secret_id, consumer.secret_id) self.assertEqual(self.project_id, consumer.project_id) self.assertEqual(self.service, consumer.service) self.assertEqual(self.resource_type, consumer.resource_type) self.assertEqual(self.resource_id, consumer.resource_id) self.assertEqual(models.States.ACTIVE, consumer.status) def test_to_dict_fields(self): consumer = models.SecretConsumerMetadatum( self.secret_id, self.project_id, self.service, self.resource_type, self.resource_id ) fields = consumer.to_dict_fields() self.assertEqual(self.service, fields["service"]) self.assertEqual(self.resource_type, fields["resource_type"]) self.assertEqual(self.resource_id, fields["resource_id"]) def test_should_raise_exception_when_missing_arguments(self): self.assertRaises( exception.MissingArgumentError, models.SecretConsumerMetadatum, None, self.project_id, self.service, self.resource_type, self.resource_id, ) self.assertRaises( exception.MissingArgumentError, models.SecretConsumerMetadatum, self.secret_id, None, self.service, self.resource_type, self.resource_id, ) self.assertRaises( exception.MissingArgumentError, models.SecretConsumerMetadatum, self.secret_id, self.project_id, None, self.resource_type, self.resource_id, ) self.assertRaises( exception.MissingArgumentError, models.SecretConsumerMetadatum, self.secret_id, self.project_id, self.service, None, self.resource_id, ) self.assertRaises( exception.MissingArgumentError, models.SecretConsumerMetadatum, self.secret_id, self.project_id, self.service, self.resource_type, None, ) if __name__ == '__main__': unittest.main() barbican-9.1.0.dev50/barbican/tests/model/repositories/0000775000175000017500000000000013616500640023143 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/model/repositories/test_repositories_secret_metadata.py0000664000175000017500000001054113616500636032516 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils from barbican.tests import utils @utils.parameterized_test_case class WhenTestingSecretMetadataRepository(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingSecretMetadataRepository, self).setUp() self.repo = repositories.SecretUserMetadatumRepo() self.test_metadata = { "dog": "poodle", "cat": "siamese" } def _create_base_secret(self, project_id=None): # Setup the secret and needed base relationship secret_repo = repositories.get_secret_repository() session = secret_repo.get_session() if project_id is None: # don't re-create project if it created earlier project = models.Project() project.external_id = "keystone_project_id" project.save(session=session) project_id = project.id secret_model = models.Secret() secret_model.project_id = project_id secret = secret_repo.create_from(secret_model, session=session) secret.save(session=session) session.commit() return secret def test_create_and_get_metadata_for_secret(self): secret = self._create_base_secret() self.repo.create_replace_user_metadata(secret.id, self.test_metadata) metadata = self.repo.get_metadata_for_secret(secret.id) self.assertEqual(self.test_metadata, metadata) def test_get_metadata_invalid_secret(self): metadata = self.repo.get_metadata_for_secret("invalid_id") self.assertEqual({}, metadata) def test_create_user_metadatum(self): secret = self._create_base_secret() self.repo.create_replace_user_metadata(secret.id, self.test_metadata) # adds a new key self.repo.create_replace_user_metadatum(secret.id, 'lizard', 'green anole') self.test_metadata['lizard'] = 'green anole' metadata = self.repo.get_metadata_for_secret(secret.id) self.assertEqual(self.test_metadata, metadata) def test_replace_user_metadatum(self): secret = self._create_base_secret() self.repo.create_replace_user_metadata(secret.id, self.test_metadata) # updates existing key self.repo.create_replace_user_metadatum(secret.id, 'dog', 'rat terrier') self.test_metadata['dog'] = 'rat terrier' metadata = self.repo.get_metadata_for_secret(secret.id) self.assertEqual(self.test_metadata, metadata) def test_delete_user_metadatum(self): secret = self._create_base_secret() self.repo.create_replace_user_metadata(secret.id, self.test_metadata) # deletes existing key self.repo.delete_metadatum(secret.id, 'cat') del self.test_metadata['cat'] metadata = self.repo.get_metadata_for_secret(secret.id) self.assertEqual(self.test_metadata, metadata) def test_delete_secret_deletes_secret_metadata(self): secret = self._create_base_secret() self.repo.create_replace_user_metadata(secret.id, self.test_metadata) metadata = self.repo.get_metadata_for_secret(secret.id) self.assertEqual(self.test_metadata, metadata) # deletes existing secret secret.delete() metadata = self.repo.get_metadata_for_secret(secret.id) self.assertEqual({}, metadata) barbican-9.1.0.dev50/barbican/tests/model/repositories/test_repositories_consumers.py0000664000175000017500000001527513616500636031420 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import six from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils as utils class WhenTestingContainerConsumerRepository(utils.RepositoryTestCase): def setUp(self): super(WhenTestingContainerConsumerRepository, self).setUp() self.repo = repositories.ContainerConsumerRepo() self.repo_container = repositories.ContainerRepo() def test_should_update_with_duplicate_consumer(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) container = models.Container() container.project_id = project.id container.save(session=session) # Create a consumer. consumer = models.ContainerConsumerMetadatum( container.id, project.id, {'name': 'name', 'URL': 'www.foo.com'}) consumer.save(session=session) # Commit things so far, because the 'create_or_update_from' call below # will handle consumer metadata with same composite key items already # existing, and then rollback this session's transaction, which would # remove the items added above and result in a not-found error below. session.commit() # Try to create a consumer on the container...should re-use the # one added above. consumer2 = models.ContainerConsumerMetadatum( container.id, project.id, {'name': 'name', 'URL': 'www.foo.com'}) self.repo.create_or_update_from(consumer2, container, session=session) container2 = self.repo_container.get( container.id, project.external_id, session=session) self.assertEqual(1, len(container2.consumers)) def test_should_raise_constraint_create_same_composite_key_no_id(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) container = models.Container() container.project_id = project.id container.save(session=session) # Create a consumer. consumer = models.ContainerConsumerMetadatum( container.id, project.id, {'name': 'name', 'URL': 'www.foo.com'}) consumer.save(session=session) # Commit things so far, because the 'create_from' call below will # handle consumer metadata with same composite key items already # existing, and then rollback this session's transaction, which would # remove the items added above and result in a not-found error below. session.commit() # Create a new entity with the same composite key as the first one. consumer2 = models.ContainerConsumerMetadatum( container.id, project.id, {'name': 'name', 'URL': 'www.foo.com'}) exception_result = self.assertRaises( exception.ConstraintCheck, self.repo.create_from, consumer2, session=session) self.assertIn( "SQL constraint check failed", six.text_type(exception_result)) def test_should_raise_no_result_found_get_container_id(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_by_container_id, "my container id", session=session, suppress_exception=False) def test_should_raise_no_result_found_get_by_values_no_deleted(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_by_values, "my container id", "name", "url", session=session, suppress_exception=False, show_deleted=False) def test_should_raise_no_result_found_get_by_values_show_deleted(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_by_values, "my container id", "name", "url", session=session, suppress_exception=False, show_deleted=True) def test_should_get_count_zero(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(0, count) def test_should_get_count_one(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) container = models.Container() container.project_id = project.id container.save(session=session) consumer = models.ContainerConsumerMetadatum( container.id, project.id, {'name': 'name', 'URL': 'www.foo.com'}) consumer.save(session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(1, count) def test_should_get_count_one_after_delete(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) container = models.Container() container.project_id = project.id container.save(session=session) consumer = models.ContainerConsumerMetadatum( container.id, project.id, {'name': 'name1', 'URL': 'www.foo.com'}) consumer.save(session=session) consumer = models.ContainerConsumerMetadatum( container.id, project.id, {'name': 'name2', 'URL': 'www.foo.com'}) consumer.save(session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(2, count) self.repo.delete_entity_by_id(consumer.id, "my keystone id", session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(1, count) barbican-9.1.0.dev50/barbican/tests/model/repositories/test_repositories_quotas.py0000664000175000017500000002522313616500636030710 0ustar sahidsahid00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import unittest from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils class WhenTestingProjectQuotasRepo(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingProjectQuotasRepo, self).setUp() self.project_quotas_repo = repositories.ProjectQuotasRepo() self.session = self.project_quotas_repo.get_session() self.project_1 = models.Project() self.project_1.id = '11111' self.project_1.external_id = '44444' self.project_1.save(session=self.session) self.project_2 = models.Project() self.project_2.id = '22222' self.project_2.external_id = '55555' self.project_2.save(session=self.session) self.project_3 = models.Project() self.project_3.id = '33333' self.project_3.external_id = '66666' self.project_3.save(session=self.session) self.parsed_project_quotas_1 = { 'secrets': 101, 'orders': 102, 'containers': 103, 'consumers': 105, 'cas': 106} self.parsed_project_quotas_2 = { 'secrets': 201, 'orders': 202, 'containers': 203, 'consumers': 205, 'cas': 206} self.parsed_project_quotas_3 = { 'secrets': 301, 'containers': 303, 'consumers': 305} def test_get_list_of_one_project_quotas(self): self.project_quotas_repo.create_or_update_by_project_id( self.project_1.id, self.parsed_project_quotas_1, session=self.session) self.session.commit() retrieved_project_quotas, offset, limit, total =\ self.project_quotas_repo.get_by_create_date(session=self.session) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) self.assertEqual([self.project_1.id], [s.project_id for s in retrieved_project_quotas]) self.assertEqual([self.project_1.external_id], [s.project.external_id for s in retrieved_project_quotas]) self.assertEqual([101], [s.secrets for s in retrieved_project_quotas]) self.assertEqual([102], [s.orders for s in retrieved_project_quotas]) self.assertEqual([103], [s.containers for s in retrieved_project_quotas]) self.assertEqual([105], [s.consumers for s in retrieved_project_quotas]) self.assertEqual([106], [s.cas for s in retrieved_project_quotas]) def test_get_list_of_two_project_quotas(self): self.project_quotas_repo.create_or_update_by_project_id( self.project_1.id, self.parsed_project_quotas_1, session=self.session) self.project_quotas_repo.create_or_update_by_project_id( self.project_2.id, self.parsed_project_quotas_2, session=self.session) self.session.commit() retrieved_project_quotas, offset, limit, total =\ self.project_quotas_repo.get_by_create_date(session=self.session) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(2, total) self.assertItemsEqual([self.project_1.id, self.project_2.id], [s.project_id for s in retrieved_project_quotas]) self.assertItemsEqual([self.project_1.external_id, self.project_2.external_id], [s.project.external_id for s in retrieved_project_quotas]) self.assertItemsEqual([101, 201], [s.secrets for s in retrieved_project_quotas]) self.assertItemsEqual([102, 202], [s.orders for s in retrieved_project_quotas]) self.assertItemsEqual([103, 203], [s.containers for s in retrieved_project_quotas]) self.assertItemsEqual([105, 205], [s.consumers for s in retrieved_project_quotas]) self.assertItemsEqual([106, 206], [s.cas for s in retrieved_project_quotas]) def test_should_raise_get_list_of_zero_project_quotas(self): self.assertRaises( exception.NotFound, self.project_quotas_repo.get_by_create_date, session=self.session, suppress_exception=False) def test_should_suppress_get_list_of_zero_project_quotas(self): retrieved_project_quotas, offset, limit, total =\ self.project_quotas_repo.get_by_create_date( session=self.session, suppress_exception=True) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(0, total) def test_get_specific_project_quotas(self): self.project_quotas_repo.create_or_update_by_project_id( self.project_1.id, self.parsed_project_quotas_1, session=self.session) self.session.commit() retrieved_project_quotas =\ self.project_quotas_repo.get_by_external_project_id( self.project_1.external_id, session=self.session) self.assertEqual(self.project_1.id, retrieved_project_quotas.project_id) self.assertEqual(self.project_1.external_id, retrieved_project_quotas.project.external_id) self.assertEqual(101, retrieved_project_quotas.secrets) self.assertEqual(102, retrieved_project_quotas.orders) self.assertEqual(103, retrieved_project_quotas.containers) self.assertEqual(105, retrieved_project_quotas.consumers) self.assertEqual(106, retrieved_project_quotas.cas) def test_project_quotas_with_some_defaults(self): self.project_quotas_repo.create_or_update_by_project_id( self.project_3.id, self.parsed_project_quotas_3, session=self.session) self.session.commit() retrieved_project_quotas =\ self.project_quotas_repo.get_by_external_project_id( self.project_3.external_id, session=self.session) self.assertEqual(self.project_3.id, retrieved_project_quotas.project_id) self.assertEqual(self.project_3.external_id, retrieved_project_quotas.project.external_id) self.assertEqual(301, retrieved_project_quotas.secrets) self.assertIsNone(retrieved_project_quotas.orders) self.assertEqual(303, retrieved_project_quotas.containers) self.assertEqual(305, retrieved_project_quotas.consumers) self.assertIsNone(retrieved_project_quotas.cas) def test_update_specific_project_quotas(self): self.project_quotas_repo.create_or_update_by_project_id( self.project_1.id, self.parsed_project_quotas_1, session=self.session) self.session.commit() self.project_quotas_repo.create_or_update_by_project_id( self.project_1.id, self.parsed_project_quotas_2, session=self.session) self.session.commit() retrieved_project_quotas =\ self.project_quotas_repo.get_by_external_project_id( self.project_1.external_id, session=self.session) self.assertEqual(self.project_1.id, retrieved_project_quotas.project_id) self.assertEqual(self.project_1.external_id, retrieved_project_quotas.project.external_id) self.assertEqual(201, retrieved_project_quotas.secrets) self.assertEqual(202, retrieved_project_quotas.orders) self.assertEqual(203, retrieved_project_quotas.containers) self.assertEqual(205, retrieved_project_quotas.consumers) self.assertEqual(206, retrieved_project_quotas.cas) def test_should_raise_get_missing_specific_project_quotas(self): self.assertRaises( exception.NotFound, self.project_quotas_repo.get_by_external_project_id, 'dummy', suppress_exception=False, session=self.session) def test_should_suppress_get_missing_specific_project_quotas(self): retrieved_project_quotas =\ self.project_quotas_repo.get_by_external_project_id( 'dummy', suppress_exception=True, session=self.session) self.assertIsNone(retrieved_project_quotas) def test_get_by_create_date_nothing(self): retrieved_project_quotas, offset, limit, total =\ self.project_quotas_repo.get_by_create_date( session=self.session, suppress_exception=True) self.assertEqual([], retrieved_project_quotas) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(0, total) def test_should_delete(self): self.project_quotas_repo.create_or_update_by_project_id( self.project_1.id, self.parsed_project_quotas_1, session=self.session) self.session.commit() self.project_quotas_repo.delete_by_external_project_id( self.project_1.external_id, session=self.session) def test_should_raise_delete_not_found(self): self.assertRaises( exception.NotFound, self.project_quotas_repo.delete_by_external_project_id, 'dummy', session=self.session) def test_should_suppress_delete_not_found(self): self.project_quotas_repo.delete_by_external_project_id( 'dummy', suppress_exception=True, session=self.session) def test_do_entity_name(self): self.assertEqual("ProjectQuotas", self.project_quotas_repo._do_entity_name()) def test_should_raise_not_found_get_by_entity_id(self): self.assertRaises( exception.NotFound, self.project_quotas_repo.get, 'dummy', session=self.session) if __name__ == '__main__': unittest.main() barbican-9.1.0.dev50/barbican/tests/model/repositories/test_repositories_projects.py0000664000175000017500000000336513616500636031230 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils class WhenTestingProjectRepository(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingProjectRepository, self).setUp() self.repo = repositories.ProjectRepo() def test_should_create_retrieve_deleted_project(self): session = self.repo.get_session() project = models.Project() project.keystone_id = 'my keystone id' project.status = models.States.ACTIVE self.repo.create_from(project, session=session) self.assertIsNotNone(project.id) self.assertFalse(project.deleted) project_get = self.repo.get(project.id) self.assertEqual(project.id, project_get.id) self.repo.delete_entity_by_id(project.id, 'my keystone id') self.assertTrue(project.deleted) def test_should_raise_no_result_found(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.find_by_external_project_id, "my keystone id", session=session, suppress_exception=False) barbican-9.1.0.dev50/barbican/tests/model/repositories/test_repositories.py0000664000175000017500000003055413616500636027317 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import six import sqlalchemy from alembic import script as alembic_script from barbican.common import config from barbican.common import exception from barbican.model.migration import commands as migration from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils from barbican.tests import utils from oslo_config import cfg class WhenCleaningRepositoryPagingParameters(utils.BaseTestCase): def setUp(self): super(WhenCleaningRepositoryPagingParameters, self).setUp() self.CONF = config.CONF self.default_limit = self.CONF.default_limit_paging def test_parameters_not_assigned(self): """The cleaner should use defaults when params are not specified.""" clean_offset, clean_limit = repositories.clean_paging_values() self.assertEqual(0, clean_offset) self.assertEqual(self.default_limit, clean_limit) def test_limit_as_none(self): """When Limit is set to None it should use the default limit.""" offset = 0 clean_offset, clean_limit = repositories.clean_paging_values( offset_arg=offset, limit_arg=None) self.assertEqual(offset, clean_offset) self.assertEqual(self.default_limit, clean_limit) def test_offset_as_none(self): """When Offset is set to None it should use an offset of 0.""" clean_offset, clean_limit = repositories.clean_paging_values( offset_arg=None, limit_arg=self.default_limit) self.assertEqual(0, clean_offset) self.assertEqual(self.default_limit, clean_limit) def test_limit_as_uncastable_str(self): """When Limit cannot be cast to an int, expect the default.""" clean_offset, clean_limit = repositories.clean_paging_values( offset_arg=0, limit_arg='boom') self.assertEqual(0, clean_offset) self.assertEqual(self.default_limit, clean_limit) def test_offset_as_uncastable_str(self): """When Offset cannot be cast to an int, it should be zero.""" clean_offset, clean_limit = repositories.clean_paging_values( offset_arg='boom', limit_arg=self.default_limit) self.assertEqual(0, clean_offset) self.assertEqual(self.default_limit, clean_limit) def test_limit_is_less_than_one(self): """Offset should default to 1.""" limit = -1 clean_offset, clean_limit = repositories.clean_paging_values( offset_arg=1, limit_arg=limit) self.assertEqual(1, clean_offset) self.assertEqual(1, clean_limit) def test_limit_is_too_big(self): """Limit should max out at configured value.""" limit = self.CONF.max_limit_paging + 10 clean_offset, clean_limit = repositories.clean_paging_values( offset_arg=1, limit_arg=limit) self.assertEqual(self.CONF.max_limit_paging, clean_limit) def test_offset_is_too_big(self): """When Offset exceeds sys.maxsize, it should be zero.""" clean_offset, clean_limit = repositories.clean_paging_values( offset_arg=265613988875874769338781322035779626829233452653394495, limit_arg=self.default_limit) self.assertEqual(0, clean_offset) self.assertEqual(self.default_limit, clean_limit) class WhenInvokingExceptionMethods(utils.BaseTestCase): def setUp(self): super(WhenInvokingExceptionMethods, self).setUp() self.CONF = config.CONF self.entity_id = '123456' self.entity_name = 'test_entity' def test_should_raise_for_entity_not_found(self): exception_result = self.assertRaises( exception.NotFound, repositories._raise_entity_not_found, self.entity_name, self.entity_id) self.assertEqual( "No test_entity found with ID 123456", six.text_type(exception_result)) def test_should_raise_for_entity_id_not_found(self): exception_result = self.assertRaises( exception.NotFound, repositories._raise_entity_id_not_found, self.entity_id) self.assertEqual( "Entity ID 123456 not found", six.text_type(exception_result)) def test_should_raise_for_no_entities_found(self): exception_result = self.assertRaises( exception.NotFound, repositories._raise_no_entities_found, self.entity_name) self.assertEqual( "No entities of type test_entity found", six.text_type(exception_result)) class WhenTestingBaseRepository(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingBaseRepository, self).setUp() self.repo = repositories.BaseRepo() def test_should_raise_invalid_create_from_no_entity(self): exception_result = self.assertRaises( exception.Invalid, self.repo.create_from, None) self.assertEqual( "Must supply non-None Entity.", six.text_type(exception_result)) def test_should_raise_invalid_create_from_entity_with_id(self): entity = models.ModelBase() entity.id = '1234' exception_result = self.assertRaises( exception.Invalid, self.repo.create_from, entity) self.assertEqual( "Must supply Entity with id=None (i.e. new entity).", six.text_type(exception_result)) def test_should_raise_invalid_do_validate_no_status(self): exception_result = self.assertRaises( exception.Invalid, self.repo._do_validate, {}) self.assertEqual( "Entity status is required.", six.text_type(exception_result)) def test_should_raise_invalid_do_validate_bad_status(self): exception_result = self.assertRaises( exception.Invalid, self.repo._do_validate, dict(status='BOGUS_STATUS')) self.assertEqual( "Invalid status 'BOGUS_STATUS' for Entity.", six.text_type(exception_result)) class WhenTestingWrapDbError(utils.BaseTestCase): def setUp(self): super(WhenTestingWrapDbError, self).setUp() repositories.CONF.set_override("sql_max_retries", 0) repositories.CONF.set_override("sql_retry_interval", 0) @mock.patch('barbican.model.repositories.is_db_connection_error') def test_should_raise_operational_error_is_connection_error( self, mock_is_db_error): mock_is_db_error.return_value = True @repositories.wrap_db_error def test_function(): raise sqlalchemy.exc.OperationalError( 'statement', 'params', 'orig') self.assertRaises( sqlalchemy.exc.OperationalError, test_function) class WhenTestingGetEnginePrivate(utils.BaseTestCase): def setUp(self): super(WhenTestingGetEnginePrivate, self).setUp() repositories.CONF.set_override("sql_connection", "connection") @mock.patch('barbican.model.repositories._create_engine') def test_should_raise_value_exception_engine_create_failure( self, mock_create_engine): engine = mock.MagicMock() engine.connect.side_effect = ValueError('Abort!') mock_create_engine.return_value = engine exception_result = self.assertRaises( exception.BarbicanException, repositories._get_engine, None) self.assertEqual( 'Error configuring registry database with supplied ' 'sql_connection. Got error: Abort!', six.text_type(exception_result)) @mock.patch('barbican.model.repositories._create_engine') def test_should_complete_with_no_alembic_create_default_configs( self, mock_create_engine): repositories.CONF.set_override("db_auto_create", False) engine = mock.MagicMock() mock_create_engine.return_value = engine # Invoke method under test. repositories._get_engine(None) engine.connect.assert_called_once_with() mock_create_engine.assert_called_once_with( 'connection', idle_timeout=3600, max_pool_size=repositories.CONF.sql_pool_size, max_overflow=repositories.CONF.sql_pool_max_overflow ) @mock.patch('barbican.model.repositories._create_engine') def test_should_complete_with_no_alembic_create_pool_configs( self, mock_create_engine): repositories.CONF.set_override("db_auto_create", False) repositories.CONF.set_override( "sql_pool_class", "QueuePool") repositories.CONF.set_override("sql_pool_size", 22) repositories.CONF.set_override("sql_pool_max_overflow", 11) engine = mock.MagicMock() mock_create_engine.return_value = engine # Invoke method under test. repositories._get_engine(None) engine.connect.assert_called_once_with() mock_create_engine.assert_called_once_with( 'connection', idle_timeout=3600, max_pool_size=22, max_overflow=11 ) class WhenTestingAutoGenerateTables(utils.BaseTestCase): @mock.patch('barbican.model.migration.commands.upgrade') def test_should_complete_with_alembic_database_update( self, mock_commands_upgrade): tables = dict( alembic_version='version') # Mimic tables already created. engine = 'engine' # Invoke method under test. repositories._auto_generate_tables(engine, tables) mock_commands_upgrade.assert_called_once_with() class WhenTestingIsDbConnectionError(utils.BaseTestCase): def test_should_return_false_no_error_code_in_args(self): args = mock.MagicMock() args.find.return_value = -1 result = repositories.is_db_connection_error(args) self.assertFalse(result) def test_should_return_true_error_code_found_in_args(self): args = mock.MagicMock() args.find.return_value = 1 result = repositories.is_db_connection_error(args) self.assertTrue(result) class WhenTestingMigrations(utils.BaseTestCase): def setUp(self): super(WhenTestingMigrations, self).setUp() repositories.CONF.set_override("sql_connection", "connection") self.alembic_config = migration.init_config() self.alembic_config.barbican_config = cfg.CONF def test_no_downgrade(self): script_dir = alembic_script.ScriptDirectory.from_config( self.alembic_config) versions = [v for v in script_dir.walk_revisions(base='base', head='heads')] failed_revisions = [] for version in versions: if hasattr(version.module, 'downgrade'): failed_revisions.append(version.revision) if failed_revisions: self.fail('Migrations %s have downgrade' % failed_revisions) class DummyRepo(repositories.BaseRepo): """Repository for the increasing code coverage of unit tests.""" def get_session(self, session=None): return None def _do_entity_name(self): return "Dummy" def _do_build_get_query(self, entity_id, external_project_id, session): return None def _do_validate(self, values): pass def _build_get_project_entities_query(self, project_id, session): return None class WhenIncreasingRepositoryTestCoverage(utils.BaseTestCase): def test_get_count_should_return_zero(self): dummy_repo = DummyRepo() count = dummy_repo.get_count('dummy_project_id') self.assertEqual(0, count) def test_get_project_entities_should_return_empty(self): dummy_repo = DummyRepo() count = dummy_repo.get_project_entities('dummy_project_id') self.assertEqual([], count) barbican-9.1.0.dev50/barbican/tests/model/repositories/test_repositories_secret_consumers.py0000664000175000017500000001151413616500636032755 0ustar sahidsahid00000000000000# Copyright (c) 2019 Red Hat, inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils as utils class WhenTestingSecretConsumerRepository(utils.RepositoryTestCase): def setUp(self): super(WhenTestingSecretConsumerRepository, self).setUp() self.secret_repo = repositories.get_secret_repository() self.consumer_repo = repositories.get_secret_consumer_repository() self.session = self.consumer_repo.get_session() self.project = utils.create_project(session=self.session) self.secret = self._create_secret() self.session.commit() def _create_secret(self): return utils.create_secret(self.project, session=self.session) def _create_consumer(self, secret=None, resource_id=0): if secret is None: secret = self.secret return utils.create_secret_consumer( secret, resource_id="resource_id_{}".format(resource_id), session=self.session, ) def _count_consumers(self): return self.consumer_repo.get_count( self.project.id, session=self.session ) def test_should_raise_no_result_found_get_by_secret_id(self): self.assertRaises( exception.NotFound, self.consumer_repo.get_by_secret_id, self.secret.id, session=self.session, ) def test_get_by_secret_id(self): for resource_id in [1, 2, 3]: self._create_consumer(resource_id=resource_id) self.assertEqual( 3, self.consumer_repo.get_by_secret_id(self.secret.id)[3] ) def test_should_raise_no_result_found_get_by_resource_id(self): self.assertRaises( exception.NotFound, self.consumer_repo.get_by_resource_id, "my resource id", session=self.session, ) def test_get_by_resource_id(self): secret1 = self._create_secret() secret2 = self._create_secret() secret3 = self._create_secret() for secret in [secret1, secret2, secret3]: self._create_consumer(secret=secret) self.assertEqual( 3, self.consumer_repo.get_by_resource_id("resource_id_0")[3] ) def test_should_update_with_duplicate_consumer(self): consumer1 = self._create_consumer() self.assertEqual(1, len(self.secret.consumers)) # Commit things so far, because the 'create_or_update_from' call below # will handle consumer metadata with same composite key items already # existing, and then rollback this session's transaction, which would # remove the items added above and result in a not-found error below. self.session.commit() consumer2 = models.SecretConsumerMetadatum( secret_id=consumer1.secret_id, project_id=consumer1.project_id, service=consumer1.service, resource_type=consumer1.resource_type, resource_id=consumer1.resource_id, ) self.consumer_repo.create_or_update_from( consumer2, self.secret, self.session ) secret = self.secret_repo.get_secret_by_id( self.secret.id, session=self.session ) self.assertEqual(1, len(secret.consumers)) def test_should_raise_constraint_create_same_composite_key_no_id(self): self._create_consumer() exception_result = self.assertRaises( exception.ConstraintCheck, self._create_consumer ) self.assertIn( "SQL constraint check failed", six.text_type(exception_result) ) def test_should_get_count_zero(self): self.assertEqual(0, self._count_consumers()) def test_should_get_count_one(self): self._create_consumer() self.assertEqual(1, self._count_consumers()) def test_should_get_count_one_after_delete(self): consumer1 = self._create_consumer(resource_id=1) self._create_consumer(resource_id=2) self.assertEqual(2, self._count_consumers()) self.consumer_repo.delete_entity_by_id( consumer1.id, consumer1.project_id, session=self.session ) self.assertEqual(1, self._count_consumers()) barbican-9.1.0.dev50/barbican/tests/model/repositories/test_repositories_orders.py0000664000175000017500000000752513616500636030677 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import config from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils class WhenTestingOrderRepository(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingOrderRepository, self).setUp() self.repo = repositories.OrderRepo() def test_should_raise_no_result_found_no_exception(self): session = self.repo.get_session() entities, offset, limit, total = self.repo.get_by_create_date( "my keystone id", session=session, suppress_exception=True) self.assertEqual([], entities) self.assertEqual(0, offset) self.assertEqual(config.CONF.default_limit_paging, limit) self.assertEqual(0, total) def test_should_raise_no_result_found_with_exceptions(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_by_create_date, "my keystone id", session=session, suppress_exception=False) def test_get_order(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) order = models.Order() order.project_id = project.id self.repo.create_from(order, session=session) session.commit() order_from_get = self.repo.get( order.id, external_project_id="my keystone id", session=session, ) self.assertEqual(order.id, order_from_get.id) def test_should_get_count_zero(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(0, count) def test_should_get_count_one(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) order_model = models.Order() order_model.project_id = project.id self.repo.create_from(order_model, session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(1, count) def test_should_get_count_one_after_delete(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) order_model = models.Order() order_model.project_id = project.id self.repo.create_from(order_model, session=session) order_model = models.Order() order_model.project_id = project.id self.repo.create_from(order_model, session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(2, count) self.repo.delete_entity_by_id(order_model.id, "my keystone id", session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(1, count) barbican-9.1.0.dev50/barbican/tests/model/repositories/test_repositories_secret_stores.py0000664000175000017500000004431513616500636032263 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_utils import uuidutils from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils import six class WhenTestingSecretStoresRepo(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingSecretStoresRepo, self).setUp() self.s_stores_repo = repositories.get_secret_stores_repository() self.def_name = "PKCS11 HSM" self.def_store_plugin = "store_crypto" self.def_crypto_plugin = "p11_crypto" self.default_secret_store = self._create_secret_store( self.def_name, self.def_store_plugin, self.def_crypto_plugin, True) def _create_secret_store(self, name, store_plugin, crypto_plugin=None, global_default=None): session = self.s_stores_repo.get_session() s_stores_model = models.SecretStores(name=name, store_plugin=store_plugin, crypto_plugin=crypto_plugin, global_default=global_default) s_stores = self.s_stores_repo.create_from(s_stores_model, session=session) s_stores.save(session=session) session.commit() return s_stores def test_get_by_entity_id(self): session = self.s_stores_repo.get_session() s_stores = self.s_stores_repo.get(self.default_secret_store.id, session=session) self.assertIsNotNone(s_stores) self.assertEqual(self.def_store_plugin, s_stores.store_plugin) self.assertEqual(self.def_crypto_plugin, s_stores.crypto_plugin) self.assertTrue(s_stores.global_default) self.assertEqual(models.States.ACTIVE, s_stores.status) def test_should_raise_notfound_exception_get_by_entity_id(self): self.assertRaises(exception.NotFound, self.s_stores_repo.get, "invalid_id", suppress_exception=False) def test_delete_entity_by_id(self): session = self.s_stores_repo.get_session() s_stores = self.s_stores_repo.get(self.default_secret_store.id, session=session) self.assertIsNotNone(s_stores) self.s_stores_repo.delete_entity_by_id(self.default_secret_store.id, None, session=session) s_stores = self.s_stores_repo.get(self.default_secret_store.id, suppress_exception=True, session=session) self.assertIsNone(s_stores) def test_get_all(self): session = self.s_stores_repo.get_session() all_stores = self.s_stores_repo.get_all(session=session) self.assertIsNotNone(all_stores) self.assertEqual(1, len(all_stores)) self._create_secret_store("db backend", "store_crypto", "simple_crypto", False) all_stores = self.s_stores_repo.get_all(session=session) self.assertEqual(2, len(all_stores)) self.assertEqual("simple_crypto", all_stores[1].crypto_plugin) self.assertEqual("store_crypto", all_stores[1].store_plugin) self.assertEqual("db backend", all_stores[1].name) self.assertEqual(False, all_stores[1].global_default) def test_no_data_case_for_get_all(self): self.s_stores_repo.delete_entity_by_id(self.default_secret_store.id, None) session = self.s_stores_repo.get_session() all_stores = self.s_stores_repo.get_all(session=session) self.assertEqual([], all_stores) def test_get_all_check_sorting_order(self): """Check that all stores are sorted in ascending creation time """ session = self.s_stores_repo.get_session() self._create_secret_store("second_name", "second_store", "second_crypto", False) m_stores = self._create_secret_store("middle_name", "middle_store", "middle_crypto", False) self._create_secret_store("last_name", "last_store", "last_crypto", False) all_stores = self.s_stores_repo.get_all(session=session) self.assertIsNotNone(all_stores) self.assertEqual(4, len(all_stores)) # returned list is sorted by created_at field so check for last entry self.assertEqual("last_crypto", all_stores[3].crypto_plugin) self.assertEqual("last_store", all_stores[3].store_plugin) self.assertEqual("last_name", all_stores[3].name) self.assertEqual(False, all_stores[3].global_default) # Now delete in between entry and create as new entry self.s_stores_repo.delete_entity_by_id(m_stores.id, None, session=session) all_stores = self.s_stores_repo.get_all(session=session) self._create_secret_store("middle_name", "middle_store", "middle_crypto", False) all_stores = self.s_stores_repo.get_all(session=session) # now newly created entry should be last one. self.assertEqual("middle_crypto", all_stores[3].crypto_plugin) self.assertEqual("middle_store", all_stores[3].store_plugin) self.assertEqual("middle_name", all_stores[3].name) self.assertEqual(False, all_stores[3].global_default) def test_should_raise_constraint_for_same_plugin_names(self): """Check for store and crypto plugin name combination uniqueness""" name = 'second_name' store_plugin = 'second_store' crypto_plugin = 'second_crypto' self._create_secret_store(name, store_plugin, crypto_plugin, False) self.assertRaises(exception.ConstraintCheck, self._create_secret_store, "thrid_name", store_plugin, crypto_plugin, False) def test_should_raise_constraint_for_same_names(self): """Check for secret store 'name' uniqueness""" name = 'Db backend' store_plugin = 'second_store' crypto_plugin = 'second_crypto' self._create_secret_store(name, store_plugin, crypto_plugin, False) self.assertRaises(exception.ConstraintCheck, self._create_secret_store, name, "another_store", "another_crypto", False) def test_do_entity_name(self): """Code coverage for entity_name which is used in case of exception. Raising duplicate error for store and crypto plugin combination """ name = "DB backend" store_plugin = 'second_store' crypto_plugin = 'second_crypto' self._create_secret_store(name, store_plugin, crypto_plugin, False) try: self._create_secret_store(name, store_plugin, crypto_plugin, False) self.assertFail() except exception.ConstraintCheck as ex: self.assertIn("SQL constraint check failed", six.text_type(ex)) class WhenTestingProjectSecretStoreRepo(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingProjectSecretStoreRepo, self).setUp() self.proj_store_repo = repositories.\ get_project_secret_store_repository() self.def_name = "PKCS11 HSM" self.def_store_plugin = "store_crypto" self.def_crypto_plugin = "p11_crypto" self.default_secret_store = self._create_secret_store( self.def_name, self.def_store_plugin, self.def_crypto_plugin, True) def _create_secret_store(self, name, store_plugin, crypto_plugin=None, global_default=None): s_stores_repo = repositories.get_secret_stores_repository() session = s_stores_repo.get_session() s_stores_model = models.SecretStores(name=name, store_plugin=store_plugin, crypto_plugin=crypto_plugin, global_default=global_default) s_stores = s_stores_repo.create_from(s_stores_model, session=session) s_stores.save(session=session) session.commit() return s_stores def _create_project(self): session = self.proj_store_repo.get_session() project = models.Project() project.external_id = ("keystone_project_id" + uuidutils.generate_uuid(dashed=False)) project.save(session=session) return project def _create_project_store(self, project_id, secret_store_id): session = self.proj_store_repo.get_session() proj_model = models.ProjectSecretStore(project_id, secret_store_id) proj_s_store = self.proj_store_repo.create_from(proj_model, session) proj_s_store.save(session=session) return proj_s_store def test_get_by_entity_id(self): """Tests for 'get' call by project secret store id""" project = self._create_project() proj_s_store = self._create_project_store(project.id, self.default_secret_store.id) session = self.proj_store_repo.get_session() s_stores = self.proj_store_repo.get(proj_s_store.id, session=session) self.assertIsNotNone(proj_s_store) self.assertEqual(project.id, proj_s_store.project_id) self.assertEqual(self.default_secret_store.id, proj_s_store.secret_store_id) self.assertEqual(models.States.ACTIVE, s_stores.status) # assert values via relationship self.assertEqual(self.default_secret_store.store_plugin, proj_s_store.secret_store.store_plugin) self.assertEqual(project.external_id, proj_s_store.project.external_id) def test_should_raise_notfound_exception_get_by_entity_id(self): self.assertRaises(exception.NotFound, self.proj_store_repo.get, "invalid_id", suppress_exception=False) def test_delete_entity_by_id(self): project = self._create_project() proj_s_store = self._create_project_store(project.id, self.default_secret_store.id) session = self.proj_store_repo.get_session() proj_s_store = self.proj_store_repo.get(proj_s_store.id, session=session) self.assertIsNotNone(proj_s_store) self.proj_store_repo.delete_entity_by_id(proj_s_store.id, None, session=session) proj_s_store = self.proj_store_repo.get(proj_s_store.id, suppress_exception=True, session=session) self.assertIsNone(proj_s_store) def test_should_raise_constraint_for_same_project_id(self): """Check preferred secret store is set only once for project""" project1 = self._create_project() name = "first_name" store_plugin = 'first_store' crypto_plugin = 'first_crypto' s_store1 = self._create_secret_store(name, store_plugin, crypto_plugin, False) # set preferred secret store for project1 self._create_project_store(project1.id, s_store1.id) name = "second_name" store_plugin = 'second_store' crypto_plugin = 'second_crypto' s_store2 = self._create_secret_store(name, store_plugin, crypto_plugin, False) self.assertRaises(exception.ConstraintCheck, self._create_project_store, project1.id, s_store2.id) def test_do_entity_name(self): """Code coverage for entity_name which is used in case of exception. Raising duplicate error when try to set another entry for existing project """ project1 = self._create_project() name = "first name" store_plugin = 'first_store' crypto_plugin = 'first_crypto' s_store1 = self._create_secret_store(name, store_plugin, crypto_plugin, False) # set preferred secret store for project1 self._create_project_store(project1.id, s_store1.id) try: name = "second_name" store_plugin = 'second_store' crypto_plugin = 'second_crypto' s_store2 = self._create_secret_store(name, store_plugin, crypto_plugin, False) self._create_project_store(project1.id, s_store2.id) self.assertFail() except exception.ConstraintCheck as ex: self.assertIn("SQL constraint check failed", six.text_type(ex)) def test_get_secret_store_for_project(self): project1 = self._create_project() name = "first_name" store_plugin = 'first_store' crypto_plugin = 'first_crypto' s_store1 = self._create_secret_store(name, store_plugin, crypto_plugin, False) # set preferred secret store for project1 proj_s_store = self._create_project_store(project1.id, s_store1.id) # get preferred secret store by barbican project id read_project_s_store = self.proj_store_repo.\ get_secret_store_for_project(project1.id, None) self.assertEqual(proj_s_store.project_id, read_project_s_store.project_id) self.assertEqual(proj_s_store.secret_store_id, read_project_s_store.secret_store_id) # get preferred secret store by keystone project id read_project_s_store = self.proj_store_repo.\ get_secret_store_for_project(None, project1.external_id) self.assertEqual(proj_s_store.project_id, read_project_s_store.project_id) self.assertEqual(project1.external_id, read_project_s_store.project.external_id) self.assertEqual(proj_s_store.secret_store_id, read_project_s_store.secret_store_id) def test_raise_notfound_exception_get_secret_store_for_project(self): self.assertRaises(exception.NotFound, self.proj_store_repo.get_secret_store_for_project, "invalid_id", None, suppress_exception=False) def test_with_exception_suppressed_get_secret_store_for_project(self): returned_value = self.proj_store_repo.\ get_secret_store_for_project("invalid_id", None, suppress_exception=True) self.assertIsNone(returned_value) def test_get_project_entities(self): entities = self.proj_store_repo.get_project_entities( uuidutils.generate_uuid(dashed=False)) self.assertEqual([], entities) def test_create_or_update_for_project(self): project1 = self._create_project() name = "first_name" store_plugin = 'first_store' crypto_plugin = 'first_crypto' s_store1 = self._create_secret_store(name, store_plugin, crypto_plugin, False) # assert that no preferred secret store is set project. entity = self.proj_store_repo.get_secret_store_for_project( project1.id, None, suppress_exception=True) self.assertIsNone(entity) # create/set preferred secret store now created_entity = self.proj_store_repo.create_or_update_for_project( project1.id, s_store1.id) entity = self.proj_store_repo.get_secret_store_for_project( project1.id, None, suppress_exception=False) self.assertIsNotNone(entity) # new preferred secret store self.assertEqual(project1.id, entity.project_id) self.assertEqual(s_store1.id, entity.secret_store_id) self.assertEqual(store_plugin, entity.secret_store.store_plugin) self.assertEqual(crypto_plugin, entity.secret_store.crypto_plugin) self.assertEqual(name, entity.secret_store.name) name = 'second_name' store_plugin = 'second_store' crypto_plugin = 'second_crypto' s_store2 = self._create_secret_store(name, store_plugin, crypto_plugin, False) updated_entity = self.proj_store_repo.create_or_update_for_project( project1.id, s_store2.id) self.assertEqual(created_entity.id, updated_entity.id) self.assertEqual(s_store2.id, updated_entity.secret_store_id) def test_get_count_by_secret_store(self): project1 = self._create_project() name = "first_name" store_plugin = 'first_store' crypto_plugin = 'first_crypto' s_store1 = self._create_secret_store(name, store_plugin, crypto_plugin, False) count = self.proj_store_repo.get_count_by_secret_store(s_store1.id) self.assertEqual(0, count) # create/set preferred secret store now self.proj_store_repo.create_or_update_for_project(project1.id, s_store1.id) count = self.proj_store_repo.get_count_by_secret_store(s_store1.id) self.assertEqual(1, count) project2 = self._create_project() self.proj_store_repo.create_or_update_for_project(project2.id, s_store1.id) count = self.proj_store_repo.get_count_by_secret_store(s_store1.id) self.assertEqual(2, count) barbican-9.1.0.dev50/barbican/tests/model/repositories/test_repositories_order_retry_tasks.py0000664000175000017500000001157413616500636033145 0ustar sahidsahid00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime import time from barbican.common import config from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils class WhenTestingOrderRetryTaskRepository(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingOrderRetryTaskRepository, self).setUp() self.date_time_now = datetime.datetime.utcnow() self.test_args = ['test', 'args'] self.test_kwargs = {'test': 1, 'kwargs': 2} self.repo = repositories.OrderRetryTaskRepo() self.order_repo = repositories.OrderRepo() def test_get_order_retry_task(self): session = self.repo.get_session() order_retry_task = self._create_retry_task(session) order_retry_task_from_get = self.repo.get( order_retry_task.id, session=session, ) self.assertEqual(order_retry_task.id, order_retry_task_from_get.id) self.assertEqual( self.date_time_now, order_retry_task_from_get.retry_at) self.assertEqual(u'retry-task', order_retry_task_from_get.retry_task) self.assertEqual(self.test_args, order_retry_task_from_get.retry_args) self.assertEqual(self.test_kwargs, order_retry_task_from_get.retry_kwargs) def test_get_order_retry_task_filtered_by_retry_time(self): session = self.repo.get_session() future_seconds = 3 date_time_future = ( self.date_time_now + datetime.timedelta(seconds=future_seconds) ) order_retry_task = self._create_retry_task( session, retry_at=date_time_future) # A retrieve by the current time should return no entries, as the only # retry record is set into the future. entities, offset, limit, total = self.repo.get_by_create_date( only_at_or_before_this_date=self.date_time_now, session=session, suppress_exception=True ) self.assertEqual(0, total) self.assertEqual([], entities) # Wait until the future time is the current time. time.sleep(2 * future_seconds) # Now, a retrieve by the current time should return our entry. entities, offset, limit, total = self.repo.get_by_create_date( only_at_or_before_this_date=datetime.datetime.utcnow(), session=session, suppress_exception=True ) self.assertEqual(1, total) # Verify that retry task record is what we put in originally. order_retry_task_from_get = entities[0] self.assertEqual(order_retry_task.id, order_retry_task_from_get.id) self.assertEqual(date_time_future, order_retry_task_from_get.retry_at) self.assertEqual(u'retry-task', order_retry_task_from_get.retry_task) self.assertEqual(self.test_args, order_retry_task_from_get.retry_args) self.assertEqual(self.test_kwargs, order_retry_task_from_get.retry_kwargs) def test_should_raise_no_result_found_no_exception(self): session = self.repo.get_session() entities, offset, limit, total = self.repo.get_by_create_date( session=session, suppress_exception=True) self.assertEqual([], entities) self.assertEqual(0, offset) self.assertEqual(config.CONF.default_limit_paging, limit) self.assertEqual(0, total) def test_should_raise_no_result_found_with_exceptions(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_by_create_date, session=session, suppress_exception=False) def _create_retry_task(self, session, retry_at=None): project = database_utils.create_project(session=session) order = database_utils.create_order(project, session=session) order_retry_task = models.OrderRetryTask() order_retry_task.order_id = order.id order_retry_task.retry_task = u'retry-task' order_retry_task.retry_at = retry_at or self.date_time_now order_retry_task.retry_args = self.test_args order_retry_task.retry_kwargs = self.test_kwargs self.repo.create_from(order_retry_task, session=session) session.commit() return order_retry_task barbican-9.1.0.dev50/barbican/tests/model/repositories/test_repositories_transport_keys.py0000664000175000017500000000267413616500636032470 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import exception from barbican.model import repositories from barbican.tests import database_utils class WhenTestingTransportKeyRepository(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingTransportKeyRepository, self).setUp() self.repo = repositories.TransportKeyRepo() def test_should_raise_no_result_found_with_plugin_name(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_by_create_date, plugin_name="plugin", session=session, suppress_exception=False) def test_should_raise_no_result_found_no_plugin_name(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_by_create_date, session=session, suppress_exception=False) barbican-9.1.0.dev50/barbican/tests/model/repositories/__init__.py0000664000175000017500000000000013616500636025247 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/model/repositories/test_repositories_acls.py0000664000175000017500000005473613616500636030331 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils class TestACLMixin(object): def _assert_acl_users(self, user_ids, acls, acl_id, check_size=True): """Checks that all input users are present in matching acl users data. It also checks if number of acl users are same as input users when check_size flag is True. """ acls_map = self._map_id_to_acl(acls) acl_users = acls_map[acl_id].to_dict_fields()['users'] if check_size: self.assertEqual(len(user_ids), len(acl_users)) self.assertTrue(all(user_id in user_ids for user_id in acl_users)) def _map_id_to_acl(self, acls): """Provides dictionary of id and acl from acls list.""" m = {} for acl in acls: m[acl.id] = acl return m class WhenTestingSecretACLRepository(database_utils.RepositoryTestCase, TestACLMixin): def setUp(self): super(WhenTestingSecretACLRepository, self).setUp() self.acl_repo = repositories.get_secret_acl_repository() def _create_base_secret(self, project_id=None): # Setup the secret and needed base relationship secret_repo = repositories.get_secret_repository() session = secret_repo.get_session() if project_id is None: # don't re-create project if it created earlier project = models.Project() project.external_id = "keystone_project_id" project.save(session=session) project_id = project.id secret_model = models.Secret() secret_model.project_id = project_id secret = secret_repo.create_from(secret_model, session=session) secret.save(session=session) session.commit() return secret def test_get_by_secret_id(self): session = self.acl_repo.get_session() secret = self._create_base_secret() acls = self.acl_repo.get_by_secret_id(secret.id, session) self.assertEqual(0, len(acls)) acl1 = self.acl_repo.create_from(models.SecretACL(secret.id, 'read', True, ['u1', 'u2']), session) acls = self.acl_repo.get_by_secret_id(secret.id, session) self.assertEqual(1, len(acls)) self.assertEqual(acl1.id, acls[0].id) self.assertEqual('read', acls[0].operation) self._assert_acl_users(['u2', 'u1'], acls, acl1.id) def test_get_by_entity_id(self): session = self.acl_repo.get_session() secret = self._create_base_secret() acl1 = self.acl_repo.create_from(models.SecretACL( secret.id, 'read', True, ['u1', 'u2']), session) acl = self.acl_repo.get(acl1.id, session) self.assertIsNotNone(acl) self.assertEqual(acl1.id, acl.id) self.assertEqual('read', acl.operation) self._assert_acl_users(['u1', 'u2'], [acl], acl1.id) self.acl_repo.delete_entity_by_id(acl1.id, session) acl = self.acl_repo.get(acl1.id, session, suppress_exception=True) self.assertIsNone(acl) def test_should_raise_notfound_exception_get_by_entity_id(self): self.assertRaises(exception.NotFound, self.acl_repo.get, "invalid_id", suppress_exception=False) def test_create_or_replace_from_for_new_acls(self): """Check create_or_replace_from and get count call. It creates new acls with users and make sure that same users are returned when acls are queries by secret id. It uses get count to assert expected number of acls for that secret. """ session = self.acl_repo.get_session() secret = self._create_base_secret() acl1 = self.acl_repo.create_from(models.SecretACL( secret.id, 'read'), session) self.acl_repo.create_or_replace_from( secret, acl1, user_ids=['u1', 'u2'], session=session) acl2 = self.acl_repo.create_from(models.SecretACL( secret.id, 'write', False), session) self.acl_repo.create_or_replace_from( secret, acl2, user_ids=['u1', 'u2', 'u3'], session=session) acl3 = self.acl_repo.create_from(models.SecretACL( secret.id, 'delete'), session) self.acl_repo.create_or_replace_from( secret, acl3, user_ids=[], session=session) acls = self.acl_repo.get_by_secret_id(secret.id, session) self.assertEqual(3, len(acls)) id_map = self._map_id_to_acl(acls) self.assertTrue(id_map[acl1.id].project_access) self.assertFalse(id_map[acl2.id].project_access) self.assertEqual('read', id_map[acl1.id].operation) self.assertEqual('write', id_map[acl2.id].operation) self.assertEqual('delete', id_map[acl3.id].operation) # order of input users should not matter self._assert_acl_users(['u1', 'u2'], acls, acl1.id) self._assert_acl_users(['u2', 'u1'], acls, acl1.id) self._assert_acl_users(['u2', 'u1', 'u3'], acls, acl2.id) count = self.acl_repo.get_count(secret.id, session) self.assertEqual(3, count) self.assertEqual(count, len(acls)) def test_create_or_replace_from_with_none_or_blank_users(self): session = self.acl_repo.get_session() secret = self._create_base_secret() acl1 = self.acl_repo.create_from(models.SecretACL( secret.id, 'read'), session) self.acl_repo.create_or_replace_from( secret, acl1, user_ids=None, session=session) acl2 = self.acl_repo.create_from(models.SecretACL( secret.id, 'list'), session) self.acl_repo.create_or_replace_from( secret, acl1, user_ids=[], session=session) acls = self.acl_repo.get_by_secret_id(secret.id, session) id_map = self._map_id_to_acl(acls) self.assertIsNone(id_map[acl1.id].to_dict_fields().get('users')) self.assertIsNone(id_map[acl2.id].to_dict_fields().get('users')) def test_create_or_replace_from_for_existing_acls(self): """Check create_or_replace_from and get count call. It modifies existing acls with users and make sure that updated users and project_access flag changes are returned when acls are queries by secret id. It uses get count to assert expected number of acls for that secret. """ session = self.acl_repo.get_session() secret = self._create_base_secret() acl1 = self.acl_repo.create_from(models.SecretACL( secret.id, 'read'), session) self.acl_repo.create_or_replace_from( secret, acl1, user_ids=['u1', 'u2'], session=session) acl2 = self.acl_repo.create_from(models.SecretACL( secret.id, 'write'), session) self.acl_repo.create_or_replace_from( secret, acl2, user_ids=['u1', 'u2', 'u3'], session=session) acl3 = self.acl_repo.create_from(models.SecretACL( secret.id, 'list'), session) self.acl_repo.create_or_replace_from( secret, acl3, user_ids=[], session=session) acls = self.acl_repo.get_by_secret_id(secret.id, session) self.assertEqual(3, len(acls)) id_map = self._map_id_to_acl(acls) # replace users in existing acls id_map[acl1.id].project_access = False self.acl_repo.create_or_replace_from( secret, id_map[acl1.id], user_ids=['u5'], session=session) self.acl_repo.create_or_replace_from( secret, id_map[acl2.id], user_ids=['u1', 'u2', 'u3', 'u4'], session=session) self.acl_repo.create_or_replace_from( secret, id_map[acl3.id], user_ids=['u1', 'u2', 'u4'], session=session) session.commit() # commit the changes made so far acls = self.acl_repo.get_by_secret_id(secret.id, session) id_map = self._map_id_to_acl(acls) self.assertEqual(3, len(acls)) self.assertFalse(id_map[acl1.id].project_access) self.assertTrue(id_map[acl2.id].project_access) self.assertTrue(id_map[acl3.id].project_access) self._assert_acl_users(['u5'], acls, acl1.id) self._assert_acl_users(['u1', 'u2', 'u3', 'u4'], acls, acl2.id) self._assert_acl_users(['u1', 'u2', 'u4'], acls, acl3.id) def test_get_count(self): session = self.acl_repo.get_session() secret1 = self._create_base_secret() acl1 = self.acl_repo.create_from(models.SecretACL(secret1.id, 'read', None, ['u1', 'u2']), session) self.acl_repo.create_or_replace_from(secret1, acl1) secret2 = self._create_base_secret(secret1.project.id) acl21 = self.acl_repo.create_from(models.SecretACL(secret2.id, 'read', None, ['u3', 'u4']), session) self.acl_repo.create_or_replace_from(secret2, acl21) acl22 = self.acl_repo.create_from(models.SecretACL(secret2.id, 'write', None, ['u5', 'u6']), session) self.acl_repo.create_or_replace_from(secret2, acl22) self.assertEqual(1, self.acl_repo.get_count(secret1.id)) self.assertEqual(2, self.acl_repo.get_count(secret2.id)) def test_delete_single_acl_and_count(self): session = self.acl_repo.get_session() secret = self._create_base_secret() acl1 = self.acl_repo.create_from(models.SecretACL(secret.id, 'read', None, ['u1', 'u2']), session) self.acl_repo.create_or_replace_from(secret, acl1) acl2 = self.acl_repo.create_from( models.SecretACL(secret.id, 'write'), session) self.acl_repo.create_or_replace_from( secret, acl2, user_ids=['u1', 'u2', 'u3']) acl3 = self.acl_repo.create_from(models.SecretACL( secret.id, 'list'), session) self.acl_repo.create_or_replace_from(secret, acl3, user_ids=['u1', 'u3']) count = self.acl_repo.get_count(secret.id) self.assertEqual(3, count) self.acl_repo.delete_entity_by_id(acl2.id, None) session.commit() self.assertEqual(2, len(secret.secret_acls)) deleted_acl = self.acl_repo.get(acl2.id, suppress_exception=True) self.assertIsNone(deleted_acl) acls = self.acl_repo.get_by_secret_id(secret.id) self.assertEqual(2, len(acls)) count = self.acl_repo.get_count(secret.id) self.assertEqual(2, count) def test_delete_acls_for_secret(self): session = self.acl_repo.get_session() secret = self._create_base_secret() acl1 = self.acl_repo.create_from(models.SecretACL( secret.id, 'read'), session) self.acl_repo.create_or_replace_from( secret, acl1, user_ids=['u1', 'u2'], session=session) acl2 = self.acl_repo.create_from(models.SecretACL( secret.id, 'write'), session) self.acl_repo.create_or_replace_from( secret, acl2, user_ids=['u1', 'u2', 'u3'], session=session) self.acl_repo.delete_acls_for_secret(secret) acls = self.acl_repo.get_by_secret_id(secret.id) self.assertEqual(0, len(acls)) class WhenTestingContainerACLRepository(database_utils.RepositoryTestCase, TestACLMixin): def setUp(self): super(WhenTestingContainerACLRepository, self).setUp() self.acl_repo = repositories.get_container_acl_repository() def _create_base_container(self, project_id=None): # Setup the container and needed base relationship container_repo = repositories.get_container_repository() session = container_repo.get_session() if project_id is None: project = models.Project() project.external_id = "keystone_project_id" project.save(session=session) project_id = project.id container = models.Container() container.project_id = project_id container.save(session=session) session.commit() return container def test_get_by_container_id(self): session = self.acl_repo.get_session() container = self._create_base_container() acls = self.acl_repo.get_by_container_id(container.id, session) self.assertEqual(0, len(acls)) acl1 = self.acl_repo.create_from(models.ContainerACL(container.id, 'read', True, ['u1', 'u2']), session) acls = self.acl_repo.get_by_container_id(container.id, session) self.assertEqual(1, len(acls)) self.assertEqual(acl1.id, acls[0].id) self.assertEqual('read', acls[0].operation) self._assert_acl_users(['u1', 'u2'], acls, acl1.id) def test_get_by_entity_id(self): session = self.acl_repo.get_session() container = self._create_base_container() acl1 = self.acl_repo.create_from(models.ContainerACL( container.id, 'read', True, ['u1', 'u2']), session) acl = self.acl_repo.get(acl1.id, session) self.assertIsNotNone(acl) self.assertEqual(acl1.id, acl.id) self.assertEqual('read', acl.operation) self._assert_acl_users(['u1', 'u2'], [acl], acl1.id) self.acl_repo.delete_entity_by_id(acl1.id, session) acl = self.acl_repo.get(acl1.id, session, suppress_exception=True) self.assertIsNone(acl) def test_should_raise_notfound_exception_get_by_entity_id(self): self.assertRaises(exception.NotFound, self.acl_repo.get, "invalid_id", suppress_exception=False) def test_create_or_replace_from_for_new_acls(self): """Check create_or_replace_from and get count call. It creates new acls with users and make sure that same users are returned when acls are queries by secret id. It uses get count to assert expected number of acls for that secret. """ session = self.acl_repo.get_session() container = self._create_base_container() acl1 = self.acl_repo.create_from(models.ContainerACL( container.id, 'read'), session) self.acl_repo.create_or_replace_from( container, acl1, user_ids=['u1', 'u2'], session=session) acl2 = self.acl_repo.create_from(models.ContainerACL( container.id, 'write', False), session) self.acl_repo.create_or_replace_from( container, acl2, user_ids=['u1', 'u2', 'u3'], session=session) acl3 = self.acl_repo.create_from(models.ContainerACL( container.id, 'list'), session) self.acl_repo.create_or_replace_from( container, acl3, user_ids=[], session=session) acls = self.acl_repo.get_by_container_id(container.id, session) self.assertEqual(3, len(acls)) id_map = self._map_id_to_acl(acls) self.assertTrue(id_map[acl1.id].project_access) self.assertFalse(id_map[acl2.id].project_access) self.assertEqual('read', id_map[acl1.id].operation) self.assertEqual('write', id_map[acl2.id].operation) self.assertEqual('list', id_map[acl3.id].operation) # order of input users should not matter self._assert_acl_users(['u1', 'u2'], acls, acl1.id) self._assert_acl_users(['u2', 'u1'], acls, acl1.id) self._assert_acl_users(['u2', 'u1', 'u3'], acls, acl2.id) count = self.acl_repo.get_count(container.id, session) self.assertEqual(3, count) self.assertEqual(count, len(acls)) def test_create_or_replace_from_with_none_or_blank_users(self): session = self.acl_repo.get_session() container = self._create_base_container() acl1 = self.acl_repo.create_from(models.ContainerACL( container.id, 'read'), session) self.acl_repo.create_or_replace_from( container, acl1, user_ids=None, session=session) acl2 = self.acl_repo.create_from(models.ContainerACL( container.id, 'write'), session) self.acl_repo.create_or_replace_from( container, acl1, user_ids=[], session=session) acls = self.acl_repo.get_by_container_id(container.id, session) id_map = self._map_id_to_acl(acls) self.assertIsNone(id_map[acl1.id].to_dict_fields().get('users')) self.assertIsNone(id_map[acl2.id].to_dict_fields().get('users')) def test_create_or_replace_from_for_existing_acls(self): """Check create_or_replace_from and get count call. It modifies existing acls with users and make sure that updated users and project_access flag changes are returned when acls are queries by secret id. It uses get count to assert expected number of acls for that secret. """ session = self.acl_repo.get_session() container = self._create_base_container() acl1 = self.acl_repo.create_from(models.ContainerACL( container.id, 'read'), session) self.acl_repo.create_or_replace_from( container, acl1, user_ids=['u1', 'u2'], session=session) acl2 = self.acl_repo.create_from(models.ContainerACL( container.id, 'write'), session) self.acl_repo.create_or_replace_from( container, acl2, user_ids=['u1', 'u2', 'u3'], session=session) acl3 = self.acl_repo.create_from(models.ContainerACL( container.id, 'list'), session) self.acl_repo.create_or_replace_from( container, acl3, user_ids=[], session=session) acls = self.acl_repo.get_by_container_id(container.id, session) self.assertEqual(3, len(acls)) id_map = self._map_id_to_acl(acls) # replace users in existing acls id_map[acl1.id].project_access = False self.acl_repo.create_or_replace_from( container, id_map[acl1.id], user_ids=['u5'], session=session) self.acl_repo.create_or_replace_from( container, id_map[acl2.id], user_ids=['u1', 'u2', 'u3', 'u4'], session=session) self.acl_repo.create_or_replace_from( container, id_map[acl3.id], user_ids=['u1', 'u2', 'u4'], session=session) session.commit() acls = self.acl_repo.get_by_container_id(container.id, session) id_map = self._map_id_to_acl(acls) self.assertEqual(3, len(acls)) self.assertFalse(id_map[acl1.id].project_access) self.assertTrue(id_map[acl2.id].project_access) self.assertTrue(id_map[acl3.id].project_access) self._assert_acl_users(['u5'], acls, acl1.id) self._assert_acl_users(['u1', 'u2', 'u3', 'u4'], acls, acl2.id) self._assert_acl_users(['u1', 'u2', 'u4'], acls, acl3.id) def test_get_count(self): session = self.acl_repo.get_session() container1 = self._create_base_container() acl1 = self.acl_repo.create_from(models.ContainerACL( container1.id, 'read', None, ['u1', 'u2']), session) self.acl_repo.create_or_replace_from(container1, acl1) container2 = self._create_base_container(container1.project_id) acl21 = self.acl_repo.create_from(models.ContainerACL( container2.id, 'read', None, ['u3', 'u4']), session) self.acl_repo.create_or_replace_from(container2, acl21) acl22 = self.acl_repo.create_from(models.ContainerACL( container2.id, 'write', None, ['u5', 'u6']), session) self.acl_repo.create_or_replace_from(container2, acl22) self.assertEqual(1, self.acl_repo.get_count(container1.id)) self.assertEqual(2, self.acl_repo.get_count(container2.id)) def test_delete_single_acl_and_count(self): session = self.acl_repo.get_session() container = self._create_base_container() acl1 = self.acl_repo.create_from(models.ContainerACL( container.id, 'read'), session) self.acl_repo.create_or_replace_from(container, acl1, user_ids=['u1', 'u2']) acl2 = self.acl_repo.create_from(models.ContainerACL( container.id, 'write'), session) self.acl_repo.create_or_replace_from(container, acl2, user_ids=['u1', 'u2', 'u3']) acl3 = self.acl_repo.create_from(models.ContainerACL( container.id, 'list'), session) self.acl_repo.create_or_replace_from(container, acl3, user_ids=['u1', 'u3']) count = self.acl_repo.get_count(container.id) self.assertEqual(3, count) self.acl_repo.delete_entity_by_id(acl2.id, None) session.commit() # commit the changes made so far self.assertEqual(2, len(container.container_acls)) deleted_acl = self.acl_repo.get(acl2.id, suppress_exception=True) self.assertIsNone(deleted_acl) acls = self.acl_repo.get_by_container_id(container.id) self.assertEqual(2, len(acls)) count = self.acl_repo.get_count(container.id) self.assertEqual(2, count) def test_delete_acls_for_secret(self): session = self.acl_repo.get_session() container = self._create_base_container() acl1 = self.acl_repo.create_from(models.ContainerACL( container.id, 'read'), session) self.acl_repo.create_or_replace_from( container, acl1, user_ids=['u1', 'u2'], session=session) acl2 = self.acl_repo.create_from(models.ContainerACL( container.id, 'write'), session) self.acl_repo.create_or_replace_from( container, acl2, user_ids=['u1', 'u2', 'u3'], session=session) self.acl_repo.delete_acls_for_container(container) acls = self.acl_repo.get_by_container_id(container.id) self.assertEqual(0, len(acls)) barbican-9.1.0.dev50/barbican/tests/model/repositories/test_repositories_containers.py0000664000175000017500000000726113616500636031543 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils class WhenTestingContainerRepository(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingContainerRepository, self).setUp() self.repo = repositories.ContainerRepo() def test_should_raise_no_result_found(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_by_create_date, "my keystone id", session=session, suppress_exception=False) def test_get_container_by_id(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) container = models.Container() container.project_id = project.id container.save(session=session) session.commit() db_container = self.repo.get_container_by_id(container.id) self.assertIsNotNone(db_container) def test_should_raise_notfound_exception(self): self.assertRaises(exception.NotFound, self.repo.get_container_by_id, "invalid_id", suppress_exception=False) def test_should_suppress_notfound_exception(self): self.assertIsNone(self.repo.get_container_by_id( "invalid_id", suppress_exception=True)) def test_should_get_count_zero(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(0, count) def test_should_get_count_one(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) container_model = models.Container() container_model.project_id = project.id self.repo.create_from(container_model, session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(1, count) def test_should_get_count_one_after_delete(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) container_model = models.Container() container_model.project_id = project.id self.repo.create_from(container_model, session=session) container_model = models.Container() container_model.project_id = project.id self.repo.create_from(container_model, session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(2, count) self.repo.delete_entity_by_id(container_model.id, "my keystone id", session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(1, count) barbican-9.1.0.dev50/barbican/tests/model/repositories/test_repositories_certificate_authorities.py0000664000175000017500000004372613616500636034306 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime from barbican.common import exception from barbican.common import resources as res from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils class WhenTestingCertificateAuthorityRepo(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingCertificateAuthorityRepo, self).setUp() self.ca_repo = repositories.CertificateAuthorityRepo() expiration = (datetime.datetime.utcnow() + datetime.timedelta(minutes=10)) self.parsed_ca = {'plugin_name': 'dogtag_plugin', 'plugin_ca_id': 'ca_master', 'name': 'Dogtag CA', 'expiration': expiration.isoformat(), 'description': 'Master CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} self.parsed_ca2 = {'plugin_name': 'symantec_plugin', 'plugin_ca_id': 'ca_master_2', 'name': 'Symantec CA2', 'expiration': expiration.isoformat(), 'description': 'Master CA for Dogtag plugin2', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} def _add_ca(self, parsed_ca, session): ca = self.ca_repo.create_from(models.CertificateAuthority(parsed_ca), session=session) return ca def test_get_by_create_date(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) session.commit() retrieved_cas, offset, limit, total = self.ca_repo.get_by_create_date( session=session ) self.assertEqual([ca.id], [s.id for s in retrieved_cas]) self.assertEqual([ca.plugin_name], [s.plugin_name for s in retrieved_cas]) self.assertEqual( [self.parsed_ca['ca_signing_certificate']], [s.ca_meta['ca_signing_certificate'].value for s in retrieved_cas]) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_by_create_date_with_plugin_name_filter(self): session = self.ca_repo.get_session() ca1 = self._add_ca(self.parsed_ca, session) self._add_ca(self.parsed_ca2, session) retrieved_cas, offset, limit, total = self.ca_repo.get_by_create_date( session=session, plugin_name=self.parsed_ca['plugin_name'] ) self.assertEqual([ca1.id], [s.id for s in retrieved_cas]) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_by_create_date_with_plugin_ca_id_filter(self): session = self.ca_repo.get_session() ca1 = self._add_ca(self.parsed_ca, session) self._add_ca(self.parsed_ca2, session) retrieved_cas, offset, limit, total = self.ca_repo.get_by_create_date( session=session, plugin_ca_id=self.parsed_ca['plugin_ca_id'] ) self.assertEqual([ca1.id], [s.id for s in retrieved_cas]) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_by_create_date_nothing(self): session = self.ca_repo.get_session() retrieved_cas, offset, limit, total = self.ca_repo.get_by_create_date( session=session, suppress_exception=True ) self.assertEqual([], retrieved_cas) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(0, total) def test_do_entity_name(self): self.assertEqual("CertificateAuthority", self.ca_repo._do_entity_name()) def test_should_raise_no_result_found(self): session = self.ca_repo.get_session() self.assertRaises( exception.NotFound, self.ca_repo.get_by_create_date, session=session, suppress_exception=False) def test_get_count_should_return_zero(self): session = self.ca_repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) session.commit() count = self.ca_repo.get_count(project.id, session=session) self.assertEqual(0, count) def test_get_count_should_return_one(self): session = self.ca_repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) ca_model = models.CertificateAuthority(self.parsed_ca) ca_model.project_id = project.id self.ca_repo.create_from(ca_model, session=session) session.commit() count = self.ca_repo.get_count(project.id, session=session) self.assertEqual(1, count) def test_get_count_should_return_one_after_delete(self): session = self.ca_repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) ca_model = models.CertificateAuthority(self.parsed_ca) ca_model.project_id = project.id self.ca_repo.create_from(ca_model, session=session) ca_model = models.CertificateAuthority(self.parsed_ca) ca_model.project_id = project.id self.ca_repo.create_from(ca_model, session=session) session.commit() count = self.ca_repo.get_count(project.id, session=session) self.assertEqual(2, count) self.ca_repo.delete_entity_by_id(ca_model.id, "my keystone id", session=session) session.commit() count = self.ca_repo.get_count(project.id, session=session) self.assertEqual(1, count) class WhenTestingProjectCARepo(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingProjectCARepo, self).setUp() self.ca_repo = repositories.CertificateAuthorityRepo() self.project_ca_repo = repositories.ProjectCertificateAuthorityRepo() expiration = (datetime.datetime.utcnow() + datetime.timedelta(minutes=10)) self.parsed_ca = {'plugin_name': 'dogtag_plugin', 'plugin_ca_id': 'ca_master', 'expiration': expiration.isoformat(), 'name': 'Dogtag CA', 'description': 'Master CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} self.parsed_ca2 = {'plugin_name': 'symantec_plugin', 'plugin_ca_id': 'ca_master_2', 'expiration': expiration.isoformat(), 'name': 'Symantec CA2', 'description': 'Master CA for Dogtag plugin2', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} def _add_ca(self, parsed_ca, session): ca = self.ca_repo.create_from(models.CertificateAuthority(parsed_ca), session=session) return ca def _add_project(self, project_id, session): project = models.Project() project.external_id = project_id project.save(session=session) return project def _add_project_ca(self, project_id, ca_id, session): project_ca = self.project_ca_repo.create_from( models.ProjectCertificateAuthority(project_id, ca_id), session) return project_ca def test_get_by_create_date(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) project = self._add_project("project_1", session) self._add_project_ca(project.id, ca.id, session) session.commit() retrieved_pcas, offset, limit, total = ( self.project_ca_repo.get_by_create_date(session=session)) self.assertEqual([ca.id], [s.ca_id for s in retrieved_pcas]) self.assertEqual([project.id], [s.project_id for s in retrieved_pcas]) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_project_entities(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) project = self._add_project("project_1", session) self._add_project_ca(project.id, ca.id, session) session.commit() retrieved_pcas = self.project_ca_repo.get_project_entities( project.id, session) self.assertEqual([ca.id], [s.ca_id for s in retrieved_pcas]) def test_get_by_create_date_with_ca_id_filter(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) project = self._add_project("project_1", session) project_ca = self._add_project_ca(project.id, ca.id, session) ca2 = self._add_ca(self.parsed_ca2, session) project2 = self._add_project("project_2", session) self._add_project_ca(project2.id, ca2.id, session) session.commit() retrieved_pcas, offset, limit, total = ( self.project_ca_repo.get_by_create_date( session=session, ca_id=ca.id)) self.assertEqual([project_ca.id], [s.id for s in retrieved_pcas]) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_by_create_date_nothing(self): session = self.project_ca_repo.get_session() retrieved_pcas, offset, limit, total = ( self.project_ca_repo.get_by_create_date( session=session, suppress_exception=True)) self.assertEqual([], retrieved_pcas) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(0, total) def test_do_entity_name(self): self.assertEqual("ProjectCertificateAuthority", self.project_ca_repo._do_entity_name()) def test_should_raise_no_result_found(self): session = self.project_ca_repo.get_session() self.assertRaises( exception.NotFound, self.project_ca_repo.get_by_create_date, session=session, suppress_exception=False) class WhenTestingPreferredCARepo(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingPreferredCARepo, self).setUp() self.ca_repo = repositories.CertificateAuthorityRepo() self.preferred_ca_repo = ( repositories.PreferredCertificateAuthorityRepo()) expiration = (datetime.datetime.utcnow() + datetime.timedelta(minutes=10)) expiration_later = (datetime.datetime.utcnow() + datetime.timedelta(days=10)) self.parsed_ca = {'plugin_name': 'dogtag_plugin', 'plugin_ca_id': 'ca_master', 'expiration': expiration.isoformat(), 'name': 'Dogtag CA', 'description': 'Master CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} self.parsed_ca2 = {'plugin_name': 'symantec_plugin', 'plugin_ca_id': 'ca_master_2', 'expiration': expiration.isoformat(), 'name': 'Symantec CA2', 'description': 'Master CA for Dogtag plugin2', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} self.parsed_modified_ca = { 'plugin_name': 'dogtag_plugin', 'plugin_ca_id': 'ca_master', 'expiration': expiration_later.isoformat(), 'name': 'Dogtag CA', 'description': 'Updated Master CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX-updated-XXXXX', 'intermediates': 'YYYYY'} self.global_project = res.get_or_create_global_preferred_project() def _add_ca(self, parsed_ca, session): ca = self.ca_repo.create_from(models.CertificateAuthority(parsed_ca), session=session) return ca def _add_project(self, project_id, session): project = models.Project() project.external_id = project_id project.save(session=session) return project def _add_preferred_ca(self, project_id, ca_id, session): preferred_ca = self.preferred_ca_repo.create_from( models.PreferredCertificateAuthority(project_id, ca_id), session) return preferred_ca def _add_global_preferred_ca(self, ca_id, session): preferred_ca = self.preferred_ca_repo.create_from( models.PreferredCertificateAuthority( self.global_project.id, ca_id), session) return preferred_ca def test_get_by_create_date(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) self._add_ca(self.parsed_ca2, session) project = self._add_project("project_1", session) self._add_preferred_ca(project.id, ca.id, session) session.commit() pca, offset, limit, total = self.preferred_ca_repo.get_by_create_date( session=session ) self.assertEqual([ca.id], [s.ca_id for s in pca]) self.assertEqual([project.id], [s.project_id for s in pca]) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_by_create_date_with_params(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) self._add_ca(self.parsed_ca2, session) project = self._add_project("project_1", session) self._add_preferred_ca(project.id, ca.id, session) session.commit() pca, offset, limit, total = self.preferred_ca_repo.get_by_create_date( session=session, project_id=project.id, ca_id=ca.id ) self.assertEqual([ca.id], [s.ca_id for s in pca]) self.assertEqual([project.id], [s.project_id for s in pca]) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_project_entities(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) self._add_ca(self.parsed_ca2, session) project = self._add_project("project_1", session) self._add_preferred_ca(project.id, ca.id, session) session.commit() pca = self.preferred_ca_repo.get_project_entities(project.id, session) self.assertEqual([ca.id], [s.ca_id for s in pca]) def test_get_nothing(self): session = self.preferred_ca_repo.get_session() retrieved_pcas, offset, limit, total = ( self.preferred_ca_repo.get_by_create_date( session=session, suppress_exception=True)) self.assertEqual([], retrieved_pcas) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(0, total) def test_do_entity_name(self): self.assertEqual("PreferredCertificateAuthority", self.preferred_ca_repo._do_entity_name()) def test_should_raise_no_result_found(self): session = self.preferred_ca_repo.get_session() self.assertRaises( exception.NotFound, self.preferred_ca_repo.get_by_create_date, session=session, suppress_exception=False) def test_should_raise_constraint_check(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) ca2 = self._add_ca(self.parsed_ca2, session) project = self._add_project("project_1", session) self._add_preferred_ca(project.id, ca.id, session) self.assertRaises( exception.ConstraintCheck, self._add_preferred_ca, project.id, ca2.id, session) def test_set_global_preferred_ca(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) self._add_global_preferred_ca(ca.id, session) session.commit() pca = self.preferred_ca_repo.get_project_entities( self.global_project.id, session) self.assertEqual([ca.id], [s.ca_id for s in pca]) def test_should_create(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) project = self._add_project("project_1", session) self.preferred_ca_repo.create_or_update_by_project_id( project.id, ca.id) session.commit() def test_should_update(self): session = self.ca_repo.get_session() ca1 = self._add_ca(self.parsed_ca, session) ca2 = self._add_ca(self.parsed_ca2, session) project = self._add_project("project_1", session) self.preferred_ca_repo.create_or_update_by_project_id( project.id, ca1.id) session.commit() self.preferred_ca_repo.create_or_update_by_project_id( project.id, ca2.id) session.commit() barbican-9.1.0.dev50/barbican/tests/model/repositories/test_repositories_secrets.py0000664000175000017500000002627513616500636031054 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime import fixtures import testtools from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.plugin.interface import secret_store as ss from barbican.tests import database_utils from barbican.tests import fixture from barbican.tests import utils @utils.parameterized_test_case class WhenTestingSecretRepository(database_utils.RepositoryTestCase): dataset_for_filter_tests = { 'query_by_name': { 'secret_1_dict': dict(name="name1"), 'secret_2_dict': dict(name="name2"), 'query_dict': dict(name="name1") }, 'query_by_algorithm': { 'secret_1_dict': dict(algorithm="algorithm1"), 'secret_2_dict': dict(algorithm="algorithm2"), 'query_dict': dict(alg="algorithm1") }, 'query_by_mode': { 'secret_1_dict': dict(mode="mode1"), 'secret_2_dict': dict(mode="mode2"), 'query_dict': dict(mode="mode1") }, 'query_by_bit_length': { 'secret_1_dict': dict(bit_length=1024), 'secret_2_dict': dict(bit_length=2048), 'query_dict': dict(bits=1024) }, 'query_by_secret_type': { 'secret_1_dict': dict(secret_type=ss.SecretType.SYMMETRIC), 'secret_2_dict': dict(secret_type=ss.SecretType.OPAQUE), 'query_dict': dict(secret_type=ss.SecretType.SYMMETRIC) }, } def setUp(self): super(WhenTestingSecretRepository, self).setUp() self.repo = repositories.SecretRepo() def test_get_secret_list(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) secret_model = models.Secret() secret_model.project_id = project.id secret = self.repo.create_from(secret_model, session=session) session.commit() secrets, offset, limit, total = self.repo.get_secret_list( "my keystone id", session=session, ) self.assertEqual([secret.id], [s.id for s in secrets]) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_secret_by_id(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) secret_model = models.Secret() secret_model.project_id = project.id secret = self.repo.create_from(secret_model, session=session) session.commit() db_secret = self.repo.get_secret_by_id(secret.id) self.assertIsNotNone(db_secret) def test_should_raise_notfound_exception(self): self.assertRaises(exception.NotFound, self.repo.get_secret_by_id, "invalid_id", suppress_exception=False) def test_should_suppress_notfound_exception(self): self.assertIsNone(self.repo.get_secret_by_id("invalid_id", suppress_exception=True)) @utils.parameterized_dataset(dataset_for_filter_tests) def test_get_secret_list_with_filter(self, secret_1_dict, secret_2_dict, query_dict): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) secret_1_dict['project_id'] = project.id secret1 = self.repo.create_from( models.Secret(secret_1_dict), session=session, ) secret_2_dict['project_id'] = project.id secret2 = self.repo.create_from( models.Secret(secret_2_dict), session=session, ) session.commit() secrets, offset, limit, total = self.repo.get_secret_list( "my keystone id", session=session, **query_dict ) resulting_secret_ids = [s.id for s in secrets] self.assertIn(secret1.id, resulting_secret_ids) self.assertNotIn(secret2.id, resulting_secret_ids) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_by_create_date_nothing(self): session = self.repo.get_session() secrets, offset, limit, total = self.repo.get_secret_list( "my keystone id", bits=1024, session=session, suppress_exception=True ) self.assertEqual([], secrets) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(0, total) def test_do_entity_name(self): self.assertEqual("Secret", self.repo._do_entity_name()) def test_should_raise_no_result_found(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_secret_list, "my keystone id", session=session, suppress_exception=False) def test_should_get_count_zero(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(0, count) def test_should_get_count_one(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) secret_model = models.Secret() secret_model.project_id = project.id self.repo.create_from(secret_model, session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(1, count) def test_should_get_count_one_after_delete(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) secret_model = models.Secret() secret_model.project_id = project.id self.repo.create_from(secret_model, session=session) secret_model = models.Secret() secret_model.project_id = project.id self.repo.create_from(secret_model, session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(2, count) self.repo.delete_entity_by_id(secret_model.id, "my keystone id", session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(1, count) def test_should_get_count_one_after_expiration(self): current_time = datetime.datetime.utcnow() tomorrow = current_time + datetime.timedelta(days=1) yesterday = current_time - datetime.timedelta(days=1) session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) secret_model = models.Secret() secret_model.project_id = project.id secret_model.expiration = tomorrow self.repo.create_from(secret_model, session=session) secret_model = models.Secret() secret_model.project_id = project.id secret_model.expiration = yesterday self.repo.create_from(secret_model, session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(1, count) class WhenTestingQueryFilters(testtools.TestCase, fixtures.TestWithFixtures): def setUp(self): super(WhenTestingQueryFilters, self).setUp() self._session_fixture = self.useFixture(fixture.SessionQueryFixture()) self.session = self._session_fixture.Session() self.query = self.session.query(models.Secret) self.repo = repositories.SecretRepo() def test_data_includes_six_secrets(self): self.assertEqual(6, len(self.query.all())) def test_sort_by_name_defaults_ascending(self): query = self.repo._build_sort_filter_query(self.query, 'name') secrets = query.all() self.assertEqual('A', secrets[0].name) def test_sort_by_name_desc(self): query = self.repo._build_sort_filter_query(self.query, 'name:desc') secrets = query.all() self.assertEqual('F', secrets[0].name) def test_sort_by_created_asc(self): query = self.repo._build_sort_filter_query(self.query, 'created:asc') secrets = query.all() self.assertEqual('A', secrets[0].name) def test_sort_by_updated_desc(self): query = self.repo._build_sort_filter_query(self.query, 'updated:desc') secrets = query.all() self.assertEqual('F', secrets[0].name) def test_filter_by_created_on_new_years(self): query = self.repo._build_date_filter_query( self.query, 'created_at', '2016-01-01T00:00:00' ) secrets = query.all() self.assertEqual(1, len(secrets)) self.assertEqual('A', secrets[0].name) def test_filter_by_created_after_march(self): query = self.repo._build_date_filter_query( self.query, 'created_at', 'gt:2016-03-01T00:00:00' ) secrets = query.all() self.assertEqual(3, len(secrets)) def test_filter_by_created_on_or_after_march(self): query = self.repo._build_date_filter_query( self.query, 'created_at', 'gte:2016-03-01T00:00:00' ) secrets = query.all() self.assertEqual(4, len(secrets)) def test_filter_by_created_before_march(self): query = self.repo._build_date_filter_query( self.query, 'created_at', 'lt:2016-03-01T00:00:00' ) secrets = query.all() self.assertEqual(2, len(secrets)) def test_filter_by_created_on_or_before_march(self): query = self.repo._build_date_filter_query( self.query, 'created_at', 'lte:2016-03-01T00:00:00' ) secrets = query.all() self.assertEqual(3, len(secrets)) def test_filter_by_created_between_march_and_may_inclusive(self): query = self.repo._build_date_filter_query( self.query, 'created_at', 'gte:2016-03-01T00:00:00,lte:2016-05-01T00:00:00' ) secrets = query.all() secret_names = [s.name for s in secrets] self.assertEqual(3, len(secrets)) self.assertIn('C', secret_names) self.assertIn('D', secret_names) self.assertIn('E', secret_names) barbican-9.1.0.dev50/barbican/tests/model/__init__.py0000664000175000017500000000000013616500636022520 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/database_utils.py0000664000175000017500000002466513616500636022674 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Supports database/repositories oriented unit testing. Warning: Do not merge this content with the utils.py module, as doing so will break the DevStack functional test discovery process. """ import datetime import oslotest.base as oslotest from sqlalchemy.engine import Engine from sqlalchemy import event from barbican.model import models from barbican.model import repositories @event.listens_for(Engine, "connect") def set_foreign_key_constraint(dbapi_connection, connection_record): # Ensure that foreign key constraints are enforced during tests dbapi_connection.execute("PRAGMA foreign_keys=ON") def setup_in_memory_db(): # Ensure we are using in-memory SQLite database, and creating tables. repositories.CONF.set_override("sql_connection", "sqlite:///:memory:") repositories.CONF.set_override("db_auto_create", True) repositories.CONF.set_override("debug", True) # Ensure the connection is completely closed, so any previous in-memory # database can be removed prior to starting the next test run. repositories.hard_reset() # Start the in-memory database, creating required tables. repositories.start() def in_memory_cleanup(): repositories.clear() def get_session(): return repositories.get_session() def create_project(external_id="my keystone id", session=None): project = models.Project() project.external_id = external_id project_repo = repositories.get_project_repository() project_repo.create_from(project, session=session) return project def create_order(project=None, session=None, secret=None, container=None): if not project: project = create_project(session=session) order = models.Order() order.project_id = project.id if secret: order.secret_id = secret.id if container: order.container_id = container.id order_repo = repositories.get_order_repository() order_repo.create_from(order, session=session) return order def create_secret(project=None, session=None): secret = models.Secret() secret.project_id = project.id secret_repo = repositories.get_secret_repository() secret_repo.create_from(secret, session=session) return secret def create_transport_key(plugin_name="plugin", transport_key="tkey", session=None): transport_key = models.TransportKey(plugin_name, transport_key) transport_key_repo = repositories.get_transport_key_repository() transport_key_repo.create_from(transport_key, session=session) return transport_key def create_secret_metadatum(secret=None, key="key", value="value", session=None): secret_meta = models.SecretStoreMetadatum(key, value) secret_meta.secret_id = secret.id secret_meta_repo = repositories.get_secret_meta_repository() secret_meta_repo.create_from(secret_meta, session=session) return secret_meta def create_secret_user_metadatum(secret=None, key="user_key", value="user_value", session=None): secret_user_metadatum = models.SecretUserMetadatum(key, value) secret_user_metadatum.secret_id = secret.id secret_user_metadatum_repo = repositories.get_secret_user_meta_repository() secret_user_metadatum_repo.create_from(secret_user_metadatum, session=session) return secret_user_metadatum def create_container(project=None, session=None): container = models.Container() container.project_id = project.id container_repo = repositories.get_container_repository() container_repo.create_from(container, session=session) return container def create_container_secret(container=None, secret=None, session=None): container_secret = models.ContainerSecret() container_secret.container_id = container.id container_secret.secret_id = secret.id container_secret_repo = repositories.get_container_secret_repository() container_secret_repo.create_from(container_secret, session=session) return container_secret def create_kek_datum(project=None, plugin_name="plugin", session=None): kek_datum = models.KEKDatum() kek_datum.plugin_name = plugin_name kek_datum.project_id = project.id kek_datum_repo = repositories.get_kek_datum_repository() kek_datum_repo.create_from(kek_datum, session=session) return kek_datum def create_encrypted_datum(secret=None, kek_datum=None, session=None): enc_datum = models.EncryptedDatum() enc_datum.secret_id = secret.id enc_datum.kek_id = kek_datum.id enc_datum_repo = repositories.get_encrypted_datum_repository() enc_datum_repo.create_from(enc_datum, session=session) return enc_datum def create_order_meta_datum(order=None, key="key", value="value", session=None): order_meta_datum = models.OrderBarbicanMetadatum(key, value) order_meta_datum.order_id = order.id order_meta_datum_repo = repositories.get_order_barbican_meta_repository() order_meta_datum_repo.create_from(order_meta_datum, session=session) return order_meta_datum def create_order_retry(order=None, retry_task="", retry_args=[], retry_kwargs={}, retry_at=None, session=None): order_retry = models.OrderRetryTask() order_retry.retry_task = retry_task order_retry.retry_args = retry_args order_retry.retry_kwargs = retry_kwargs if not retry_at: order_retry.retry_at = datetime.datetime.utcnow() order_retry.order_id = order.id order_retry_task_repo = repositories.get_order_retry_tasks_repository() order_retry_task_repo.create_from(order_retry, session) return order_retry def create_order_plugin_metadatum(order=None, key="key", value="value", session=None): order_plugin_metadatum = models.OrderPluginMetadatum(key, value) order_plugin_metadatum.order_id = order.id order_plugin_repo = repositories.get_order_plugin_meta_repository() order_plugin_repo.create_from(order_plugin_metadatum, session=session) return order_plugin_metadatum def create_container_consumer_meta(container=None, parsed_request=None, session=None): if not parsed_request: parsed_request = {"name": "name", "URL": "URL"} container_consumer_meta = models.ContainerConsumerMetadatum( container_id=container.id, project_id=container.project_id, parsed_request=parsed_request, ) cont_cons_meta_repo = repositories.get_container_consumer_repository() cont_cons_meta_repo.create_from(container_consumer_meta, session=session) return container_consumer_meta def create_secret_consumer(secret=None, service="service", resource_id="resource_id", resource_type="resource_type", session=None): consumer = models.SecretConsumerMetadatum( secret_id=secret.id, project_id=secret.project_id, service=service, resource_type=resource_type, resource_id=resource_id, ) repo = repositories.get_secret_consumer_repository() repo.create_from(consumer, session=session) return consumer def create_certificate_authority(project=None, parsed_ca_in=None, session=None): if not parsed_ca_in: parsed_ca_in = {'plugin_name': 'plugin_name', 'plugin_ca_id': 'plugin_ca_id', 'expiration:': 'expiration', 'creator_id': 'creator_id', 'project_id': project.id} certificate_authority = models.CertificateAuthority( parsed_ca_in=parsed_ca_in) cert_auth_repo = repositories.get_ca_repository() cert_auth_repo.create_from(certificate_authority, session=session) return certificate_authority def create_preferred_cert_authority(cert_authority, session=None): preferred_cert_authority = models.PreferredCertificateAuthority( ca_id=cert_authority.id, project_id=cert_authority.project_id) preferred_ca_repo = repositories.get_preferred_ca_repository() preferred_ca_repo.create_from(preferred_cert_authority, session=session) return preferred_cert_authority def create_project_cert_authority(certificate_authority=None, session=None): project_cert_authority = models.ProjectCertificateAuthority( ca_id=certificate_authority.id, project_id=certificate_authority.project_id) project_cert_repo = repositories.get_project_ca_repository() project_cert_repo.create_from(project_cert_authority, session=session) return project_cert_authority def create_project_quotas(project=None, parsed_project_quotas=None, session=None): project_quota = models.ProjectQuotas( project_id=project.id, parsed_project_quotas=parsed_project_quotas) project_quota_repo = repositories.get_project_quotas_repository() project_quota_repo.create_from(project_quota, session=session) return project_quota def create_acl_secret(secret=None, user_ids=[], session=None): acl_secret = models.SecretACL(secret.id, "read") acl_secret.secret_id = secret.id acl_secret_repo = repositories.get_secret_acl_repository() acl_secret_repo.create_from(acl_secret, session=session) return acl_secret class RepositoryTestCase(oslotest.BaseTestCase): """Base test case class for in-memory database unit tests. Database/Repository oriented unit tests should *not* modify the global state in the barbican/model/repositories.py module, as this can lead to hard to debug errors. Instead only utilize methods in this fixture. Also, database-oriented unit tests extending this class MUST NO INVOKE the repositories.start()/clear()/hard_reset() methods!*, otherwise *VERY* hard to debug 'Broken Pipe' errors could result! """ def setUp(self): super(RepositoryTestCase, self).setUp() setup_in_memory_db() # Clean up once tests are completed. self.addCleanup(in_memory_cleanup) barbican-9.1.0.dev50/barbican/tests/queue/0000775000175000017500000000000013616500640020440 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/queue/test_server.py0000664000175000017500000003663213616500636023376 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime import mock import six from barbican.model import models from barbican.model import repositories from barbican.queue import server from barbican.tasks import common from barbican.tests import database_utils from barbican.tests import utils class WhenUsingTransactionalDecorator(utils.BaseTestCase): """Test using the 'transactional' decorator in server.py. Note that only the 'I am a server' logic is tested here, as the alternate mode is only used for direct invocation of Task methods in the standalone server mode, which is also thoroughly tested in WhenUsingBeginTypeOrderTask below. """ def setUp(self): super(WhenUsingTransactionalDecorator, self).setUp() # Ensure we always thing we are in 'I am a server' mode. is_server_side_config = { 'return_value': True } self.is_server_side_patcher = mock.patch( 'barbican.queue.is_server_side', **is_server_side_config ) self.is_server_side_patcher.start() self.commit_patcher = mock.patch( 'barbican.model.repositories.commit' ) self.commit_mock = self.commit_patcher.start() self.rollback_patcher = mock.patch( 'barbican.model.repositories.rollback' ) self.rollback_mock = self.rollback_patcher.start() self.clear_patcher = mock.patch( 'barbican.model.repositories.clear' ) self.clear_mock = self.clear_patcher.start() self.args = ('foo', 'bar') self.kwargs = {'k_foo': 1, 'k_bar': 2} # Class/decorator under test. class TestClass(object): my_args = None my_kwargs = None is_exception_needed = False @server.transactional def test_method(self, *args, **kwargs): if self.is_exception_needed: raise ValueError() self.my_args = args self.my_kwargs = kwargs self.test_object = TestClass() def tearDown(self): super(WhenUsingTransactionalDecorator, self).tearDown() self.is_server_side_patcher.stop() self.commit_patcher.stop() self.rollback_patcher.stop() self.clear_patcher.stop() def test_should_commit(self): self.test_object.test_method(*self.args, **self.kwargs) self.assertEqual(self.args, self.test_object.my_args) self.assertEqual(self.kwargs, self.test_object.my_kwargs) self.assertEqual(1, self.commit_mock.call_count) self.assertEqual(0, self.rollback_mock.call_count) self.assertEqual(1, self.clear_mock.call_count) def test_should_rollback(self): self.test_object.is_exception_needed = True self.test_object.test_method(*self.args, **self.kwargs) self.assertEqual(0, self.commit_mock.call_count) self.assertEqual(1, self.rollback_mock.call_count) self.assertEqual(1, self.clear_mock.call_count) class WhenUsingRetryableOrderDecorator(utils.BaseTestCase): """Test using the 'retryable_order' decorator in server.py.""" def setUp(self): super(WhenUsingRetryableOrderDecorator, self).setUp() self.schedule_retry_tasks_patcher = mock.patch( 'barbican.queue.server.schedule_order_retry_tasks' ) self.schedule_retry_tasks_mock = ( self.schedule_retry_tasks_patcher.start() ) self.order_id = 'order-id' self.args = ('foo', 'bar') self.kwargs = {'k_foo': 1, 'k_bar': 2} # Class/decorator under test. class TestClass(object): self.order_id = None my_args = None my_kwargs = None is_exception_needed = False result = common.FollowOnProcessingStatusDTO() @server.retryable_order def test_method(self, order_id, *args, **kwargs): if self.is_exception_needed: raise ValueError() self.order_id = order_id self.my_args = args self.my_kwargs = kwargs return self.result self.test_object = TestClass() self.test_method = TestClass.test_method def tearDown(self): super(WhenUsingRetryableOrderDecorator, self).tearDown() self.schedule_retry_tasks_patcher.stop() def test_should_successfully_schedule_a_task_for_retry(self): self.test_object.test_method(self.order_id, *self.args, **self.kwargs) self.assertEqual(self.order_id, self.test_object.order_id) self.assertEqual(self.args, self.test_object.my_args) self.assertEqual(self.kwargs, self.test_object.my_kwargs) self.assertEqual(1, self.schedule_retry_tasks_mock.call_count) self.schedule_retry_tasks_mock.assert_called_with( mock.ANY, self.test_object.result, self.order_id, *self.args, **self.kwargs) def test_retry_should_not_be_scheduled_if_exception_is_raised(self): self.test_object.is_exception_needed = True self.assertRaises( ValueError, self.test_object.test_method, self.order_id, self.args, self.kwargs, ) self.assertEqual(0, self.schedule_retry_tasks_mock.call_count) class WhenCallingScheduleOrderRetryTasks(database_utils.RepositoryTestCase): """Test calling schedule_order_retry_tasks() in server.py.""" def setUp(self): super(WhenCallingScheduleOrderRetryTasks, self).setUp() self.project = database_utils.create_project() self.order = database_utils.create_order(self.project) database_utils.get_session().commit() self.repo = repositories.OrderRetryTaskRepo() self.result = common.FollowOnProcessingStatusDTO() self.args = ['args-foo', 'args-bar'] self.kwargs = {'order_id': self.order.id, 'foo': 1, 'bar': 2} self.date_to_retry_at = ( datetime.datetime.utcnow() + datetime.timedelta( milliseconds=self.result.retry_msec) ) def test_should_not_schedule_task_due_to_no_result(self): retry_rpc_method = server.schedule_order_retry_tasks(None, None, None) self.assertIsNone(retry_rpc_method) def test_should_not_schedule_task_due_to_no_action_required_result(self): self.result.retry_task = common.RetryTasks.NO_ACTION_REQUIRED retry_rpc_method = server.schedule_order_retry_tasks( None, self.result, None) self.assertIsNone(retry_rpc_method) def test_should_schedule_invoking_task_for_retry(self): self.result.retry_task = common.RetryTasks.INVOKE_SAME_TASK # Schedule this test method as the passed-in 'retry' function. retry_rpc_method = server.schedule_order_retry_tasks( self.test_should_schedule_invoking_task_for_retry, self.result, None, # Not used. *self.args, **self.kwargs) database_utils.get_session().commit() # Flush to the database. self.assertEqual( 'test_should_schedule_invoking_task_for_retry', retry_rpc_method) def test_should_schedule_certificate_status_task_for_retry(self): self.result.retry_task = ( common.RetryTasks.INVOKE_CERT_STATUS_CHECK_TASK ) # Schedule this test method as the passed-in 'retry' function. retry_rpc_method = server.schedule_order_retry_tasks( None, # Should be ignored for non-self retries. self.result, None, # Not used. *self.args, **self.kwargs) database_utils.get_session().commit() # Flush to the database. self.assertEqual( 'check_certificate_status', retry_rpc_method) self._verify_retry_task_entity( 'check_certificate_status') def _verify_retry_task_entity(self, retry_task): # Retrieve the task retry entity created above and verify it. entities, offset, limit, total = self.repo.get_by_create_date() self.assertEqual(1, total) retry_model = entities[0] self.assertEqual(retry_task, retry_model.retry_task) self.assertEqual(self.args, retry_model.retry_args) self.assertEqual(self.kwargs, retry_model.retry_kwargs) self.assertEqual(0, retry_model.retry_count) # Compare retry_at times. # Note that the expected retry_at time is computed at setUp() time, but # the retry_at time on the task retry entity/model is computed and set # a few milliseconds after this setUp() time, hence they will vary by a # small amount of time. delta = retry_model.retry_at - self.date_to_retry_at delta_seconds = delta.seconds self.assertLessEqual(delta_seconds, 2) class WhenCallingTasksMethod(utils.BaseTestCase): """Test calling methods on the Tasks class.""" def setUp(self): super(WhenCallingTasksMethod, self).setUp() # Mock the 'am I a server process?' flag used by the decorator around # all task methods. Since this test class focuses on testing task # method behaviors, this flag is set to false to allow for direct # testing of these tasks without database transactional interference. is_server_side_config = { 'return_value': False } self.is_server_side_patcher = mock.patch( 'barbican.queue.is_server_side', **is_server_side_config ) self.is_server_side_patcher.start() self.tasks = server.Tasks() def tearDown(self): super(WhenCallingTasksMethod, self).tearDown() self.is_server_side_patcher.stop() @mock.patch('barbican.queue.server.schedule_order_retry_tasks') @mock.patch('barbican.tasks.resources.BeginTypeOrder') def test_should_process_begin_order(self, mock_begin_order, mock_schedule): method = mock_begin_order.return_value.process_and_suppress_exceptions method.return_value = 'result' self.tasks.process_type_order( None, self.order_id, self.external_project_id, self.request_id) mock_process = mock_begin_order.return_value mock_process.process_and_suppress_exceptions.assert_called_with( self.order_id, self.external_project_id) mock_schedule.assert_called_with( mock.ANY, 'result', None, 'order1234', 'keystone1234', 'request1234') @mock.patch('barbican.queue.server.schedule_order_retry_tasks') @mock.patch('barbican.tasks.resources.CheckCertificateStatusOrder') def test_should_check_certificate_order( self, mock_check_cert, mock_schedule): method = mock_check_cert.return_value.process_and_suppress_exceptions method.return_value = 'result' self.tasks.check_certificate_status( None, self.order_id, self.external_project_id, self.request_id) mock_process = mock_check_cert.return_value mock_process.process_and_suppress_exceptions.assert_called_with( self.order_id, self.external_project_id ) mock_schedule.assert_called_with( mock.ANY, 'result', None, 'order1234', 'keystone1234', 'request1234') @mock.patch('barbican.tasks.resources.BeginTypeOrder') def test_process_order_catch_exception(self, mock_begin_order): """Test that BeginTypeOrder's process() handles all exceptions.""" mock_begin_order.return_value._process.side_effect = Exception() self.tasks.process_type_order(None, self.order_id, self.external_project_id, self.request_id) class WhenUsingTaskServer(database_utils.RepositoryTestCase): """Test using the asynchronous task client. This test suite performs a full-stack test of worker-side task processing (except for queue interactions, which are mocked). This includes testing database commit and session close behaviors. """ def setUp(self): super(WhenUsingTaskServer, self).setUp() # Queue target mocking setup. self.target = 'a target value here' queue_get_target_config = { 'return_value': self.target } self.queue_get_target_patcher = mock.patch( 'barbican.queue.get_target', **queue_get_target_config ) self.queue_get_target_mock = self.queue_get_target_patcher.start() # Queue server mocking setup. self.server_mock = mock.MagicMock() self.server_mock.start.return_value = None self.server_mock.stop.return_value = None queue_get_server_config = { 'return_value': self.server_mock } self.queue_get_server_patcher = mock.patch( 'barbican.queue.get_server', **queue_get_server_config ) self.queue_get_server_mock = self.queue_get_server_patcher.start() self.server = server.TaskServer() # Add an order to the in-memory database. self.external_id = 'keystone-id' project = database_utils.create_project( external_id=self.external_id) self.order = database_utils.create_order( project=project) self.request_id = 'request1234' def tearDown(self): super(WhenUsingTaskServer, self).tearDown() self.queue_get_target_patcher.stop() self.queue_get_server_patcher.stop() def test_should_start(self): self.server.start() self.queue_get_target_mock.assert_called_with() self.queue_get_server_mock.assert_called_with( target=self.target, endpoints=[self.server]) self.server_mock.start.assert_called_with() def test_should_stop(self): self.server.stop() self.queue_get_target_mock.assert_called_with() self.queue_get_server_mock.assert_called_with( target=self.target, endpoints=[self.server]) self.server_mock.stop.assert_called_with() def test_process_bogus_begin_type_order_should_not_rollback(self): order_id = self.order.id self.order.type = 'bogus-type' # Force error out of business logic. # Invoke process, including the transactional decorator that terminates # the session when it is done. Hence we must re-retrieve the order for # verification afterwards. self.server.process_type_order( None, self.order.id, self.external_id, self.request_id) order_repo = repositories.get_order_repository() order_result = order_repo.get(order_id, self.external_id) self.assertEqual(models.States.ERROR, order_result.status) self.assertEqual( six.u( 'Process TypeOrder failure seen - ' 'please contact site administrator.'), order_result.error_reason) self.assertEqual( six.u('500'), order_result.error_status_code) barbican-9.1.0.dev50/barbican/tests/queue/test_retry_scheduler.py0000664000175000017500000001706613616500636025273 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime import time import eventlet import mock import oslotest.base as oslotest from barbican.model import models from barbican.model import repositories from barbican.queue import retry_scheduler from barbican.tests import database_utils # Oslo messaging RPC server uses eventlet. eventlet.monkey_patch() INITIAL_DELAY_SECONDS = 5.0 NEXT_RETRY_SECONDS = 5.0 def is_interval_in_expected_range(interval): return NEXT_RETRY_SECONDS * .8 <= interval < NEXT_RETRY_SECONDS * 1.2 class WhenRunningPeriodicServerRetryLogic(database_utils.RepositoryTestCase): """Tests the retry logic invoked by the periodic task retry server. These tests are only concerned with the logic of the invoked periodic task method. Testing of whether or not the periodic tasks are actually invoked per configured schedule configuration is deferred to the tests in :class:`WhenRunningPeriodicServer`. """ def setUp(self): super(WhenRunningPeriodicServerRetryLogic, self).setUp() retry_scheduler.CONF.set_override( "initial_delay_seconds", 2 * INITIAL_DELAY_SECONDS, group='retry_scheduler') retry_scheduler.CONF.set_override( "periodic_interval_max_seconds", NEXT_RETRY_SECONDS, group='retry_scheduler') self.queue_client = mock.MagicMock() self.periodic_server = retry_scheduler.PeriodicServer( queue_resource=self.queue_client) def tearDown(self): super(WhenRunningPeriodicServerRetryLogic, self).tearDown() self.periodic_server.stop() def test_should_perform_retry_processing_no_tasks(self): interval = self.periodic_server._check_retry_tasks() self.assertTrue(is_interval_in_expected_range(interval)) def test_should_perform_retry_processing_one_task(self): # Add one retry task. args, kwargs, retry_repo = self._create_retry_task() # Retrieve this entity. entities, _, _, total = retry_repo.get_by_create_date() self.assertEqual(1, total) time.sleep(1) interval = self.periodic_server._check_retry_tasks() # Attempt to retrieve this entity, should have been deleted above. entities, _, _, total = retry_repo.get_by_create_date( suppress_exception=True) self.assertEqual(0, total) self.assertTrue(is_interval_in_expected_range(interval)) self.queue_client.test_task.assert_called_once_with( *args, **kwargs ) @mock.patch('barbican.model.repositories.commit') def test_should_fail_and_force_a_rollback(self, mock_commit): mock_commit.side_effect = Exception() # Add one retry task. args, kwargs, retry_repo = self._create_retry_task() # Retrieve this entity. entities, _, _, total = retry_repo.get_by_create_date() self.assertEqual(1, total) time.sleep(1) self.periodic_server._check_retry_tasks() # Attempt to retrieve this entity, should not have been deleted above. entities, _, _, total = retry_repo.get_by_create_date( suppress_exception=True) self.assertEqual(1, total) @mock.patch('barbican.model.repositories.get_order_retry_tasks_repository') def test_should_fail_process_retry(self, mock_get_repo): mock_get_repo.return_value.get_by_create_date.side_effect = \ Exception() periodic_server_with_mock_repo = retry_scheduler.PeriodicServer( queue_resource=self.queue_client) interval = periodic_server_with_mock_repo._check_retry_tasks() self.assertTrue(is_interval_in_expected_range(interval)) def _create_retry_task(self): # Add one retry task: task = 'test_task' args = ('foo', 'bar') kwargs = {'k_foo': 1, 'k_bar': 2} order = database_utils.create_order() retry = models.OrderRetryTask() retry.order_id = order.id retry.retry_at = datetime.datetime.utcnow() retry.retry_task = task retry.retry_args = args retry.retry_kwargs = kwargs retry_repo = repositories.get_order_retry_tasks_repository() retry_repo.create_from(retry) database_utils.get_session().commit() return args, kwargs, retry_repo class WhenRunningPeriodicServer(oslotest.BaseTestCase): """Tests the timing-related functionality of the periodic task retry server. These tests are only concerned with whether or not periodic tasks are actually invoked per configured schedule configuration. The logic of the invoked periodic task method itself is deferred to the tests in :class:`WhenRunningPeriodicServerRetryLogic`. """ def setUp(self): super(WhenRunningPeriodicServer, self).setUp() retry_scheduler.CONF.set_override( "initial_delay_seconds", INITIAL_DELAY_SECONDS, group='retry_scheduler') self.database_patcher = _DatabasePatcherHelper() self.database_patcher.start() self.periodic_server = _PeriodicServerStub(queue_resource=None) self.periodic_server.start() def tearDown(self): super(WhenRunningPeriodicServer, self).tearDown() self.periodic_server.stop() self.database_patcher.stop() def test_should_have_invoked_periodic_task_after_initial_delay(self): # Wait a bit longer than the initial delay. time.sleep(3 * INITIAL_DELAY_SECONDS / 2) self.assertEqual(1, self.periodic_server.invoke_count) def test_should_have_invoked_periodic_task_twice(self): # Wait a bit longer than the initial delay plus retry interval. time.sleep(INITIAL_DELAY_SECONDS + 2 * NEXT_RETRY_SECONDS) self.assertEqual(2, self.periodic_server.invoke_count) def test_should_have_not_invoked_periodic_task_yet(self): # Wait a short time, before the initial delay expires. time.sleep(1) self.assertEqual(0, self.periodic_server.invoke_count) class _PeriodicServerStub(retry_scheduler.PeriodicServer): """Periodic server testing stub class. This class overrides the periodic retry task so that we can track how many times it has been invoked by the Oslo periodic task process. """ def __init__(self, queue_resource=None): super(_PeriodicServerStub, self).__init__() self.invoke_count = 0 def _check_retry_tasks(self): """Override the periodic method, indicating we have called it.""" self.invoke_count += 1 return NEXT_RETRY_SECONDS class _DatabasePatcherHelper(object): """This test suite does not test database interactions, so just stub it.""" def __init__(self): super(_DatabasePatcherHelper, self).__init__() database_config = { 'return_value': None } self.database_patcher = mock.patch( 'barbican.model.repositories.setup_database_engine_and_factory', **database_config ) def start(self): self.database_patcher.start() def stop(self): self.database_patcher.stop() barbican-9.1.0.dev50/barbican/tests/queue/test_client.py0000664000175000017500000000450513616500636023340 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from barbican import queue from barbican.queue import client from barbican.tests import utils class WhenUsingAsyncTaskClient(utils.BaseTestCase): """Test using the asynchronous task client.""" def setUp(self): super(WhenUsingAsyncTaskClient, self).setUp() # Mock out the queue get_client() call: self.mock_client = mock.MagicMock() self.mock_client.cast.return_value = None get_client_config = { 'return_value': self.mock_client } self.get_client_patcher = mock.patch( 'barbican.queue.get_client', **get_client_config ) self.get_client_patcher.start() self.client = client.TaskClient() def tearDown(self): super(WhenUsingAsyncTaskClient, self).tearDown() self.get_client_patcher.stop() def test_should_process_type_order(self): self.client.process_type_order(order_id=self.order_id, project_id=self.external_project_id, request_id=self.request_id) self.mock_client.cast.assert_called_with( {}, 'process_type_order', order_id=self.order_id, project_id=self.external_project_id, request_id=self.request_id) class WhenCreatingDirectTaskClient(utils.BaseTestCase): """Test using the synchronous task client (i.e. standalone mode).""" def setUp(self): super(WhenCreatingDirectTaskClient, self).setUp() queue.get_client = mock.MagicMock(return_value=None) self.client = client.TaskClient() def test_should_use_direct_task_client(self): self.assertIsInstance(self.client._client, client._DirectTaskInvokerClient) barbican-9.1.0.dev50/barbican/tests/queue/__init__.py0000664000175000017500000000000013616500636022544 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/queue/test_keystone_listener.py0000664000175000017500000003512213616500636025627 0ustar sahidsahid00000000000000# Copyright (c) 2014 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_utils import uuidutils import mock import oslo_messaging from oslo_service import service from barbican.common import config from barbican import queue from barbican.queue import keystone_listener from barbican.tasks import keystone_consumer as consumer from barbican.tests import utils class UtilMixin(object): def __init__(self, *args, **kwargs): super(UtilMixin, self).__init__(*args, **kwargs) self.conf = config.CONF # dict which has item as {property: (value, group_name)} self.overrides = {} def revert_overrides(self): '''Reverts configuration override values after test end.''' for k, v in self.overrides.items(): value, group = v self.conf.set_override(k, value, group) def setUp(self): super(UtilMixin, self).setUp() self.addCleanup(self.revert_overrides) def opt_in_group(self, group, **kw): for k, v in kw.items(): # add to local overrides if its not already set # we want to keep the original value from first override dict_value = self.overrides.get(k) if not dict_value: if group: orig_value = getattr(getattr(self.conf, group), k) else: orig_value = getattr(self.conf, k) self.overrides[k] = orig_value, group self.conf.set_override(k, v, group) class WhenUsingNotificationTask(UtilMixin, utils.BaseTestCase): """Test for 'Notification' task functionality.""" def setUp(self): super(WhenUsingNotificationTask, self).setUp() self.task = keystone_listener.NotificationTask(self.conf) self.payload = {'resource_info': uuidutils.generate_uuid( dashed=False)} self.type_index = 2 self.payload_index = 3 self.task_args = ['my_context', 'publisher_id', 'event_type', self.payload, {'metadata': 'value'}] @mock.patch.object(keystone_listener.NotificationTask, 'process_event') def test_info_level_notification(self, mock_process): self.task.info(*self.task_args) mock_process.assert_called_once_with(*self.task_args) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_create_project_event_notification(self, mock_process): self.task_args[self.type_index] = 'identity.project.created' result = self.task.info(*self.task_args) self.assertFalse(mock_process.called, 'Should not call event consumer ' 'for project create event') self.assertIsNone(result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_update_project_event_notification(self, mock_process): self.task_args[self.type_index] = 'identity.project.updated' result = self.task.info(*self.task_args) self.assertFalse(mock_process.called, 'Should not call event consumer ' 'for project update event') self.assertIsNone(result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_delete_project_event_notification_with_required_data( self, mock_process): project_id = uuidutils.generate_uuid(dashed=False) self.task_args[self.type_index] = 'identity.project.deleted' self.task_args[self.payload_index] = {'resource_info': project_id} result = self.task.info(*self.task_args) mock_process.assert_called_once_with(project_id=project_id, operation_type='deleted', resource_type='project') self.assertEqual(oslo_messaging.NotificationResult.HANDLED, result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_delete_project_event_with_different_service_name_in_event_type( self, mock_process): project_id = uuidutils.generate_uuid(dashed=False) self.task_args[self.type_index] = 'aaa.project.deleted' self.task_args[self.payload_index] = {'resource_info': project_id} result = self.task.info(*self.task_args) mock_process.assert_called_once_with(project_id=project_id, operation_type='deleted', resource_type='project') self.assertEqual(oslo_messaging.NotificationResult.HANDLED, result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_delete_project_event_with_event_type_in_different_case( self, mock_process): project_id = uuidutils.generate_uuid(dashed=False) self.task_args[self.type_index] = 'Identity.PROJECT.DeleteD' self.task_args[self.payload_index] = {'resource_info': project_id} result = self.task.info(*self.task_args) mock_process.assert_called_once_with(project_id=project_id, operation_type='deleted', resource_type='project') self.assertEqual(oslo_messaging.NotificationResult.HANDLED, result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_delete_project_event_with_incomplete_event_type_format( self, mock_process): project_id = uuidutils.generate_uuid(dashed=False) self.task_args[self.type_index] = 'project.deleted' self.task_args[self.payload_index] = {'resource_info': project_id} result = self.task.info(*self.task_args) self.assertFalse(mock_process.called, 'Should not call event consumer ' 'for project delete event as service name is missing ' 'in event_type data. Expected format is ' ' ..') self.assertIsNone(result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_delete_project_event_notification_with_missing_resource_info( self, mock_process): self.task_args[self.type_index] = 'identity.project.deleted' self.task_args[self.payload_index] = {'resource_info': None} result = self.task.info(*self.task_args) self.assertFalse(mock_process.called, 'Should not call event consumer ' 'for project delete event when project_id is missing ' 'in payload') self.assertIsNone(result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_delete_project_event_notification_with_missing_payload( self, mock_process): self.task_args[self.type_index] = 'identity.project.deleted' self.task_args[self.payload_index] = None result = self.task.info(*self.task_args) self.assertFalse(mock_process.called, 'Should not call event consumer ' 'for project delete event when payload is missing') self.assertIsNone(result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_delete_project_event_notification_with_blank_payload( self, mock_process): self.task_args[self.type_index] = 'identity.project.deleted' self.task_args[self.payload_index] = '' result = self.task.info(*self.task_args) self.assertFalse(mock_process.called, 'Should not call event consumer ' 'for project delete event when payload is missing') self.assertIsNone(result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_event_notification_with_missing_event_type(self, mock_process): project_id = uuidutils.generate_uuid(dashed=False) self.task_args[self.type_index] = None self.task_args[self.payload_index] = {'resource_info': project_id} result = self.task.info(*self.task_args) self.assertFalse(mock_process.called, 'Should not call event consumer ' 'for keystone event when event_type is missing in ' 'notification') self.assertIsNone(result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_event_notification_with_blank_event_type(self, mock_process): project_id = uuidutils.generate_uuid(dashed=False) self.task_args[self.type_index] = '' self.task_args[self.payload_index] = {'resource_info': project_id} result = self.task.info(*self.task_args) self.assertFalse(mock_process.called, 'Should not call event consumer ' 'keystone event when event_type is blank in ' 'notification') self.assertIsNone(result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process') def test_event_notification_with_processing_error_requeue_disabled( self, mock_process): self.opt_in_group(queue.KS_NOTIFICATIONS_GRP_NAME, allow_requeue=False) local_task = keystone_listener.NotificationTask(self.conf) mock_process.side_effect = Exception('Dummy Error') project_id = uuidutils.generate_uuid(dashed=False) self.task_args[self.type_index] = 'identity.project.deleted' self.task_args[self.payload_index] = {'resource_info': project_id} result = local_task.info(*self.task_args) self.assertTrue(mock_process.called, 'Should call event consumer for' ' project delete event') self.assertEqual(oslo_messaging.NotificationResult.HANDLED, result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process') def test_event_notification_with_processing_error_requeue_enabled( self, mock_process): self.opt_in_group(queue.KS_NOTIFICATIONS_GRP_NAME, allow_requeue=True) local_task = keystone_listener.NotificationTask(self.conf) mock_process.side_effect = Exception('Dummy Error') project_id = uuidutils.generate_uuid(dashed=False) self.task_args[self.type_index] = 'identity.project.deleted' self.task_args[self.payload_index] = {'resource_info': project_id} result = local_task.info(*self.task_args) self.assertTrue(mock_process.called, 'Should call event consumer for' ' project delete event') self.assertEqual(oslo_messaging.NotificationResult.REQUEUE, result) class WhenUsingMessageServer(UtilMixin, utils.BaseTestCase): """Test using the asynchronous task client.""" def setUp(self): super(WhenUsingMessageServer, self).setUp() queue.init(self.conf) patcher = mock.patch('oslo_messaging.notify.listener.' 'NotificationServer') mock_server_class = patcher.start() self.addCleanup(patcher.stop) self.msg_server_mock = mock_server_class() self.msg_server_mock.start.return_value = None self.msg_server_mock.stop.return_value = None self.msg_server_mock.wait.return_value = None @mock.patch.object(queue, 'get_notification_server') @mock.patch.object(queue, 'get_notification_target') def test_target_and_notification_server_invocations(self, mock_target, mock_server): target = 'a target value here' mock_target.return_value = target msg_server = keystone_listener.MessageServer(self.conf) mock_target.assert_called_once_with() mock_server.assert_called_once_with( targets=[target], endpoints=[msg_server]) def test_keystone_notification_config_used(self): topic = 'my test topic' exchange = 'my test exchange' version = ' my test version' self.opt_in_group(queue.KS_NOTIFICATIONS_GRP_NAME, topic=topic) self.opt_in_group(queue.KS_NOTIFICATIONS_GRP_NAME, control_exchange=exchange) self.opt_in_group(queue.KS_NOTIFICATIONS_GRP_NAME, version=version) self.opt_in_group(queue.KS_NOTIFICATIONS_GRP_NAME, version=version) target = queue.get_notification_target() self.assertEqual(topic, target.topic) self.assertEqual(exchange, target.exchange) self.assertEqual(version, target.version) @mock.patch.object(service.Service, '__init__') def test_keystone_notification_pool_size_used(self, mock_service_init): thread_pool_size = 5 self.opt_in_group(queue.KS_NOTIFICATIONS_GRP_NAME, thread_pool_size=thread_pool_size) msg_server = keystone_listener.MessageServer(self.conf) mock_service_init.assert_called_once_with(msg_server, threads=thread_pool_size) @mock.patch.object(service.Service, 'start') def test_should_start(self, mock_service): msg_server = keystone_listener.MessageServer(self.conf) msg_server.start() self.msg_server_mock.start.assert_called_with() @mock.patch.object(service.Service, 'stop', autospec=True) def test_should_stop(self, mock_service_stop): msg_server = keystone_listener.MessageServer(self.conf) msg_server.stop() self.msg_server_mock.stop.assert_called_with() @mock.patch.object(service.Service, 'wait') def test_should_wait(self, mock_service_wait): msg_server = keystone_listener.MessageServer(self.conf) msg_server.wait() self.assertFalse(self.msg_server_mock.stop.called, 'No need to call' 'message server wait() as Service itself creates the ' ' wait event') self.assertTrue(mock_service_wait.called, 'Expected to only call ' 'service.Service.wait() method') barbican-9.1.0.dev50/barbican/tests/utils.py0000664000175000017500000006336613616500636021051 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from contextlib import contextmanager import datetime import functools import os from os import path import time import types import mock from oslo_config import cfg from oslo_utils import uuidutils import oslotest.base as oslotest from oslotest import createfile import six from six.moves.urllib import parse import webtest from OpenSSL import crypto from barbican.api import app from barbican.common import config import barbican.context from barbican.model import repositories from barbican.plugin.crypto import manager as cm from barbican.plugin.crypto import p11_crypto from barbican.plugin.interface import secret_store from barbican.plugin import kmip_secret_store as kss from barbican.tests import database_utils def mock_pecan_request(test_instance, host=None): patcher_obj = mock.patch('pecan.request') mock_req = patcher_obj.start() test_instance.addCleanup(patcher_obj.stop) mock_req.url = host mock_req.environ = os.environ.copy() mock_req.application_url = host @contextmanager def pecan_context(test_instance, host=None): mock_pecan_request(test_instance, host=host) yield class BarbicanAPIBaseTestCase(oslotest.BaseTestCase): """Base TestCase for all tests needing to interact with a Barbican app.""" root_controller = None def _build_context(self, project_id, roles=None, user=None, is_admin=True, policy_enforcer=None): context = barbican.context.RequestContext( roles=roles, user=user, project_id=project_id, is_admin=is_admin ) context.policy_enforcer = policy_enforcer return context def setUp(self): super(BarbicanAPIBaseTestCase, self).setUp() # Make sure we have a test db and session to work with database_utils.setup_in_memory_db() # Generic project id to perform actions under self.project_id = generate_test_valid_uuid() # Build the test app wsgi_app = app.build_wsgi_app( controller=self.root_controller, transactional=True ) self.app = webtest.TestApp(wsgi_app) self.app.extra_environ = { 'barbican.context': self._build_context(self.project_id) } def tearDown(self): database_utils.in_memory_cleanup() super(BarbicanAPIBaseTestCase, self).tearDown() class BaseTestCase(oslotest.BaseTestCase): """DEPRECATED - Will remove in future refactoring.""" def setUp(self): super(BaseTestCase, self).setUp() self.order_id = 'order1234' self.external_project_id = 'keystone1234' self.request_id = 'request1234' def tearDown(self): super(BaseTestCase, self).tearDown() ss_conf = config.get_module_config('secretstore') ss_conf.clear_override("enable_multiple_secret_stores", group='secretstore') class MockModelRepositoryMixin(object): """Class for setting up the repo factory mocks This class has the purpose of setting up the mocks for the model repository factory functions. This is because they are intended to be singletons, and thus called inside the code-base, and not really passed around as arguments. Thus, this kind of approach is needed. The functions assume that the class that inherits from this is a test case fixture class. This is because as a side-effect patcher objects will be added to the class, and also the cleanup of these patcher objects will be added to the tear-down of the respective classes. """ def setup_container_consumer_repository_mock( self, mock_container_consumer_repo=mock.MagicMock()): """Mocks the container consumer repository factory function :param mock_container_consumer_repo: The pre-configured mock container consumer repo to be returned. """ self.mock_container_consumer_repo_patcher = None self._setup_repository_mock( repo_factory='get_container_consumer_repository', mock_repo_obj=mock_container_consumer_repo, patcher_obj=self.mock_container_consumer_repo_patcher) def setup_secret_consumer_repository_mock( self, mock_secret_consumer_repo=mock.MagicMock()): """Mocks the secret consumer repository factory function :param mock_secret_consumer_repo: The pre-configured mock secret consumer repo to be returned. """ self.mock_secret_consumer_repo_patcher = None self._setup_repository_mock( repo_factory='get_secret_consumer_repository', mock_repo_obj=mock_secret_consumer_repo, patcher_obj=self.mock_secret_consumer_repo_patcher) def setup_container_repository_mock(self, mock_container_repo=mock.MagicMock()): """Mocks the container repository factory function :param mock_container_repo: The pre-configured mock container repo to be returned. """ self.mock_container_repo_patcher = None self._setup_repository_mock( repo_factory='get_container_repository', mock_repo_obj=mock_container_repo, patcher_obj=self.mock_container_repo_patcher) def setup_container_secret_repository_mock( self, mock_container_secret_repo=mock.MagicMock()): """Mocks the container-secret repository factory function :param mock_container_secret_repo: The pre-configured mock container-secret repo to be returned. """ self.mock_container_secret_repo_patcher = None self._setup_repository_mock( repo_factory='get_container_secret_repository', mock_repo_obj=mock_container_secret_repo, patcher_obj=self.mock_container_secret_repo_patcher) def setup_encrypted_datum_repository_mock( self, mock_encrypted_datum_repo=mock.MagicMock()): """Mocks the encrypted datum repository factory function :param mock_encrypted_datum_repo: The pre-configured mock encrypted datum repo to be returned. """ self.mock_encrypted_datum_repo_patcher = None self._setup_repository_mock( repo_factory='get_encrypted_datum_repository', mock_repo_obj=mock_encrypted_datum_repo, patcher_obj=self.mock_encrypted_datum_repo_patcher) def setup_kek_datum_repository_mock(self, mock_kek_datum_repo=mock.MagicMock()): """Mocks the kek datum repository factory function :param mock_kek_datum_repo: The pre-configured mock kek-datum repo to be returned. """ self.mock_kek_datum_repo_patcher = None self._setup_repository_mock( repo_factory='get_kek_datum_repository', mock_repo_obj=mock_kek_datum_repo, patcher_obj=self.mock_kek_datum_repo_patcher) def setup_order_barbican_meta_repository_mock( self, mock_order_barbican_meta_repo=mock.MagicMock()): """Mocks the order-barbican-meta repository factory function :param mock_order_barbican_meta_repo: The pre-configured mock order barbican-meta repo to be returned. """ self.mock_order_barbican_meta_repo_patcher = None self._setup_repository_mock( repo_factory='get_order_barbican_meta_repository', mock_repo_obj=mock_order_barbican_meta_repo, patcher_obj=self.mock_order_barbican_meta_repo_patcher) def setup_order_plugin_meta_repository_mock( self, mock_order_plugin_meta_repo=mock.MagicMock()): """Mocks the order-plugin-meta repository factory function :param mock_order_plugin_meta_repo: The pre-configured mock order plugin-meta repo to be returned. """ self.mock_order_plugin_meta_repo_patcher = None self._setup_repository_mock( repo_factory='get_order_plugin_meta_repository', mock_repo_obj=mock_order_plugin_meta_repo, patcher_obj=self.mock_order_plugin_meta_repo_patcher) def setup_order_repository_mock(self, mock_order_repo=mock.MagicMock()): """Mocks the order repository factory function :param mock_order_repo: The pre-configured mock order repo to be returned. """ self.mock_order_repo_patcher = None self._setup_repository_mock(repo_factory='get_order_repository', mock_repo_obj=mock_order_repo, patcher_obj=self.mock_order_repo_patcher) def setup_project_repository_mock(self, mock_project_repo=mock.MagicMock()): """Mocks the project repository factory function :param mock_project_repo: The pre-configured mock project repo to be returned. """ self.mock_project_repo_patcher = None self._setup_repository_mock(repo_factory='get_project_repository', mock_repo_obj=mock_project_repo, patcher_obj=self.mock_project_repo_patcher) def setup_secret_meta_repository_mock( self, mock_secret_meta_repo=mock.MagicMock()): """Mocks the secret-meta repository factory function :param mock_secret_meta_repo: The pre-configured mock secret-meta repo to be returned. """ self.mock_secret_meta_repo_patcher = None self._setup_repository_mock( repo_factory='get_secret_meta_repository', mock_repo_obj=mock_secret_meta_repo, patcher_obj=self.mock_secret_meta_repo_patcher) def setup_secret_repository_mock(self, mock_secret_repo=mock.MagicMock()): """Mocks the secret repository factory function :param mock_secret_repo: The pre-configured mock secret repo to be returned. """ self.mock_secret_repo_patcher = None self._setup_repository_mock(repo_factory='get_secret_repository', mock_repo_obj=mock_secret_repo, patcher_obj=self.mock_secret_repo_patcher) def setup_transport_key_repository_mock( self, mock_transport_key_repo=mock.MagicMock()): """Mocks the transport-key repository factory function :param mock_transport_key_repo: The pre-configured mock transport_key repo to be returned. """ self.mock_transport_key_repo_patcher = None self._setup_repository_mock( repo_factory='get_transport_key_repository', mock_repo_obj=mock_transport_key_repo, patcher_obj=self.mock_transport_key_repo_patcher) def setup_ca_repository_mock(self, mock_ca_repo=mock.MagicMock()): """Mocks the project repository factory function :param mock_ca_repo: The pre-configured mock ca repo to be returned. """ self.mock_ca_repo_patcher = None self._setup_repository_mock(repo_factory='get_ca_repository', mock_repo_obj=mock_ca_repo, patcher_obj=self.mock_ca_repo_patcher) def setup_preferred_ca_repository_mock( self, mock_preferred_ca_repo=mock.MagicMock()): """Mocks the project repository factory function :param mock_preferred_ca_repo: The pre-configured mock project ca repo to be returned. """ self.mock_preferred_ca_repo_patcher = None self._setup_repository_mock( repo_factory='get_preferred_ca_repository', mock_repo_obj=mock_preferred_ca_repo, patcher_obj=self.mock_preferred_ca_repo_patcher) def setup_secret_stores_repository_mock( self, mock_secret_stores_repo=mock.MagicMock()): """Mocks the project repository factory function :param mock_secret_stores_repo: The pre-configured mock secret stores repo to be returned. """ self.mock_secret_stores_repo_patcher = None self._setup_repository_mock( repo_factory='get_secret_stores_repository', mock_repo_obj=mock_secret_stores_repo, patcher_obj=self.mock_secret_stores_repo_patcher) def setup_project_secret_store_repository_mock( self, mock_project_secret_store_repo=mock.MagicMock()): """Mocks the project repository factory function :param mock_project_secret_store_repo: The pre-configured mock project secret store repo to be returned. """ self.mock_proj_secret_store_repo_patcher = None self._setup_repository_mock( repo_factory='get_project_secret_store_repository', mock_repo_obj=mock_project_secret_store_repo, patcher_obj=self.mock_proj_secret_store_repo_patcher) def setup_project_ca_repository_mock( self, mock_project_ca_repo=mock.MagicMock()): """Mocks the project repository factory function :param mock_project_ca_repo: The pre-configured mock project ca repo to be returned. """ self.mock_project_ca_repo_patcher = None self._setup_repository_mock( repo_factory='get_project_ca_repository', mock_repo_obj=mock_project_ca_repo, patcher_obj=self.mock_project_ca_repo_patcher) def _setup_repository_mock(self, repo_factory, mock_repo_obj, patcher_obj): patcher_obj = mock.patch( 'barbican.model.repositories.' + repo_factory, return_value=mock_repo_obj ) patcher_obj.start() self.addCleanup(patcher_obj.stop) def construct_new_test_function(original_func, name, build_params): """Builds a new test function based on parameterized data. :param original_func: The original test function that is used as a template :param name: The fullname of the new test function :param build_params: A dictionary or list containing args or kwargs for the new test :return: A new function object """ new_func = types.FunctionType( six.get_function_code(original_func), six.get_function_globals(original_func), name=name, argdefs=six.get_function_defaults(original_func), closure=six.get_function_closure(original_func) ) for key, val in original_func.__dict__.items(): if key != 'build_data': new_func.__dict__[key] = val # Support either an arg list or kwarg dict for our data build_args = build_params if isinstance(build_params, list) else [] build_kwargs = build_params if isinstance(build_params, dict) else {} # Build a test wrapper to execute with our kwargs def test_wrapper(func, test_args, test_kwargs): @functools.wraps(func) def wrapper(self): return func(self, *test_args, **test_kwargs) return wrapper return test_wrapper(new_func, build_args, build_kwargs) def process_parameterized_function(name, func_obj, build_data): """Build lists of functions to add and remove to a test case.""" to_remove = [] to_add = [] for subtest_name, params in build_data.items(): # Build new test function func_name = '{0}_{1}'.format(name, subtest_name) new_func = construct_new_test_function(func_obj, func_name, params) # Mark the new function as needed to be added to the class to_add.append((func_name, new_func)) # Mark key for removal to_remove.append(name) return to_remove, to_add def parameterized_test_case(cls): """Class decorator to process parameterized tests This allows for parameterization to be used for potentially any unittest compatible runner; including testr and py.test. """ tests_to_remove = [] tests_to_add = [] for key, val in vars(cls).items(): # Only process tests with build data on them if key.startswith('test_') and val.__dict__.get('build_data'): to_remove, to_add = process_parameterized_function( name=key, func_obj=val, build_data=val.__dict__.get('build_data') ) tests_to_remove.extend(to_remove) tests_to_add.extend(to_add) # Add all new test functions [setattr(cls, name, func) for name, func in tests_to_add] # Remove all old test function templates (if they still exist) [delattr(cls, key) for key in tests_to_remove if hasattr(cls, key)] return cls def parameterized_dataset(build_data): """Simple decorator to mark a test method for processing.""" def decorator(func): func.__dict__['build_data'] = build_data return func return decorator def setup_oslo_config_conf(testcase, content, conf_instance=None): conf_file_fixture = testcase.useFixture( createfile.CreateFileWithContent('barbican', content)) if conf_instance is None: conf_instance = cfg.CONF conf_instance([], project="barbican", default_config_files=[conf_file_fixture.path]) testcase.addCleanup(conf_instance.reset) def setup_multiple_secret_store_plugins_conf(testcase, store_plugin_names, crypto_plugin_names, global_default_index, conf_instance=None, multiple_support_enabled=None): """Sets multiple secret store support conf as oslo conf file. Generating file based conf based on input store and crypto plugin names provided as list. Index specified in argument is used to mark that specific secret store as global_default = True. Input lists are 'store_plugins': ['store_crypto', 'kmip_plugin', 'store_crypto'], 'crypto_plugins': ['simple_crypto', '', 'p11_crypto'], Sample output conf file generated is [secretstore] enable_multiple_secret_stores = True stores_lookup_suffix = plugin_0, plugin_1, plugin_2 [secretstore:plugin_0] secret_store_plugin = store_crypto crypto_plugin = simple_crypto global_default = True [secretstore:plugin_1] secret_store_plugin = kmip_plugin [secretstore:plugin_2] secret_store_plugin = store_crypto crypto_plugin = p11_crypto """ def _get_conf_line(name, value, section=None): out_line = "\n[{0}]\n".format(section) if section else "" out_line += "{0} = {1}\n".format(name, value) if name else "" return out_line if multiple_support_enabled is None: multiple_support_enabled = True conf_content = "" if store_plugin_names is not None: if len(store_plugin_names) < len(crypto_plugin_names): max_count = len(crypto_plugin_names) else: max_count = len(store_plugin_names) lookup_names = ['plugin_{0}'.format(indx) for indx in range(max_count)] section_names = ['secretstore:{0}'.format(lname) for lname in lookup_names] lookup_str = ", ".join(lookup_names) conf_content = _get_conf_line('enable_multiple_secret_stores', multiple_support_enabled, section='secretstore') conf_content += _get_conf_line('stores_lookup_suffix', lookup_str, section=None) for indx, section_name in enumerate(section_names): if indx < len(store_plugin_names): store_plugin = store_plugin_names[indx] conf_content += _get_conf_line('secret_store_plugin', store_plugin, section=section_name) else: conf_content += _get_conf_line(None, None, section=section_name) if indx < len(crypto_plugin_names): crypto_plugin = crypto_plugin_names[indx] conf_content += _get_conf_line('crypto_plugin', crypto_plugin, section=None) if indx == global_default_index: conf_content += _get_conf_line('global_default', 'True', section=None) setup_oslo_config_conf(testcase, conf_content, conf_instance) class MultipleBackendsTestCase(database_utils.RepositoryTestCase): def _mock_plugin_settings(self): kmip_conf = kss.CONF kmip_conf.kmip_plugin.username = "sample_username" kmip_conf.kmip_plugin.password = "sample_password" kmip_conf.kmip_plugin.keyfile = None kmip_conf.kmip_plugin.pkcs1_only = False pkcs11_conf = p11_crypto.CONF pkcs11_conf.p11_crypto_plugin.library_path = "/tmp" # any dummy path def init_via_conf_file(self, store_plugin_names, crypto_plugin_names, enabled=True, global_default_index=0): secretstore_conf = config.get_module_config('secretstore') setup_multiple_secret_store_plugins_conf( self, store_plugin_names=store_plugin_names, crypto_plugin_names=crypto_plugin_names, global_default_index=global_default_index, conf_instance=secretstore_conf, multiple_support_enabled=enabled) # clear globals if already set in previous tests secret_store._SECRET_STORE = None # clear secret store manager cm._PLUGIN_MANAGER = None # clear crypto manager self._mock_plugin_settings() def _get_secret_store_entry(self, store_plugin, crypto_plugin): all_ss = repositories.get_secret_stores_repository().get_all() for ss in all_ss: if (ss.store_plugin == store_plugin and ss.crypto_plugin == crypto_plugin): return ss return None def create_timestamp_w_tz_and_offset(timezone=None, days=0, hours=0, minutes=0, seconds=0): """Creates a timestamp with a timezone and offset in days :param timezone: Timezone used in creation of timestamp :param days: The offset in days :param hours: The offset in hours :param minutes: The offset in minutes :return: a timestamp """ if timezone is None: timezone = time.strftime("%z") timestamp = '{time}{timezone}'.format( time=(datetime.datetime.today() + datetime.timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds)), timezone=timezone) return timestamp def get_limit_and_offset_from_ref(ref): matches = dict(parse.parse_qsl(parse.urlparse(ref).query)) ref_limit = matches['limit'] ref_offset = matches['offset'] return ref_limit, ref_offset def get_tomorrow_timestamp(): tomorrow = (datetime.today() + datetime.timedelta(days=1)) return tomorrow.isoformat() def get_id_from_ref(ref): """Returns id from reference.""" ref_id = None if ref is not None and len(ref) > 0: ref_id = path.split(ref)[1] return ref_id def generate_test_uuid(tail_value=0): """Returns a blank uuid with the given value added to the end segment.""" return '00000000-0000-0000-0000-{value:0>{pad}}'.format(value=tail_value, pad=12) def generate_test_valid_uuid(): """Returns a valid uuid value, similar to uuid generated in barbican""" return uuidutils.generate_uuid() def get_symmetric_key(): s = b"MIICdgIBADANBgkqhkiG9w==" return s def get_triple_des_key(): s = b"AQIDBAUGBwgBAgMEBQYHCAECAwQFBgcI" return s def is_cert_valid(expected, observed): c1 = crypto.load_certificate(crypto.FILETYPE_PEM, expected) c2 = crypto.load_certificate(crypto.FILETYPE_PEM, observed) return (crypto.dump_certificate(crypto.FILETYPE_PEM, c1) == crypto.dump_certificate(crypto.FILETYPE_PEM, c2)) def is_private_key_valid(expected, observed): k1 = crypto.load_privatekey(crypto.FILETYPE_PEM, expected) k2 = crypto.load_privatekey(crypto.FILETYPE_PEM, observed) return (crypto.dump_privatekey(crypto.FILETYPE_PEM, k1) == crypto.dump_privatekey(crypto.FILETYPE_PEM, k2)) def is_public_key_valid(expected, observed): # TODO(alee) fill in the relevant test here return True def is_kmip_enabled(): return os.environ.get('KMIP_PLUGIN_ENABLED') is not None def is_vault_enabled(): return os.environ.get('VAULT_PLUGIN_ENABLED') is not None def is_pkcs11_enabled(): return os.environ.get('PKCS11_PLUGIN_ENABLED') is not None class DummyClassForTesting(object): pass barbican-9.1.0.dev50/barbican/tests/api/0000775000175000017500000000000013616500640020065 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/api/test_resources_policy.py0000664000175000017500000016565113616500636025112 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ This test module focuses on RBAC interactions with the API resource classes. For typical-flow business logic tests of these classes, see the 'resources_test.py' module. """ import os import mock from webob import exc from barbican.api.controllers import consumers from barbican.api.controllers import containers from barbican.api.controllers import orders from barbican.api.controllers import secrets from barbican.api.controllers import secretstores from barbican.api.controllers import versions from barbican.common import accept as common_accept from barbican.common import config from barbican.common import policy from barbican import context from barbican.model import models from barbican.tests import utils # Point to the policy.json file located in source control. TEST_VAR_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../etc', 'barbican')) CONF = config.new_config() policy.init() ENFORCER = policy.ENFORCER class TestableResource(object): def __init__(self, *args, **kwargs): self.controller = self.controller_cls(*args, **kwargs) def on_get(self, req, resp, *args, **kwargs): with mock.patch('pecan.request', req): with mock.patch('pecan.response', resp): return self.controller.on_get(*args, **kwargs) def on_post(self, req, resp, *args, **kwargs): with mock.patch('pecan.request', req): with mock.patch('pecan.response', resp): return self.controller.on_post(*args, **kwargs) def on_put(self, req, resp, *args, **kwargs): with mock.patch('pecan.request', req): with mock.patch('pecan.response', resp): return self.controller.on_put(*args, **kwargs) def on_delete(self, req, resp, *args, **kwargs): with mock.patch('pecan.request', req): with mock.patch('pecan.response', resp): return self.controller.on_delete(*args, **kwargs) class VersionsResource(TestableResource): controller_cls = versions.VersionsController class SecretsResource(TestableResource): controller_cls = secrets.SecretsController class SecretResource(TestableResource): controller_cls = secrets.SecretController class OrdersResource(TestableResource): controller_cls = orders.OrdersController class OrderResource(TestableResource): controller_cls = orders.OrderController class ContainerResource(TestableResource): controller_cls = containers.ContainerController class ConsumersResource(TestableResource): controller_cls = consumers.ContainerConsumersController class ConsumerResource(TestableResource): controller_cls = consumers.ContainerConsumerController class SecretStoresResource(TestableResource): controller_cls = secretstores.SecretStoresController class SecretStoreResource(TestableResource): controller_cls = secretstores.SecretStoreController class PreferredSecretStoreResource(TestableResource): controller_cls = secretstores.PreferredSecretStoreController class SecretConsumersResource(TestableResource): controller_cls = consumers.SecretConsumersController class SecretConsumerResource(TestableResource): controller_cls = consumers.SecretConsumerController class BaseTestCase(utils.BaseTestCase, utils.MockModelRepositoryMixin): def setUp(self): super(BaseTestCase, self).setUp() CONF(args=['--config-dir', TEST_VAR_DIR]) self.policy_enforcer = ENFORCER self.policy_enforcer.load_rules(True) self.resp = mock.MagicMock() def _generate_req(self, roles=None, accept=None, content_type=None, user_id=None, project_id=None): """Generate a fake HTTP request with security context added to it.""" req = mock.MagicMock() req.get_param.return_value = None kwargs = { 'user_id': user_id, 'project_id': project_id, 'roles': roles or [], 'policy_enforcer': self.policy_enforcer, } req.environ = {} req.environ['barbican.context'] = context.RequestContext(**kwargs) req.content_type = content_type req.accept = common_accept.create_accept_header(accept) return req def _generate_stream_for_exit(self): """Mock HTTP stream generator, to force RBAC-pass exit. Generate a fake HTTP request stream that forces an IOError to occur, which short circuits API resource processing when RBAC checks under test here pass. """ stream = mock.MagicMock() read = mock.MagicMock(return_value=None, side_effect=IOError()) stream.read = read return stream def _assert_post_rbac_exception(self, exception, role): """Assert that we received the expected RBAC-passed exception.""" self.assertEqual(500, exception.status_int) def _generate_get_error(self): """Falcon exception generator to throw from early-exit mocks. Creates an exception that should be raised by GET tests that pass RBAC. This allows such flows to short-circuit normal post-RBAC processing that is not tested in this module. :return: Python exception that should be raised by repo get methods. """ # The 'Read Error' clause needs to match that asserted in # _assert_post_rbac_exception() above. return exc.HTTPServerError(detail='Read Error') def _assert_pass_rbac(self, roles, method_under_test, accept=None, content_type=None, user_id=None, project_id=None): """Assert that RBAC authorization rules passed for the specified roles. :param roles: List of roles to check, one at a time :param method_under_test: The test method to invoke for each role. :param accept Optional Accept header to set on the HTTP request :return: None """ for role in roles: self.req = self._generate_req(roles=[role] if role else [], accept=accept, content_type=content_type, user_id=user_id, project_id=project_id) # Force an exception early past the RBAC passing. type(self.req).body = mock.PropertyMock(side_effect=IOError) self.req.body_file = self._generate_stream_for_exit() exception = self.assertRaises(exc.HTTPServerError, method_under_test) self._assert_post_rbac_exception(exception, role) def _assert_fail_rbac(self, roles, method_under_test, accept=None, content_type=None, user_id=None, project_id=None): """Assert that RBAC rules failed for one of the specified roles. :param roles: List of roles to check, one at a time :param method_under_test: The test method to invoke for each role. :param accept Optional Accept header to set on the HTTP request :return: None """ for role in roles: self.req = self._generate_req(roles=[role] if role else [], accept=accept, content_type=content_type, user_id=user_id, project_id=project_id) exception = self.assertRaises(exc.HTTPForbidden, method_under_test) self.assertEqual(403, exception.status_int) class WhenTestingVersionsResource(BaseTestCase): """RBAC tests for the barbican.api.resources.VersionsResource class.""" def setUp(self): super(WhenTestingVersionsResource, self).setUp() self.resource = VersionsResource() def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_get_versions(self): # Can't use base method that short circuits post-RBAC processing here, # as version GET is trivial for role in ['admin', 'observer', 'creator', 'audit']: self.req = self._generate_req(roles=[role] if role else []) self._invoke_on_get() def test_should_pass_get_versions_with_bad_roles(self): self.req = self._generate_req(roles=[None, 'bunkrolehere']) self._invoke_on_get() def test_should_pass_get_versions_with_no_roles(self): self.req = self._generate_req() self._invoke_on_get() def test_should_pass_get_versions_multiple_roles(self): self.req = self._generate_req(roles=['admin', 'observer', 'creator', 'audit']) self._invoke_on_get() def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) class WhenTestingSecretsResource(BaseTestCase): """RBAC tests for the barbican.api.resources.SecretsResource class.""" def setUp(self): super(WhenTestingSecretsResource, self).setUp() self.external_project_id = '12345' # Force an error on GET calls that pass RBAC, as we are not testing # such flows in this test module. self.secret_repo = mock.MagicMock() get_by_create_date = mock.MagicMock(return_value=None, side_effect=self ._generate_get_error()) self.secret_repo.get_by_create_date = get_by_create_date self.setup_secret_repository_mock(self.secret_repo) self.setup_encrypted_datum_repository_mock() self.setup_kek_datum_repository_mock() self.setup_project_repository_mock() self.setup_secret_meta_repository_mock() self.setup_transport_key_repository_mock() self.resource = SecretsResource() def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_create_secret(self): self._assert_pass_rbac(['admin', 'creator'], self._invoke_on_post, content_type='application/json') def test_should_raise_create_secret(self): self._assert_fail_rbac([None, 'audit', 'observer', 'bogus'], self._invoke_on_post, content_type='application/json') def test_should_pass_get_secrets(self): self._assert_pass_rbac(['admin', 'observer', 'creator'], self._invoke_on_get, content_type='application/json') def test_should_raise_get_secrets(self): self._assert_fail_rbac([None, 'audit', 'bogus'], self._invoke_on_get, content_type='application/json') def _invoke_on_post(self): self.resource.on_post(self.req, self.resp) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) class WhenTestingSecretResource(BaseTestCase): """RBAC tests for SecretController class.""" def setUp(self): super(WhenTestingSecretResource, self).setUp() self.external_project_id = '12345project' self.secret_id = '12345secret' self.user_id = '123456user' self.creator_user_id = '123456CreatorUser' # Force an error on GET and DELETE calls that pass RBAC, # as we are not testing such flows in this test module. self.secret_repo = mock.MagicMock() fail_method = mock.MagicMock(return_value=None, side_effect=self._generate_get_error()) self.secret_repo.get = fail_method self.secret_repo.delete_entity_by_id = fail_method self.setup_secret_repository_mock(self.secret_repo) self.setup_encrypted_datum_repository_mock() self.setup_kek_datum_repository_mock() self.setup_project_repository_mock() self.setup_secret_meta_repository_mock() self.setup_transport_key_repository_mock() acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=True, user_ids=[self.user_id, 'anyRandomId']) self.acl_list = [acl_read] secret = mock.MagicMock() secret.secret_acls.__iter__.return_value = self.acl_list secret.project.external_id = self.external_project_id secret.creator_id = self.creator_user_id self.resource = SecretResource(secret) # self.resource.controller.get_acl_tuple = mock.MagicMock( # return_value=(None, None)) def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_decrypt_secret(self): self._assert_pass_rbac(['admin', 'observer', 'creator'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_decrypt_secret(self): self._assert_fail_rbac([None, 'audit', 'bogus'], self._invoke_on_get, accept='notjsonaccepttype') def test_should_pass_decrypt_secret_for_same_project_with_no_acl(self): """Token and secret project needs to be same in no ACL defined case.""" self.acl_list.pop() # remove read acl from default setup self._assert_pass_rbac(['admin', 'observer', 'creator'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_decrypt_secret_with_project_access_disabled(self): """Should raise authz error as secret is marked private. As secret is private so project users should not be able to access the secret. Admin project user can still access it. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_fail_rbac(['observer', 'creator', 'audit'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id=self.user_id, project_id=self.external_project_id) def test_pass_decrypt_secret_for_admin_user_project_access_disabled(self): """Should pass authz for admin role user as secret is marked private. Even when secret is private, admin user should still have access to the secret. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_pass_rbac(['admin'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_decrypt_secret_for_with_project_access_nolist(self): """Should raise authz error as secret is marked private. As secret is private so project users should not be able to access the secret. This test passes user_ids as empty list, which is a valid and common case. Admin project user can still access it. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=False, user_ids=[]) self.acl_list.append(acl_read) self._assert_fail_rbac(['observer', 'creator', 'audit'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id=self.user_id, project_id=self.external_project_id) def test_should_pass_decrypt_secret_private_enabled_with_read_acl(self): """Should pass authz as user has read acl for private secret. Even though secret is private, user with read acl should be able to access the secret. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit', 'bogusRole'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id='aclUser1', project_id=self.external_project_id) def test_should_pass_decrypt_secret_different_user_valid_read_acl(self): self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=True, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) # token project_id is different from secret's project id but another # user (from different project) has read acl for secret so should pass self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit', 'bogusRole'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id='aclUser1', project_id='different_project_id') def test_should_raise_decrypt_secret_for_different_user_no_read_acl(self): self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='write', project_access=True, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) # token project_id is different from secret's project id but another # user (from different project) has read acl for secret so should pass self._assert_fail_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id='aclUser1', project_id='different_project_id') def test_fail_decrypt_secret_for_creator_user_with_different_project(self): """Check for creator user rule for secret decrypt/get call. If token's user is creator of secret but its scoped to different project, then he/she is not allowed access to secret when project is marked private. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='write', project_access=True, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self.resource.controller.secret.creator_id = 'creatorUserX' # token user is creator but scoped to project different from secret # project so don't allow decrypt secret call to creator of that secret self._assert_fail_rbac(['admin', 'observer', 'creator', 'audit', 'bogusRole'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id='creatorUserX', project_id='different_project_id') def test_should_pass_get_secret(self): self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id=self.user_id, project_id=self.external_project_id) def test_should_pass_get_secret_with_no_context(self): """In unauthenticated flow, get secret should work.""" self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get_without_context, user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_get_secret_for_different_project_no_acl(self): """Should raise error when secret and token's project is different.""" self.acl_list.pop() # remove read acl from default setup # token project_id is different from secret's project id so should fail self._assert_fail_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id=self.user_id, project_id='different_id') def test_should_pass_get_secret_for_same_project_but_different_user(self): # user id should not matter as long token and secret's project match self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id='different_user_id', project_id=self.external_project_id) def test_should_pass_get_secret_for_same_project_with_no_acl(self): self.acl_list.pop() # remove read acl from default setup self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_get_secret_for_with_project_access_disabled(self): """Should raise authz error as secret is marked private. As secret is private so project users should not be able to access the secret. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_fail_rbac(['observer', 'creator', 'audit'], self._invoke_on_get, user_id=self.user_id, project_id=self.external_project_id) def test_pass_get_secret_for_admin_user_with_project_access_disabled(self): """Should pass authz for admin user as secret is marked private. Even when secret is private, admin user should have access the secret. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_pass_rbac(['admin'], self._invoke_on_get, user_id=self.user_id, project_id=self.external_project_id) def test_should_pass_get_secret_for_private_enabled_with_read_acl(self): """Should pass authz as user has read acl for private secret. Even though secret is private, user with read acl should be able to access the secret. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit', 'bogusRole'], self._invoke_on_get, user_id='aclUser1', project_id=self.external_project_id) def test_should_pass_get_secret_different_user_with_valid_read_acl(self): """Should allow when read ACL is defined for a user. Secret's own project and token's project is different but read is allowed because of valid read ACL. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=True, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) # token project_id is different from secret's project id but another # user (from different project) has read acl for secret so should pass self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit', 'bogusRole'], self._invoke_on_get, user_id='aclUser1', project_id='different_project_id') def test_should_raise_get_secret_for_different_user_with_no_read_acl(self): """Get secret fails when no read acl is defined. With different secret and token's project, read is not allowed without a read ACL. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='write', project_access=True, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) # token project_id is different from secret's project id but another # user (from different project) has read acl for secret so should pass self._assert_fail_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id='aclUser1', project_id='different_project_id') def test_fail_get_secret_for_creator_user_with_different_project(self): """Check for creator user rule for secret get call. If token's user is creator of secret but its scoped to different project, then he/she is not allowed access to secret when project is marked private. """ self.acl_list.pop() # remove read acl from default setup self.resource.controller.secret.creator_id = 'creatorUserX' self._assert_fail_rbac(['admin', 'observer', 'creator', 'audit', 'bogusRole'], self._invoke_on_get, user_id='creatorUserX', project_id='different_project_id') def test_should_raise_get_secret(self): self._assert_fail_rbac([None, 'bogus'], self._invoke_on_get) def test_should_pass_put_secret(self): self._assert_pass_rbac(['admin', 'creator'], self._invoke_on_put, content_type="application/octet-stream", user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_put_secret(self): self._assert_fail_rbac([None, 'audit', 'observer', 'bogus'], self._invoke_on_put, content_type="application/octet-stream") def test_should_pass_delete_secret(self): self._assert_pass_rbac(['admin'], self._invoke_on_delete, user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_delete_secret(self): """A non-admin user cannot delete other user's secret. User id is different from initial user who has created the secret. """ self._assert_fail_rbac([None, 'audit', 'observer', 'creator', 'bogus'], self._invoke_on_delete, user_id=self.user_id, project_id=self.external_project_id) def test_should_pass_delete_secret_for_owner(self): """Non-admin user can delete his/her own secret Secret creator_id should match with token user to establish ownership. """ self._assert_pass_rbac(['creator'], self._invoke_on_delete, user_id=self.creator_user_id, project_id=self.external_project_id) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) def _invoke_on_get_without_context(self): # Adding this to get code coverage around context check lines self.req.environ.pop('barbican.context') self.resource.on_get(self.req, self.resp, self.external_project_id) def _invoke_on_put(self): self.resource.on_put(self.req, self.resp) def _invoke_on_delete(self): self.resource.on_delete(self.req, self.resp) class WhenTestingContainerResource(BaseTestCase): """RBAC tests for ContainerController class. Container controller tests are quite similar to SecretController as policy logic is same. Just adding them here to make sure logic related to acl gathering data works as expected. """ def setUp(self): super(WhenTestingContainerResource, self).setUp() self.external_project_id = '12345project' self.container_id = '12345secret' self.user_id = '123456user' self.creator_user_id = '123456CreatorUser' # Force an error on GET and DELETE calls that pass RBAC, # as we are not testing such flows in this test module. self.container_repo = mock.MagicMock() fail_method = mock.MagicMock(return_value=None, side_effect=self._generate_get_error()) self.container_repo.get = fail_method self.container_repo.delete_entity_by_id = fail_method acl_read = models.ContainerACL( container_id=self.container_id, operation='read', project_access=True, user_ids=[self.user_id, 'anyRandomId']) self.acl_list = [acl_read] container = mock.MagicMock() container.to_dict_fields = mock.MagicMock(side_effect=IOError) container.id = self.container_id container.container_acls.__iter__.return_value = self.acl_list container.project.external_id = self.external_project_id container.creator_id = self.creator_user_id self.container_repo.get_container_by_id.return_value = container self.setup_container_repository_mock(self.container_repo) self.resource = ContainerResource(container) def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_get_container(self): self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id=self.user_id, project_id=self.external_project_id) def test_should_pass_get_container_with_no_context(self): """In unauthenticated flow, get container should work.""" self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get_without_context, user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_get_container_for_different_project_no_acl(self): """Raise error when container and token's project is different.""" self.acl_list.pop() # remove read acl from default setup # token project_id is different from secret's project id so should fail self._assert_fail_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id=self.user_id, project_id='different_id') def test_should_pass_get_container_for_same_project_but_different_user( self): """Should pass if token and secret's project match. User id should not matter as long token and container's project match. """ self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id='different_user_id', project_id=self.external_project_id) def test_should_pass_get_container_for_same_project_with_no_acl(self): self.acl_list.pop() # remove read acl from default setup self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_get_container_for_with_project_access_disabled(self): """Should raise authz error as container is marked private. As container is private so project users should not be able to access the secret (other than admin user). """ self.acl_list.pop() # remove read acl from default setup acl_read = models.ContainerACL( container_id=self.container_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_fail_rbac(['observer', 'creator', 'audit'], self._invoke_on_get, user_id=self.user_id, project_id=self.external_project_id) def test_pass_get_container_for_admin_user_project_access_disabled(self): """Should pass authz for admin user when container is marked private. For private container, admin user should still be able to access the secret. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.ContainerACL( container_id=self.container_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_pass_rbac(['admin'], self._invoke_on_get, user_id=self.user_id, project_id=self.external_project_id) def test_should_pass_get_container_for_private_enabled_with_read_acl(self): """Should pass authz as user has read acl for private container. Even though container is private, user with read acl should be able to access the container. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.ContainerACL( container_id=self.container_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit', 'bogusRole'], self._invoke_on_get, user_id='aclUser1', project_id=self.external_project_id) def test_should_pass_get_container_different_user_with_valid_read_acl( self): """Should allow when read ACL is defined for a user. Container's own project and token's project is different but read is allowed because of valid read ACL. User can read regardless of what is token's project as it has necessary ACL. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.ContainerACL( container_id=self.container_id, operation='read', project_access=True, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit', 'bogusRole'], self._invoke_on_get, user_id='aclUser1', project_id='different_project_id') def test_should_raise_get_container_for_different_user_with_no_read_acl( self): """Get secret fails when no read acl is defined. With different container and token's project, read is not allowed without a read ACL. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.ContainerACL( container_id=self.container_id, operation='write', project_access=True, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) # token project_id is different from secret's project id but another # user (from different project) has read acl for secret so should pass self._assert_fail_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id='aclUser1', project_id='different_project_id') def test_fail_get_container_for_creator_user_different_project(self): """Check for creator user rule for container get call. If token's user is creator of container but its scoped to different project, then he/she is not allowed access to container when project is marked private. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.ContainerACL( container_id=self.container_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_fail_rbac(['creator'], self._invoke_on_get, user_id=self.creator_user_id, project_id='differet_project_id') def test_pass_get_container_for_creator_user_project_access_disabled(self): """Should pass authz for creator user when container is marked private. As container is private so user who created the container can still access it as long as user has 'creator' role in container project. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.ContainerACL( container_id=self.container_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_pass_rbac(['creator'], self._invoke_on_get, user_id=self.creator_user_id, project_id=self.external_project_id) def test_should_raise_get_container(self): self._assert_fail_rbac([None, 'bogus'], self._invoke_on_get) def test_should_pass_delete_container(self): self._assert_pass_rbac(['admin'], self._invoke_on_delete, user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_delete_container(self): """A non-admin user cannot delete other user's container. User id is different from initial user who has created the container. """ self._assert_fail_rbac([None, 'audit', 'observer', 'creator', 'bogus'], self._invoke_on_delete, user_id=self.user_id, project_id=self.external_project_id) def test_should_pass_delete_container_for_owner(self): """Non-admin user can delete his/her own container Container creator_id should match with token user to establish ownership. """ self._assert_pass_rbac(['creator'], self._invoke_on_delete, user_id=self.creator_user_id, project_id=self.external_project_id) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) def _invoke_on_get_without_context(self): # Adding this to get code coverage around context check lines self.req.environ.pop('barbican.context') self.resource.on_get(self.req, self.resp) def _invoke_on_put(self): self.resource.on_put(self.req, self.resp) def _invoke_on_delete(self): self.resource.on_delete(self.req, self.resp) class WhenTestingOrdersResource(BaseTestCase): """RBAC tests for the barbican.api.resources.OrdersResource class.""" def setUp(self): super(WhenTestingOrdersResource, self).setUp() self.external_project_id = '12345' # Force an error on GET calls that pass RBAC, as we are not testing # such flows in this test module. self.order_repo = mock.MagicMock() get_by_create_date = mock.MagicMock(return_value=None, side_effect=self ._generate_get_error()) self.order_repo.get_by_create_date = get_by_create_date self.setup_order_repository_mock(self.order_repo) self.setup_project_repository_mock() self.resource = OrdersResource(queue_resource=mock.MagicMock()) def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_create_order(self): self._assert_pass_rbac(['admin', 'creator'], self._invoke_on_post, content_type='application/json') def test_should_raise_create_order(self): self._assert_fail_rbac([None, 'audit', 'observer', 'bogus'], self._invoke_on_post) def test_should_pass_get_orders(self): self._assert_pass_rbac(['admin', 'observer', 'creator'], self._invoke_on_get) def test_should_raise_get_orders(self): self._assert_fail_rbac([None, 'audit', 'bogus'], self._invoke_on_get) def _invoke_on_post(self): self.resource.on_post(self.req, self.resp) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) class WhenTestingOrderResource(BaseTestCase): """RBAC tests for the barbican.api.resources.OrderResource class.""" def setUp(self): super(WhenTestingOrderResource, self).setUp() self.external_project_id = '12345project' self.order_id = '12345order' # Force an error on GET and DELETE calls that pass RBAC, # as we are not testing such flows in this test module. self.order_repo = mock.MagicMock() fail_method = mock.MagicMock(return_value=None, side_effect=self._generate_get_error()) self.order_repo.get = fail_method self.order_repo.delete_entity_by_id = fail_method self.setup_order_repository_mock(self.order_repo) self.resource = OrderResource(self.order_id) def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_get_order(self): self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get) def test_should_raise_get_order(self): self._assert_fail_rbac([None, 'bogus'], self._invoke_on_get) def test_should_pass_delete_order(self): self._assert_pass_rbac(['admin'], self._invoke_on_delete) def test_should_raise_delete_order(self): self._assert_fail_rbac([None, 'audit', 'observer', 'creator', 'bogus'], self._invoke_on_delete) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) def _invoke_on_delete(self): self.resource.on_delete(self.req, self.resp) class WhenTestingConsumersResource(BaseTestCase): """RBAC tests for the barbican.api.resources.ConsumersResource class.""" def setUp(self): super(WhenTestingConsumersResource, self).setUp() self.external_project_id = '12345project' self.container_id = '12345container' # Force an error on GET calls that pass RBAC, as we are not testing # such flows in this test module. self.consumer_repo = mock.MagicMock() get_by_container_id = mock.MagicMock(return_value=None, side_effect=self ._generate_get_error()) self.consumer_repo.get_by_container_id = get_by_container_id self.setup_project_repository_mock() self.setup_container_consumer_repository_mock(self.consumer_repo) self.setup_container_repository_mock() self.resource = ConsumersResource(container_id=self.container_id) def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_create_consumer(self): self._assert_pass_rbac(['admin'], self._invoke_on_post, content_type='application/json') def test_should_raise_create_consumer(self): self._assert_fail_rbac([None, 'audit', 'observer', 'creator', 'bogus'], self._invoke_on_post, content_type='application/json') def test_should_pass_delete_consumer(self): self._assert_pass_rbac(['admin'], self._invoke_on_delete, content_type='application/json') def test_should_raise_delete_consumer(self): self._assert_fail_rbac([None, 'audit', 'observer', 'creator', 'bogus'], self._invoke_on_delete) def test_should_pass_get_consumers(self): self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, content_type='application/json') def test_should_raise_get_consumers(self): self._assert_fail_rbac([None, 'bogus'], self._invoke_on_get, content_type='application/json') def _invoke_on_post(self): self.resource.on_post(self.req, self.resp) def _invoke_on_delete(self): self.resource.on_delete(self.req, self.resp) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) class WhenTestingConsumerResource(BaseTestCase): """RBAC tests for the barbican.api.resources.ConsumerResource class.""" def setUp(self): super(WhenTestingConsumerResource, self).setUp() self.external_project_id = '12345project' self.consumer_id = '12345consumer' # Force an error on GET calls that pass RBAC, as we are not testing # such flows in this test module. self.consumer_repo = mock.MagicMock() fail_method = mock.MagicMock(return_value=None, side_effect=self._generate_get_error()) self.consumer_repo.get = fail_method self.setup_project_repository_mock() self.setup_container_consumer_repository_mock(self.consumer_repo) self.resource = ConsumerResource(consumer_id=self.consumer_id) def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_get_consumer(self): self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get) def test_should_raise_get_consumer(self): self._assert_fail_rbac([None, 'bogus'], self._invoke_on_get) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) class WhenTestingSecretStoresResource(BaseTestCase): """RBAC tests for the barbican.api.resources.SecretStoresResource class.""" def setUp(self): super(WhenTestingSecretStoresResource, self).setUp() self.external_project_id = '12345project' self.moc_enable_patcher = mock.patch( 'barbican.common.utils.is_multiple_backends_enabled') enable_check_method = self.moc_enable_patcher.start() enable_check_method.return_value = True self.addCleanup(self.moc_enable_patcher.stop) # Force an error on GET calls that pass RBAC, as we are not testing # such flows in this test module. self.project_repo = mock.MagicMock() fail_method = mock.MagicMock(return_value=None, side_effect=self._generate_get_error()) self.project_repo.find_by_external_project_id = fail_method self.setup_project_repository_mock(self.project_repo) self.resource = SecretStoresResource() def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_get_all_secret_stores(self): self._assert_pass_rbac(['admin'], self._invoke_on_get) def test_should_raise_get_all_secret_stores(self): self._assert_fail_rbac([None, 'creator', 'observer', 'audit'], self._invoke_on_get) def test_should_pass_get_global_default(self): self._assert_pass_rbac(['admin'], self._invoke_get_global_default) def test_should_raise_get_global_default(self): self._assert_fail_rbac([None, 'creator', 'observer', 'audit'], self._invoke_get_global_default) def test_should_pass_get_preferred(self): self._assert_pass_rbac(['admin'], self._invoke_get_preferred) def test_should_raise_get_preferred(self): self._assert_fail_rbac([None, 'creator', 'observer', 'audit'], self._invoke_get_preferred) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) def _invoke_get_global_default(self): with mock.patch('pecan.request', self.req): with mock.patch('pecan.response', self.resp): return self.resource.controller.get_global_default() def _invoke_get_preferred(self): with mock.patch('pecan.request', self.req): with mock.patch('pecan.response', self.resp): return self.resource.controller.get_preferred() class WhenTestingSecretStoreResource(BaseTestCase): """RBAC tests for the barbican.api.resources.SecretStoreResource class.""" def setUp(self): super(WhenTestingSecretStoreResource, self).setUp() self.external_project_id = '12345project' self.store_id = '123456SecretStoreId' self.moc_enable_patcher = mock.patch( 'barbican.common.utils.is_multiple_backends_enabled') enable_check_method = self.moc_enable_patcher.start() enable_check_method.return_value = True self.addCleanup(self.moc_enable_patcher.stop) # Force an error on GET calls that pass RBAC, as we are not testing # such flows in this test module. self.project_repo = mock.MagicMock() fail_method = mock.MagicMock(return_value=None, side_effect=self._generate_get_error()) self.project_repo.find_by_external_project_id = fail_method self.setup_project_repository_mock(self.project_repo) secret_store_res = mock.MagicMock() secret_store_res.to_dict_fields = mock.MagicMock(side_effect=IOError) secret_store_res.id = self.store_id self.resource = SecretStoreResource(secret_store_res) def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_get_a_secret_store(self): self._assert_pass_rbac(['admin'], self._invoke_on_get) def test_should_raise_get_a_secret_store(self): self._assert_fail_rbac([None, 'creator', 'observer', 'audit'], self._invoke_on_get) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) class WhenTestingPreferredSecretStoreResource(BaseTestCase): """RBAC tests for barbican.api.resources.PreferredSecretStoreResource""" def setUp(self): super(WhenTestingPreferredSecretStoreResource, self).setUp() self.external_project_id = '12345project' self.store_id = '123456SecretStoreId' self.moc_enable_patcher = mock.patch( 'barbican.common.utils.is_multiple_backends_enabled') enable_check_method = self.moc_enable_patcher.start() enable_check_method.return_value = True self.addCleanup(self.moc_enable_patcher.stop) # Force an error on POST/DELETE calls that pass RBAC, as we are not # testing such flows in this test module. self.project_repo = mock.MagicMock() fail_method = mock.MagicMock(return_value=None, side_effect=self._generate_get_error()) self.project_repo.find_by_external_project_id = fail_method self.setup_project_repository_mock(self.project_repo) self.resource = PreferredSecretStoreResource(mock.MagicMock()) def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_set_preferred_secret_store(self): self._assert_pass_rbac(['admin'], self._invoke_on_post) def test_should_raise_set_preferred_secret_store(self): self._assert_fail_rbac([None, 'creator', 'observer', 'audit'], self._invoke_on_post) def _invoke_on_post(self): self.resource.on_post(self.req, self.resp) class WhenTestingSecretConsumersResource(BaseTestCase): """RBAC tests for barbican.api.resources.SecretConsumersResource""" def setUp(self): super(WhenTestingSecretConsumersResource, self).setUp() self.external_project_id = '12345project' self.secret_id = '12345secret' # Force an error on GET calls that pass RBAC, as we are not testing # such flows in this test module. self.consumer_repo = mock.MagicMock() get_by_secret_id = mock.MagicMock(return_value=None, side_effect=self ._generate_get_error()) self.consumer_repo.get_by_secret_id = get_by_secret_id self.setup_project_repository_mock() self.setup_secret_consumer_repository_mock(self.consumer_repo) self.setup_secret_repository_mock() self.resource = SecretConsumersResource(secret_id=self.secret_id) def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_create_consumer(self): self._assert_pass_rbac(['admin'], self._invoke_on_post, content_type='application/json') def test_should_raise_create_consumer(self): self._assert_fail_rbac([None, 'audit', 'observer', 'creator', 'bogus'], self._invoke_on_post, content_type='application/json') def test_should_pass_delete_consumer(self): self._assert_pass_rbac(['admin'], self._invoke_on_delete, content_type='application/json') def test_should_raise_delete_consumer(self): self._assert_fail_rbac([None, 'audit', 'observer', 'creator', 'bogus'], self._invoke_on_delete) def test_should_pass_get_consumers(self): self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, content_type='application/json') def test_should_raise_get_consumers(self): self._assert_fail_rbac([None, 'bogus'], self._invoke_on_get, content_type='application/json') def _invoke_on_post(self): self.resource.on_post(self.req, self.resp) def _invoke_on_delete(self): self.resource.on_delete(self.req, self.resp) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) class WhenTestingSecretConsumerResource(BaseTestCase): """RBAC tests for barbican.api.resources.SecretConsumerResource""" def setUp(self): super(WhenTestingSecretConsumerResource, self).setUp() self.external_project_id = '12345project' self.consumer_id = '12345consumer' # Force an error on GET calls that pass RBAC, as we are not testing # such flows in this test module. self.consumer_repo = mock.MagicMock() fail_method = mock.MagicMock(return_value=None, side_effect=self._generate_get_error()) self.consumer_repo.get = fail_method self.setup_project_repository_mock() self.setup_secret_consumer_repository_mock(self.consumer_repo) self.resource = SecretConsumerResource(consumer_id=self.consumer_id) def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_get_consumer(self): self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get) def test_should_raise_get_consumer(self): self._assert_fail_rbac([None, 'bogus'], self._invoke_on_get) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) barbican-9.1.0.dev50/barbican/tests/api/test_resources.py0000664000175000017500000017210713616500636023525 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ This test module focuses on typical-flow business logic tests with the API resource classes. For RBAC tests of these classes, see the 'resources_policy_test.py' module. """ import mimetypes import mock import pecan from testtools import testcase import webtest from barbican import api from barbican.api import app from barbican.api import controllers from barbican.common import exception as excep from barbican.common import hrefs from barbican.common import utils as barbican_utils import barbican.context from barbican.model import models from barbican.tests import utils def get_barbican_env(external_project_id): """Create and return a barbican.context for use with the RBAC decorator Injects the provided external_project_id. """ kwargs = {'roles': None, 'user_id': None, 'project_id': external_project_id, 'is_admin': True} ctx = barbican.context.RequestContext(**kwargs) ctx.policy_enforcer = None barbican_env = {'barbican.context': ctx} return barbican_env def create_secret(id_ref="id", name="name", algorithm=None, bit_length=None, mode=None, encrypted_datum=None, content_type=None, project_id=None): """Generate a Secret entity instance.""" info = { 'id': id_ref, 'name': name, 'algorithm': algorithm, 'bit_length': bit_length, 'mode': mode, 'project_id': project_id, } secret = models.Secret(info) secret.id = id_ref if encrypted_datum: secret.encrypted_data = [encrypted_datum] if content_type: content_meta = models.SecretStoreMetadatum('content_type', content_type) secret.secret_store_metadata['content_type'] = content_meta return secret def create_order_with_meta(id_ref="id", order_type="certificate", meta={}, status='PENDING'): """Generate an Order entity instance with Metadata.""" order = models.Order() order.id = id_ref order.type = order_type order.meta = meta order.status = status return order def validate_datum(test, datum): test.assertIsNone(datum.kek_meta_extended) test.assertIsNotNone(datum.kek_meta_project) test.assertTrue(datum.kek_meta_project.bind_completed) test.assertIsNotNone(datum.kek_meta_project.plugin_name) test.assertIsNotNone(datum.kek_meta_project.kek_label) def create_container(id_ref, project_id=None, external_project_id=None): """Generate a Container entity instance.""" container = models.Container() container.id = id_ref container.name = 'test name' container.type = 'rsa' container_secret = models.ContainerSecret() container_secret.container_id = id_ref container_secret.secret_id = '123' container.container_secrets.append(container_secret) if project_id: project = models.Project() project.id = project_id project.external_id = external_project_id container.project = project return container def create_container_consumer(container_id, project_id, id_ref): """Generate a ContainerConsumerMetadatum entity instance.""" data = { 'name': 'test name', 'URL': 'http://test/url' } consumer = models.ContainerConsumerMetadatum(container_id, project_id, data) consumer.id = id_ref return consumer def create_secret_consumer(secret_id, project_id, id_ref): """Generate a SecretConsumerMetadatum entity instance.""" consumer = models.SecretConsumerMetadatum( secret_id, project_id, "service", "resource_type", "resource_id", ) consumer.id = id_ref return consumer class SecretAllowAllMimeTypesDecoratorTest(utils.BaseTestCase): def setUp(self): super(SecretAllowAllMimeTypesDecoratorTest, self).setUp() self.mimetype_values = set(mimetypes.types_map.values()) @pecan.expose(generic=True) @barbican_utils.allow_all_content_types def _empty_pecan_exposed_function(self): pass def _empty_function(self): pass def test_mimetypes_successfully_added_to_mocked_function(self): empty_function = mock.MagicMock() empty_function._pecan = {} func = barbican_utils.allow_all_content_types(empty_function) cfg = func._pecan self.assertEqual(len(self.mimetype_values), len(cfg['content_types'])) def test_mimetypes_successfully_added_to_pecan_exposed_function(self): cfg = self._empty_pecan_exposed_function._pecan self.assertEqual(len(self.mimetype_values), len(cfg['content_types'])) def test_decorator_raises_if_function_not_pecan_exposed(self): self.assertRaises(AttributeError, barbican_utils.allow_all_content_types, self._empty_function) class FunctionalTest(utils.BaseTestCase, utils.MockModelRepositoryMixin, testcase.WithAttributes): def setUp(self): super(FunctionalTest, self).setUp() root = self.root config = {'app': {'root': root}} pecan.set_config(config, overwrite=True) self.app = webtest.TestApp(pecan.make_app(root)) def tearDown(self): super(FunctionalTest, self).tearDown() pecan.set_config({}, overwrite=True) @property def root(self): return controllers.versions.VersionController() class BaseSecretsResource(FunctionalTest): """Base test class for the Secrets resource.""" def setUp(self): super(BaseSecretsResource, self).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): secrets = controllers.secrets.SecretsController() return RootController() def _init(self, payload=b'not-encrypted', payload_content_type='text/plain', payload_content_encoding=None): self.name = 'name' self.payload = payload self.payload_content_type = payload_content_type self.payload_content_encoding = payload_content_encoding self.secret_algorithm = 'AES' self.secret_bit_length = 256 self.secret_mode = 'CBC' self.secret_req = {'name': self.name, 'algorithm': self.secret_algorithm, 'bit_length': self.secret_bit_length, 'creator_id': None, 'mode': self.secret_mode} if payload: self.secret_req['payload'] = payload if payload_content_type: self.secret_req['payload_content_type'] = payload_content_type if payload_content_encoding: self.secret_req['payload_content_encoding'] = ( payload_content_encoding) # Set up mocked project self.external_project_id = 'keystone1234' self.project_entity_id = 'tid1234' self.project = models.Project() self.project.id = self.project_entity_id self.project.external_id = self.external_project_id # Set up mocked project repo self.project_repo = mock.MagicMock() self.project_repo.find_by_external_project_id.return_value = ( self.project) self.setup_project_repository_mock(self.project_repo) # Set up mocked secret self.secret = models.Secret() self.secret.id = utils.generate_test_valid_uuid() # Set up mocked secret repo self.secret_repo = mock.MagicMock() self.secret_repo.create_from.return_value = self.secret self.setup_secret_repository_mock(self.secret_repo) # Set up mocked encrypted datum repo self.datum_repo = mock.MagicMock() self.datum_repo.create_from.return_value = None self.setup_encrypted_datum_repository_mock(self.datum_repo) # Set up mocked kek datum self.kek_datum = models.KEKDatum() self.kek_datum.kek_label = "kek_label" self.kek_datum.bind_completed = False self.kek_datum.algorithm = '' self.kek_datum.bit_length = 0 self.kek_datum.mode = '' self.kek_datum.plugin_meta = '' # Set up mocked kek datum repo self.kek_repo = mock.MagicMock() self.kek_repo.find_or_create_kek_datum.return_value = self.kek_datum self.setup_kek_datum_repository_mock(self.kek_repo) # Set up mocked secret meta repo self.setup_secret_meta_repository_mock() # Set up mocked transport key self.transport_key = models.TransportKey( 'default_plugin_name', 'XXXABCDEF') self.transport_key_id = 'tkey12345' self.tkey_url = hrefs.convert_transport_key_to_href( self.transport_key.id) # Set up mocked transport key self.setup_transport_key_repository_mock() class WhenGettingPuttingOrDeletingSecretUsingSecretResource(FunctionalTest): def setUp(self): super( WhenGettingPuttingOrDeletingSecretUsingSecretResource, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): secrets = controllers.secrets.SecretsController() return RootController() def _init(self): self.project_id = 'projectid1234' self.external_project_id = 'keystone1234' self.name = 'name1234' secret_id = utils.generate_test_valid_uuid() datum_id = "iddatum1" kek_id = "idkek1" self.secret_algorithm = "AES" self.secret_bit_length = 256 self.secret_mode = "CBC" self.kek_project = models.KEKDatum() self.kek_project.id = kek_id self.kek_project.active = True self.kek_project.bind_completed = False self.kek_project.kek_label = "kek_label" self.datum = models.EncryptedDatum() self.datum.id = datum_id self.datum.secret_id = secret_id self.datum.kek_id = kek_id self.datum.kek_meta_project = self.kek_project self.datum.content_type = "text/plain" self.datum.cypher_text = "aaaa" # base64 value. self.secret = create_secret(id_ref=secret_id, name=self.name, algorithm=self.secret_algorithm, bit_length=self.secret_bit_length, mode=self.secret_mode, encrypted_datum=self.datum, content_type=self.datum.content_type) self.secret.secret_acls = [] self.secret.project = mock.MagicMock() self.secret.project.external_id = self.external_project_id # Set up mocked project self.project = models.Project() self.project.id = self.project_id self.project.external_id = self.external_project_id # Set up mocked project repo self.project_repo = mock.MagicMock() self.project_repo.get.return_value = self.project self.project_repo.find_by_external_project_id.return_value = ( self.project) self.setup_project_repository_mock(self.project_repo) # Set up mocked secret repo self.secret_repo = mock.Mock() self.secret_repo.get = mock.Mock(return_value=self.secret) self.secret_repo.get_secret_by_id = mock.Mock(return_value=self.secret) self.secret_repo.delete_entity_by_id = mock.Mock(return_value=None) self.setup_secret_repository_mock(self.secret_repo) # Set up mocked encrypted datum repo self.datum_repo = mock.MagicMock() self.datum_repo.create_from.return_value = None self.setup_encrypted_datum_repository_mock(self.datum_repo) # Set up mocked kek datum repo self.setup_kek_datum_repository_mock() # Set up mocked secret meta repo self.secret_meta_repo = mock.MagicMock() self.secret_meta_repo.get_metadata_for_secret.return_value = None self.setup_secret_meta_repository_mock(self.secret_meta_repo) # Set up mocked transport key self.transport_key_model = models.TransportKey( "default_plugin", "my transport key") # Set up mocked transport key repo self.transport_key_repo = mock.MagicMock() self.transport_key_repo.get.return_value = self.transport_key_model self.setup_transport_key_repository_mock(self.transport_key_repo) self.transport_key_id = 'tkey12345' @mock.patch('barbican.plugin.resources.get_transport_key_id_for_retrieval') def test_should_get_secret_as_json(self, mock_get_transport_key): mock_get_transport_key.return_value = None resp = self.app.get( '/secrets/{0}/'.format(self.secret.id), headers={'Accept': 'application/json', 'Accept-Encoding': 'gzip'} ) self.secret_repo.get_secret_by_id.assert_called_once_with( entity_id=self.secret.id, suppress_exception=True) self.assertEqual(200, resp.status_int) self.assertNotIn('content_encodings', resp.namespace) self.assertIn('content_types', resp.namespace) self.assertIn(self.datum.content_type, resp.namespace['content_types'].values()) self.assertNotIn('mime_type', resp.namespace) @testcase.attr('deprecated') @mock.patch('barbican.plugin.resources.get_secret') def test_should_get_secret_as_plain_based_on_content_type(self, mock_get_secret): data = 'unencrypted_data' mock_get_secret.return_value = data resp = self.app.get( '/secrets/{0}/payload/'.format(self.secret.id), headers={'Accept': 'text/plain'} ) self.secret_repo.get_secret_by_id.assert_called_once_with( entity_id=self.secret.id, suppress_exception=True) self.assertEqual(200, resp.status_int) self.assertEqual(data, resp.body.decode()) mock_get_secret.assert_called_once_with( 'text/plain', self.secret, self.project, None, None ) @mock.patch('barbican.plugin.resources.get_secret') def test_should_get_secret_as_plain_with_twsk(self, mock_get_secret): data = 'encrypted_data' mock_get_secret.return_value = data twsk = "trans_wrapped_session_key" resp = self.app.get( ('/secrets/{0}/payload/' '?trans_wrapped_session_key={1}&transport_key_id={2}') .format(self.secret.id, twsk, self.transport_key_id), headers={'Accept': 'text/plain'} ) self.secret_repo.get_secret_by_id.assert_called_once_with( entity_id=self.secret.id, suppress_exception=True) self.assertEqual(200, resp.status_int) self.assertEqual(data, resp.body.decode()) mock_get_secret.assert_called_once_with( 'text/plain', self.secret, self.project, twsk, self.transport_key_model.transport_key ) @testcase.attr('deprecated') @mock.patch('barbican.plugin.resources.get_secret') def test_should_get_secret_as_plain_with_twsk_based_on_content_type( self, mock_get_secret): data = 'encrypted_data' mock_get_secret.return_value = data twsk = "trans_wrapped_session_key" resp = self.app.get( ('/secrets/{0}/' '?trans_wrapped_session_key={1}&transport_key_id={2}') .format(self.secret.id, twsk, self.transport_key_id), headers={'Accept': 'text/plain'} ) self.secret_repo.get_secret_by_id.assert_called_once_with( entity_id=self.secret.id, suppress_exception=True) self.assertEqual(200, resp.status_int) self.assertEqual(data, resp.body.decode()) mock_get_secret.assert_called_once_with( 'text/plain', self.secret, self.project, twsk, self.transport_key_model.transport_key ) @mock.patch('barbican.plugin.resources.get_secret') def test_should_throw_exception_for_get_when_twsk_but_no_tkey_id( self, mock_get_secret): data = 'encrypted_data' mock_get_secret.return_value = data twsk = "trans_wrapped_session_key" resp = self.app.get( '/secrets/{0}/payload/?trans_wrapped_session_key={1}'.format( self.secret.id, twsk), headers={'Accept': 'text/plain'}, expect_errors=True ) self.secret_repo.get_secret_by_id.assert_called_once_with( entity_id=self.secret.id, suppress_exception=True) self.assertEqual(400, resp.status_int) @testcase.attr('deprecated') @mock.patch('barbican.plugin.resources.get_secret') def test_should_throw_exception_for_get_when_twsk_but_no_tkey_id_old_way( self, mock_get_secret): data = 'encrypted_data' mock_get_secret.return_value = data twsk = "trans_wrapped_session_key" resp = self.app.get( '/secrets/{0}/payload/?trans_wrapped_session_key={1}'.format( self.secret.id, twsk), headers={'Accept': 'text/plain'}, expect_errors=True ) self.secret_repo.get_secret_by_id.assert_called_once_with( entity_id=self.secret.id, suppress_exception=True) self.assertEqual(400, resp.status_int) @mock.patch('barbican.plugin.resources.get_transport_key_id_for_retrieval') def test_should_get_secret_meta_for_binary(self, mock_get_transport_key): mock_get_transport_key.return_value = None self.datum.content_type = "application/octet-stream" self.secret.secret_store_metadata['content_type'].value = ( self.datum.content_type ) self.datum.cypher_text = 'aaaa' resp = self.app.get( '/secrets/{0}/'.format(self.secret.id), headers={'Accept': 'application/json', 'Accept-Encoding': 'gzip'} ) self.secret_repo.get_secret_by_id.assert_called_once_with( entity_id=self.secret.id, suppress_exception=True) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.namespace) self.assertIn('content_types', resp.namespace) self.assertIn(self.datum.content_type, resp.namespace['content_types'].values()) @mock.patch('barbican.plugin.resources.get_transport_key_id_for_retrieval') def test_should_get_secret_meta_for_binary_with_tkey( self, mock_get_transport_key_id): mock_get_transport_key_id.return_value = self.transport_key_id self.datum.content_type = "application/octet-stream" self.secret.secret_store_metadata['content_type'].value = ( self.datum.content_type ) self.datum.cypher_text = 'aaaa' resp = self.app.get( '/secrets/{0}/?transport_key_needed=true'.format( self.secret.id), headers={'Accept': 'application/json', 'Accept-Encoding': 'gzip'} ) self.secret_repo.get_secret_by_id.assert_called_once_with( entity_id=self.secret.id, suppress_exception=True) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.namespace) self.assertIn('content_types', resp.namespace) self.assertIn(self.datum.content_type, resp.namespace['content_types'].values()) self.assertIn('transport_key_ref', resp.namespace) self.assertEqual( hrefs.convert_transport_key_to_href(self.transport_key_id), resp.namespace['transport_key_ref'] ) @testcase.attr('deprecated') @mock.patch('barbican.plugin.resources.get_secret') def test_should_get_secret_as_binary_based_on_content_type( self, mock_get_secret): data = 'unencrypted_data' mock_get_secret.return_value = data self.datum.content_type = "application/octet-stream" self.datum.cypher_text = 'aaaa' resp = self.app.get( '/secrets/{0}/'.format(self.secret.id), headers={ 'Accept': 'application/octet-stream', 'Accept-Encoding': 'gzip' } ) self.assertEqual(data, resp.body.decode()) mock_get_secret.assert_called_once_with( 'application/octet-stream', self.secret, self.project, None, None ) @mock.patch('barbican.plugin.resources.store_secret') def test_should_put_secret_as_plain_with_tkey_id(self, mock_store_secret): self.secret.encrypted_data = [] self.secret.secret_store_metadata = {} resp = self.app.put( '/secrets/{0}/?transport_key_id={1}'.format( self.secret.id, self.transport_key_id), 'plain text', headers={'Accept': 'text/plain', 'Content-Type': 'text/plain'}, ) self.assertEqual(204, resp.status_int) mock_store_secret.assert_called_once_with( unencrypted_raw=b'plain text', content_type_raw='text/plain', content_encoding=None, secret_model=self.secret, project_model=self.project, transport_key_id=self.transport_key_id ) @mock.patch('barbican.plugin.resources.store_secret') def test_should_put_secret_as_binary_with_tkey_id(self, mock_store_secret): self.secret.encrypted_data = [] self.secret.secret_store_metadata = {} resp = self.app.put( '/secrets/{0}/?transport_key_id={1}'.format( self.secret.id, self.transport_key_id), 'plain text', headers={ 'Accept': 'text/plain', 'Content-Type': 'application/octet-stream' }, ) self.assertEqual(204, resp.status_int) mock_store_secret.assert_called_once_with( unencrypted_raw=b'plain text', content_type_raw='application/octet-stream', content_encoding=None, secret_model=self.secret, project_model=self.project, transport_key_id=self.transport_key_id ) class WhenAddingNavigationHrefs(utils.BaseTestCase): def setUp(self): super(WhenAddingNavigationHrefs, self).setUp() self.resource_name = 'orders' self.external_project_id = '12345' self.num_elements = 100 self.data = {} def test_add_nav_hrefs_adds_next_only(self): offset = 0 limit = 10 data_with_hrefs = hrefs.add_nav_hrefs( self.resource_name, offset, limit, self.num_elements, self.data) self.assertNotIn('previous', data_with_hrefs) self.assertIn('next', data_with_hrefs) def test_add_nav_hrefs_adds_both_next_and_previous(self): offset = 10 limit = 10 data_with_hrefs = hrefs.add_nav_hrefs( self.resource_name, offset, limit, self.num_elements, self.data) self.assertIn('previous', data_with_hrefs) self.assertIn('next', data_with_hrefs) def test_add_nav_hrefs_adds_previous_only(self): offset = 90 limit = 10 data_with_hrefs = hrefs.add_nav_hrefs( self.resource_name, offset, limit, self.num_elements, self.data) self.assertIn('previous', data_with_hrefs) self.assertNotIn('next', data_with_hrefs) class TestingJsonSanitization(utils.BaseTestCase): def test_json_sanitization_without_array(self): json_without_array = {"name": "name", "algorithm": "AES", "payload_content_type": " text/plain ", "mode": "CBC", "bit_length": 256, "payload": "not-encrypted"} self.assertTrue(json_without_array['payload_content_type'] .startswith(' '), "whitespace should be there") self.assertTrue(json_without_array['payload_content_type'] .endswith(' '), "whitespace should be there") api.strip_whitespace(json_without_array) self.assertFalse(json_without_array['payload_content_type'] .startswith(' '), "whitespace should be gone") self.assertFalse(json_without_array['payload_content_type'] .endswith(' '), "whitespace should be gone") def test_json_sanitization_with_array(self): json_with_array = {"name": "name", "algorithm": "AES", "payload_content_type": "text/plain", "mode": "CBC", "bit_length": 256, "payload": "not-encrypted", "an-array": [{"name": " item 1"}, {"name": "item2 "}]} self.assertTrue(json_with_array['an-array'][0]['name'] .startswith(' '), "whitespace should be there") self.assertTrue(json_with_array['an-array'][1]['name'] .endswith(' '), "whitespace should be there") api.strip_whitespace(json_with_array) self.assertFalse(json_with_array['an-array'][0]['name'] .startswith(' '), "whitespace should be gone") self.assertFalse(json_with_array['an-array'][1]['name'] .endswith(' '), "whitespace should be gone") class WhenCreatingContainerConsumersUsingResource(FunctionalTest): def setUp(self): super( WhenCreatingContainerConsumersUsingResource, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): containers = controllers.containers.ContainersController() return RootController() def _init(self): self.name = 'test container name' self.type = 'generic' self.secret_refs = [ { 'name': 'test secret 1', 'secret_ref': '1231' }, { 'name': 'test secret 2', 'secret_ref': '1232' }, { 'name': 'test secret 3', 'secret_ref': '1233' } ] self.consumer_ref = { 'name': 'test_consumer1', 'URL': 'http://consumer/1' } self.project_internal_id = 'projectid1234' self.external_project_id = 'keystoneid1234' # Set up mocked project self.project = models.Project() self.project.id = self.project_internal_id self.project.external_id = self.external_project_id # Set up mocked project repo self.project_repo = mock.MagicMock() self.project_repo.get.return_value = self.project self.setup_project_repository_mock(self.project_repo) # Set up mocked quota enforcer self.quota_patch = mock.patch( 'barbican.common.quota.QuotaEnforcer.enforce', return_value=None) self.quota_patch.start() self.addCleanup(self.quota_patch.stop) # Set up mocked container self.container = create_container( id_ref=utils.generate_test_valid_uuid(), project_id=self.project_internal_id, external_project_id=self.external_project_id) # Set up mocked container repo self.container_repo = mock.MagicMock() self.container_repo.get_container_by_id.return_value = self.container self.setup_container_repository_mock(self.container_repo) # Set up secret repo self.secret_repo = mock.MagicMock() self.secret_repo.create_from.return_value = None self.setup_secret_repository_mock(self.secret_repo) # Set up container consumer repo self.consumer_repo = mock.MagicMock() self.consumer_repo.create_from.return_value = None self.setup_container_consumer_repository_mock(self.consumer_repo) self.container_req = {'name': self.name, 'type': self.type, 'secret_refs': self.secret_refs} def test_should_add_new_consumer(self): resp = self.app.post_json( '/containers/{0}/consumers/'.format(self.container.id), self.consumer_ref ) self.assertEqual(200, resp.status_int) self.assertNotIn(self.external_project_id, resp.headers['Location']) args, kwargs = self.consumer_repo.create_or_update_from.call_args consumer = args[0] self.assertIsInstance(consumer, models.ContainerConsumerMetadatum) def test_should_fail_consumer_bad_json(self): resp = self.app.post( '/containers/{0}/consumers/'.format(self.container.id), '', expect_errors=True ) self.assertEqual(415, resp.status_int) def test_should_404_when_container_ref_doesnt_exist(self): self.container_repo.get_container_by_id.return_value = None resp = self.app.post_json( '/containers/{0}/consumers/'.format('bad_id'), self.consumer_ref, expect_errors=True ) self.assertEqual(404, resp.status_int) class WhenGettingOrDeletingContainerConsumersUsingResource(FunctionalTest): def setUp(self): super( WhenGettingOrDeletingContainerConsumersUsingResource, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): containers = controllers.containers.ContainersController() return RootController() def _init(self): self.external_project_id = 'keystoneid1234' self.project_internal_id = 'projectid1234' # Set up mocked project self.project = models.Project() self.project.id = self.project_internal_id self.project.external_id = self.external_project_id # Set up mocked project repo self.project_repo = mock.MagicMock() self.project_repo.get.return_value = self.project self.setup_project_repository_mock(self.project_repo) # Set up mocked container self.container = create_container( id_ref=utils.generate_test_valid_uuid(), project_id=self.project_internal_id, external_project_id=self.external_project_id) # Set up mocked consumers self.consumer = create_container_consumer( self.container.id, self.project_internal_id, id_ref=utils.generate_test_valid_uuid()) self.consumer2 = create_container_consumer( self.container.id, self.project_internal_id, id_ref=utils.generate_test_valid_uuid()) self.consumer_ref = { 'name': self.consumer.name, 'URL': self.consumer.URL } # Set up mocked container repo self.container_repo = mock.MagicMock() self.container_repo.get_container_by_id.return_value = self.container self.setup_container_repository_mock(self.container_repo) # Set up mocked container consumer repo self.consumer_repo = mock.MagicMock() self.consumer_repo.get_by_values.return_value = self.consumer self.consumer_repo.delete_entity_by_id.return_value = None self.setup_container_consumer_repository_mock(self.consumer_repo) # Set up mocked secret repo self.setup_secret_repository_mock() def test_should_get_consumer(self): ret_val = ([self.consumer], 0, 0, 1) self.consumer_repo.get_by_container_id.return_value = ret_val resp = self.app.get('/containers/{0}/consumers/'.format( self.container.id )) self.assertEqual(200, resp.status_int) self.consumer_repo.get_by_container_id.assert_called_once_with( self.container.id, limit_arg=None, offset_arg=0, suppress_exception=True ) self.assertEqual(self.consumer.name, resp.json['consumers'][0]['name']) self.assertEqual(self.consumer.URL, resp.json['consumers'][0]['URL']) def test_should_404_when_container_ref_doesnt_exist(self): self.container_repo.get_container_by_id.return_value = None resp = self.app.get('/containers/{0}/consumers/'.format( 'bad_id' ), expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_get_consumer_by_id(self): self.consumer_repo.get.return_value = self.consumer resp = self.app.get('/containers/{0}/consumers/{1}/'.format( self.container.id, self.consumer.id )) self.assertEqual(200, resp.status_int) def test_should_404_with_bad_consumer_id(self): self.consumer_repo.get.return_value = None resp = self.app.get('/containers/{0}/consumers/{1}/'.format( self.container.id, 'bad_id' ), expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_get_no_consumers(self): self.consumer_repo.get_by_container_id.return_value = ([], 0, 0, 0) resp = self.app.get('/containers/{0}/consumers/'.format( self.container.id )) self.assertEqual(200, resp.status_int) def test_should_delete_consumer(self): self.app.delete_json('/containers/{0}/consumers/'.format( self.container.id ), self.consumer_ref) self.consumer_repo.delete_entity_by_id.assert_called_once_with( self.consumer.id, self.external_project_id) def test_should_fail_deleting_consumer_bad_json(self): resp = self.app.delete( '/containers/{0}/consumers/'.format(self.container.id), '', expect_errors=True ) self.assertEqual(415, resp.status_int) def test_should_404_on_delete_when_consumer_not_found(self): old_return = self.consumer_repo.get_by_values.return_value self.consumer_repo.get_by_values.return_value = None resp = self.app.delete_json('/containers/{0}/consumers/'.format( self.container.id ), self.consumer_ref, expect_errors=True) self.consumer_repo.get_by_values.return_value = old_return self.assertEqual(404, resp.status_int) # Error response should have json content type self.assertEqual("application/json", resp.content_type) def test_should_404_on_delete_when_consumer_not_found_later(self): self.consumer_repo.delete_entity_by_id.side_effect = excep.NotFound() resp = self.app.delete_json('/containers/{0}/consumers/'.format( self.container.id ), self.consumer_ref, expect_errors=True) self.consumer_repo.delete_entity_by_id.side_effect = None self.assertEqual(404, resp.status_int) # Error response should have json content type self.assertEqual("application/json", resp.content_type) def test_should_delete_consumers_on_container_delete(self): consumers = [self.consumer, self.consumer2] ret_val = (consumers, 0, 0, 1) self.consumer_repo.get_by_container_id.return_value = ret_val resp = self.app.delete( '/containers/{0}/'.format(self.container.id) ) self.assertEqual(204, resp.status_int) # Verify consumers were deleted calls = [] for consumer in consumers: calls.append(mock.call(consumer.id, self.external_project_id)) self.consumer_repo.delete_entity_by_id.assert_has_calls( calls, any_order=True ) def test_should_pass_on_container_delete_with_missing_consumers(self): consumers = [self.consumer, self.consumer2] ret_val = (consumers, 0, 0, 1) self.consumer_repo.get_by_container_id.return_value = ret_val self.consumer_repo.delete_entity_by_id.side_effect = excep.NotFound resp = self.app.delete( '/containers/{0}/'.format(self.container.id) ) self.assertEqual(204, resp.status_int) # Verify consumers were deleted calls = [] for consumer in consumers: calls.append(mock.call(consumer.id, self.external_project_id)) self.consumer_repo.delete_entity_by_id.assert_has_calls( calls, any_order=True ) class WhenPerformingUnallowedOperationsOnContainerConsumers(FunctionalTest): def setUp(self): super( WhenPerformingUnallowedOperationsOnContainerConsumers, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): containers = controllers.containers.ContainersController() return RootController() def _init(self): self.name = 'test container name' self.type = 'generic' self.secret_refs = [ { 'name': 'test secret 1', 'secret_ref': '1231' }, { 'name': 'test secret 2', 'secret_ref': '1232' }, { 'name': 'test secret 3', 'secret_ref': '1233' } ] self.consumer_ref = { 'name': 'test_consumer1', 'URL': 'http://consumer/1' } self.external_project_id = 'keystoneid1234' self.project_internal_id = 'projectid1234' # Set up mocked project self.project = models.Project() self.project.id = self.project_internal_id self.project.external_id = self.external_project_id # Set up mocked project repo self.project_repo = mock.MagicMock() self.project_repo.get.return_value = self.project self.setup_project_repository_mock(self.project_repo) # Set up mocked container self.container = create_container( id_ref=utils.generate_test_valid_uuid(), project_id=self.project_internal_id, external_project_id=self.external_project_id) # Set up mocked container consumers self.consumer = create_container_consumer( self.container.id, self.project_internal_id, id_ref=utils.generate_test_valid_uuid()) self.consumer2 = create_container_consumer( self.container.id, self.project_internal_id, id_ref=utils.generate_test_valid_uuid()) self.consumer_ref = { 'name': self.consumer.name, 'URL': self.consumer.URL } # Set up container repo self.container_repo = mock.MagicMock() self.container_repo.get_container_by_id.return_value = self.container self.setup_container_repository_mock(self.container_repo) # Set up container consumer repo self.consumer_repo = mock.MagicMock() self.consumer_repo.get_by_values.return_value = self.consumer self.consumer_repo.delete_entity_by_id.return_value = None self.setup_container_consumer_repository_mock(self.consumer_repo) # Set up secret repo self.setup_secret_repository_mock() def test_should_not_allow_put_on_consumers(self): ret_val = ([self.consumer], 0, 0, 1) self.consumer_repo.get_by_container_id.return_value = ret_val resp = self.app.put_json( '/containers/{0}/consumers/'.format(self.container.id), self.consumer_ref, expect_errors=True ) self.assertEqual(405, resp.status_int) def test_should_not_allow_post_on_consumer_by_id(self): self.consumer_repo.get.return_value = self.consumer resp = self.app.post_json( '/containers/{0}/consumers/{1}/'.format(self.container.id, self.consumer.id), self.consumer_ref, expect_errors=True ) self.assertEqual(405, resp.status_int) def test_should_not_allow_put_on_consumer_by_id(self): self.consumer_repo.get.return_value = self.consumer resp = self.app.put_json( '/containers/{0}/consumers/{1}/'.format(self.container.id, self.consumer.id), self.consumer_ref, expect_errors=True ) self.assertEqual(405, resp.status_int) def test_should_not_allow_delete_on_consumer_by_id(self): self.consumer_repo.get.return_value = self.consumer resp = self.app.delete( '/containers/{0}/consumers/{1}/'.format(self.container.id, self.consumer.id), expect_errors=True ) self.assertEqual(405, resp.status_int) class WhenOwnershipMismatchForContainerConsumer(FunctionalTest): def setUp(self): super( WhenOwnershipMismatchForContainerConsumer, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): containers = controllers.containers.ContainersController() return RootController() def _init(self): self.external_project_id = 'keystoneid1234' self.project_internal_id = 'projectid1234' # Set up mocked project self.project = models.Project() self.project.id = self.project_internal_id self.project.external_id = self.external_project_id # Set up mocked project repo self.project_repo = mock.MagicMock() self.project_repo.get.return_value = self.project self.setup_project_repository_mock(self.project_repo) # Set up mocked container self.container = create_container( id_ref=utils.generate_test_valid_uuid(), project_id=self.project_internal_id, external_project_id='differentProjectId') # Set up mocked consumers self.consumer = create_container_consumer(self.container.id, self.project_internal_id, id_ref='id2') self.consumer2 = create_container_consumer(self.container.id, self.project_internal_id, id_ref='id3') self.consumer_ref = { 'name': self.consumer.name, 'URL': self.consumer.URL } # Set up mocked container repo self.container_repo = mock.MagicMock() self.container_repo.get.return_value = self.container self.container_repo.get_container_by_id.return_value = self.container self.setup_container_repository_mock(self.container_repo) # Set up mocked container consumer repo self.consumer_repo = mock.MagicMock() self.consumer_repo.get_by_values.return_value = self.consumer self.consumer_repo.delete_entity_by_id.return_value = None self.setup_container_consumer_repository_mock(self.consumer_repo) # Set up mocked secret repo self.setup_secret_repository_mock() def test_consumer_check_ownership_mismatch(self): resp = self.app.delete_json( '/containers/{0}/consumers/'.format(self.container.id), self.consumer_ref, expect_errors=True) self.assertEqual(403, resp.status_int) class WhenCreatingSecretConsumersUsingResource(FunctionalTest): def setUp(self): super( WhenCreatingSecretConsumersUsingResource, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): secrets = controllers.secrets.SecretsController() return RootController() def _init(self): self.external_project_id = 'keystoneid1234' self.project_internal_id = 'projectid1234' # Set up mocked project self.project = models.Project() self.project.id = self.project_internal_id self.project.external_id = self.external_project_id # Set up mocked secret self.secret = models.Secret() self.secret.id = utils.generate_test_valid_uuid() self.secret.project = self.project self.secret.project_id = self.project_internal_id # Set up consumer ref self.consumer_ref = { "service": "service", "resource_type": "resource_type", "resource_id": "resource_id", } # Set up mocked project repo self.project_repo = mock.MagicMock() self.project_repo.get.return_value = self.project self.setup_project_repository_mock(self.project_repo) # Set up mocked quota enforcer self.quota_patch = mock.patch( 'barbican.common.quota.QuotaEnforcer.enforce', return_value=None) self.quota_patch.start() self.addCleanup(self.quota_patch.stop) # Set up mocked secret repo self.secret_repo = mock.MagicMock() self.secret_repo.get_secret_by_id.return_value = self.secret self.setup_secret_repository_mock(self.secret_repo) # Set up mocked secret meta repo self.secret_meta_repo = mock.MagicMock() self.secret_meta_repo.get_metadata_for_secret.return_value = None self.setup_secret_meta_repository_mock(self.secret_meta_repo) # Set up mocked secret consumer repo self.consumer_repo = mock.MagicMock() self.consumer_repo.create_from.return_value = None self.setup_secret_consumer_repository_mock(self.consumer_repo) def test_should_add_new_consumer(self): resp = self.app.post_json( '/secrets/{0}/consumers/'.format(self.secret.id), self.consumer_ref ) self.assertEqual(200, resp.status_int) self.assertNotIn(self.external_project_id, resp.headers['Location']) args, kwargs = self.consumer_repo.create_or_update_from.call_args consumer = args[0] self.assertIsInstance(consumer, models.SecretConsumerMetadatum) def test_should_fail_consumer_bad_json(self): resp = self.app.post( '/secrets/{0}/consumers/'.format(self.secret.id), '', expect_errors=True ) self.assertEqual(415, resp.status_int) def test_should_404_when_secret_ref_doesnt_exist(self): self.secret_repo.get_secret_by_id.return_value = None resp = self.app.post_json( '/secrets/{0}/consumers/'.format('bad_id'), self.consumer_ref, expect_errors=True ) self.assertEqual(404, resp.status_int) class WhenGettingOrDeletingSecretConsumersUsingResource(FunctionalTest): def setUp(self): super( WhenGettingOrDeletingSecretConsumersUsingResource, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): secrets = controllers.secrets.SecretsController() return RootController() def _init(self): self.external_project_id = 'keystoneid1234' self.project_internal_id = 'projectid1234' # Set up mocked project self.project = models.Project() self.project.id = self.project_internal_id self.project.external_id = self.external_project_id # Set up mocked secret self.secret = models.Secret() self.secret.id = utils.generate_test_valid_uuid() self.secret.project = self.project self.secret.project_id = self.project_internal_id # Set up mocked consumers self.consumer = create_secret_consumer( self.secret.id, self.project_internal_id, id_ref=utils.generate_test_valid_uuid()) self.consumer2 = create_secret_consumer( self.secret.id, self.project_internal_id, id_ref=utils.generate_test_valid_uuid()) self.consumer_ref = { "service": self.consumer.service, "resource_type": self.consumer.resource_type, "resource_id": self.consumer.resource_type, } # Set up mocked project repo self.project_repo = mock.MagicMock() self.project_repo.get.return_value = self.project self.setup_project_repository_mock(self.project_repo) # Set up mocked secret repo self.secret_repo = mock.MagicMock() self.secret_repo.get_secret_by_id.return_value = self.secret self.setup_secret_repository_mock(self.secret_repo) # Set up mocked secret meta repo self.secret_meta_repo = mock.MagicMock() self.secret_meta_repo.get_metadata_for_secret.return_value = None self.setup_secret_meta_repository_mock(self.secret_meta_repo) # Set up mocked secret consumer repo self.consumer_repo = mock.MagicMock() self.consumer_repo.get_by_values.return_value = self.consumer self.consumer_repo.delete_entity_by_id.return_value = None self.setup_secret_consumer_repository_mock(self.consumer_repo) def test_should_get_consumer(self): ret_val = ([self.consumer], 0, 0, 1) self.consumer_repo.get_by_secret_id.return_value = ret_val resp = self.app.get('/secrets/{0}/consumers/'.format( self.secret.id )) self.assertEqual(200, resp.status_int) self.consumer_repo.get_by_secret_id.assert_called_once_with( self.secret.id, limit_arg=None, offset_arg=0, suppress_exception=True ) self.assertEqual( self.consumer.service, resp.json["consumers"][0]["service"], ) self.assertEqual( self.consumer.resource_type, resp.json["consumers"][0]["resource_type"] ) self.assertEqual( self.consumer.resource_id, resp.json["consumers"][0]["resource_id"] ) def test_should_404_when_secret_ref_doesnt_exist(self): self.secret_repo.get_secret_by_id.return_value = None resp = self.app.get('/secrets/{0}/consumers/'.format( 'bad_id' ), expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_get_consumer_by_id(self): self.consumer_repo.get.return_value = self.consumer resp = self.app.get('/secrets/{0}/consumers/{1}/'.format( self.secret.id, self.consumer.id )) self.assertEqual(200, resp.status_int) def test_should_404_with_bad_consumer_id(self): self.consumer_repo.get.return_value = None resp = self.app.get('/secrets/{0}/consumers/{1}/'.format( self.secret.id, 'bad_id' ), expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_get_no_consumers(self): self.consumer_repo.get_by_secret_id.return_value = ([], 0, 0, 0) resp = self.app.get('/secrets/{0}/consumers/'.format( self.secret.id )) self.assertEqual(200, resp.status_int) def test_should_delete_consumer(self): self.app.delete_json('/secrets/{0}/consumers/'.format( self.secret.id ), self.consumer_ref) self.consumer_repo.delete_entity_by_id.assert_called_once_with( self.consumer.id, self.external_project_id) def test_should_fail_deleting_consumer_bad_json(self): resp = self.app.delete( '/secrets/{0}/consumers/'.format(self.secret.id), '', expect_errors=True ) self.assertEqual(415, resp.status_int) def test_should_404_on_delete_when_consumer_not_found(self): old_return = self.consumer_repo.get_by_values.return_value self.consumer_repo.get_by_values.return_value = None resp = self.app.delete_json('/secrets/{0}/consumers/'.format( self.secret.id ), self.consumer_ref, expect_errors=True) self.consumer_repo.get_by_values.return_value = old_return self.assertEqual(404, resp.status_int) # Error response should have json content type self.assertEqual("application/json", resp.content_type) def test_should_404_on_delete_when_consumer_not_found_later(self): self.consumer_repo.delete_entity_by_id.side_effect = excep.NotFound() resp = self.app.delete_json('/secrets/{0}/consumers/'.format( self.secret.id ), self.consumer_ref, expect_errors=True) self.consumer_repo.delete_entity_by_id.side_effect = None self.assertEqual(404, resp.status_int) # Error response should have json content type self.assertEqual("application/json", resp.content_type) def test_should_delete_consumers_on_secret_delete(self): consumers = [self.consumer, self.consumer2] ret_val = (consumers, 0, 0, 1) self.consumer_repo.get_by_secret_id.return_value = ret_val resp = self.app.delete( '/secrets/{0}/'.format(self.secret.id) ) self.assertEqual(204, resp.status_int) # Verify consumers were deleted calls = [] for consumer in consumers: calls.append(mock.call(consumer.id, self.external_project_id)) self.consumer_repo.delete_entity_by_id.assert_has_calls( calls, any_order=True ) def test_should_pass_on_secret_delete_with_missing_consumers(self): consumers = [self.consumer, self.consumer2] ret_val = (consumers, 0, 0, 1) self.consumer_repo.get_by_secret_id.return_value = ret_val self.consumer_repo.delete_entity_by_id.side_effect = excep.NotFound resp = self.app.delete( '/secrets/{0}/'.format(self.secret.id) ) self.assertEqual(204, resp.status_int) # Verify consumers were deleted calls = [] for consumer in consumers: calls.append(mock.call(consumer.id, self.external_project_id)) self.consumer_repo.delete_entity_by_id.assert_has_calls( calls, any_order=True ) class WhenPerformingUnallowedOperationsOnSecretConsumers(FunctionalTest): def setUp(self): super( WhenPerformingUnallowedOperationsOnSecretConsumers, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): secrets = controllers.secrets.SecretsController() return RootController() def _init(self): self.external_project_id = 'keystoneid1234' self.project_internal_id = 'projectid1234' # Set up mocked project self.project = models.Project() self.project.id = self.project_internal_id self.project.external_id = self.external_project_id # Set up mocked secret self.secret = models.Secret() self.secret.id = utils.generate_test_valid_uuid() self.secret.project_id = self.project_internal_id # Set up mocked secret consumers self.consumer = create_secret_consumer( self.secret.id, self.project_internal_id, id_ref=utils.generate_test_valid_uuid()) self.consumer_ref = { "service": self.consumer.service, "resource_type": self.consumer.resource_type, "resource_id": self.consumer.resource_type, } # Set up mocked project repo self.project_repo = mock.MagicMock() self.project_repo.get.return_value = self.project self.setup_project_repository_mock(self.project_repo) # Set up secret repo self.secret_repo = mock.MagicMock() self.secret_repo.get_secret_by_id.return_value = self.secret self.setup_secret_repository_mock(self.secret_repo) # Set up secret consumer repo self.consumer_repo = mock.MagicMock() self.consumer_repo.get_by_values.return_value = self.consumer self.consumer_repo.delete_entity_by_id.return_value = None self.setup_secret_consumer_repository_mock(self.consumer_repo) def test_should_not_allow_put_on_consumers(self): ret_val = ([self.consumer], 0, 0, 1) self.consumer_repo.get_by_secret_id.return_value = ret_val resp = self.app.put_json( '/secrets/{0}/consumers/'.format(self.secret.id), self.consumer_ref, expect_errors=True ) self.assertEqual(405, resp.status_int) def test_should_not_allow_post_on_consumer_by_id(self): self.consumer_repo.get.return_value = self.consumer resp = self.app.post_json( '/secrets/{0}/consumers/{1}/'.format(self.secret.id, self.consumer.id), self.consumer_ref, expect_errors=True ) self.assertEqual(405, resp.status_int) def test_should_not_allow_put_on_consumer_by_id(self): self.consumer_repo.get.return_value = self.consumer resp = self.app.put_json( '/secrets/{0}/consumers/{1}/'.format(self.secret.id, self.consumer.id), self.consumer_ref, expect_errors=True ) self.assertEqual(405, resp.status_int) def test_should_not_allow_delete_on_consumer_by_id(self): self.consumer_repo.get.return_value = self.consumer resp = self.app.delete( '/secrets/{0}/consumers/{1}/'.format(self.secret.id, self.consumer.id), expect_errors=True ) self.assertEqual(405, resp.status_int) class WhenOwnershipMismatchForSecretConsumer(FunctionalTest): def setUp(self): super( WhenOwnershipMismatchForSecretConsumer, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): secrets = controllers.secrets.SecretsController() return RootController() def _init(self): self.external_project_id = 'keystoneid1234' self.project_internal_id = 'projectid1234' # Set up mocked project self.project = models.Project() self.project.id = self.project_internal_id self.project.external_id = self.external_project_id # Set up mocked secret self.secret = models.Secret() self.secret.id = utils.generate_test_valid_uuid() self.secret.project = models.Project() self.secret.project.external_id = "differentProjectId" # Set up mocked consumer self.consumer = create_secret_consumer(self.secret.id, self.project_internal_id, id_ref='consumerid1234') self.consumer_ref = { "service": self.consumer.service, "resource_type": self.consumer.resource_type, "resource_id": self.consumer.resource_type, } # Set up mocked project repo self.project_repo = mock.MagicMock() self.project_repo.get.return_value = self.project self.setup_project_repository_mock(self.project_repo) # Set up mocked secret repo self.secret_repo = mock.MagicMock() self.secret_repo.get.return_value = self.secret self.secret_repo.get_secret_by_id.return_value = self.secret self.setup_secret_repository_mock(self.secret_repo) # Set up mocked secret consumer repo self.consumer_repo = mock.MagicMock() self.consumer_repo.get_by_values.return_value = self.consumer self.consumer_repo.delete_entity_by_id.return_value = None self.setup_secret_consumer_repository_mock(self.consumer_repo) def test_consumer_check_ownership_mismatch(self): resp = self.app.delete_json( '/secrets/{0}/consumers/'.format(self.secret.id), self.consumer_ref, expect_errors=True) self.assertEqual(403, resp.status_int) barbican-9.1.0.dev50/barbican/tests/api/test_transport_keys_resource.py0000664000175000017500000002672613616500636026516 0ustar sahidsahid00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ This test module focuses on typical-flow business logic tests with the transport key resource classes. """ import mock import pecan from six import moves import webtest from barbican.api import app from barbican.api import controllers from barbican.common import exception as excep import barbican.context from barbican.model import models from barbican.tests import utils def get_barbican_env(external_project_id): class NoopPolicyEnforcer(object): def authorize(self, *args, **kwargs): return kwargs = {'roles': None, 'user_id': None, 'project_id': external_project_id, 'is_admin': True, 'policy_enforcer': NoopPolicyEnforcer()} barbican_env = {'barbican.context': barbican.context.RequestContext(**kwargs)} return barbican_env SAMPLE_TRANSPORT_KEY = """ -----BEGIN CERTIFICATE----- MIIDlDCCAnygAwIBAgIBGDANBgkqhkiG9w0BAQsFADBCMR8wHQYDVQQKDBZ0b21j YXQgMjggZG9tYWluIHRyeSAzMR8wHQYDVQQDDBZDQSBTaWduaW5nIENlcnRpZmlj YXRlMB4XDTE0MDMyNzA0MTU0OFoXDTE2MDMxNjA0MTU0OFowRTEfMB0GA1UECgwW dG9tY2F0IDI4IGRvbWFpbiB0cnkgMzEiMCAGA1UEAwwZRFJNIFRyYW5zcG9ydCBD ZXJ0aWZpY2F0ZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANEjiTHn xWKKnzgBzR8kHo5YKXDbYi01ar0pAiJQ8Xx4MXj3Uf6ckfxvJ7Icb2PhigAgINLe td8butAXM0164kHeEMJWI2TG/+2f42Kla2KeU0bdgKbw1egyZreDvhGk/3P46LQt LtRBCb5eQWS2gTFocgA5phzRQnmSS4BRTh1MnGxaFLZsPOXqZKptAYaeXyLG63vL woBwFVGoodHrRrpYpCd+D6JABBdUEgSCaYG9JBDC5ElSjJnBlCNrUZ2kxokxbsQp UHm70LV9c+5n0o1VLJSqnUDuOkoovVWytlKbz0dw0KiTUDjkb4F4D6s+IePV1ufJ 6cXvXCLLSQa42AcCAwEAAaOBkTCBjjAfBgNVHSMEGDAWgBSiQq7mBrAcTqqsPRvn l8pk4uZCWTBGBggrBgEFBQcBAQQ6MDgwNgYIKwYBBQUHMAGGKmh0dHA6Ly9hbGVl LXdvcmtwYy5yZWRoYXQuY29tOjgyODAvY2Evb2NzcDAOBgNVHQ8BAf8EBAMCBPAw EwYDVR0lBAwwCgYIKwYBBQUHAwIwDQYJKoZIhvcNAQELBQADggEBALmAtjactFHA d4nBFpwpwh3tGhkfwoSCuKThX54UXsJawQrx5gaxP0JE7YVLDRe4jn+RHjkXxdxX Xt4IugdTsPNq0nvWVAzwZwoGlJZjqghHpD3AB4E5DEoOnVnmJRLFLF0Xg/R5Sw3F j9wdVE/hGShrF+fOqNZhTG2Mf4f9TUR1Y8PtoBmtkwnFUoeiaI+Nq6Dd1Qw8ysar i/sOzOOjou4vcbYnrKnn2hlSgF6toza0BCGVA8fMyGBh16JtTR1REL7Bf0m3ZQDy 4hjmPjvUTN3YO2RlLVZXArhhmqcQzCl94P37pAEN/JhAIYvQ2PPM/ofK9XHc9u9j rQJGkMpu7ck= -----END CERTIFICATE-----""" def create_transport_key(id_ref="id", plugin_name="default_plugin", transport_key=None): """Generate a transport cert entity instance.""" tkey = models.TransportKey(plugin_name, transport_key) tkey.id = id_ref return tkey class FunctionalTest(utils.BaseTestCase): def setUp(self): super(FunctionalTest, self).setUp() root = self.root config = {'app': {'root': root}} pecan.set_config(config, overwrite=True) self.app = webtest.TestApp(pecan.make_app(root)) def tearDown(self): super(FunctionalTest, self).tearDown() pecan.set_config({}, overwrite=True) @property def root(self): return controllers.versions.VersionController() class WhenGettingTransKeysListUsingTransportKeysResource(FunctionalTest): def setUp(self): super( WhenGettingTransKeysListUsingTransportKeysResource, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): transport_keys = controllers.transportkeys.TransportKeysController( self.repo) return RootController() def _init(self): self.plugin_name = "default_plugin" self.external_project_id = 'keystoneid1234' self.params = {'offset': 2, 'limit': 2} self.transport_key = SAMPLE_TRANSPORT_KEY self.num_keys = 10 self.offset = 2 self.limit = 2 tk_params = {'plugin_name': self.plugin_name, 'transport_key': self.transport_key} self.tkeys = [create_transport_key( id_ref='id' + str(tkid), **tk_params) for tkid in moves.range(self.num_keys)] self.total = len(self.tkeys) self.repo = mock.MagicMock() self.repo.get_by_create_date.return_value = (self.tkeys, self.offset, self.limit, self.total) self.params = { 'offset': self.offset, 'limit': self.limit } def test_should_get_list_transport_keys(self): resp = self.app.get('/transport_keys/', self.params) self.repo.get_by_create_date.assert_called_once_with( plugin_name=None, offset_arg=u'{0}'.format(self.offset), limit_arg=u'{0}'.format(self.limit), suppress_exception=True ) self.assertIn('previous', resp.namespace) self.assertIn('next', resp.namespace) body = resp.body.decode('utf-8') url_nav_next = self._create_url(self.external_project_id, self.offset + self.limit, self.limit) self.assertEqual(1, body.count(url_nav_next)) url_nav_prev = self._create_url(self.external_project_id, 0, self.limit) self.assertEqual(1, body.count(url_nav_prev)) url_hrefs = self._create_url(self.external_project_id) self.assertEqual((self.num_keys + 2), body.count(url_hrefs)) def test_response_should_include_total(self): resp = self.app.get('/transport_keys/', self.params) self.assertIn('total', resp.namespace) self.assertEqual(self.total, resp.namespace['total']) def test_should_handle_no_transport_keys(self): del self.tkeys[:] resp = self.app.get('/transport_keys/', self.params) self.repo.get_by_create_date.assert_called_once_with( plugin_name=None, offset_arg=u'{0}'.format(self.offset), limit_arg=u'{0}'.format(self.limit), suppress_exception=True ) self.assertNotIn('previous', resp.namespace) self.assertNotIn('next', resp.namespace) def _create_url(self, external_project_id, offset_arg=None, limit_arg=None): if limit_arg: offset = int(offset_arg) limit = int(limit_arg) return '/transport_keys?limit={0}&offset={1}'.format( limit, offset) else: return '/transport_keys' class WhenCreatingTransKeysListUsingTransportKeysResource(FunctionalTest): def setUp(self): super( WhenCreatingTransKeysListUsingTransportKeysResource, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): transport_keys = controllers.transportkeys.TransportKeysController( self.repo) return RootController() def _init(self): self.plugin_name = "default_plugin" self.external_project_id = 'keystoneid1234' self.repo = mock.MagicMock() self.transport_key_req = { 'plugin_name': self.plugin_name, 'transport_key': SAMPLE_TRANSPORT_KEY } def test_should_add_new_transport_key(self): resp = self.app.post_json( '/transport_keys/', self.transport_key_req ) self.assertEqual(201, resp.status_int) args, kwargs = self.repo.create_from.call_args order = args[0] self.assertIsInstance(order, models.TransportKey) def test_should_raise_add_new_transport_key_no_secret(self): resp = self.app.post_json( '/transport_keys/', {}, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_should_raise_add_new_transport_key_bad_json(self): resp = self.app.post( '/transport_keys/', '', expect_errors=True, content_type='application/json' ) self.assertEqual(400, resp.status_int) def test_should_raise_add_new_transport_key_no_content_type_header(self): resp = self.app.post( '/transport_keys/', self.transport_key_req, expect_errors=True, ) self.assertEqual(415, resp.status_int) class WhenGettingOrDeletingTransKeyUsingTransportKeyResource(FunctionalTest): def setUp(self): super( WhenGettingOrDeletingTransKeyUsingTransportKeyResource, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): transport_keys = controllers.transportkeys.TransportKeysController( self.repo) return RootController() def _init(self): self.external_project_id = 'keystoneid1234' self.transport_key = SAMPLE_TRANSPORT_KEY self.tkey_id = utils.generate_test_valid_uuid() self.tkey = create_transport_key( id_ref=self.tkey_id, plugin_name="default_plugin", transport_key=self.transport_key) self.repo = mock.MagicMock() self.repo.get.return_value = self.tkey def test_should_get_transport_key(self): self.app.get('/transport_keys/{0}/'.format(self.tkey.id)) self.repo.get.assert_called_once_with(entity_id=self.tkey.id) def test_should_throw_exception_for_get_when_trans_key_not_found(self): self.repo.get.return_value = None resp = self.app.get( '/transport_keys/{0}/'.format(utils.generate_test_valid_uuid()), expect_errors=True ) self.assertEqual(404, resp.status_int) def test_should_throw_exception_for_get_when_trans_key_invalid(self): resp = self.app.get( '/transport_keys/{0}/'.format("invalid_key_id"), expect_errors=True ) self.assertEqual(404, resp.status_int) def test_should_delete_transport_key(self): self.app.delete('/transport_keys/{0}/'.format(self.tkey.id)) self.repo.delete_entity_by_id.assert_called_once_with( entity_id=self.tkey.id, external_project_id=self.external_project_id) def test_should_throw_exception_for_delete_when_trans_key_not_found(self): self.repo.delete_entity_by_id.side_effect = excep.NotFound( "Test not found exception") resp = self.app.delete( '/transport_keys/{0}/'.format(self.tkey.id), expect_errors=True ) self.assertEqual(404, resp.status_int) barbican-9.1.0.dev50/barbican/tests/api/test_init.py0000664000175000017500000000663213616500636022455 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ This test module tests the barbican.api.__init__.py module functionality. """ import mock from oslo_serialization import jsonutils as json from barbican import api from barbican.common import exception from barbican.plugin.interface import secret_store from barbican.tests import utils class WhenInvokingLoadBodyFunction(utils.BaseTestCase): """Tests the load_body function.""" @mock.patch('pecan.abort') def test_should_abort_with_read_error(self, mock_pecan_abort): mock_pecan_abort.side_effect = ValueError('Abort!') req = mock.MagicMock() req.body_file = mock.MagicMock() req.body_file.read.side_effect = IOError('Dummy IOError') exception = self.assertRaises( ValueError, api.load_body, req) self.assertEqual('Abort!', str(exception)) @mock.patch('pecan.abort') def test_should_abort_with_validation_unsupported_field( self, mock_pecan_abort): mock_pecan_abort.side_effect = ValueError('Abort!') body = json.dumps({'key1': 'value1'}) req = mock.MagicMock() req.body_file = mock.MagicMock() req.body_file.read.return_value = body validator = mock.MagicMock() validator.validate.side_effect = exception.UnsupportedField('Field') exception_result = self.assertRaises( ValueError, api.load_body, req, validator=validator) self.assertEqual('Abort!', str(exception_result)) validator.validate.assert_called_once_with(json.loads(body)) class WhenInvokingGenerateSafeExceptionMessageFunction(utils.BaseTestCase): """Tests the generate_safe_exception_message function.""" def setUp(self): super(WhenInvokingGenerateSafeExceptionMessageFunction, self).setUp() def test_handle_secret_content_type_not_supported_exception(self): operation = 'operation' content_type = 'application/octet-stream' test_exception = secret_store.SecretContentTypeNotSupportedException( content_type) status, message = api.generate_safe_exception_message( operation, test_exception) self.assertEqual(400, status) self.assertEqual("operation issue seen - content-type of " "'application/octet-stream' not " "supported.", message) def test_handle_secret_content_encoding_not_supported_exception(self): operation = 'operation' content_encoding = 'application/octet-stream' test_excep = secret_store.SecretContentEncodingNotSupportedException( content_encoding) status, message = api.generate_safe_exception_message( operation, test_excep) self.assertEqual(400, status) self.assertEqual("operation issue seen - content-encoding of " "'application/octet-stream' not " "supported.", message) barbican-9.1.0.dev50/barbican/tests/api/__init__.py0000664000175000017500000000000013616500636022171 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/api/controllers/0000775000175000017500000000000013616500640022433 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/api/controllers/test_containers.py0000664000175000017500000010005613616500636026220 0ustar sahidsahid00000000000000# Copyright (c) 2015 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os import uuid from barbican.common import config from barbican.common import exception from barbican.model import repositories from barbican.tests.api.controllers import test_secrets as secret_helper from barbican.tests import utils containers_repo = repositories.get_container_repository() class SuccessfulContainerCreateMixin(object): def _assert_successful_container_create(self, resp, container_uuid): self.assertEqual(201, resp.status_int) # this will raise if the container uuid is not proper uuid.UUID(container_uuid) class WhenCreatingContainersUsingContainersResource( utils.BarbicanAPIBaseTestCase, SuccessfulContainerCreateMixin): def test_should_add_new_empty_container(self): container_name = 'test container name' container_type = 'generic' resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(container_name, container.name) self.assertEqual(container_type, container.type) def test_should_add_new_populated_container(self): secret_name = 'test secret 1' resp, _ = secret_helper.create_secret( self.app, name=secret_name ) self.assertEqual(201, resp.status_int) secret_ref = resp.json.get('secret_ref') container_name = 'test container name' container_type = 'generic' secret_refs = [ { 'name': secret_name, 'secret_ref': secret_ref } ] resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type, secret_refs=secret_refs ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(container_name, container.name) self.assertEqual(container_type, container.type) def test_should_create_container_w_empty_name(self): # Name key missing container_type = 'generic' resp, container_uuid = create_container( self.app, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) # Name key is null request = { 'name': None, 'type': container_type, } resp = self.app.post_json( '/containers/', request, ) container_ref = resp.json.get('container_ref', '') _, container_uuid = os.path.split(container_ref) self._assert_successful_container_create(resp, container_uuid) def test_should_raise_container_bad_json(self): resp, container_uuid = create_container( self.app, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_should_raise_container_bad_content_type_header(self): resp, container_uuid = create_container( self.app, name='test container name', container_type='generic', expect_errors=True, headers={'Content-Type': 'bad_content_type'} ) self.assertEqual(415, resp.status_int) def test_should_sanitize_location_from_response_header(self): resp, container_uuid = create_container( self.app, name='test container name', container_type='generic' ) self._assert_successful_container_create(resp, container_uuid) self.assertNotIn(self.project_id, resp.headers['Location']) def test_should_throw_exception_when_secret_ref_doesnt_exist(self): config.CONF.set_override("host_href", "http://localhost:9311") secret_refs = [ { 'name': 'bad secret', 'secret_ref': 'http://localhost:9311/secrets/does_not_exist' } ] resp, container_uuid = create_container( self.app, name='test container name', container_type='generic', secret_refs=secret_refs, expect_errors=True, ) self.assertEqual(404, resp.status_int) config.CONF.clear_override('host_href') class WhenGettingContainersListUsingContainersResource( utils.BarbicanAPIBaseTestCase, SuccessfulContainerCreateMixin): def setUp(self): super(WhenGettingContainersListUsingContainersResource, self).setUp() self.num_containers = 10 self.offset = 2 self.limit = 2 self.params = { 'offset': self.offset, 'limit': self.limit } def _create_containers(self, type='generic'): for i in range(self.num_containers): resp, container_uuid = create_container( self.app, name='test container name {num}'.format(num=i), container_type=type ) self._assert_successful_container_create(resp, container_uuid) def _create_url(self, offset_arg=None, limit_arg=None): if limit_arg: offset = int(offset_arg) limit = int(limit_arg) return '/containers?limit={limit}&offset={offset}'.format( limit=limit, offset=offset) else: return '/containers' def test_should_get_list_containers(self): self._create_containers() resp = self.app.get( '/containers/', self.params ) self.assertEqual(200, resp.status_int) self.assertIn('previous', resp.namespace) self.assertIn('next', resp.namespace) url_nav_next = self._create_url(self.offset + self.limit, self.limit) self.assertEqual(1, resp.body.decode('utf-8').count(url_nav_next)) url_nav_prev = self._create_url(0, self.limit) self.assertEqual(1, resp.body.decode('utf-8').count(url_nav_prev)) url_hrefs = self._create_url() self.assertEqual((self.limit + 2), resp.body.decode('utf-8').count(url_hrefs)) def test_list_containerss_by_type(self): # Creating containers to be retrieved later self._create_containers(type='generic') self._create_containers(type='certificate') self._create_containers(type='rsa') for type in ('generic', 'certificate', 'rsa'): params = { 'limit': self.num_containers, 'type': type } resp = self.app.get( '/containers/', params ) self.assertEqual(200, resp.status_int) self.assertEqual(self.num_containers, resp.namespace.get('total')) def test_response_should_include_total(self): self._create_containers() resp = self.app.get( '/containers/', self.params ) self.assertIn('total', resp.namespace) self.assertEqual(self.num_containers, resp.namespace['total']) def test_should_handle_no_containers(self): resp = self.app.get( '/containers/', self.params ) self.assertEqual(0, resp.namespace['total']) self.assertNotIn('previous', resp.namespace) self.assertNotIn('next', resp.namespace) class WhenGettingOrDeletingContainerUsingContainerResource( utils.BarbicanAPIBaseTestCase, SuccessfulContainerCreateMixin): def test_should_get_container(self): container_name = 'test container name' container_type = 'generic' resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) resp = self.app.get('/containers/{container_id}/'.format( container_id=container_uuid )) self.assertEqual(200, resp.status_int) self.assertEqual(container_name, resp.json.get('name', '')) self.assertEqual(container_type, resp.json.get('type', '')) def test_should_delete_container(self): resp, container_uuid = create_container( self.app, name='test container name', container_type='generic' ) self._assert_successful_container_create(resp, container_uuid) resp = self.app.delete('/containers/{container_id}/'.format( container_id=container_uuid )) self.assertEqual(204, resp.status_int) self.assertRaises(exception.NotFound, containers_repo.get, container_uuid, self.project_id) def test_should_throw_exception_for_get_when_container_not_found(self): resp = self.app.get( '/containers/{0}/'.format(utils.generate_test_valid_uuid()), expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_throw_exception_for_get_when_invalid_container_id(self): resp = self.app.get('/containers/bad_id/', expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_throw_exception_for_delete_when_container_not_found(self): resp = self.app.delete('/containers/bad_id/', expect_errors=True) self.assertEqual(404, resp.status_int) # Error response should have json content type self.assertEqual("application/json", resp.content_type) class WhenAddingOrRemovingContainerSecretsUsingContainersSecretsResource( utils.BarbicanAPIBaseTestCase, SuccessfulContainerCreateMixin): def test_should_add_container_secret(self): container_name = 'test container name' container_type = 'generic' resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(0, len(container.container_secrets)) secret_name = 'test secret 1' resp, _ = secret_helper.create_secret( self.app, name=secret_name ) self.assertEqual(201, resp.status_int) secret_ref = resp.json.get('secret_ref') resp, updated_container_uuid = create_container_secret( self.app, container_id=container_uuid, secret_ref=secret_ref, name=secret_name ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(1, len(container.container_secrets)) def test_should_add_container_secret_with_trailing_slash(self): resp, container_id = create_container( self.app, name='test container name', container_type='generic', ) self._assert_successful_container_create(resp, container_id) secret_name = 'test secret 1' resp, _ = secret_helper.create_secret( self.app, name=secret_name ) self.assertEqual(201, resp.status_int) request = { 'name': secret_name, 'secret_ref': resp.json.get('secret_ref') } resp = self.app.post_json( '/containers/{container_id}/secrets/'.format( container_id=container_id ), request, expect_errors=False, headers=None ) self.assertEqual(201, resp.status_int) def test_should_add_container_secret_without_name(self): container_name = 'test container name' container_type = 'generic' resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(0, len(container.container_secrets)) secret_name = 'test secret 1' resp, _ = secret_helper.create_secret( self.app, name=secret_name ) self.assertEqual(201, resp.status_int) secret_ref = resp.json.get('secret_ref') resp, updated_container_uuid = create_container_secret( self.app, container_id=container_uuid, secret_ref=secret_ref ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(1, len(container.container_secrets)) def test_should_add_container_secret_with_different_name(self): container_name = 'test container name' container_type = 'generic' resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(0, len(container.container_secrets)) secret_name = 'test secret 1' resp, _ = secret_helper.create_secret( self.app, name=secret_name ) self.assertEqual(201, resp.status_int) secret_ref = resp.json.get('secret_ref') resp, updated_container_uuid = create_container_secret( self.app, container_id=container_uuid, secret_ref=secret_ref, name=secret_name ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(1, len(container.container_secrets)) secret_name = 'test secret 2' resp, updated_container_uuid = create_container_secret( self.app, container_id=container_uuid, secret_ref=secret_ref, name=secret_name ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(2, len(container.container_secrets)) def test_should_not_add_when_secret_not_found(self): container_name = 'test container name' container_type = 'generic' resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(0, len(container.container_secrets)) secret_ref = '/secrets/bad_id' resp, updated_container_uuid = create_container_secret( self.app, container_id=container_uuid, secret_ref=secret_ref, expect_errors=True ) self.assertEqual(404, resp.status_int) def test_should_not_add_container_secret_with_invalid_name(self): container_name = 'test container name' container_type = 'generic' resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(0, len(container.container_secrets)) secret_name = 'test secret 1' resp, _ = secret_helper.create_secret( self.app, name=secret_name ) self.assertEqual(201, resp.status_int) secret_ref = resp.json.get('secret_ref') container_secret_name = "x" * 256 resp, updated_container_uuid = create_container_secret( self.app, container_id=container_uuid, secret_ref=secret_ref, name=container_secret_name, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_should_not_add_container_secret_with_invalid_secret_ref(self): container_name = 'test container name' container_type = 'generic' resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(0, len(container.container_secrets)) secret_ref = "" resp, updated_container_uuid = create_container_secret( self.app, container_id=container_uuid, secret_ref=secret_ref, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_should_add_different_secret_refs_with_duplicate_name(self): container_name = 'test container name' container_type = 'generic' resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(0, len(container.container_secrets)) secret_name = 'test secret 1' resp, _ = secret_helper.create_secret( self.app, name=secret_name ) self.assertEqual(201, resp.status_int) first_secret_ref = resp.json.get('secret_ref') secret_name = 'test secret 2' resp, _ = secret_helper.create_secret( self.app, name=secret_name ) self.assertEqual(201, resp.status_int) second_secret_ref = resp.json.get('secret_ref') container_secret_name = 'test container secret name' resp, updated_container_uuid = create_container_secret( self.app, container_id=container_uuid, secret_ref=first_secret_ref, name=container_secret_name ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(1, len(container.container_secrets)) resp, updated_container_uuid = create_container_secret( self.app, container_id=container_uuid, secret_ref=second_secret_ref, name=container_secret_name ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(2, len(container.container_secrets)) def test_should_not_allow_add_on_rsa_container(self): container_name = 'test container name' container_type = 'rsa' resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) secret_name = 'test secret 1' resp, _ = secret_helper.create_secret( self.app, name=secret_name ) self.assertEqual(201, resp.status_int) secret_ref = resp.json.get('secret_ref') resp, updated_container_uuid = create_container_secret( self.app, container_id=container_uuid, secret_ref=secret_ref, name=secret_name, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_should_not_allow_add_on_certificate_container(self): container_name = 'test container name' container_type = 'certificate' resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) secret_name = 'test secret 1' resp, _ = secret_helper.create_secret( self.app, name=secret_name ) self.assertEqual(201, resp.status_int) secret_ref = resp.json.get('secret_ref') resp, updated_container_uuid = create_container_secret( self.app, container_id=container_uuid, secret_ref=secret_ref, name=secret_name, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_should_not_allow_add_secret_when_exists_in_container(self): container_name = 'test container name' container_type = 'generic' resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) secret_name = 'test secret 1' resp, _ = secret_helper.create_secret( self.app, name=secret_name ) self.assertEqual(201, resp.status_int) secret_ref = resp.json.get('secret_ref') resp, updated_container_uuid = create_container_secret( self.app, container_id=container_uuid, secret_ref=secret_ref, name=secret_name ) self._assert_successful_container_create(resp, container_uuid) resp, updated_container_uuid = create_container_secret( self.app, container_id=container_uuid, secret_ref=secret_ref, name=secret_name, expect_errors=True ) self.assertEqual(409, resp.status_int) def test_should_delete_existing_container_secret(self): secret_name = 'test secret 1' resp, _ = secret_helper.create_secret( self.app, name=secret_name ) self.assertEqual(201, resp.status_int) secret_ref = resp.json.get('secret_ref') container_name = 'test container name' container_type = 'generic' secret_refs = [ { 'name': secret_name, 'secret_ref': secret_ref } ] resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type, secret_refs=secret_refs ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(1, len(container.container_secrets)) resp = delete_container_secret(self.app, container_uuid, secret_ref, secret_name) self.assertEqual(204, resp.status_int) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(0, len(container.container_secrets)) def test_should_delete_container_secret_without_name(self): secret_name = 'test secret 1' resp, _ = secret_helper.create_secret( self.app, name=secret_name ) self.assertEqual(201, resp.status_int) secret_ref = resp.json.get('secret_ref') container_name = 'test container name' container_type = 'generic' secret_refs = [ { 'secret_ref': secret_ref } ] resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type, secret_refs=secret_refs ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(1, len(container.container_secrets)) resp = delete_container_secret(self.app, container_uuid, secret_ref) self.assertEqual(204, resp.status_int) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(0, len(container.container_secrets)) def test_should_not_delete_container_secret_with_incorrect_name(self): secret_name = 'test secret 1' resp, _ = secret_helper.create_secret( self.app, name=secret_name ) self.assertEqual(201, resp.status_int) secret_ref = resp.json.get('secret_ref') container_name = 'test container name' container_type = 'generic' secret_refs = [ { 'name': secret_name, 'secret_ref': secret_ref } ] resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type, secret_refs=secret_refs ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(1, len(container.container_secrets)) incorrect_name = 'test incorrect name' resp = delete_container_secret(self.app, container_uuid, secret_ref, incorrect_name, expect_errors=True) self.assertEqual(404, resp.status_int) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(1, len(container.container_secrets)) def test_should_delete_only_when_secret_exists(self): secret_ref = '/secrets/bad_id' container_name = 'test container name' container_type = 'generic' resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) resp = delete_container_secret(self.app, container_uuid, secret_ref, expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_delete_only_when_secret_exists_in_container(self): secret_name = 'test secret 1' resp, _ = secret_helper.create_secret( self.app, name=secret_name ) self.assertEqual(201, resp.status_int) secret_ref = resp.json.get('secret_ref') container_name = 'test container name' container_type = 'generic' resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) resp = delete_container_secret(self.app, container_uuid, secret_ref, secret_name, expect_errors=True) self.assertEqual(404, resp.status_int) class WhenPerformingUnallowedOperationsOnContainers( utils.BarbicanAPIBaseTestCase, SuccessfulContainerCreateMixin): container_req = [ { 'name': 'test container name', 'type': 'generic', 'secret_refs': [] } ] secret_req = { 'name': 'test secret name', 'secret_ref': 'https://localhost/v1/secrets/1-2-3-4' } def test_should_not_allow_put_on_containers(self): resp = self.app.put_json( '/containers/', self.container_req, expect_errors=True ) self.assertEqual(405, resp.status_int) def test_should_not_allow_post_on_container_by_id(self): resp, container_uuid = create_container( self.app, name='test container name', container_type='generic' ) self._assert_successful_container_create(resp, container_uuid) resp = self.app.post_json( '/containers/{container_id}/'.format(container_id=container_uuid), self.container_req, expect_errors=True ) self.assertEqual(405, resp.status_int) def test_should_not_allow_put_on_container_by_id(self): resp, container_uuid = create_container( self.app, name='test container name', container_type='generic' ) self._assert_successful_container_create(resp, container_uuid) resp = self.app.put_json( '/containers/{container_id}/'.format(container_id=container_uuid), self.container_req, expect_errors=True ) self.assertEqual(405, resp.status_int) def test_should_not_allow_get_on_container_secrets(self): resp, container_uuid = create_container( self.app, name='test container name', container_type='generic' ) self._assert_successful_container_create(resp, container_uuid) resp = self.app.get( '/containers/{container_id}/secrets'.format( container_id=container_uuid), expect_errors=True ) self.assertEqual(405, resp.status_int) def test_should_not_allow_put_on_container_secrets(self): resp, container_uuid = create_container( self.app, name='test container name', container_type='generic' ) self._assert_successful_container_create(resp, container_uuid) resp = self.app.put_json( '/containers/{container_id}/secrets'.format( container_id=container_uuid), self.secret_req, expect_errors=True ) self.assertEqual(405, resp.status_int) # ----------------------- Helper Functions --------------------------- def create_container(app, name=None, container_type=None, secret_refs=None, expect_errors=False, headers=None): request = { 'name': name, 'type': container_type, 'secret_refs': secret_refs if secret_refs else [] } cleaned_request = {key: val for key, val in request.items() if val is not None} resp = app.post_json( '/containers/', cleaned_request, expect_errors=expect_errors, headers=headers ) created_uuid = None if resp.status_int == 201: container_ref = resp.json.get('container_ref', '') _, created_uuid = os.path.split(container_ref) return resp, created_uuid def create_container_secret(app, container_id=None, secret_ref=None, name=None, expect_errors=False, headers=None): request = { 'name': name, 'secret_ref': secret_ref } cleaned_request = {key: val for key, val in request.items() if val is not None} resp = app.post_json( '/containers/{container_id}/secrets'.format(container_id=container_id), cleaned_request, expect_errors=expect_errors, headers=headers ) updated_uuid = None if resp.status_int == 201: container_ref = resp.json.get('container_ref', '') _, updated_uuid = os.path.split(container_ref) return resp, updated_uuid def delete_container_secret(app, container_id=None, secret_ref=None, name=None, expect_errors=False, headers=None): request = { 'name': name, 'secret_ref': secret_ref } cleaned_request = {key: val for key, val in request.items() if val is not None} resp = app.delete_json( '/containers/{container_id}/secrets'.format(container_id=container_id), cleaned_request, expect_errors=expect_errors, headers=headers ) return resp barbican-9.1.0.dev50/barbican/tests/api/controllers/test_acls.py0000664000175000017500000013050713616500636025001 0ustar sahidsahid00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os from oslo_utils import uuidutils from barbican.api.controllers import acls from barbican.model import repositories from barbican.tests.api import test_resources_policy as test_policy from barbican.tests import utils class TestACLsWithContextMixin(test_policy.BaseTestCase): """Mixin for performing common acls operation used with policy logic.""" def _create_secret_with_creator_user(self, app, creator_user_id): # define creator user for new secret entry. app.extra_environ = { 'barbican.context': self._build_context(self.project_id, user=creator_user_id) } secret_id, _ = create_secret(app) return secret_id def _create_container_with_creator_user(self, app, creator_user_id): # define creator user for new container entry. app.extra_environ = { 'barbican.context': self._build_context(self.project_id, user=creator_user_id) } container_id, _ = create_container(app) return container_id def _set_acls_with_context(self, app, entity_type=None, op_type=None, entity_id=None, roles=None, user=None, enforce_policy=True, expect_errors=False): """Perform acl create/update/delete operation with policy logic. Before performing acl create/update/delete, provided input is used for setting custom barbican context. Operation is done under policy enforcement logic. """ policy_enforcer = self.policy_enforcer if enforce_policy else None app.extra_environ = { 'barbican.context': self._build_context( self.project_id, roles=roles, user=user, is_admin=False, policy_enforcer=policy_enforcer) } resp = None if op_type == 'create': resp = create_acls(app, entity_type, entity_id, read_user_ids=['u1', 'u2'], expect_errors=expect_errors) elif op_type == 'update': resp = update_acls(app, entity_type, entity_id, read_user_ids=['u1', 'u2'], partial_update=True, expect_errors=expect_errors) elif op_type == 'delete': resp = app.delete('/{0}/{1}/acl'.format(entity_type, entity_id), expect_errors=expect_errors) return resp class WhenTestingSecretACLsResource(utils.BarbicanAPIBaseTestCase, TestACLsWithContextMixin): def test_can_create_new_secret_acls(self): """Create secret acls and compare stored values with request data.""" secret_uuid, _ = create_secret(self.app) resp = create_acls( self.app, 'secrets', secret_uuid, read_user_ids=['u1', 'u2']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/secrets/{0}/acl'.format(secret_uuid), resp.json['acl_ref']) acl_map = _get_acl_map(secret_uuid, is_secret=True) # Check project_access is True when not provided self.assertTrue(acl_map['read']['project_access']) def test_who_can_create_new_secret_acls(self): """Test who can create new secret ACLs as per policy rules. New secret ACLs can be created by user who created the secret. Other user with 'creator' role in secret project cannot create ACL if user is not creator of the secret. User with 'admin' role in secret project can create ACL for that secret. """ creator_user_id = 'creatorUserId' secret_uuid = self._create_secret_with_creator_user( self.app, creator_user_id) secret_uuid2 = self._create_secret_with_creator_user( self.app, creator_user_id) resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='create', entity_id=secret_uuid, roles=['creator'], user='NotSecretCreator', expect_errors=True) self.assertEqual(403, resp.status_int) resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='create', entity_id=secret_uuid, roles=['creator'], user=creator_user_id, expect_errors=False) self.assertEqual(200, resp.status_int) # test for user with 'admin' role in secret project resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='create', entity_id=secret_uuid2, roles=['admin'], user='AdminUser', expect_errors=False) self.assertEqual(200, resp.status_int) def test_create_new_secret_acls_with_project_access_false(self): """Should allow creating acls for a new secret with project-access.""" secret_uuid, _ = create_secret(self.app) resp = create_acls( self.app, 'secrets', secret_uuid, read_project_access=False) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/secrets/{0}/acl'.format(secret_uuid), resp.json['acl_ref']) acl_map = _get_acl_map(secret_uuid, is_secret=True) self.assertFalse(acl_map['read']['project_access']) def test_new_secret_acls_with_invalid_project_access_value_should_fail( self): """Should fail if project-access flag is provided as string value.""" secret_uuid, _ = create_secret(self.app) resp = create_acls( self.app, 'secrets', secret_uuid, read_project_access="False", read_user_ids=['u1', 'u3', 'u4'], expect_errors=True) self.assertEqual(400, resp.status_int) resp = create_acls( self.app, 'secrets', secret_uuid, read_project_access="None", expect_errors=True) self.assertEqual(400, resp.status_int) def test_get_secret_acls_with_complete_acl_data(self): """Read existing acls for a with complete acl data.""" secret_id, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_id, read_user_ids=['u1', 'u3'], read_project_access=False) resp = self.app.get( '/secrets/{0}/acl'.format(secret_id), expect_errors=False) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('read', resp.json) self.assertFalse(resp.json['read']['project-access']) self.assertIsNotNone(resp.json['read']['created']) self.assertIsNotNone(resp.json['read']['updated']) self.assertEqual({'u1', 'u3'}, set(resp.json['read']['users'])) def test_get_secret_acls_with_project_access_data(self): """Read existing acls for acl when only project-access flag is set.""" secret_id, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_id, read_project_access=False) resp = self.app.get( '/secrets/{0}/acl'.format(secret_id), expect_errors=False) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertEqual([], resp.json['read']['users']) self.assertFalse(resp.json['read']['project-access']) self.assertIsNotNone(resp.json['read']['created']) self.assertIsNotNone(resp.json['read']['updated']) def test_get_secret_acls_invalid_secret_should_fail(self): """Get secret acls should fail for invalid secret id. This test applies to all secret ACLs methods as secret entity is populated in same manner for get, put, patch, delete methods. """ secret_id, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_id, read_project_access=True, read_user_ids=['u1', 'u3', 'u4']) resp = self.app.get( '/secrets/{0}/acl'.format(uuidutils.generate_uuid(dashed=False)), expect_errors=True) self.assertEqual(404, resp.status_int) def test_get_secret_acls_no_acls_defined_return_default_acl(self): """Get secret acls should pass when no acls defined for a secret.""" secret_id, _ = create_secret(self.app) resp = self.app.get( '/secrets/{0}/acl'.format(secret_id), expect_errors=True) self.assertEqual(200, resp.status_int) self.assertEqual(acls.DEFAULT_ACL, resp.json) def test_get_secret_acls_with_incorrect_uri_should_fail(self): """Get secret acls should fail when no acls defined for a secret.""" secret_id, _ = create_secret(self.app) resp = self.app.get( '/secrets/{0}/incorrect_acls'.format(secret_id), expect_errors=True) self.assertEqual(404, resp.status_int) def test_full_update_secret_acls_modify_project_access_value(self): """ACLs full update with userids where project-access flag modified.""" secret_uuid, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_uuid, read_user_ids=['u1', 'u2'], read_project_access=False) # update acls with no user input so it should delete existing users resp = update_acls( self.app, 'secrets', secret_uuid, partial_update=False, read_project_access=True) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/secrets/{0}/acl'.format(secret_uuid), resp.json['acl_ref']) acl_map = _get_acl_map(secret_uuid, is_secret=True) self.assertTrue(acl_map['read']['project_access']) self.assertIsNone(acl_map['read'].to_dict_fields().get('users')) def test_full_update_secret_acls_modify_users_only(self): """ACLs full update where specific operation acl is modified.""" secret_uuid, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_uuid, read_user_ids=['u1', 'u2'], read_project_access=False) resp = update_acls( self.app, 'secrets', secret_uuid, partial_update=False, read_user_ids=['u1', 'u3', 'u5']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/secrets/{0}/acl'.format(secret_uuid), resp.json['acl_ref']) acl_map = _get_acl_map(secret_uuid, is_secret=True) self.assertTrue(acl_map['read']['project_access']) self.assertNotIn('u2', acl_map['read'].to_dict_fields()['users']) self.assertEqual({'u1', 'u3', 'u5'}, set(acl_map['read'].to_dict_fields()['users'])) def test_full_update_secret_acls_with_read_users_only(self): """Acls full update where specific operation acl is modified.""" secret_uuid, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_uuid, read_user_ids=['u1', 'u2']) acl_map = _get_acl_map(secret_uuid, is_secret=True) # ACL api does not support 'list' operation so making direct db update # in acl operation data to make sure full update removes this existing # ACL. secret_acl = acl_map['read'] secret_acl.operation = 'list' secret_acl.save() acl_map = _get_acl_map(secret_uuid, is_secret=True) # check 'list' operation is there in db self.assertIn('list', acl_map) resp = update_acls( self.app, 'secrets', secret_uuid, partial_update=False, read_user_ids=['u1', 'u3', 'u5']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/secrets/{0}/acl'.format(secret_uuid), resp.json['acl_ref']) acl_map = _get_acl_map(secret_uuid, is_secret=True) # make sure 'list' operation is no longer after full update self.assertNotIn('list', acl_map) self.assertTrue(acl_map['read']['project_access']) self.assertEqual({'u1', 'u3', 'u5'}, set(acl_map['read'].to_dict_fields()['users'])) self.assertNotIn('u2', acl_map['read'].to_dict_fields()['users']) def test_partial_update_secret_acls_with_read_users_only(self): """Acls update where specific operation acl is modified.""" secret_uuid, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_uuid, read_user_ids=['u1', 'u2']) acl_map = _get_acl_map(secret_uuid, is_secret=True) secret_acl = acl_map['read'] secret_acl.operation = 'list' secret_acl.save() acl_map = _get_acl_map(secret_uuid, is_secret=True) # check 'list' operation is there in db self.assertIn('list', acl_map) resp = update_acls( self.app, 'secrets', secret_uuid, partial_update=True, read_user_ids=['u1', 'u3', 'u5']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/secrets/{0}/acl'.format(secret_uuid), resp.json['acl_ref']) acl_map = _get_acl_map(secret_uuid, is_secret=True) # For partial update, existing other operation ACL is not tocuhed. self.assertIn('list', acl_map) self.assertEqual({'u1', 'u2'}, set(acl_map['list'].to_dict_fields()['users'])) self.assertTrue(acl_map['read']['project_access']) self.assertEqual({'u1', 'u3', 'u5'}, set(acl_map['read'].to_dict_fields()['users'])) def test_partial_update_secret_acls_when_no_acls_defined_should_pass(self): """Acls partial update pass when no acls are defined for a secret. Partial update (PATCH) is applicable even when no explicit ACL has been set as by default every secret has implicit acl definition. If PUT is used, then new ACL is created instead. """ secret_id, _ = create_secret(self.app) resp = update_acls( self.app, 'secrets', secret_id, partial_update=True, read_user_ids=['u1', 'u3', 'u5'], expect_errors=False) self.assertEqual(200, resp.status_int) acl_map = _get_acl_map(secret_id, is_secret=True) self.assertTrue(acl_map['read']['project_access']) def test_who_can_update_secret_acls(self): """Test PATCH update existing secret ACLs as per policy rules. Existing secret ACLs can be updated by user who created the secret. Other user with 'creator' role in secret project cannot update ACL if user is not creator of the secret. User with 'admin' role in secret project can update ACL for that secret. """ creator_user_id = 'creatorUserId' secret_uuid = self._create_secret_with_creator_user( self.app, creator_user_id) self._set_acls_with_context( self.app, entity_type='secrets', op_type='create', entity_id=secret_uuid, enforce_policy=False) resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='update', entity_id=secret_uuid, roles=['creator'], user='NotSecretCreator', expect_errors=True) self.assertEqual(403, resp.status_int) resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='update', entity_id=secret_uuid, roles=['creator'], user=creator_user_id) self.assertEqual(200, resp.status_int) # test for user with 'admin' role in secret project resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='update', entity_id=secret_uuid, roles=['admin'], user='AdminUser') self.assertEqual(200, resp.status_int) def test_partial_update_secret_acls_modify_project_access_values(self): """Acls partial update where project-access flag is modified.""" secret_uuid, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_uuid, read_user_ids=['u1', 'u2'], read_project_access=False) resp = update_acls( self.app, 'secrets', secret_uuid, partial_update=True, read_project_access=True) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/secrets/{0}/acl'.format(secret_uuid), resp.json['acl_ref']) acl_map = _get_acl_map(secret_uuid, is_secret=True) self.assertTrue(acl_map['read']['project_access']) self.assertEqual({'u1', 'u2'}, set(acl_map['read'].to_dict_fields()['users'])) def test_delete_secret_acls_with_valid_secret_id(self): """Delete existing acls for a given secret.""" secret_id, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_id, read_project_access=True) resp = self.app.delete( '/secrets/{0}/acl'.format(secret_id), expect_errors=False) content = resp.json self.assertIsNone(content) # make sure there is no response self.assertEqual(200, resp.status_int) acl_map = _get_acl_map(secret_id, is_secret=True) self.assertFalse(acl_map) def test_delete_secret_acls_no_acl_defined_should_pass(self): """Delete acls should pass when no acls are defined for a secret.""" secret_id, _ = create_secret(self.app) resp = self.app.delete( '/secrets/{0}/acl'.format(secret_id), expect_errors=False) self.assertEqual(200, resp.status_int) def test_who_can_delete_secret_acls(self): """Test who can delete existing secret ACLs as per policy rules. Existing secret ACLs can be deleted by user who created the secret. Other user with 'creator' role in secret project cannot delete ACL if user is not creator of the secret. User with 'admin' role in secret project can delete ACL for that secret. """ creator_user_id = 'creatorUserId' secret_uuid = self._create_secret_with_creator_user( self.app, creator_user_id) self._set_acls_with_context( self.app, entity_type='secrets', op_type='create', entity_id=secret_uuid, enforce_policy=False) resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='delete', entity_id=secret_uuid, roles=['creator'], user='NotSecretCreator', expect_errors=True) self.assertEqual(403, resp.status_int) resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='delete', entity_id=secret_uuid, roles=['creator'], user=creator_user_id) self.assertEqual(200, resp.status_int) # Create new secret ACLs again. self._set_acls_with_context( self.app, entity_type='secrets', op_type='create', entity_id=secret_uuid, enforce_policy=False) # test for user with 'admin' role in secret project resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='delete', entity_id=secret_uuid, roles=['admin'], user='AdminUser') self.assertEqual(200, resp.status_int) def test_invoke_secret_acls_head_should_fail(self): """Should fail as put request to secret acls URI is not supported.""" secret_id, _ = create_secret(self.app) resp = self.app.head( '/secrets/{0}/acl'.format(secret_id), expect_errors=True) self.assertEqual(405, resp.status_int) def test_list_secrets_with_no_acls_and_acl_only_should_be_empty(self): """Return list should be empty""" creator_user_id = 'creatorUserID' self._create_secret_with_creator_user( self.app, creator_user_id) resp = self.app.get( '/secrets/?acl_only=TRUE') self.assertEqual(200, resp.status_int) self.assertEqual([], resp.json['secrets']) def test_list_secrets_with_acls(self): """Return List should not include secrets with no ACL for user""" creator_user_id = 'creatorUserID' secret_uuid_acl_1 = self._create_secret_with_creator_user( self.app, creator_user_id) secret_uuid_acl_2 = self._create_secret_with_creator_user( self.app, creator_user_id) secret_uuid_no_acl = self._create_secret_with_creator_user( self.app, creator_user_id) create_acls( self.app, 'secrets', secret_uuid_acl_1, read_user_ids=[creator_user_id], read_project_access=False) create_acls( self.app, 'secrets', secret_uuid_acl_2, read_user_ids=[creator_user_id], read_project_access=False) resp = self.app.get( '/secrets/?acl_only=TrUe') self.assertEqual(200, resp.status_int) secret_list = resp.json.get('secrets') self.assertEqual(2, len(secret_list)) self.assertNotIn(secret_uuid_no_acl, secret_list) class WhenTestingContainerAclsResource(utils.BarbicanAPIBaseTestCase, TestACLsWithContextMixin): def test_can_create_new_container_acls(self): """Create container acls and compare db values with request data.""" container_id, _ = create_container(self.app) resp = create_acls( self.app, 'containers', container_id, read_user_ids=['u1', 'u2']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/containers/{0}/acl'.format(container_id), resp.json['acl_ref']) acl_map = _get_acl_map(container_id, is_secret=False) # Check project_access is True when not provided self.assertTrue(acl_map['read']['project_access']) self.assertEqual({'u1', 'u2'}, set(acl_map['read'].to_dict_fields()['users'])) def test_who_can_create_new_container_acls(self): """Test who can create new container ACLs as per policy rules. New container ACLs can be created by user who created the container. Other user with 'creator' role in container project cannot create ACL if user is not creator of the container. User with 'admin' role in container project can create ACL for that container. """ creator_user_id = 'creatorUserId' container_id = self._create_container_with_creator_user( self.app, creator_user_id) container_id2 = self._create_container_with_creator_user( self.app, creator_user_id) resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='create', entity_id=container_id, roles=['creator'], user='NotContainerCreator', expect_errors=True) self.assertEqual(403, resp.status_int) resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='create', entity_id=container_id, roles=['creator'], user=creator_user_id, expect_errors=False) self.assertEqual(200, resp.status_int) # test for user with 'admin' role in container project resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='create', entity_id=container_id2, roles=['admin'], user='AdminUser', expect_errors=False) self.assertEqual(200, resp.status_int) def test_create_new_container_acls_with_project_access_true(self): """Should allow creating acls for new container with project-access.""" container_id, _ = create_container(self.app) resp = create_acls( self.app, 'containers', container_id, read_project_access=True, read_user_ids=['u1', 'u3', 'u4']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/containers/{0}/acl'.format(container_id), resp.json['acl_ref']) acl_map = _get_acl_map(container_id, is_secret=False) self.assertTrue(acl_map['read']['project_access']) def test_create_new_container_acls_with_project_access_false(self): """Should allow creating acls for new container with project-access.""" container_id, _ = create_container(self.app) resp = create_acls( self.app, 'containers', container_id, read_project_access=False, read_user_ids=['u1', 'u3', 'u4']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/containers/{0}/acl'.format(container_id), resp.json['acl_ref']) acl_map = _get_acl_map(container_id, is_secret=False) self.assertFalse(acl_map['read']['project_access']) def test_container_acls_with_invalid_project_access_value_fail(self): """Should fail if project-access flag is provided as string value.""" container_id, _ = create_container(self.app) resp = create_acls( self.app, 'containers', container_id, read_project_access="False", read_user_ids=['u1', 'u3', 'u4'], expect_errors=True) self.assertEqual(400, resp.status_int) resp = create_acls( self.app, 'containers', container_id, read_project_access="None", expect_errors=True) self.assertEqual(400, resp.status_int) def test_get_container_acls_with_complete_acl_data(self): """Read existing acls for a with complete acl data.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_user_ids=['u1', 'u3'], read_project_access=False) resp = self.app.get( '/containers/{0}/acl'.format(container_id), expect_errors=False) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('read', resp.json) self.assertFalse(resp.json['read']['project-access']) self.assertIsNotNone(resp.json['read']['created']) self.assertIsNotNone(resp.json['read']['updated']) self.assertEqual({'u1', 'u3'}, set(resp.json['read']['users'])) def test_get_container_acls_with_project_access_data(self): """Read existing acls for acl when only project-access flag is set.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_project_access=False) resp = self.app.get( '/containers/{0}/acl'.format(container_id), expect_errors=False) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertEqual([], resp.json['read']['users']) self.assertFalse(resp.json['read']['project-access']) self.assertIsNotNone(resp.json['read']['created']) self.assertIsNotNone(resp.json['read']['updated']) def test_get_container_acls_invalid_container_id_should_fail(self): """Get container acls should fail for invalid secret id. This test applies to all container ACLs methods as secret entity is populated in same manner for get, put, patch, delete methods. """ container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_project_access=True) resp = self.app.get( '/containers/{0}/acl'.format(uuidutils.generate_uuid()), expect_errors=True) self.assertEqual(404, resp.status_int) def test_get_container_acls_invalid_non_uuid_secret_should_fail(self): """Get container acls should fail for invalid (non-uuid) id.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_project_access=True) resp = self.app.get( '/containers/{0}/acl'.format('my_container_id'), expect_errors=True) self.assertEqual(404, resp.status_int) def test_get_container_acls_no_acls_defined_return_default_acl(self): """Get container acls should pass when no acls defined for a secret.""" container_id, _ = create_container(self.app) resp = self.app.get( '/containers/{0}/acl'.format(container_id), expect_errors=True) self.assertEqual(200, resp.status_int) self.assertEqual(acls.DEFAULT_ACL, resp.json) def test_full_update_container_acls_modify_all_acls(self): """Acls update where only user ids list is modified.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_project_access=False, read_user_ids=['u1', 'u2']) resp = update_acls( self.app, 'containers', container_id, partial_update=False, read_user_ids=['u1', 'u2', 'u5']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/containers/{0}/acl'.format(container_id), resp.json['acl_ref']) acl_map = _get_acl_map(container_id, is_secret=False) # Check project_access is True when not provided self.assertTrue(acl_map['read']['project_access']) self.assertIn('u5', acl_map['read'].to_dict_fields()['users']) def test_full_update_container_acls_modify_project_access_values(self): """Acls update where user ids and project-access flag is modified.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_user_ids=['u1', 'u2']) resp = update_acls( self.app, 'containers', container_id, partial_update=False, read_project_access=False) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/containers/{0}/acl'.format(container_id), resp.json['acl_ref']) acl_map = _get_acl_map(container_id, is_secret=False) self.assertFalse(acl_map['read']['project_access']) self.assertIsNone(acl_map['read'].to_dict_fields().get('users')) def test_full_update_container_acls_with_read_users_only(self): """Acls full update where specific operation acl is modified.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_user_ids=['u1', 'u2']) acl_map = _get_acl_map(container_id, is_secret=False) # ACL api does not support 'list' operation so making direct db update # in acl operation data to make sure full update removes this existing # ACL. container_acl = acl_map['read'] container_acl.operation = 'list' container_acl.save() acl_map = _get_acl_map(container_id, is_secret=False) # check 'list' operation is there in db self.assertIn('list', acl_map) resp = update_acls( self.app, 'containers', container_id, partial_update=False, read_user_ids=['u1', 'u3', 'u5']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/containers/{0}/acl'.format(container_id), resp.json['acl_ref']) acl_map = _get_acl_map(container_id, is_secret=False) # make sure 'list' operation is no longer after full update self.assertNotIn('list', acl_map) self.assertTrue(acl_map['read']['project_access']) self.assertEqual({'u1', 'u3', 'u5'}, set(acl_map['read'].to_dict_fields()['users'])) self.assertNotIn('u2', acl_map['read'].to_dict_fields()['users']) def test_partial_update_container_acls_with_read_users_only(self): """Acls update where specific operation acl is modified.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_user_ids=['u1', 'u2']) acl_map = _get_acl_map(container_id, is_secret=False) secret_acl = acl_map['read'] secret_acl.operation = 'list' secret_acl.save() acl_map = _get_acl_map(container_id, is_secret=False) # check 'list' operation is there in db self.assertIn('list', acl_map) resp = update_acls( self.app, 'containers', container_id, partial_update=True, read_user_ids=['u1', 'u3', 'u5']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/containers/{0}/acl'.format(container_id), resp.json['acl_ref']) acl_map = _get_acl_map(container_id, is_secret=False) # For partial update, existing other operation ACL is not tocuhed. self.assertIn('list', acl_map) self.assertEqual({'u1', 'u2'}, set(acl_map['list'].to_dict_fields()['users'])) self.assertTrue(acl_map['read']['project_access']) self.assertEqual({'u1', 'u3', 'u5'}, set(acl_map['read'].to_dict_fields()['users'])) def test_partial_update_container_acls_when_no_acls_defined(self): """Acls partial update pass when no acls are defined for container. Partial update (PATCH) is applicable even when no explicit ACL has been set as by default every container has implicit acl definition. If PUT is used, then new ACL is created instead. """ container_id, _ = create_container(self.app) resp = update_acls( self.app, 'containers', container_id, partial_update=True, read_user_ids=['u1', 'u3', 'u5'], expect_errors=False) self.assertEqual(200, resp.status_int) acl_map = _get_acl_map(container_id, is_secret=False) self.assertTrue(acl_map['read']['project_access']) def test_partial_update_container_acls_modify_project_access_values(self): """Acls partial update where project-access flag is modified.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_user_ids=['u1', 'u2'], read_project_access=False) resp = update_acls( self.app, 'containers', container_id, partial_update=True, read_project_access=True) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/containers/{0}/acl'.format(container_id), resp.json['acl_ref']) acl_map = _get_acl_map(container_id, is_secret=False) self.assertTrue(acl_map['read']['project_access']) self.assertEqual({'u1', 'u2'}, set(acl_map['read'].to_dict_fields()['users'])) def test_who_can_update_container_acls(self): """Test PATCH update existing container ACLs as per policy rules. Existing container ACLs can be updated by user who created the container. Other user with 'creator' role in container project cannot update ACL if user is not creator of the container. User with 'admin' role in container project can update ACL for that container. """ creator_user_id = 'creatorUserId' container_id = self._create_container_with_creator_user( self.app, creator_user_id) self._set_acls_with_context( self.app, entity_type='containers', op_type='create', entity_id=container_id, enforce_policy=False) resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='update', entity_id=container_id, roles=['creator'], user='NotCreator', expect_errors=True) self.assertEqual(403, resp.status_int) resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='update', entity_id=container_id, roles=['creator'], user=creator_user_id) self.assertEqual(200, resp.status_int) # test for user with 'admin' role in container project resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='update', entity_id=container_id, roles=['admin'], user='AdminUser') self.assertEqual(200, resp.status_int) def test_delete_container_acls_with_valid_container_id(self): """Delete existing acls for a given container.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_project_access=True) resp = self.app.delete( '/containers/{0}/acl'.format(container_id), expect_errors=False) content = resp.json self.assertIsNone(content) # make sure there is no response self.assertEqual(200, resp.status_int) acl_map = _get_acl_map(container_id, is_secret=False) self.assertFalse(acl_map) def test_delete_container_acls_no_acl_defined_should_pass(self): """Delete acls should pass when no acls are defined for a container.""" container_id, _ = create_container(self.app) resp = self.app.delete( '/containers/{0}/acl'.format(container_id), expect_errors=False) self.assertEqual(200, resp.status_int) def test_who_can_delete_container_acls(self): """Test who can delete existing container ACLs as per policy rules. Existing container ACLs can be deleted by user who created the container. Other user with 'creator' role in container project cannot delete ACL if user is not creator of the container. User with 'admin' role in container project can delete ACL for that container. """ creator_user_id = 'creatorUserId' container_id = self._create_container_with_creator_user( self.app, creator_user_id) self._set_acls_with_context( self.app, entity_type='containers', op_type='create', entity_id=container_id, enforce_policy=False) resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='delete', entity_id=container_id, roles=['creator'], user='NotCreator', expect_errors=True) self.assertEqual(403, resp.status_int) resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='delete', entity_id=container_id, roles=['creator'], user=creator_user_id) self.assertEqual(200, resp.status_int) # Create new container ACLs again. self._set_acls_with_context( self.app, entity_type='containers', op_type='create', entity_id=container_id, enforce_policy=False) # test for user with 'admin' role in container project resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='delete', entity_id=container_id, roles=['admin'], user='AdminUser') self.assertEqual(200, resp.status_int) def test_invoke_container_acls_head_should_fail(self): """PUT request to container acls URI is not supported.""" container_id, _ = create_container(self.app) resp = self.app.head( '/containers/{0}/acl/'.format(container_id), expect_errors=True) self.assertEqual(405, resp.status_int) # ----------------------- Helper Functions --------------------------- def create_secret(app, name=None, algorithm=None, bit_length=None, mode=None, expiration=None, payload='not-encrypted', content_type='text/plain', content_encoding=None, transport_key_id=None, transport_key_needed=None, expect_errors=False): request = { 'name': name, 'algorithm': algorithm, 'bit_length': bit_length, 'mode': mode, 'expiration': expiration, 'payload': payload, 'payload_content_type': content_type, 'payload_content_encoding': content_encoding, 'transport_key_id': transport_key_id, 'transport_key_needed': transport_key_needed } cleaned_request = {key: val for key, val in request.items() if val is not None} resp = app.post_json( '/secrets/', cleaned_request, expect_errors=expect_errors ) created_uuid = None if resp.status_int == 201: secret_ref = resp.json.get('secret_ref', '') _, created_uuid = os.path.split(secret_ref) return created_uuid, resp def create_container(app): _, resp = create_secret(app) secret_ref = resp.json['secret_ref'] request = { "name": "container name", "type": "generic", "secret_refs": [ { "name": "any_key", "secret_ref": secret_ref } ] } resp = app.post_json( '/containers/', request, expect_errors=False ) created_uuid = None if resp.status_int == 201: container_ref = resp.json.get('container_ref', '') _, created_uuid = os.path.split(container_ref) return created_uuid, resp def create_acls(app, entity_type, entity_id, read_user_ids=None, read_project_access=None, expect_errors=False): return manage_acls(app, entity_type, entity_id, read_user_ids=read_user_ids, read_project_access=read_project_access, is_update=False, partial_update=False, expect_errors=expect_errors) def update_acls(app, entity_type, entity_id, read_user_ids=None, read_project_access=None, partial_update=False, expect_errors=False): return manage_acls(app, entity_type, entity_id, read_user_ids=read_user_ids, read_project_access=read_project_access, is_update=True, partial_update=partial_update, expect_errors=expect_errors) def manage_acls(app, entity_type, entity_id, read_user_ids=None, read_project_access=None, is_update=False, partial_update=None, expect_errors=False): request = {} _append_acl_to_request(request, 'read', read_user_ids, read_project_access) cleaned_request = {key: val for key, val in request.items() if val is not None} if is_update and partial_update: # patch for partial update resp = app.patch_json( '/{0}/{1}/acl'.format(entity_type, entity_id), cleaned_request, expect_errors=expect_errors) else: # put (for create or complete update) resp = app.put_json( '/{0}/{1}/acl'.format(entity_type, entity_id), cleaned_request, expect_errors=expect_errors) return resp def _append_acl_to_request(req, operation, user_ids=None, project_access=None): op_dict = {} if user_ids is not None: op_dict['users'] = user_ids if project_access is not None: op_dict['project-access'] = project_access if op_dict: req[operation] = op_dict def _get_acl_map(entity_id, is_secret=True): """Provides map of operation: acl_entity for given entity id.""" if is_secret: acl_repo = repositories.get_secret_acl_repository() acl_map = {acl.operation: acl for acl in acl_repo.get_by_secret_id(entity_id)} else: acl_repo = repositories.get_container_acl_repository() acl_map = {acl.operation: acl for acl in acl_repo.get_by_container_id(entity_id)} return acl_map barbican-9.1.0.dev50/barbican/tests/api/controllers/test_consumers.py0000664000175000017500000006437513616500636026106 0ustar sahidsahid00000000000000# Copyright (c) 2017 IBM # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os from barbican.tests import utils class WhenTestingContainerConsumersResource(utils.BarbicanAPIBaseTestCase): def setUp(self): super(WhenTestingContainerConsumersResource, self).setUp() self.container_name = "Im_a_container" self.container_type = "generic" self.consumer_a = { "URL": "http://test_a", "name": "consumer_a" } self.consumer_b = { "URL": "http://test_b", "name": "consumer_b" } self.consumer_c = { "URL": "http://test_c", "name": "consumer_c" } def test_can_create_new_consumer(self): resp, container_uuid = create_container( self.app, name=self.container_name, container_type=self.container_type ) self.assertEqual(201, resp.status_int) consumer_resp, consumer = create_container_consumer( self.app, container_id=container_uuid, name=self.consumer_a["name"], url=self.consumer_a["URL"] ) self.assertEqual(200, consumer_resp.status_int) self.assertEqual([self.consumer_a], consumer) def test_can_get_consumers(self): resp, container_uuid = create_container( self.app, name=self.container_name, container_type=self.container_type ) self.assertEqual(201, resp.status_int) consumer_resp, consumers = create_container_consumer( self.app, container_id=container_uuid, name=self.consumer_a["name"], url=self.consumer_a["URL"] ) self.assertEqual(200, consumer_resp.status_int) consumer_resp, consumers = create_container_consumer( self.app, container_id=container_uuid, name=self.consumer_b["name"], url=self.consumer_b["URL"] ) self.assertEqual(200, consumer_resp.status_int) consumer_resp, consumers = create_container_consumer( self.app, container_id=container_uuid, name=self.consumer_c["name"], url=self.consumer_c["URL"] ) self.assertEqual(200, consumer_resp.status_int) consumer_get_resp = self.app.get( '/containers/{container_id}/consumers/'.format( container_id=container_uuid)) self.assertEqual(200, consumer_get_resp.status_int) self.assertIn(consumers[0]["name"], consumer_get_resp.json["consumers"][0]["name"]) self.assertIn(consumers[0]["URL"], consumer_get_resp.json["consumers"][0]["URL"]) self.assertIn(consumers[1]["name"], consumer_get_resp.json["consumers"][1]["name"]) self.assertIn(consumers[1]["URL"], consumer_get_resp.json["consumers"][1]["URL"]) self.assertIn(consumers[2]["name"], consumer_get_resp.json["consumers"][2]["name"]) self.assertIn(consumers[2]["URL"], consumer_get_resp.json["consumers"][2]["URL"]) def test_can_get_consumers_with_limit_and_offset(self): resp, container_uuid = create_container( self.app, name=self.container_name, container_type=self.container_type ) self.assertEqual(201, resp.status_int) consumer_resp, consumers = create_container_consumer( self.app, container_id=container_uuid, name=self.consumer_a["name"], url=self.consumer_a["URL"] ) self.assertEqual(200, consumer_resp.status_int) consumer_resp, consumers = create_container_consumer( self.app, container_id=container_uuid, name=self.consumer_b["name"], url=self.consumer_b["URL"] ) self.assertEqual(200, consumer_resp.status_int) consumer_resp, consumers = create_container_consumer( self.app, container_id=container_uuid, name=self.consumer_c["name"], url=self.consumer_c["URL"] ) self.assertEqual(200, consumer_resp.status_int) consumer_get_resp = self.app.get( '/containers/{container_id}/consumers/?limit=1&offset=1'.format( container_id=container_uuid)) self.assertEqual(200, consumer_get_resp.status_int) container_url = resp.json["container_ref"] prev_cons = u"{container_url}/consumers?limit=1&offset=0".format( container_url=container_url) self.assertEqual(prev_cons, consumer_get_resp.json["previous"]) next_cons = u"{container_url}/consumers?limit=1&offset=2".format( container_url=container_url) self.assertEqual(next_cons, consumer_get_resp.json["next"]) self.assertEqual(self.consumer_b["name"], consumer_get_resp.json["consumers"][0]["name"]) self.assertEqual(self.consumer_b["URL"], consumer_get_resp.json["consumers"][0]["URL"]) self.assertEqual(3, consumer_get_resp.json["total"]) def test_can_delete_consumer(self): resp, container_uuid = create_container( self.app, name=self.container_name, container_type=self.container_type ) self.assertEqual(201, resp.status_int) consumer_resp, consumers = create_container_consumer( self.app, container_id=container_uuid, name=self.consumer_a["name"], url=self.consumer_a["URL"] ) self.assertEqual(200, consumer_resp.status_int) request = { 'name': self.consumer_a["name"], 'URL': self.consumer_a["URL"] } cleaned_request = {key: val for key, val in request.items() if val is not None} consumer_del_resp = self.app.delete_json( '/containers/{container_id}/consumers/'.format( container_id=container_uuid ), cleaned_request, headers={'Content-Type': 'application/json'}) self.assertEqual(200, consumer_del_resp.status_int) def test_can_get_no_consumers(self): resp, container_uuid = create_container( self.app, name=self.container_name, container_type=self.container_type ) self.assertEqual(201, resp.status_int) consumer_get_resp = self.app.get( '/containers/{container_id}/consumers/'.format( container_id=container_uuid)) self.assertEqual(200, consumer_get_resp.status_int) self.assertEqual([], consumer_get_resp.json["consumers"]) def test_fail_create_container_not_found(self): consumer_resp, consumers = create_container_consumer( self.app, container_id="bad_container_id", name=self.consumer_a["name"], url=self.consumer_a["URL"], expect_errors=True ) self.assertEqual(404, consumer_resp.status_int) def test_fail_get_container_not_found(self): consumer_get_resp = self.app.get( '/containers/{container_id}/consumers/'.format( container_id="bad_container_id"), expect_errors=True) self.assertEqual(404, consumer_get_resp.status_int) def test_fail_delete_container_not_found(self): request = { 'name': self.consumer_a["name"], 'URL': self.consumer_a["URL"] } cleaned_request = {key: val for key, val in request.items() if val is not None} consumer_del_resp = self.app.delete_json( '/containers/{container_id}/consumers/'.format( container_id="bad_container_id" ), cleaned_request, headers={'Content-Type': 'application/json'}, expect_errors=True) self.assertEqual(404, consumer_del_resp.status_int) def test_fail_delete_consumer_not_found(self): resp, container_uuid = create_container( self.app, name=self.container_name, container_type=self.container_type ) self.assertEqual(201, resp.status_int) request = { 'name': self.consumer_a["name"], 'URL': self.consumer_a["URL"] } cleaned_request = {key: val for key, val in request.items() if val is not None} consumer_del_resp = self.app.delete_json( '/containers/{container_id}/consumers/'.format( container_id=container_uuid ), cleaned_request, headers={'Content-Type': 'application/json'}, expect_errors=True) self.assertEqual(404, consumer_del_resp.status_int) def test_fail_create_no_name(self): resp, container_uuid = create_container( self.app, name=self.container_name, container_type=self.container_type ) self.assertEqual(201, resp.status_int) consumer_resp, consumer = create_container_consumer( self.app, container_id=container_uuid, url="http://theurl", expect_errors=True ) self.assertEqual(400, consumer_resp.status_int) def test_fail_create_no_url(self): resp, container_uuid = create_container( self.app, name=self.container_name, container_type=self.container_type ) self.assertEqual(201, resp.status_int) consumer_resp, consumer = create_container_consumer( self.app, container_id=container_uuid, name="thename", expect_errors=True ) self.assertEqual(400, consumer_resp.status_int) def test_fail_create_empty_name(self): resp, container_uuid = create_container( self.app, name=self.container_name, container_type=self.container_type ) self.assertEqual(201, resp.status_int) consumer_resp, consumer = create_container_consumer( self.app, container_id=container_uuid, name="", url="http://theurl", expect_errors=True ) self.assertEqual(400, consumer_resp.status_int) def test_fail_create_empty_url(self): resp, container_uuid = create_container( self.app, name=self.container_name, container_type=self.container_type ) self.assertEqual(201, resp.status_int) consumer_resp, consumer = create_container_consumer( self.app, container_id=container_uuid, name="thename", url="", expect_errors=True ) self.assertEqual(400, consumer_resp.status_int) # TODO(redrobot): Uncomment this after adding microversion # class WhenTestingSecretConsumersResource(utils.BarbicanAPIBaseTestCase): # # def setUp(self): # super(WhenTestingSecretConsumersResource, self).setUp() # # self.consumer_a = { # "service": "service_a", # "resource_type": "resource_type_a", # "resource_id": "resource_id_a", # } # # self.consumer_b = { # "service": "service_b", # "resource_type": "resource_type_b", # "resource_id": "resource_id_b", # } # # self.consumer_c = { # "service": "service_c", # "resource_type": "resource_type_c", # "resource_id": "resource_id_c", # } # # def test_can_create_new_consumer(self): # resp, secret_id = create_secret(self.app) # self.assertEqual(201, resp.status_int) # # consumer_resp, consumer = create_secret_consumer( # self.app, # secret_id=secret_id, # service=self.consumer_a["service"], # resource_type=self.consumer_a["resource_type"], # resource_id=self.consumer_a["resource_id"], # ) # # self.assertEqual(200, consumer_resp.status_int) # self.assertEqual([self.consumer_a], consumer) # # def test_can_get_consumers(self): # resp, secret_id = create_secret(self.app) # self.assertEqual(201, resp.status_int) # # consumer_resp, consumers = create_secret_consumer( # self.app, # secret_id=secret_id, # service=self.consumer_a["service"], # resource_type=self.consumer_a["resource_type"], # resource_id=self.consumer_a["resource_id"], # ) # self.assertEqual(200, consumer_resp.status_int) # # consumer_resp, consumers = create_secret_consumer( # self.app, # secret_id=secret_id, # service=self.consumer_b["service"], # resource_type=self.consumer_b["resource_type"], # resource_id=self.consumer_b["resource_id"], # ) # self.assertEqual(200, consumer_resp.status_int) # # consumer_resp, consumers = create_secret_consumer( # self.app, # secret_id=secret_id, # service=self.consumer_c["service"], # resource_type=self.consumer_c["resource_type"], # resource_id=self.consumer_c["resource_id"], # ) # self.assertEqual(200, consumer_resp.status_int) # # consumer_get_resp = self.app.get( # '/secrets/{secret_id}/consumers/'.format( # secret_id=secret_id)) # # self.assertEqual(200, consumer_get_resp.status_int) # self.assertIn(consumers[0]["service"], # consumer_get_resp.json["consumers"][0]["service"]) # self.assertIn(consumers[0]["resource_type"], # consumer_get_resp.json["consumers"][0]["resource_type"]) # self.assertIn(consumers[0]["resource_id"], # consumer_get_resp.json["consumers"][0]["resource_id"]) # self.assertIn(consumers[1]["service"], # consumer_get_resp.json["consumers"][1]["service"]) # self.assertIn(consumers[1]["resource_type"], # consumer_get_resp.json["consumers"][1]["resource_type"]) # self.assertIn(consumers[1]["resource_id"], # consumer_get_resp.json["consumers"][1]["resource_id"]) # self.assertIn(consumers[2]["service"], # consumer_get_resp.json["consumers"][2]["service"]) # self.assertIn(consumers[2]["resource_type"], # consumer_get_resp.json["consumers"][2]["resource_type"]) # self.assertIn(consumers[2]["resource_id"], # consumer_get_resp.json["consumers"][2]["resource_id"]) # # def test_can_get_consumers_with_limit_and_offset(self): # resp, secret_id = create_secret(self.app) # self.assertEqual(201, resp.status_int) # # consumer_resp, consumers = create_secret_consumer( # self.app, # secret_id=secret_id, # service=self.consumer_a["service"], # resource_type=self.consumer_a["resource_type"], # resource_id=self.consumer_a["resource_id"], # ) # self.assertEqual(200, consumer_resp.status_int) # # consumer_resp, consumers = create_secret_consumer( # self.app, # secret_id=secret_id, # service=self.consumer_b["service"], # resource_type=self.consumer_b["resource_type"], # resource_id=self.consumer_b["resource_id"], # ) # self.assertEqual(200, consumer_resp.status_int) # # consumer_resp, consumers = create_secret_consumer( # self.app, # secret_id=secret_id, # service=self.consumer_c["service"], # resource_type=self.consumer_c["resource_type"], # resource_id=self.consumer_c["resource_id"], # ) # self.assertEqual(200, consumer_resp.status_int) # # consumer_get_resp = self.app.get( # '/secrets/{secret_id}/consumers/?limit=1&offset=1'.format( # secret_id=secret_id)) # self.assertEqual(200, consumer_get_resp.status_int) # # secret_url = resp.json["secret_ref"] # # prev_cons = u"{secret_url}/consumers?limit=1&offset=0".format( # secret_url=secret_url) # self.assertEqual(prev_cons, consumer_get_resp.json["previous"]) # # next_cons = u"{secret_url}/consumers?limit=1&offset=2".format( # secret_url=secret_url) # self.assertEqual(next_cons, consumer_get_resp.json["next"]) # # self.assertEqual( # self.consumer_b["service"], # consumer_get_resp.json["consumers"][0]["service"] # ) # self.assertEqual( # self.consumer_b["resource_type"], # consumer_get_resp.json["consumers"][0]["resource_type"] # ) # self.assertEqual( # self.consumer_b["resource_id"], # consumer_get_resp.json["consumers"][0]["resource_id"] # ) # # self.assertEqual(3, consumer_get_resp.json["total"]) # # def test_can_delete_consumer(self): # resp, secret_id = create_secret(self.app) # self.assertEqual(201, resp.status_int) # # consumer_resp, consumers = create_secret_consumer( # self.app, # secret_id=secret_id, # service=self.consumer_a["service"], # resource_type=self.consumer_a["resource_type"], # resource_id=self.consumer_a["resource_id"], # ) # self.assertEqual(200, consumer_resp.status_int) # # request = { # "service": self.consumer_a["service"], # "resource_type": self.consumer_a["resource_type"], # "resource_id": self.consumer_a["resource_id"], # } # cleaned_request = {key: val for key, val in request.items() # if val is not None} # # consumer_del_resp = self.app.delete_json( # '/secrets/{secret_id}/consumers/'.format( # secret_id=secret_id # ), cleaned_request, headers={'Content-Type': 'application/json'}) # # self.assertEqual(200, consumer_del_resp.status_int) # # def test_can_get_no_consumers(self): # resp, secret_id = create_secret(self.app) # self.assertEqual(201, resp.status_int) # # consumer_get_resp = self.app.get( # '/secrets/{secret_id}/consumers/'.format( # secret_id=secret_id)) # # self.assertEqual(200, consumer_get_resp.status_int) # self.assertEqual([], consumer_get_resp.json["consumers"]) # # def test_fail_create_secret_not_found(self): # consumer_resp, consumers = create_secret_consumer( # self.app, # secret_id="bad_secret_id", # service=self.consumer_a["service"], # resource_type=self.consumer_a["resource_type"], # resource_id=self.consumer_a["resource_id"], # expect_errors=True # ) # self.assertEqual(404, consumer_resp.status_int) # # def test_fail_get_secret_not_found(self): # consumer_get_resp = self.app.get( # '/secrets/{secret_id}/consumers/'.format( # secret_id="bad_secret_id"), expect_errors=True) # # self.assertEqual(404, consumer_get_resp.status_int) # # def test_fail_delete_secret_not_found(self): # request = { # "service": self.consumer_a["service"], # "resource_type": self.consumer_a["resource_type"], # "resource_id": self.consumer_a["resource_id"], # } # cleaned_request = {key: val for key, val in request.items() # if val is not None} # # consumer_del_resp = self.app.delete_json( # '/secrets/{secret_id}/consumers/'.format( # secret_id="bad_secret_id" # ), cleaned_request, headers={'Content-Type': 'application/json'}, # expect_errors=True) # # self.assertEqual(404, consumer_del_resp.status_int) # # def test_fail_delete_consumer_not_found(self): # resp, secret_id = create_secret(self.app) # self.assertEqual(201, resp.status_int) # # request = { # "service": self.consumer_a["service"], # "resource_type": self.consumer_a["resource_type"], # "resource_id": self.consumer_a["resource_id"], # } # cleaned_request = {key: val for key, val in request.items() # if val is not None} # # consumer_del_resp = self.app.delete_json( # '/secrets/{secret_id}/consumers/'.format( # secret_id=secret_id # ), cleaned_request, headers={'Content-Type': 'application/json'}, # expect_errors=True) # # self.assertEqual(404, consumer_del_resp.status_int) # # def test_fail_create_no_service(self): # resp, secret_id = create_secret(self.app) # self.assertEqual(201, resp.status_int) # # consumer_resp, consumer = create_secret_consumer( # self.app, # secret_id=secret_id, # resource_type="resource_type", # resource_id="resource_id", # expect_errors=True # ) # self.assertEqual(400, consumer_resp.status_int) # # def test_fail_create_no_resource_type(self): # resp, secret_id = create_secret(self.app) # self.assertEqual(201, resp.status_int) # # consumer_resp, consumer = create_secret_consumer( # self.app, # secret_id=secret_id, # service="service", # resource_id="resource_id", # expect_errors=True # ) # self.assertEqual(400, consumer_resp.status_int) # # def test_fail_create_no_resource_id(self): # resp, secret_id = create_secret(self.app) # self.assertEqual(201, resp.status_int) # # consumer_resp, consumer = create_secret_consumer( # self.app, # secret_id=secret_id, # service="service", # resource_type="resource_type", # expect_errors=True # ) # self.assertEqual(400, consumer_resp.status_int) # # def test_fail_create_empty_service(self): # resp, secret_id = create_secret(self.app) # self.assertEqual(201, resp.status_int) # # consumer_resp, consumer = create_secret_consumer( # self.app, # secret_id=secret_id, # service="", # resource_type="resource_type", # resource_id="resource_id", # expect_errors=True # ) # self.assertEqual(400, consumer_resp.status_int) # # def test_fail_create_empty_resource_type(self): # resp, secret_id = create_secret(self.app) # self.assertEqual(201, resp.status_int) # # consumer_resp, consumer = create_secret_consumer( # self.app, # secret_id=secret_id, # service="service", # resource_type="", # resource_id="resource_id", # expect_errors=True # ) # self.assertEqual(400, consumer_resp.status_int) # # def test_fail_create_empty_resource_id(self): # resp, secret_id = create_secret(self.app) # self.assertEqual(201, resp.status_int) # # consumer_resp, consumer = create_secret_consumer( # self.app, # secret_id=secret_id, # service="service", # resource_type="resource_type", # resource_id="", # expect_errors=True # ) # self.assertEqual(400, consumer_resp.status_int) # ----------------------- Helper Functions --------------------------- def create_container(app, name=None, container_type=None, expect_errors=False, headers=None): request = { 'name': name, 'type': container_type } cleaned_request = {key: val for key, val in request.items() if val is not None} resp = app.post_json( '/containers/', cleaned_request, expect_errors=expect_errors, headers=headers ) created_uuid = None if resp.status_int == 201: container_ref = resp.json.get('container_ref', '') _, created_uuid = os.path.split(container_ref) return resp, created_uuid def create_container_consumer(app, container_id=None, name=None, url=None, expect_errors=False, headers=None): request = { 'name': name, 'URL': url } cleaned_request = {key: val for key, val in request.items() if val is not None} resp = app.post_json( '/containers/{container_id}/consumers/'.format( container_id=container_id), cleaned_request, expect_errors=expect_errors, headers=headers ) consumers = None if resp.status_int == 200: consumers = resp.json.get('consumers', '') return resp, consumers def create_secret(app, expect_errors=False): resp = app.post_json('/secrets/', {}, expect_errors=expect_errors) secret_id = None if resp.status_int == 201: secret_ref = resp.json.get('secret_ref', '') _, secret_id = os.path.split(secret_ref) return resp, secret_id def create_secret_consumer(app, secret_id=None, service=None, resource_type=None, resource_id=None, expect_errors=False, headers=None): request = { "service": service, "resource_type": resource_type, "resource_id": resource_id, } request = {k: v for k, v in request.items() if v is not None} resp = app.post_json( "/secrets/{}/consumers/".format(secret_id), request, expect_errors=expect_errors, headers=headers ) consumers = None if resp.status_int == 200: consumers = resp.json.get('consumers', '') return resp, consumers barbican-9.1.0.dev50/barbican/tests/api/controllers/test_secretmeta.py0000664000175000017500000003234713616500636026216 0ustar sahidsahid00000000000000# Copyright (c) 2017 IBM # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os import mock from oslo_serialization import jsonutils as json from oslo_utils import uuidutils from barbican.tests import utils @utils.parameterized_test_case class WhenTestingSecretMetadataResource(utils.BarbicanAPIBaseTestCase): def setUp(self): super(WhenTestingSecretMetadataResource, self).setUp() self.valid_metadata = { "metadata": { "latitude": "30.393805", "longitude": "-97.724077" } } def test_create_secret_metadata(self): secret_resp, secret_uuid = create_secret(self.app) meta_resp = create_secret_metadata(self.app, self.valid_metadata, secret_resp) self.assertEqual(201, meta_resp.status_int) self.assertIsNotNone(meta_resp.json) def test_can_get_secret_metadata(self): secret_resp, secret_uuid = create_secret(self.app) meta_resp = create_secret_metadata(self.app, self.valid_metadata, secret_resp) self.assertEqual(201, meta_resp.status_int) get_resp = self.app.get('/secrets/%s/metadata' % secret_resp) self.assertEqual(200, get_resp.status_int) self.assertEqual(self.valid_metadata, get_resp.json) def test_get_secret_metadata_invalid_secret_should_fail(self): secret_resp, secret_uuid = create_secret(self.app) create_secret_metadata(self.app, self.valid_metadata, secret_resp) get_resp = self.app.get('/secrets/%s/metadata' % uuidutils.generate_uuid(dashed=False), expect_errors=True) self.assertEqual(404, get_resp.status_int) @utils.parameterized_test_case class WhenTestingSecretMetadatumResource(utils.BarbicanAPIBaseTestCase): def setUp(self): super(WhenTestingSecretMetadatumResource, self).setUp() self.valid_metadata = { "metadata": { "latitude": "30.393805", "longitude": "-97.724077" } } self.updated_valid_metadata = { "metadata": { "latitude": "30.393805", "longitude": "-97.724077", "access-limit": "2" } } self.valid_metadatum = { 'key': 'access-limit', 'value': '2' } @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_can_create_secret_metadatum(self, mocked_get): secret_resp, secret_uuid = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] meta_resp = create_secret_metadatum(self.app, self.valid_metadatum, secret_resp) self.assertEqual(201, meta_resp.status_int) self.assertIsNotNone(meta_resp.json) @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_conflict_create_same_key_secret_metadatum(self, mocked_get): secret_resp, secret_uuid = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] latitude_metadatum = { "key": "latitude", "value": "30.393805" } meta_resp = create_secret_metadatum(self.app, latitude_metadatum, secret_resp, expect_errors=True) self.assertEqual(409, meta_resp.status_int) self.assertIsNotNone(meta_resp.json) @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_can_delete_secret_metadatum(self, mocked_get): secret_resp, secret_uuid = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] meta_resp = create_secret_metadatum(self.app, self.valid_metadatum, secret_resp) self.assertEqual(201, meta_resp.status_int) delete_resp = self.app.delete('/secrets/%s/metadata/access-limit' % secret_resp) self.assertEqual(204, delete_resp.status_int) @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_can_get_secret_metadatum(self, mocked_get): secret_resp, secret_uuid = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] meta_resp = create_secret_metadatum(self.app, self.valid_metadatum, secret_resp) self.assertEqual(201, meta_resp.status_int) mocked_get.return_value = self.updated_valid_metadata['metadata'] get_resp = self.app.get('/secrets/%s/metadata/access-limit' % secret_resp) self.assertEqual(200, get_resp.status_int) self.assertEqual(self.valid_metadatum, get_resp.json) @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_get_secret_metadatum_not_found(self, mocked_get): secret_resp, secret_uuid = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] meta_resp = create_secret_metadatum(self.app, self.valid_metadatum, secret_resp) self.assertEqual(201, meta_resp.status_int) mocked_get.return_value = self.updated_valid_metadata['metadata'] get_resp = self.app.get('/secrets/%s/metadata/nothere' % secret_resp, expect_errors=True) self.assertEqual(404, get_resp.status_int) @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_can_update_secret_metadatum(self, mocked_get): secret_resp, secret_uuid = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] meta_resp = create_secret_metadatum(self.app, self.valid_metadatum, secret_resp) self.assertEqual(201, meta_resp.status_int) new_metadatum = { 'key': 'access-limit', 'value': '5' } new_metadatum_json = json.dumps(new_metadatum) mocked_get.return_value = self.updated_valid_metadata['metadata'] put_resp = self.app.put('/secrets/%s/metadata/access-limit' % secret_resp, new_metadatum_json, headers={'Content-Type': 'application/json'}) self.assertEqual(200, put_resp.status_int) self.assertEqual(new_metadatum, put_resp.json) @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_can_update_secret_metadatum_not_found(self, mocked_get): secret_resp, secret_uuid = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] meta_resp = create_secret_metadatum(self.app, self.valid_metadatum, secret_resp) self.assertEqual(201, meta_resp.status_int) new_metadatum = { 'key': 'newwwww', 'value': '5' } new_metadatum_json = json.dumps(new_metadatum) mocked_get.return_value = self.updated_valid_metadata['metadata'] put_resp = self.app.put('/secrets/%s/metadata/newwwww' % secret_resp, new_metadatum_json, headers={'Content-Type': 'application/json'}, expect_errors=True) self.assertEqual(404, put_resp.status_int) @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_conflict_update_secret_metadatum(self, mocked_get): secret_resp, secret_uuid = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] meta_resp = create_secret_metadatum(self.app, self.valid_metadatum, secret_resp) self.assertEqual(201, meta_resp.status_int) new_metadatum = { 'key': 'snoop', 'value': '5' } new_metadatum_json = json.dumps(new_metadatum) mocked_get.return_value = self.updated_valid_metadata['metadata'] put_resp = self.app.put('/secrets/%s/metadata/access-limit' % secret_resp, new_metadatum_json, headers={'Content-Type': 'application/json'}, expect_errors=True) self.assertEqual(409, put_resp.status_int) def test_returns_405_for_delete_on_metadata(self): secret_id, secret_resp = create_secret(self.app) resp = self.app.delete('/secrets/{0}/metadata/'.format(secret_id), expect_errors=True) self.assertEqual(405, resp.status_int) @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_returns_405_for_head_on_metadatum(self, mocked_get): secret_id, secret_resp = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] meta_resp = create_secret_metadatum(self.app, self.valid_metadatum, secret_id) self.assertEqual(201, meta_resp.status_int) resp = self.app.head('/secrets/{0}/metadata/access-limit'.format( secret_id), expect_errors=True) self.assertEqual(405, resp.status_int) # ----------------------- Helper Functions --------------------------- def create_secret(app, name=None, algorithm=None, bit_length=None, mode=None, expiration=None, payload='not-encrypted', content_type='text/plain', content_encoding=None, transport_key_id=None, transport_key_needed=None, expect_errors=False): request = { 'name': name, 'algorithm': algorithm, 'bit_length': bit_length, 'mode': mode, 'expiration': expiration, 'payload': payload, 'payload_content_type': content_type, 'payload_content_encoding': content_encoding, 'transport_key_id': transport_key_id, 'transport_key_needed': transport_key_needed } cleaned_request = {key: val for key, val in request.items() if val is not None} resp = app.post_json( '/secrets/', cleaned_request, expect_errors=expect_errors ) created_uuid = None if resp.status_int == 201: secret_ref = resp.json.get('secret_ref', '') _, created_uuid = os.path.split(secret_ref) return created_uuid, resp def create_secret_metadata(app, metadata, secret_uuid, expect_errors=False): request = {} for metadatum in metadata: request[metadatum] = metadata.get(metadatum) cleaned_request = {key: val for key, val in request.items() if val is not None} url = '/secrets/%s/metadata/' % secret_uuid resp = app.put_json( url, cleaned_request, expect_errors=expect_errors ) return resp def create_secret_metadatum(app, metadata, secret_uuid, remainder=None, update=False, expect_errors=False): request = {} for metadatum in metadata: request[metadatum] = metadata.get(metadatum) cleaned_request = {key: val for key, val in request.items() if val is not None} url = '/secrets/%s/metadata/' % secret_uuid if remainder: url = url + remainder if update: resp = app.put_json( url, cleaned_request, expect_errors=expect_errors ) else: resp = app.post_json( url, cleaned_request, expect_errors=expect_errors ) return resp barbican-9.1.0.dev50/barbican/tests/api/controllers/test_versions.py0000664000175000017500000001330613616500636025724 0ustar sahidsahid00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.api import controllers from barbican.common import utils as cmn_utils from barbican.tests import utils class WhenTestingVersionsResource(utils.BarbicanAPIBaseTestCase): root_controller = controllers.versions.VersionsController() def tearDown(self): super(WhenTestingVersionsResource, self).tearDown() cmn_utils.CONF.clear_override('host_href') def test_should_return_multiple_choices_on_get(self): resp = self.app.get('/') self.assertEqual(300, resp.status_int) def test_should_return_multiple_choices_on_get_if_json_accept_header(self): headers = {'Accept': 'application/json'} resp = self.app.get('/', headers=headers) self.assertEqual(300, resp.status_int) def test_should_redirect_if_json_home_accept_header_present(self): headers = {'Accept': 'application/json-home'} resp = self.app.get('/', headers=headers) self.assertEqual(302, resp.status_int) def test_should_return_version_json(self): resp = self.app.get('/') versions_response = resp.json['versions']['values'] v1_info = versions_response[0] # NOTE(jaosorior): I used assertIn instead of assertEqual because we # might start using decimal numbers in the future. So when that happens # this test will still be valid. self.assertIn('v1', v1_info['id']) self.assertEqual(1, len(v1_info['media-types'])) self.assertEqual('application/json', v1_info['media-types'][0]['base']) def test_when_host_href_is_not_set_in_conf(self): cmn_utils.CONF.set_override('host_href', '') host_hdr = 'http://myproxy.server.com:9311' utils.mock_pecan_request(self, host=host_hdr) dummy_root = 'http://mylocalhost:9999' resp = self.app.get(dummy_root) versions_response = resp.json['versions']['values'] for v_info in versions_response: self.assertIn(host_hdr, v_info['links'][0]['href']) self.assertNotIn(dummy_root, v_info['links'][0]['href']) def test_when_host_href_is_set_in_conf(self): host_href = 'http://myapp.server.com:9311/' cmn_utils.CONF.set_override('host_href', host_href) host_hdr = 'http://myproxy.server.com:9311' utils.mock_pecan_request(self, host=host_hdr) dummy_root = 'http://mylocalhost:9999' resp = self.app.get(dummy_root) versions_response = resp.json['versions']['values'] for v_info in versions_response: self.assertIn(host_href, v_info['links'][0]['href']) self.assertNotIn(dummy_root, v_info['links'][0]['href']) self.assertNotIn(host_hdr, v_info['links'][0]['href']) def test_when_host_href_is_general(self): host_href = 'http://myapp.server.com/key-manager' cmn_utils.CONF.set_override('host_href', host_href) host_hdr = 'http://myproxy.server.com:9311' utils.mock_pecan_request(self, host=host_hdr) dummy_root = 'http://mylocalhost:9999' resp = self.app.get(dummy_root) versions_response = resp.json['versions']['values'] for v_info in versions_response: self.assertIn(host_href, v_info['links'][0]['href']) self.assertNotIn(dummy_root, v_info['links'][0]['href']) self.assertNotIn(host_hdr, v_info['links'][0]['href']) def test_when_host_href_is_not_set_with_general_request_url(self): cmn_utils.CONF.set_override('host_href', '') host_hdr = 'http://myproxy.server.com/key-manager' utils.mock_pecan_request(self, host=host_hdr) dummy_root = 'http://mylocalhost:9999' resp = self.app.get(dummy_root) versions_response = resp.json['versions']['values'] for v_info in versions_response: self.assertIn(host_hdr, v_info['links'][0]['href']) self.assertNotIn(dummy_root, v_info['links'][0]['href']) class WhenTestingV1Resource(utils.BarbicanAPIBaseTestCase): def setUp(self): super(WhenTestingV1Resource, self).setUp() # For V1Controller, '/' URI maps to /v1 resource path self.root_controller = controllers.versions.V1Controller def test_get_for_json_accept_header(self): headers = {'Accept': 'application/json'} resp = self.app.get('/', headers=headers) # / refers to /v1 path self.assertEqual(200, resp.status_int) def test_get_for_json_home_accept_header(self): headers = {'Accept': 'application/json-home'} resp = self.app.get('/', headers=headers) # / refers to /v1 path self.assertEqual(200, resp.status_int) def test_get_response_should_return_version_json(self): resp = self.app.get('/') # / refers to /v1 path self.assertEqual(200, resp.status_int) v1_info = resp.json['version'] # NOTE(jaosorior): I used assertIn instead of assertEqual because we # might start using decimal numbers in the future. So when that happens # this test will still be valid. self.assertIn('v1', v1_info['id']) self.assertEqual(1, len(v1_info['media-types'])) self.assertEqual('application/json', v1_info['media-types'][0]['base']) barbican-9.1.0.dev50/barbican/tests/api/controllers/test_secretstores.py0000664000175000017500000003133413616500636026602 0ustar sahidsahid00000000000000# (c) Copyright 2015-2016 Hewlett Packard Enterprise Development LP # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from oslo_utils import uuidutils from barbican.model import models from barbican.model import repositories as repos from barbican.plugin.interface import secret_store from barbican.tests import utils class SecretStoresMixin(utils.MultipleBackendsTestCase): def _create_project(self): session = repos.get_project_repository().get_session() project = models.Project() project.external_id = ("keystone_project_id" + uuidutils.generate_uuid(dashed=False)) project.save(session=session) return project def _create_project_store(self, project_id, secret_store_id): proj_store_repo = repos.get_project_secret_store_repository() session = proj_store_repo.get_session() proj_model = models.ProjectSecretStore(project_id, secret_store_id) proj_s_store = proj_store_repo.create_from(proj_model, session) proj_s_store.save(session=session) return proj_s_store def _init_multiple_backends(self, enabled=True, global_default_index=0): store_plugin_names = ['store_crypto', 'kmip_plugin', 'store_crypto'] crypto_plugin_names = ['p11_crypto', '', 'simple_crypto'] self.init_via_conf_file(store_plugin_names, crypto_plugin_names, enabled=enabled, global_default_index=global_default_index) with mock.patch('barbican.plugin.crypto.p11_crypto.P11CryptoPlugin.' '_create_pkcs11'), \ mock.patch('kmip.pie.client.ProxyKmipClient'): secret_store.SecretStorePluginManager() class WhenTestingSecretStores(utils.BarbicanAPIBaseTestCase, SecretStoresMixin): def setUp(self): super(WhenTestingSecretStores, self).setUp() self.secret_store_repo = repos.get_secret_stores_repository() def test_should_get_all_secret_stores(self): g_index = 2 # global default index in plugins list self._init_multiple_backends(global_default_index=g_index) resp = self.app.get('/secret-stores', expect_errors=False) self.assertEqual(200, resp.status_int) secret_stores_data = resp.json.get('secret_stores') self.assertEqual(3, len(secret_stores_data)) for i, secret_data in enumerate(secret_stores_data): self.assertEqual(i == g_index, secret_data['global_default']) self.assertIsNotNone(secret_data['secret_store_ref']) self.assertIsNone(secret_data.get('id')) self.assertIsNone(secret_data.get('secret_store_id')) self.assertIsNotNone(secret_data['name']) self.assertIsNotNone(secret_data['secret_store_plugin']) self.assertIsNotNone(secret_data['created']) self.assertIsNotNone(secret_data['updated']) self.assertEqual(models.States.ACTIVE, secret_data['status']) def test_get_all_secret_stores_when_multiple_backends_not_enabled(self): self._init_multiple_backends(enabled=False) resp = self.app.get('/secret-stores', expect_errors=True) self.assertEqual(404, resp.status_int) resp = self.app.get('/secret-stores/any_valid_id', expect_errors=True) self.assertEqual(404, resp.status_int) def test_get_all_secret_stores_with_unsupported_http_method(self): self._init_multiple_backends() resp = self.app.put('/secret-stores', expect_errors=True) self.assertEqual(405, resp.status_int) resp = self.app.patch('/secret-stores', expect_errors=True) self.assertEqual(405, resp.status_int) def test_should_get_global_default(self): self._init_multiple_backends(global_default_index=1) resp = self.app.get('/secret-stores/global-default', expect_errors=False) self.assertEqual(200, resp.status_int) resp_data = resp.json self.assertTrue(resp_data['global_default']) self.assertIn('kmip', resp_data['name'].lower()) self.assertIsNotNone(resp_data['secret_store_ref']) self.assertIsNotNone(resp_data['secret_store_plugin']) self.assertIsNone(resp_data['crypto_plugin']) self.assertIsNotNone(resp_data['created']) self.assertIsNotNone(resp_data['updated']) self.assertEqual(models.States.ACTIVE, resp_data['status']) def test_get_global_default_when_multiple_backends_not_enabled(self): self._init_multiple_backends(enabled=False) with mock.patch('barbican.common.resources.' 'get_or_create_project') as m1: resp = self.app.get('/secret-stores/global-default', expect_errors=True) self.assertFalse(m1.called) self.assertEqual(404, resp.status_int) def test_get_preferred_when_preferred_is_set(self): self._init_multiple_backends(global_default_index=1) secret_stores = self.secret_store_repo.get_all() project1 = self._create_project() self._create_project_store(project1.id, secret_stores[0].id) self.app.extra_environ = { 'barbican.context': self._build_context(project1.external_id) } resp = self.app.get('/secret-stores/preferred', expect_errors=False) self.assertEqual(200, resp.status_int) resp_data = resp.json self.assertEqual(secret_stores[0].name, resp_data['name']) self.assertEqual(secret_stores[0].global_default, resp_data['global_default']) self.assertIn('/secret-stores/{0}'.format(secret_stores[0].id), resp_data['secret_store_ref']) self.assertIsNotNone(resp_data['created']) self.assertIsNotNone(resp_data['updated']) self.assertEqual(models.States.ACTIVE, resp_data['status']) def test_get_preferred_when_preferred_is_not_set(self): self._init_multiple_backends(global_default_index=1) project1 = self._create_project() self.app.extra_environ = { 'barbican.context': self._build_context(project1.external_id) } resp = self.app.get('/secret-stores/preferred', expect_errors=True) self.assertEqual(404, resp.status_int) def test_get_preferred_when_multiple_backends_not_enabled(self): self._init_multiple_backends(enabled=False) with mock.patch('barbican.common.resources.' 'get_or_create_project') as m1: resp = self.app.get('/secret-stores/preferred', expect_errors=True) self.assertFalse(m1.called) self.assertEqual(404, resp.status_int) class WhenTestingSecretStore(utils.BarbicanAPIBaseTestCase, SecretStoresMixin): def setUp(self): super(WhenTestingSecretStore, self).setUp() self.secret_store_repo = repos.get_secret_stores_repository() def test_get_a_secret_store_when_no_error(self): self._init_multiple_backends() secret_stores = self.secret_store_repo.get_all() store = secret_stores[0] resp = self.app.get('/secret-stores/{0}'.format(store.id), expect_errors=False) self.assertEqual(200, resp.status_int) data = resp.json self.assertEqual(store.global_default, data['global_default']) self.assertEqual(store.name, data['name']) self.assertIn('/secret-stores/{0}'.format(store.id), data['secret_store_ref']) self.assertIsNotNone(data['secret_store_plugin']) self.assertIsNotNone(data['created']) self.assertIsNotNone(data['updated']) self.assertEqual(models.States.ACTIVE, data['status']) def test_invalid_uri_for_secret_stores_subresource(self): self._init_multiple_backends() resp = self.app.get('/secret-stores/invalid_uri', expect_errors=True) self.assertEqual(404, resp.status_int) def test_get_a_secret_store_with_unsupported_http_method(self): self._init_multiple_backends() secret_stores = self.secret_store_repo.get_all() store_id = secret_stores[0].id resp = self.app.put('/secret-stores/{0}'.format(store_id), expect_errors=True) self.assertEqual(405, resp.status_int) def test_invalid_uri_for_a_secret_store_subresource(self): self._init_multiple_backends() secret_stores = self.secret_store_repo.get_all() resp = self.app.get('/secret-stores/{0}/invalid_uri'. format(secret_stores[0].id), expect_errors=True) self.assertEqual(405, resp.status_int) class WhenTestingProjectSecretStore(utils.BarbicanAPIBaseTestCase, SecretStoresMixin): def setUp(self): super(WhenTestingProjectSecretStore, self).setUp() self.secret_store_repo = repos.get_secret_stores_repository() self.proj_store_repo = repos.get_project_secret_store_repository() def test_set_a_preferred_secret_store_when_no_error(self): self._init_multiple_backends() stores = self.secret_store_repo.get_all() proj_external_id = uuidutils.generate_uuid(dashed=False) # get ids as secret store are not bound to session after a rest call. store_ids = [store.id for store in stores] for store_id in store_ids: self.app.extra_environ = { 'barbican.context': self._build_context(proj_external_id) } resp = self.app.post('/secret-stores/{0}/preferred'. format(store_id), expect_errors=False) self.assertEqual(204, resp.status_int) # Now make sure preferred store is set to store id via get call resp = self.app.get('/secret-stores/preferred') self.assertIn(store_id, resp.json['secret_store_ref']) def test_unset_a_preferred_secret_store_when_no_error(self): self._init_multiple_backends() stores = self.secret_store_repo.get_all() proj_external_id = uuidutils.generate_uuid(dashed=False) # get ids as secret store are not bound to session after a rest call. store_ids = [store.id for store in stores] for store_id in store_ids: self.app.extra_environ = { 'barbican.context': self._build_context(proj_external_id) } resp = self.app.post('/secret-stores/{0}/preferred'. format(store_id), expect_errors=False) self.assertEqual(204, resp.status_int) # unset preferred store here resp = self.app.delete('/secret-stores/{0}/preferred'. format(store_id), expect_errors=False) self.assertEqual(204, resp.status_int) # Now make sure that there is no longer a preferred store set resp = self.app.get('/secret-stores/preferred', expect_errors=True) self.assertEqual(404, resp.status_int) def test_unset_a_preferred_store_when_not_found_error(self): self._init_multiple_backends() stores = self.secret_store_repo.get_all() proj_external_id = uuidutils.generate_uuid(dashed=False) self.app.extra_environ = { 'barbican.context': self._build_context(proj_external_id) } resp = self.app.delete('/secret-stores/{0}/preferred'. format(stores[0].id), expect_errors=True) self.assertEqual(404, resp.status_int) def test_preferred_secret_store_call_with_unsupported_http_method(self): self._init_multiple_backends() secret_stores = self.secret_store_repo.get_all() store_id = secret_stores[0].id proj_external_id = uuidutils.generate_uuid(dashed=False) self.app.extra_environ = { 'barbican.context': self._build_context(proj_external_id) } resp = self.app.put('/secret-stores/{0}/preferred'. format(store_id), expect_errors=True) self.assertEqual(405, resp.status_int) barbican-9.1.0.dev50/barbican/tests/api/controllers/test_quotas.py0000664000175000017500000001644713616500636025401 0ustar sahidsahid00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from barbican.tests import utils class WhenTestingQuotas(utils.BarbicanAPIBaseTestCase): def test_should_get_quotas(self): params = {} resp = self.app.get('/quotas', params) self.assertEqual(200, resp.status_int) quotas_list = resp.json.get('quotas') self.assertEqual({'consumers': -1, 'containers': -1, 'orders': -1, 'secrets': -1, 'cas': -1}, quotas_list) def test_should_get_specific_project_quotas(self): params = {} self.create_a_project_quotas() resp = self.app.get( '/project-quotas/{0}'.format(self.get_test_project_id()), params) self.assertEqual(200, resp.status_int) project_quotas = resp.json.get('project_quotas') self.assertEqual({'consumers': 105, 'containers': 103, 'orders': 102, 'secrets': 101, 'cas': 106}, project_quotas) def test_should_return_not_found_get_specific_project_quotas(self): params = {} resp = self.app.get( '/project-quotas/{0}'.format(self.get_test_project_id()), params, expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_get_project_quotas_list(self): self.create_project_quotas() params = {} resp = self.app.get('/project-quotas', params) self.assertEqual(200, resp.status_int) project_quotas_list = resp.json.get('project_quotas') self.assertEqual(3, len(project_quotas_list)) self.assertIn('total', resp.json) def test_should_get_empty_project_quotas_list(self): params = {} resp = self.app.get('/project-quotas', params) self.assertEqual(200, resp.status_int) project_quotas_list = resp.json.get('project_quotas') self.assertEqual([], project_quotas_list) self.assertIn('total', resp.json) def test_pagination_attributes(self): for index in range(11): self.create_a_project_quotas(index) params = {'limit': '2', 'offset': '2'} resp = self.app.get('/project-quotas', params) self.assertEqual(200, resp.status_int) self.assertIn('previous', resp.json) self.assertIn('next', resp.json) previous_ref = resp.json.get('previous') next_ref = resp.json.get('next') self.assertIn('offset=0', previous_ref) self.assertIn('offset=4', next_ref) def test_should_put_project_quotas(self): request = {'project_quotas': {}} resp = self.app.put_json( '/project-quotas/{0}'.format(self.project_id), request, headers={'Content-Type': 'application/json'}) self.assertEqual(204, resp.status_int) def test_should_return_bad_value_put_project_quotas(self): request = '{"project_quotas": {"secrets": "foo"}}' resp = self.app.put( '/project-quotas/{0}'.format(self.project_id), request, headers={'Content-Type': 'application/json'}, expect_errors=True) self.assertEqual(400, resp.status_int) def test_should_return_bad_type_put_project_quotas(self): request = {'project_quotas': {}} resp = self.app.put_json( '/project-quotas/{0}'.format(self.project_id), request, headers={'Content-Type': 'application/foo'}, expect_errors=True) self.assertEqual(415, resp.status_int) def test_should_return_bad_data_put_project_quotas(self): """PUT not allowed operation for /project-quotas/{project-id}""" params = {'bad': 'value'} resp = self.app.put( '/project-quotas/{0}'.format(self.project_id), params, headers={'Content-Type': 'application/json'}, expect_errors=True) self.assertEqual(400, resp.status_int) def test_should_return_no_payload_for_put_project_quotas(self): """PUT not allowed operation for /project-quotas/{project-id}""" params = {} resp = self.app.put( '/project-quotas/{0}'.format(self.project_id), params, headers={'Content-Type': 'application/json'}, expect_errors=True) self.assertEqual(400, resp.status_int) def test_should_delete_specific_project_quotas(self): params = {} self.create_a_project_quotas() resp = self.app.delete( '/project-quotas/{0}'.format(self.get_test_project_id()), params) self.assertEqual(204, resp.status_int) def test_should_return_not_found_delete_specific_project_quotas(self): params = {} resp = self.app.delete( '/project-quotas/{0}'.format('dummy'), params, expect_errors=True) self.assertEqual(404, resp.status_int) def test_check_put_quotas_not_allowed(self): """PuT not allowed operation for /quotas""" params = {} resp = self.app.put('/quotas/', params, expect_errors=True) self.assertEqual(405, resp.status_int) def test_check_put_project_quotas_list_not_allowed(self): """PUT not allowed operation for /project-quotas""" params = {} resp = self.app.put('/project-quotas', params, expect_errors=True) self.assertEqual(405, resp.status_int) def test_check_post_project_quotas_not_allowed(self): """POST not allowed operation for /project-quotas/{project-id}""" params = {} resp = self.app.post( '/project-quotas/{0}'.format(self.project_id), params, expect_errors=True) self.assertEqual(405, resp.status_int) def test_check_post_project_quotas_list_not_allowed(self): """POST not allowed operation for /project-quotas""" params = {} resp = self.app.post('/project-quotas', params, expect_errors=True) self.assertEqual(405, resp.status_int) # ----------------------- Helper Functions --------------------------- def get_test_project_id(self, index=1): return 'project' + str(index) def create_a_project_quotas(self, index=1): project_id = self.get_test_project_id(index) parsed_project_quotas = { 'secrets': index * 100 + 1, 'orders': index * 100 + 2, 'containers': index * 100 + 3, 'consumers': index * 100 + 5, 'cas': index * 100 + 6} request = {'project_quotas': parsed_project_quotas} resp = self.app.put_json( '/project-quotas/{0}'.format(project_id), request) self.assertEqual(204, resp.status_int) def create_project_quotas(self): for index in [1, 2, 3]: self.create_a_project_quotas(index) if __name__ == '__main__': unittest.main() barbican-9.1.0.dev50/barbican/tests/api/controllers/__init__.py0000664000175000017500000000000013616500636024537 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/api/controllers/test_secrets.py0000664000175000017500000007016313616500636025530 0ustar sahidsahid00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import os import mock from oslo_utils import timeutils from barbican.api.controllers import secrets from barbican.common import validators from barbican.model import models from barbican.model import repositories from barbican.tests import utils project_repo = repositories.get_project_repository() secrets_repo = repositories.get_secret_repository() tkey_repo = repositories.get_transport_key_repository() @utils.parameterized_test_case class WhenTestingSecretsResource(utils.BarbicanAPIBaseTestCase): def test_can_create_new_secret_one_step(self): resp, secret_uuid = create_secret( self.app, payload='not-encrypted', content_type='text/plain' ) self.assertEqual(201, resp.status_int) self.assertIsNotNone(secret_uuid) def test_can_create_new_secret_without_payload(self): resp, secret_uuid = create_secret(self.app, name='test') self.assertEqual(201, resp.status_int) secret = secrets_repo.get(secret_uuid, self.project_id) self.assertEqual('test', secret.name) self.assertEqual([], secret.encrypted_data) def test_can_create_new_secret_if_project_doesnt_exist(self): # Build new context new_project_context = self._build_context('test_project_id') self.app.extra_environ = {'barbican.context': new_project_context} # Create a generic secret resp, _ = create_secret(self.app, name='test_secret') self.assertEqual(201, resp.status_int) # Verify the new project was created project = project_repo.find_by_external_project_id('test_project_id') self.assertIsNotNone(project) def test_can_create_new_secret_with_payload_just_under_max(self): large_payload = 'A' * (validators.DEFAULT_MAX_SECRET_BYTES - 8) resp, _ = create_secret( self.app, payload=large_payload, content_type='text/plain' ) self.assertEqual(201, resp.status_int) def test_creating_new_secret_with_oversized_payload_should_fail(self): oversized_payload = 'A' * (validators.DEFAULT_MAX_SECRET_BYTES + 10) resp, _ = create_secret( self.app, payload=oversized_payload, content_type='text/plain', expect_errors=True ) self.assertEqual(413, resp.status_int) def test_create_new_secret_with_empty_payload_should_fail(self): resp, _ = create_secret( self.app, payload='', content_type='text/plain', expect_errors=True ) self.assertEqual(400, resp.status_int) def test_expiration_should_be_normalized_with_new_secret(self): target_expiration = '2114-02-28 12:14:44.180394-05:00' resp, secret_uuid = create_secret( self.app, expiration=target_expiration ) self.assertEqual(201, resp.status_int) # Verify that the system normalizes time to UTC secret = secrets_repo.get(secret_uuid, self.project_id) local_datetime = timeutils.parse_isotime(target_expiration) datetime_utc = timeutils.normalize_time(local_datetime) self.assertEqual(datetime_utc, secret.expiration) @mock.patch('barbican.plugin.resources.store_secret') def test_can_create_new_secret_meta_w_transport_key(self, mocked_store): transport_key_model = models.TransportKey('default_plugin', 'tkey1234') # TODO(jvrbanac): Look into removing this patch mocked_store.return_value = models.Secret(), transport_key_model # Make sure to add the transport key tkey_repo.create_from(transport_key_model) transport_key_id = transport_key_model.id resp, secret_uuid = create_secret( self.app, name='test', transport_key_needed='true' ) self.assertEqual(201, resp.status_int) self.assertIsNotNone(secret_uuid) self.assertIn(transport_key_id, resp.json.get('transport_key_ref')) @mock.patch('barbican.plugin.resources.store_secret') def test_can_create_new_secret_with_transport_key(self, mocked_store): # TODO(jvrbanac): Look into removing this patch mocked_store.return_value = models.Secret(), None # Create Transport Key (keeping for session scoping reasons) transport_key_model = models.TransportKey('default_plugin', 'tkey1234') transport_key_id = transport_key_model.id tkey_repo.create_from(transport_key_model) # Create a normal secret with the TransportKey resp, secret_uuid = create_secret( self.app, payload='not-encrypted', content_type='text/plain', transport_key_id=transport_key_id ) self.assertEqual(201, resp.status_int) # We're interested in the transport key values mocked_store.assert_called_once_with( unencrypted_raw='not-encrypted', content_type_raw='text/plain', content_encoding=None, secret_model=mock.ANY, project_model=mock.ANY, transport_key_id=transport_key_id, transport_key_needed=False ) def test_new_secret_fails_with_invalid_transport_key_ref(self): resp, _ = create_secret( self.app, payload='superdupersecret', content_type='text/plain', transport_key_id="non_existing_transport_key_id", transport_key_needed="true", expect_errors=True ) self.assertEqual(400, resp.status_int) def test_new_secret_w_unsupported_content_type_should_fail(self): resp, _ = create_secret( self.app, payload='something_here', content_type='bogus_content_type', expect_errors=True ) self.assertEqual(400, resp.status_int) @utils.parameterized_dataset({ 'no_encoding': [None, 'application/octet-stream'], 'bad_encoding': ['purple', 'application/octet-stream'], 'no_content_type': ['base64', None] }) def test_new_secret_fails_with_binary_payload_and(self, encoding=None, content_type=None): resp, _ = create_secret( self.app, payload='lOtfqHaUUpe6NqLABgquYQ==', content_type=content_type, content_encoding=encoding, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_new_secret_fails_with_bad_payload(self): resp, _ = create_secret( self.app, payload='AAAAAAAAA', content_type='application/octet-stream', content_encoding='base64', expect_errors=True ) self.assertEqual(400, resp.status_int) class WhenGettingSecretsList(utils.BarbicanAPIBaseTestCase): def test_list_secrets_by_name(self): # Creating a secret to be retrieved later create_resp, _ = create_secret( self.app, name='secret mission' ) self.assertEqual(201, create_resp.status_int) params = {'name': 'secret mission'} get_resp = self.app.get('/secrets/', params) self.assertEqual(200, get_resp.status_int) secret_list = get_resp.json.get('secrets') self.assertEqual('secret mission', secret_list[0].get('name')) def test_list_secrets(self): # Creating a secret to be retrieved later create_resp, _ = create_secret( self.app, name='James Bond' ) self.assertEqual(201, create_resp.status_int) get_resp = self.app.get('/secrets/') self.assertEqual(200, get_resp.status_int,) self.assertIn('total', get_resp.json) secret_list = get_resp.json.get('secrets') self.assertGreater(len(secret_list), 0) def test_pagination_attributes(self): # Create a list of secrets greater than default limit (10) for _ in range(11): create_resp, _ = create_secret(self.app, name='Sterling Archer') self.assertEqual(201, create_resp.status_int) params = {'limit': '2', 'offset': '2'} get_resp = self.app.get('/secrets/', params) self.assertEqual(200, get_resp.status_int) self.assertIn('previous', get_resp.json) self.assertIn('next', get_resp.json) previous_ref = get_resp.json.get('previous') next_ref = get_resp.json.get('next') self.assertIn('offset=0', previous_ref) self.assertIn('offset=4', next_ref) def test_empty_list_of_secrets(self): params = {'name': 'Austin Powers'} get_resp = self.app.get('/secrets/', params) self.assertEqual(200, get_resp.status_int) secret_list = get_resp.json.get('secrets') self.assertEqual(0, len(secret_list)) # These should never exist in this scenario self.assertNotIn('previous', get_resp.json) self.assertNotIn('next', get_resp.json) def test_bad_date_filter_results_in_400(self): params = {'expiration': 'bogus'} get_resp = self.app.get('/secrets/', params, expect_errors=True) self.assertEqual(400, get_resp.status_int) def test_bad_sorting_results_in_400(self): params = {'sort': 'bogus'} get_resp = self.app.get('/secrets/', params, expect_errors=True) self.assertEqual(400, get_resp.status_int) class WhenGettingPuttingOrDeletingSecret(utils.BarbicanAPIBaseTestCase): def test_get_secret_as_plain(self): payload = 'this message will self destruct in 10 seconds' resp, secret_uuid = create_secret( self.app, payload=payload, content_type='text/plain' ) self.assertEqual(201, resp.status_int) headers = {'Accept': 'text/plain'} get_resp = self.app.get( '/secrets/{0}'.format(secret_uuid), headers=headers ) self.assertEqual(200, get_resp.status_int) self.assertEqual(payload, get_resp.body.decode('utf-8')) def test_get_secret_payload_with_pecan_default_accept_header(self): payload = 'a very interesting string' resp, secret_uuid = create_secret( self.app, payload=payload, content_type='text/plain' ) self.assertEqual(201, resp.status_int) headers = {'Accept': '*/*'} get_resp = self.app.get( '/secrets/{0}/payload'.format(secret_uuid), headers=headers ) self.assertEqual(200, get_resp.status_int) self.assertEqual(payload, get_resp.body.decode('utf-8')) def test_get_secret_payload_with_blank_accept_header(self): payload = 'a very interesting string' resp, secret_uuid = create_secret( self.app, payload=payload, content_type='text/plain' ) self.assertEqual(201, resp.status_int) headers = {'Accept': ''} get_resp = self.app.get( '/secrets/{0}/payload'.format(secret_uuid), headers=headers ) self.assertEqual(200, get_resp.status_int) self.assertEqual(payload, get_resp.body.decode('utf-8')) def test_get_secret_payload_with_no_accept_header(self): payload = 'a very interesting string' resp, secret_uuid = create_secret( self.app, payload=payload, content_type='text/plain' ) self.assertEqual(201, resp.status_int) headers = {} get_resp = self.app.get( '/secrets/{0}/payload'.format(secret_uuid), headers=headers ) self.assertEqual(200, get_resp.status_int) self.assertEqual(payload, get_resp.body.decode('utf-8')) def test_get_secret_is_decoded_for_binary(self): payload = 'a123' resp, secret_uuid = create_secret( self.app, payload=payload, content_type='application/octet-stream', content_encoding='base64' ) headers = { 'Accept': 'application/octet-stream', } get_resp = self.app.get( '/secrets/{0}'.format(secret_uuid), headers=headers ) decoded = b'k]\xb7' self.assertEqual(decoded, get_resp.body) def test_returns_404_on_get_when_not_found(self): """Test with valid uuid which is not present in DB.""" get_resp = self.app.get( '/secrets/{0}'.format(utils.generate_test_valid_uuid()), headers={'Accept': 'application/json'}, expect_errors=True ) self.assertEqual(404, get_resp.status_int) def test_returns_404_on_get_invalid_secret_id(self): """Test where uuid provided is not valid.""" get_resp = self.app.get( '/secrets/98c876d9-aaac-44e4-8ea8-invalid-id', headers={'Accept': 'application/json'}, expect_errors=True ) self.assertEqual(404, get_resp.status_int) def test_returns_404_on_get_payload_when_no_payload(self): resp, secret_uuid = create_secret(self.app) headers = { 'Accept': 'text/plain', } get_resp = self.app.get( '/secrets/{0}/payload'.format(secret_uuid), headers=headers, expect_errors=True ) self.assertEqual(404, get_resp.status_int) def test_returns_404_on_get_with_bad_uuid(self): get_resp = self.app.get( '/secrets/98c876d9-aaac-44e4-8ea8-441932962b05X', headers={'Accept': 'application/json'}, expect_errors=True ) self.assertEqual(404, get_resp.status_int) def test_returns_406_with_get_bad_accept_header(self): resp, secret_uuid = create_secret( self.app, payload='blah', content_type='text/plain' ) self.assertEqual(201, resp.status_int) get_resp = self.app.get( '/secrets/{0}'.format(secret_uuid), headers={'Accept': 'golden gun', 'Accept-Encoding': 'gzip'}, expect_errors=True ) self.assertEqual(406, get_resp.status_int) def test_put_plain_text_secret(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) message = 'Babou! Serpentine!' put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), message, headers={'Content-Type': 'text/plain'} ) self.assertEqual(204, put_resp.status_int) get_resp = self.app.get( '/secrets/{0}'.format(secret_uuid), headers={'Accept': 'text/plain'} ) self.assertEqual(200, get_resp.status_int) self.assertEqual(message, get_resp.body.decode('utf-8')) def test_put_binary_secret(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) binary_string = b'a binary string' put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), binary_string, headers={'Content-Type': 'application/octet-stream'} ) self.assertEqual(204, put_resp.status_int) get_resp = self.app.get( '/secrets/{0}'.format(secret_uuid), headers={'Accept': 'application/octet-stream'} ) self.assertEqual(200, get_resp.status_int) self.assertEqual(binary_string, get_resp.body) def test_put_base64_secret(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) payload = base64.b64encode(b'I had something for this') put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), payload, headers={ 'Content-Type': 'application/octet-stream', 'Content-Encoding': 'base64' } ) self.assertEqual(204, put_resp.status_int) get_resp = self.app.get( '/secrets/{0}'.format(secret_uuid), headers={ 'Accept': 'application/octet-stream', 'Content-Encoding': 'base64' } ) self.assertEqual(200, get_resp.status_int) self.assertEqual(base64.b64decode(payload), get_resp.body) def test_returns_400_with_put_unknown_encoding(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) payload = base64.b64encode(b'I had something for this') put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), payload, headers={ 'Accept': 'text/plain', 'Content-Type': 'application/octet-stream', 'Content-Encoding': 'unknownencoding' }, expect_errors=True ) self.assertEqual(400, put_resp.status_int) def test_returns_415_with_put_unsupported_media_type(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), 'rampage', headers={ 'Content-Type': 'application/json' }, expect_errors=True ) self.assertEqual(415, put_resp.status_int) def test_returns_415_with_put_no_media_type(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), 'rampage again', headers={ 'Content-Type': '' }, expect_errors=True ) self.assertEqual(415, put_resp.status_int) def test_returns_404_put_secret_not_found(self): put_resp = self.app.put( '/secrets/98c876d9-aaac-44e4-8ea8-441932962b05', 'some text', headers={'Content-Type': 'text/plain'}, expect_errors=True ) self.assertEqual(404, put_resp.status_int) def test_returns_409_put_to_existing_secret(self): resp, secret_uuid = create_secret( self.app, payload='blah', content_type='text/plain' ) self.assertEqual(201, resp.status_int) put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), 'do not want', headers={'Content-Type': 'text/plain'}, expect_errors=True ) self.assertEqual(409, put_resp.status_int) def test_returns_400_put_no_payload(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), headers={'Content-Type': 'text/plain'}, expect_errors=True ) self.assertEqual(400, put_resp.status_int) def test_returns_400_put_with_empty_payload(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), '', headers={'Content-Type': 'text/plain'}, expect_errors=True ) self.assertEqual(400, put_resp.status_int) def test_returns_413_put_with_text_too_large(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) text_too_big = 'x' * 10050 put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), text_too_big, headers={'Content-Type': 'text/plain'}, expect_errors=True ) self.assertEqual(413, put_resp.status_int) def test_delete_secret(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) delete_resp = self.app.delete( '/secrets/{0}/'.format(secret_uuid) ) self.assertEqual(204, delete_resp.status_int) def test_raise_404_for_delete_secret_not_found(self): delete_resp = self.app.delete( '/secrets/98c876d9-aaac-44e4-8ea8-441932962b05', expect_errors=True ) self.assertEqual(404, delete_resp.status_int) self.assertEqual('application/json', delete_resp.content_type) def test_delete_with_json_accept_header(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) delete_resp = self.app.delete( '/secrets/{0}/'.format(secret_uuid), headers={'Accept': 'application/json'} ) self.assertEqual(204, delete_resp.status_int) @utils.parameterized_test_case class WhenPerformingUnallowedOperations(utils.BarbicanAPIBaseTestCase): def test_returns_405_for_put_json_on_secrets(self): test_json = { 'name': 'Barry', 'algorithm': 'AES', 'bit_length': 256, 'mode': 'CBC' } resp = self.app.put_json( '/secrets/', test_json, expect_errors=True ) self.assertEqual(405, resp.status_int) def test_returns_405_for_delete_on_secrets(self): resp = self.app.delete( '/secrets/', expect_errors=True ) self.assertEqual(405, resp.status_int) def test_returns_405_for_get_payload(self): created_resp, secret_uuid = create_secret( self.app ) resp = self.app.post( '/secrets/{0}/payload'.format(secret_uuid), 'Do you want ants? This is how you get ants!', headers={'Content-Type': 'text/plain'}, expect_errors=True ) self.assertEqual(405, resp.status_int) @utils.parameterized_dataset({ 'delete': ['delete'], 'put': ['put'], 'post': ['post'] }) def test_returns_405_for_calling_secret_payload_uri_with( self, http_verb=None ): created_resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, created_resp.status_int) operation = getattr(self.app, http_verb) resp = operation( '/secrets/{0}/payload'.format(secret_uuid), 'boop', expect_errors=True ) self.assertEqual(405, resp.status_int) class WhenValidatingDateFilters(utils.BarbicanAPIBaseTestCase): def setUp(self): super(WhenValidatingDateFilters, self).setUp() self.controller = secrets.SecretsController() def test_validates_plain_timestamp(self): date_filter = '2016-01-01T00:00:00' self.assertTrue(self.controller._is_valid_date_filter(date_filter)) def test_validates_gt_and_lt_timestamps(self): date_filter = 'gt:2016-01-01T00:00:00,lt:2016-12-31T00:00:00' self.assertTrue(self.controller._is_valid_date_filter(date_filter)) def test_validates_gte_and_lte_timestamps(self): date_filter = 'gte:2016-01-01T00:00:00,lte:2016-12-31T00:00:00' self.assertTrue(self.controller._is_valid_date_filter(date_filter)) def test_validation_fails_with_two_plain_timestamps(self): date_filter = '2016-01-01T00:00:00,2016-01-02T00:00:00' self.assertFalse(self.controller._is_valid_date_filter(date_filter)) def test_validation_fails_with_two_gt_timestamps(self): date_filter = 'gt:2016-01-01T00:00:00,gt:2016-01-02T00:00:00' self.assertFalse(self.controller._is_valid_date_filter(date_filter)) def test_validation_fails_with_two_lt_timestamps(self): date_filter = 'lt:2016-01-01T00:00:00,lt:2016-01-02T00:00:00' self.assertFalse(self.controller._is_valid_date_filter(date_filter)) def test_validation_fails_with_two_gte_timestamps(self): date_filter = 'gte:2016-01-01T00:00:00,gte:2016-01-02T00:00:00' self.assertFalse(self.controller._is_valid_date_filter(date_filter)) def test_validation_fails_with_two_lte_timestamps(self): date_filter = 'lte:2016-01-01T00:00:00,lte:2016-01-02T00:00:00' self.assertFalse(self.controller._is_valid_date_filter(date_filter)) def test_validation_fails_with_plain_and_gte_timestamps(self): date_filter = '2016-01-01T00:00:00,gte:2016-01-02T00:00:00' self.assertFalse(self.controller._is_valid_date_filter(date_filter)) date_filter = 'gte:2016-01-01T00:00:00,2016-01-02T00:00:00' self.assertFalse(self.controller._is_valid_date_filter(date_filter)) def test_validation_fails_with_plain_and_lte_timestamps(self): date_filter = '2016-01-01T00:00:00,lte:2016-01-02T00:00:00' self.assertFalse(self.controller._is_valid_date_filter(date_filter)) date_filter = 'lte:2016-01-01T00:00:00,2016-01-02T00:00:00' self.assertFalse(self.controller._is_valid_date_filter(date_filter)) def test_validation_fails_with_gt_and_gte_timestamps(self): date_filter = 'gt:2016-01-01T00:00:00,gte:2016-01-02T00:00:00' self.assertFalse(self.controller._is_valid_date_filter(date_filter)) def test_validation_fails_with_lt_and_lte_timestamps(self): date_filter = 'lt:2016-01-01T00:00:00,lte:2016-01-02T00:00:00' self.assertFalse(self.controller._is_valid_date_filter(date_filter)) def test_validation_fails_with_bogus_timestamp(self): date_filter = 'bogus' self.assertFalse(self.controller._is_valid_date_filter(date_filter)) class WhenValidatingSortFilters(utils.BarbicanAPIBaseTestCase): def setUp(self): super(WhenValidatingSortFilters, self).setUp() self.controller = secrets.SecretsController() def test_validates_name_sorting(self): sorting = 'name' self.assertTrue(self.controller._is_valid_sorting(sorting)) def test_validation_fails_for_bogus_attribute(self): sorting = 'bogus' self.assertFalse(self.controller._is_valid_sorting(sorting)) def test_validation_fails_for_duplicate_keys(self): sorting = 'name,name:asc' self.assertFalse(self.controller._is_valid_sorting(sorting)) def test_validation_fails_for_too_many_colons(self): sorting = 'name:asc:foo' self.assertFalse(self.controller._is_valid_sorting(sorting)) # ----------------------- Helper Functions --------------------------- def create_secret(app, name=None, algorithm=None, bit_length=None, mode=None, expiration=None, payload=None, content_type=None, content_encoding=None, transport_key_id=None, transport_key_needed=None, expect_errors=False): # TODO(chellygel): Once test resources is split out, refactor this # and similar functions into a generalized helper module and reduce # duplication. request = { 'name': name, 'algorithm': algorithm, 'bit_length': bit_length, 'mode': mode, 'expiration': expiration, 'payload': payload, 'payload_content_type': content_type, 'payload_content_encoding': content_encoding, 'transport_key_id': transport_key_id, 'transport_key_needed': transport_key_needed } cleaned_request = {key: val for key, val in request.items() if val is not None} resp = app.post_json( '/secrets/', cleaned_request, expect_errors=expect_errors ) created_uuid = None if resp.status_int == 201: secret_ref = resp.json.get('secret_ref', '') _, created_uuid = os.path.split(secret_ref) return resp, created_uuid barbican-9.1.0.dev50/barbican/tests/api/controllers/test_orders.py0000664000175000017500000002341713616500636025356 0ustar sahidsahid00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os import uuid from barbican.model import models from barbican.model import repositories from barbican.tests import utils from oslo_utils import uuidutils order_repo = repositories.get_order_repository() project_repo = repositories.get_project_repository() ca_repo = repositories.get_ca_repository() project_ca_repo = repositories.get_project_ca_repository() container_repo = repositories.get_container_repository() generic_key_meta = { 'name': 'secretname', 'algorithm': 'AES', 'bit_length': 256, 'mode': 'cbc', 'payload_content_type': 'application/octet-stream' } class WhenCreatingOrdersUsingOrdersResource(utils.BarbicanAPIBaseTestCase): def test_can_create_a_new_order(self): resp, order_uuid = create_order( self.app, order_type='key', meta=generic_key_meta ) self.assertEqual(202, resp.status_int) # Make sure we get a valid uuid for the order uuid.UUID(order_uuid) order = order_repo.get(order_uuid, self.project_id) self.assertIsInstance(order, models.Order) def test_order_creation_should_allow_unknown_algorithm(self): meta = { 'bit_length': 128, 'algorithm': 'unknown' } resp, _ = create_order( self.app, order_type='key', meta=meta ) self.assertEqual(202, resp.status_int) def test_order_creation_should_fail_without_a_type(self): resp, _ = create_order( self.app, meta=generic_key_meta, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_order_creation_should_fail_without_metadata(self): resp, _ = create_order( self.app, order_type='key', expect_errors=True ) self.assertEqual(400, resp.status_int) def test_order_create_should_fail_w_unsupported_payload_content_type(self): meta = { 'bit_length': 128, 'algorithm': 'aes', 'payload_content_type': 'something_unsupported' } resp, _ = create_order( self.app, order_type='key', meta=meta, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_order_creation_should_fail_with_bogus_content(self): resp = self.app.post( '/orders/', 'random_stuff', headers={'Content-Type': 'application/json'}, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_order_creation_should_fail_with_empty_dict(self): resp = self.app.post_json( '/orders/', {}, headers={'Content-Type': 'application/json'}, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_order_creation_should_fail_without_content_type_header(self): resp = self.app.post( '/orders/', 'doesn\'t matter. headers are validated first', expect_errors=True, ) self.assertEqual(415, resp.status_int) class WhenGettingOrdersListUsingOrdersResource(utils.BarbicanAPIBaseTestCase): def test_can_get_a_list_of_orders(self): # Make sure we have atleast one order to created resp, order_uuid = create_order( self.app, order_type='key', meta=generic_key_meta ) self.assertEqual(202, resp.status_int) # Get the list of orders resp = self.app.get( '/orders/', headers={'Content-Type': 'application/json'} ) self.assertEqual(200, resp.status_int) self.assertIn('total', resp.json) self.assertGreater(len(resp.json.get('orders')), 0) def test_pagination_attributes_not_available_with_empty_order_list(self): params = {'name': 'no_orders_with_this_name'} resp = self.app.get( '/orders/', params ) self.assertEqual(200, resp.status_int) self.assertEqual(0, len(resp.json.get('orders'))) class WhenGettingOrDeletingOrders(utils.BarbicanAPIBaseTestCase): def test_can_get_order(self): # Make sure we have a order to retrieve create_resp, order_uuid = create_order( self.app, order_type='key', meta=generic_key_meta ) self.assertEqual(202, create_resp.status_int) # Retrieve the order get_resp = self.app.get('/orders/{0}/'.format(order_uuid)) self.assertEqual(200, get_resp.status_int) def test_can_delete_order(self): # Make sure we have a order to retrieve create_resp, order_uuid = create_order( self.app, order_type='key', meta=generic_key_meta ) self.assertEqual(202, create_resp.status_int) delete_resp = self.app.delete('/orders/{0}'.format(order_uuid)) self.assertEqual(204, delete_resp.status_int) def test_get_call_on_non_existant_order_should_give_404(self): bogus_uuid = uuidutils.generate_uuid() resp = self.app.get( '/orders/{0}'.format(bogus_uuid), expect_errors=True ) self.assertEqual(404, resp.status_int) def test_returns_404_on_get_with_bad_uuid(self): resp = self.app.get( '/orders/98c876d9-aaac-44e4-8ea8-441932962b05X', expect_errors=True ) self.assertEqual(404, resp.status_int) def test_delete_call_on_non_existant_order_should_give_404(self): bogus_uuid = uuidutils.generate_uuid() resp = self.app.delete( '/orders/{0}'.format(bogus_uuid), expect_errors=True ) self.assertEqual(404, resp.status_int) class WhenCreatingOrders(utils.BarbicanAPIBaseTestCase): def test_should_add_new_order(self): order_meta = { 'name': 'secretname', 'expiration': '2114-02-28T17:14:44.180394', 'algorithm': 'AES', 'bit_length': 256, 'mode': 'cbc', 'payload_content_type': 'application/octet-stream' } create_resp, order_uuid = create_order( self.app, order_type='key', meta=order_meta ) self.assertEqual(202, create_resp.status_int) order = order_repo.get(order_uuid, self.project_id) self.assertIsInstance(order, models.Order) self.assertEqual('key', order.type) self.assertEqual(order_meta, order.meta) def test_should_return_400_when_creating_with_empty_json(self): resp = self.app.post_json('/orders/', {}, expect_errors=True) self.assertEqual(400, resp.status_int,) def test_should_return_415_when_creating_with_blank_body(self): resp = self.app.post('/orders/', '', expect_errors=True) self.assertEqual(415, resp.status_int) class WhenPerformingUnallowedOperations(utils.BarbicanAPIBaseTestCase): def test_should_not_allow_put_orders(self): resp = self.app.put_json('/orders/', expect_errors=True) self.assertEqual(405, resp.status_int) def test_should_not_allow_delete_orders(self): resp = self.app.delete('/orders/', expect_errors=True) self.assertEqual(405, resp.status_int) def test_should_not_allow_post_order_by_id(self): # Create generic order so we don't get a 404 on POST resp, order_uuid = create_order( self.app, order_type='key', meta=generic_key_meta ) self.assertEqual(202, resp.status_int) resp = self.app.post_json( '/orders/{0}'.format(order_uuid), {}, expect_errors=True ) self.assertEqual(405, resp.status_int) # ----------------------- Helper Functions --------------------------- def create_order(app, order_type=None, meta=None, expect_errors=False): # TODO(jvrbanac): Once test resources is split out, refactor this # and similar functions into a generalized helper module and reduce # duplication. request = { 'type': order_type, 'meta': meta } cleaned_request = {key: val for key, val in request.items() if val is not None} resp = app.post_json( '/orders/', cleaned_request, expect_errors=expect_errors ) created_uuid = None if resp.status_int == 202: order_ref = resp.json.get('order_ref', '') _, created_uuid = os.path.split(order_ref) return resp, created_uuid def create_container(app, name=None, container_type=None, secret_refs=None, expect_errors=False, headers=None): request = { 'name': name, 'type': container_type, 'secret_refs': secret_refs if secret_refs else [] } cleaned_request = {key: val for key, val in request.items() if val is not None} resp = app.post_json( '/containers/', cleaned_request, expect_errors=expect_errors, headers=headers ) created_uuid = None if resp.status_int == 201: container_ref = resp.json.get('container_ref', '') _, created_uuid = os.path.split(container_ref) return resp, created_uuid barbican-9.1.0.dev50/barbican/tests/api/middleware/0000775000175000017500000000000013616500640022202 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/api/middleware/test_simple.py0000664000175000017500000000161713616500636025116 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import mock from barbican.api.middleware import simple class WhenTestingSimpleMiddleware(unittest.TestCase): def setUp(self): self.app = mock.MagicMock() self.middle = simple.SimpleFilter(self.app) self.req = mock.MagicMock() def test_should_process_request(self): self.middle.process_request(self.req) barbican-9.1.0.dev50/barbican/tests/api/middleware/test_context.py0000664000175000017500000000420013616500636025300 0ustar sahidsahid00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import oslotest.base as oslotest from barbican.api.middleware import context class TestUnauthenticatedContextMiddleware(oslotest.BaseTestCase): def setUp(self): super(TestUnauthenticatedContextMiddleware, self).setUp() self.app = mock.MagicMock() self.middleware = context.UnauthenticatedContextMiddleware(self.app) def test_role_defaults_to_admin(self): request = mock.MagicMock() request.headers = {'X-Project-Id': 'trace'} request.environ = {} with mock.patch('barbican.context.RequestContext') as rc: self.middleware.process_request(request) rc.assert_called_with( project_id='trace', is_admin=True, user=None, roles=['admin'], request_id=request.request_id, project_domain=None, domain=None, user_domain=None ) def test_role_used_from_header(self): request = mock.MagicMock() request.headers = {'X-Project-Id': 'trace', 'X-Roles': 'something'} request.environ = {} with mock.patch('barbican.context.RequestContext') as rc: self.middleware.process_request(request) rc.assert_called_with( project_id='trace', is_admin=False, user=None, roles=['something'], request_id=request.request_id, project_domain=None, domain=None, user_domain=None ) barbican-9.1.0.dev50/barbican/tests/api/middleware/__init__.py0000664000175000017500000000000013616500636024306 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/tasks/0000775000175000017500000000000013616500640020441 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/tasks/test_certificate_resources.py0000664000175000017500000012252713616500636026444 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import datetime from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.hazmat.primitives import serialization import mock from OpenSSL import crypto from oslo_utils import encodeutils from barbican.common import exception as excep from barbican.common import hrefs from barbican.common import resources as res from barbican.model import models from barbican.model import repositories from barbican.plugin.interface import certificate_manager as cert_man from barbican.plugin.interface import secret_store from barbican.tasks import certificate_resources as cert_res from barbican.tasks import common from barbican.tests import database_utils from barbican.tests import utils container_repo = repositories.get_container_repository() secret_repo = repositories.get_secret_repository() ca_repo = repositories.get_ca_repository() project_ca_repo = repositories.get_project_ca_repository() preferred_ca_repo = repositories.get_preferred_ca_repository() project_repo = repositories.get_project_repository() order_repo = repositories.get_order_repository() class WhenPerformingPrivateOperations(utils.BaseTestCase, utils.MockModelRepositoryMixin): """Tests private methods within certificate_resources.py.""" def setUp(self): super(WhenPerformingPrivateOperations, self).setUp() self.order_plugin_meta_repo = mock.MagicMock() self.setup_order_plugin_meta_repository_mock( self.order_plugin_meta_repo) self.order_barbican_meta_repo = mock.MagicMock() self.setup_order_barbican_meta_repository_mock( self.order_barbican_meta_repo) def test_get_plugin_meta(self): class Value(object): def __init__(self, value): self.value = value class OrderModel(object): id = mock.ANY order_plugin_metadata = { "foo": Value(1), "bar": Value(2), } order_model = OrderModel() self.order_plugin_meta_repo.get_metadata_for_order.return_value = ( order_model.order_plugin_metadata ) result = cert_res._get_plugin_meta(order_model) self._assert_dict_equal(order_model.order_plugin_metadata, result) def test_get_plugin_meta_with_empty_dict(self): result = cert_res._get_plugin_meta(None) self._assert_dict_equal({}, result) def test_save_plugin_meta_w_mock_meta(self): # Test dict for plugin meta data. test_order_model = 'My order model' test_plugin_meta = {"foo": 1} cert_res._save_plugin_metadata( test_order_model, test_plugin_meta) self.order_plugin_meta_repo.save.assert_called_once_with( test_plugin_meta, test_order_model) def test_save_plugin_w_null_meta(self): test_order_model = 'My order model' # Test None for plugin meta data. cert_res._save_plugin_metadata( test_order_model, None) self.order_plugin_meta_repo.save.assert_called_once_with( {}, test_order_model) def test_get_barbican_meta_with_empty_dict(self): result = cert_res._get_barbican_meta(None) self._assert_dict_equal({}, result) def test_save_barbican_w_null_meta(self): test_order_model = 'My order model' # Test None for plugin meta data. cert_res._save_barbican_metadata( test_order_model, None) self.order_barbican_meta_repo.save.assert_called_once_with( {}, test_order_model) def _assert_dict_equal(self, expected, test): self.assertIsInstance(expected, dict) self.assertIsInstance(test, dict) if expected != test: if len(expected) != len(test): self.fail('Expected dict not same size as test dict') unmatched_items = set(expected.items()) ^ set(test.items()) if len(unmatched_items): self.fail('One or more items different ' 'between the expected and test dicts') class BaseCertificateRequestsTestCase(database_utils.RepositoryTestCase): """Base Certificate Case Test function """ def setUp(self): super(BaseCertificateRequestsTestCase, self).setUp() self.external_project_id = "56789" self.project = res.get_or_create_project(self.external_project_id) project_repo.save(self.project) self.barbican_meta_dto = mock.MagicMock() self.order_meta = {} self.plugin_meta = {} self.barbican_meta = {} self.result = cert_man.ResultDTO( cert_man.CertificateStatus.WAITING_FOR_CA ) self.result_follow_on = common.FollowOnProcessingStatusDTO() self.cert_plugin = mock.MagicMock() self.cert_plugin.issue_certificate_request.return_value = self.result self.cert_plugin.check_certificate_status.return_value = self.result self.store_plugin = mock.MagicMock() parsed_ca = { 'plugin_name': "cert_plugin", 'plugin_ca_id': "XXXX", 'name': "test ca", 'description': 'Test CA', 'ca_signing_certificate': 'ZZZZZ', 'intermediates': 'YYYYY' } self.ca = models.CertificateAuthority(parsed_ca) ca_repo.create_from(self.ca) self.ca_id = self.ca.id # second ca for testing parsed_ca = { 'plugin_name': "cert_plugin", 'plugin_ca_id': "XXXX2", 'name': "test ca2", 'description': 'Test CA2', 'ca_signing_certificate': 'ZZZZZ2', 'intermediates': 'YYYYY2' } self.ca2 = models.CertificateAuthority(parsed_ca) ca_repo.create_from(self.ca2) self.ca_id2 = self.ca2.id # data for preferred CA and global preferred CA tests # add those to the repo in those tests self.pref_ca = models.PreferredCertificateAuthority( self.project.id, self.ca_id) self.global_pref_ca = models.PreferredCertificateAuthority( self.project.id, self.ca_id) # data for stored key cases self.private_key = models.Secret() self.private_key.secret_type = 'PRIVATE' self.private_key.project_id = self.project.id secret_repo.create_from(self.private_key) self.public_key = models.Secret() self.public_key.secret_type = 'PUBLIC' self.public_key.project_id = self.project.id secret_repo.create_from(self.public_key) self.passphrase = models.Secret() self.passphrase.secret_type = 'PASSPHRASE' self.passphrase.project_id = self.project.id secret_repo.create_from(self.passphrase) self.private_key_value = None self.public_key_value = "public_key" self.passphrase_value = None self.parsed_container_with_passphrase = { 'name': 'container name', 'type': 'rsa', 'secret_refs': [ {'name': 'private_key', 'secret_ref': 'https://localhost/secrets/' + self.private_key.id}, {'name': 'public_key', 'secret_ref': 'https://localhost/secrets/' + self.public_key.id}, {'name': 'private_key_passphrase', 'secret_ref': 'https://localhost/secrets/' + self.passphrase.id} ] } self.parsed_container = { 'name': 'container name', 'type': 'rsa', 'secret_refs': [ {'name': 'private_key', 'secret_ref': 'https://localhost/secrets/' + self.private_key.id}, {'name': 'public_key', 'secret_ref': 'https://localhost/secrets/' + self.public_key.id} ] } self.container_with_passphrase = models.Container( self.parsed_container_with_passphrase) self.container_with_passphrase.project_id = self.project.id container_repo.create_from(self.container_with_passphrase) self.container = models.Container(self.parsed_container) self.container.project_id = self.project.id container_repo.create_from(self.container) repositories.commit() self.stored_key_meta = { cert_man.REQUEST_TYPE: cert_man.CertificateRequestType.STORED_KEY_REQUEST, "container_ref": "https://localhost/containers/" + self.container.id, "subject_dn": "cn=host.example.com,ou=dev,ou=us,o=example.com" } self.order = models.Order() self.order.meta = self.order_meta self.order.project_id = self.project.id self.order.order_barbican_meta = self.barbican_meta self.order.type = 'certificate' order_repo.create_from(self.order) self._config_cert_plugin() self._config_store_plugin() self._config_cert_event_plugin() self._config_save_meta_plugin() self._config_get_meta_plugin() self._config_save_barbican_meta_plugin() self._config_get_barbican_meta_plugin() self._config_barbican_meta_dto() def tearDown(self): super(BaseCertificateRequestsTestCase, self).tearDown() self.cert_plugin_patcher.stop() self.save_plugin_meta_patcher.stop() self.get_plugin_meta_patcher.stop() self.cert_event_plugin_patcher.stop() self.barbican_meta_dto_patcher.stop() self.save_barbican_barbican_meta_patcher.stop() self.get_barbican_plugin_meta_patcher.stop() self.store_plugin_patcher.stop() def stored_key_side_effect(self, *args, **kwargs): if args[0] == 'PRIVATE': return secret_store.SecretDTO( secret_store.SecretType.PRIVATE, self.private_key_value, None, 'application/octet-string', None) elif args[0] == 'PASSPHRASE': return secret_store.SecretDTO( secret_store.SecretType.PASSPHRASE, self.passphrase_value, None, 'application/octet-string', None) elif args[0] == 'PUBLIC': return secret_store.SecretDTO( secret_store.SecretType.PUBLIC, self.public_key_value, None, 'application/octet-string', None) else: return None def _test_should_return_waiting_for_ca(self, method_to_test): self.result.status = cert_man.CertificateStatus.WAITING_FOR_CA method_to_test( self.order, self.project, self.result_follow_on) self.assertEqual( common.RetryTasks.INVOKE_CERT_STATUS_CHECK_TASK, self.result_follow_on.retry_task) self.assertEqual( cert_res.ORDER_STATUS_REQUEST_PENDING.id, self.result_follow_on.status) self.assertEqual( cert_res.ORDER_STATUS_REQUEST_PENDING.message, self.result_follow_on.status_message) def _test_should_return_certificate_generated(self, method_to_test): self.result.status = cert_man.CertificateStatus.CERTIFICATE_GENERATED method_to_test( self.order, self.project, self.result_follow_on) self.assertEqual( common.RetryTasks.NO_ACTION_REQUIRED, self.result_follow_on.retry_task) self.assertEqual( cert_res.ORDER_STATUS_CERT_GENERATED.id, self.result_follow_on.status) self.assertEqual( cert_res.ORDER_STATUS_CERT_GENERATED.message, self.result_follow_on.status_message) def _test_should_raise_client_data_issue_seen(self, method_to_test): self.result.status = cert_man.CertificateStatus.CLIENT_DATA_ISSUE_SEEN self.assertRaises( cert_man.CertificateStatusClientDataIssue, method_to_test, self.order, self.project, self.result_follow_on ) def _test_should_raise_status_not_supported(self, method_to_test): self.result.status = "Legend of Link" self.assertRaises( cert_man.CertificateStatusNotSupported, method_to_test, self.order, self.project, self.result_follow_on ) def _config_cert_plugin(self): """Mock the certificate plugin manager.""" cert_plugin_config = { 'return_value.get_plugin.return_value': self.cert_plugin, 'return_value.get_plugin_by_name.return_value': self.cert_plugin, 'return_value.get_plugin_by_ca_id.return_value': self.cert_plugin } self.cert_plugin_patcher = mock.patch( 'barbican.plugin.interface.certificate_manager' '.CertificatePluginManager', **cert_plugin_config ) self.cert_plugin_patcher.start() def _config_store_plugin(self): """Mock the secret store plugin manager.""" store_plugin_config = { 'return_value.get_plugin_retrieve_delete.return_value': self.store_plugin } self.store_plugin_patcher = mock.patch( 'barbican.plugin.interface.secret_store' '.get_manager', **store_plugin_config ) self.store_plugin_patcher.start() def _config_cert_event_plugin(self): """Mock the certificate event plugin manager.""" self.cert_event_plugin_patcher = mock.patch( 'barbican.plugin.interface.certificate_manager' '._EVENT_PLUGIN_MANAGER' ) self.cert_event_plugin_patcher.start() def _config_save_meta_plugin(self): """Mock the save plugin meta function.""" self.save_plugin_meta_patcher = mock.patch( 'barbican.tasks.certificate_resources._save_plugin_metadata' ) self.mock_save_plugin = self.save_plugin_meta_patcher.start() def _config_get_meta_plugin(self): """Mock the get plugin meta function.""" get_plugin_config = {'return_value': self.plugin_meta} self.get_plugin_meta_patcher = mock.patch( 'barbican.tasks.certificate_resources._get_plugin_meta', **get_plugin_config ) self.get_plugin_meta_patcher.start() def _config_save_barbican_meta_plugin(self): """Mock the save barbican plugin meta function.""" self.save_barbican_barbican_meta_patcher = mock.patch( 'barbican.tasks.certificate_resources._save_barbican_metadata' ) self.mock_barbican_save_plugin = ( self.save_barbican_barbican_meta_patcher.start() ) def _config_get_barbican_meta_plugin(self): """Mock the get barbican plugin meta function.""" get_barbican_plugin_config = {'return_value': self.barbican_meta} self.get_barbican_plugin_meta_patcher = mock.patch( 'barbican.tasks.certificate_resources._get_barbican_meta', **get_barbican_plugin_config ) self.get_barbican_plugin_meta_patcher.start() def _config_barbican_meta_dto(self): """Mock the BarbicanMetaDTO.""" get_plugin_config = {'return_value': self.barbican_meta_dto} self.barbican_meta_dto_patcher = mock.patch( 'barbican.plugin.interface.certificate_manager' '.BarbicanMetaDTO', **get_plugin_config ) self.barbican_meta_dto_patcher.start() class WhenIssuingCertificateRequests(BaseCertificateRequestsTestCase): """Tests the 'issue_certificate_request()' function.""" def tearDown(self): super(WhenIssuingCertificateRequests, self).tearDown() def test_should_return_waiting_for_ca(self): self._test_should_return_waiting_for_ca( cert_res.issue_certificate_request) self._verify_issue_certificate_plugins_called() def test_should_return_waiting_for_ca_as_retry(self): # For a retry, the plugin-name to look up would have already been # saved into the barbican metadata for the order, so just make sure # we can retrieve it. self.barbican_meta.update({'plugin_name': 'foo-plugin'}) self._test_should_return_waiting_for_ca( cert_res.issue_certificate_request) self._verify_issue_certificate_plugins_called() def test_should_return_certificate_generated(self): self._test_should_return_certificate_generated( cert_res.issue_certificate_request) self._verify_issue_certificate_plugins_called() def test_should_raise_client_data_issue_seen(self): self._test_should_raise_client_data_issue_seen( cert_res.issue_certificate_request) def _do_pyopenssl_stored_key_request(self): self.order_meta.update(self.stored_key_meta) pkey = crypto.PKey() pkey.generate_key(crypto.TYPE_RSA, 2048) key_pem = crypto.dump_privatekey( crypto.FILETYPE_PEM, pkey) self.private_key_value = base64.b64encode(key_pem) self.public_key_value = "public_key" self.passphrase_value = None self.store_plugin.get_secret.side_effect = self.stored_key_side_effect self._test_should_return_waiting_for_ca( cert_res.issue_certificate_request) def test_should_return_for_pyopenssl_stored_key(self): self._do_pyopenssl_stored_key_request() self._verify_issue_certificate_plugins_called() self.assertIsNotNone( self.order.order_barbican_meta.get('generated_csr')) # TODO(alee-3) Add tests to validate the request based on the validator # code that dave-mccowan is adding. def test_should_return_for_openssl_stored_key_ca_id_passed_in(self): self.stored_key_meta['ca_id'] = self.ca_id2 self._do_pyopenssl_stored_key_request() self._verify_issue_certificate_plugins_called() self.assertIsNotNone( self.order.order_barbican_meta['generated_csr']) def test_should_return_for_openssl_stored_key_pref_ca_defined(self): preferred_ca_repo.create_from(self.pref_ca) self._do_pyopenssl_stored_key_request() self._verify_issue_certificate_plugins_called() self.assertIsNotNone( self.order.order_barbican_meta['generated_csr']) def test_should_return_for_openssl_stored_key_global_ca_defined(self): preferred_ca_repo.create_from(self.global_pref_ca) self._do_pyopenssl_stored_key_request() self._verify_issue_certificate_plugins_called() self.assertIsNotNone( self.order.order_barbican_meta['generated_csr']) def test_should_return_for_pyopenssl_stored_key_with_passphrase(self): self.order_meta.update(self.stored_key_meta) self.order_meta['container_ref'] = ( "https://localhost/containers/" + self.container_with_passphrase.id ) passphrase = "my secret passphrase" pkey = crypto.PKey() pkey.generate_key(crypto.TYPE_RSA, 2048) key_pem = crypto.dump_privatekey( crypto.FILETYPE_PEM, pkey, passphrase=passphrase.encode('utf-8') ) self.private_key_value = base64.b64encode(key_pem) self.public_key_value = "public_key" self.passphrase_value = base64.b64encode(passphrase.encode('utf-8')) self.store_plugin.get_secret.side_effect = self.stored_key_side_effect self._test_should_return_waiting_for_ca( cert_res.issue_certificate_request) self._verify_issue_certificate_plugins_called() self.assertIsNotNone( self.order.order_barbican_meta['generated_csr']) # TODO(alee-3) Add tests to validate the request based on the validator # code that dave-mccowan is adding. def test_should_return_for_pycrypto_stored_key_with_passphrase(self): self.order_meta.update(self.stored_key_meta) self.order_meta['container_ref'] = ( "https://localhost/containers/" + self.container_with_passphrase.id ) passphrase = "my secret passphrase" private_key = rsa.generate_private_key( public_exponent=65537, key_size=2048, backend=default_backend() ) public_key = private_key.public_key() private_key_pem = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization. BestAvailableEncryption(encodeutils.safe_encode(passphrase)) ) self.private_key_value = base64.b64encode(private_key_pem) public_key_pem = public_key.public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.PKCS1 ) self.public_key_value = base64.b64encode(public_key_pem) self.passphrase_value = base64.b64encode(passphrase.encode('utf-8')) self.store_plugin.get_secret.side_effect = self.stored_key_side_effect self._test_should_return_waiting_for_ca( cert_res.issue_certificate_request) self._verify_issue_certificate_plugins_called() self.assertIsNotNone( self.order.order_barbican_meta['generated_csr']) # TODO(alee-3) Add tests to validate the request based on the validator # code that dave-mccowan is adding. def test_should_return_for_pycrypto_stored_key_without_passphrase(self): self.order_meta.update(self.stored_key_meta) private_key = rsa.generate_private_key( public_exponent=65537, key_size=2048, backend=default_backend() ) public_key = private_key.public_key() private_key_pem = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption() ) self.private_key_value = base64.b64encode(private_key_pem) public_key_pem = public_key.public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.PKCS1 ) self.public_key_value = base64.b64encode(public_key_pem) self.store_plugin.get_secret.side_effect = self.stored_key_side_effect self._test_should_return_waiting_for_ca( cert_res.issue_certificate_request) self._verify_issue_certificate_plugins_called() self.assertIsNotNone( self.order.order_barbican_meta['generated_csr']) # TODO(alee-3) Add tests to validate the request based on the validator # code that dave-mccowan is adding. def test_should_raise_for_pycrypto_stored_key_no_container(self): self.order_meta.update(self.stored_key_meta) private_key = rsa.generate_private_key( public_exponent=65537, key_size=2048, backend=default_backend() ) public_key = private_key.public_key() private_key_pem = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption() ) self.private_key_value = base64.b64encode(private_key_pem) public_key_pem = public_key.public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.PKCS1 ) self.public_key_value = base64.b64encode(public_key_pem) self.store_plugin.get_secret.side_effect = self.stored_key_side_effect self.result.status = cert_man.CertificateStatus.WAITING_FOR_CA container_repo.delete_project_entities(self.project.id) self.assertRaises(excep.StoredKeyContainerNotFound, cert_res.issue_certificate_request, self.order, self.project, self.result_follow_on) def test_should_raise_for_pycrypto_stored_key_no_private_key(self): self.order_meta.update(self.stored_key_meta) private_key = rsa.generate_private_key( public_exponent=65537, key_size=2048, backend=default_backend() ) public_key = private_key.public_key() private_key_pem = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption() ) self.private_key_value = base64.b64encode(private_key_pem) public_key_pem = public_key.public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.PKCS1 ) self.public_key_value = base64.b64encode(public_key_pem) self.store_plugin.get_secret.side_effect = self.stored_key_side_effect self.result.status = cert_man.CertificateStatus.WAITING_FOR_CA secret_repo.delete_entity_by_id( self.private_key.id, self.external_project_id) self.assertRaises(excep.StoredKeyPrivateKeyNotFound, cert_res.issue_certificate_request, self.order, self.project, self.result_follow_on) def test_should_return_for_pyopenssl_stored_key_with_extensions(self): self.order_meta.update(self.stored_key_meta) pkey = crypto.PKey() pkey.generate_key(crypto.TYPE_RSA, 2048) self.private_key_value = base64.b64encode(crypto.dump_privatekey( crypto.FILETYPE_PEM, pkey)) self.store_plugin.get_secret.side_effect = self.stored_key_side_effect self.order_meta['extensions'] = 'my ASN.1 extensions structure here' # TODO(alee-3) Add real extensions data here self.result.status = cert_man.CertificateStatus.WAITING_FOR_CA cert_res.issue_certificate_request(self.order, self.project, self.result_follow_on) self._verify_issue_certificate_plugins_called() self.assertIsNotNone(self.order.order_barbican_meta['generated_csr']) # TODO(alee-3) Add tests to validate the request based on the validator # code that dave-mccowan is adding. # TODO(alee-3) Add tests to validate the extensions in the request def test_should_raise_invalid_operation_seen(self): self.result.status = cert_man.CertificateStatus.INVALID_OPERATION self.assertRaises( cert_man.CertificateStatusInvalidOperation, cert_res.issue_certificate_request, self.order, self.project, self.result_follow_on ) def test_should_return_ca_unavailable_for_request(self): retry_msec = 123 status_msg = 'Test status' self.result.status = ( cert_man.CertificateStatus.CA_UNAVAILABLE_FOR_REQUEST) self.result.retry_msec = retry_msec self.result.status_message = status_msg order_ref = hrefs.convert_order_to_href(self.order.id) cert_res.issue_certificate_request(self.order, self.project, self.result_follow_on) self._verify_issue_certificate_plugins_called() epm = self.cert_event_plugin_patcher.target._EVENT_PLUGIN_MANAGER epm.notify_ca_is_unavailable.assert_called_once_with( self.project.id, order_ref, status_msg, retry_msec ) self._verify_issue_certificate_plugins_called() self.assertEqual( common.RetryTasks.INVOKE_SAME_TASK, self.result_follow_on.retry_task) self.assertEqual( cert_res.ORDER_STATUS_CA_UNAVAIL_FOR_ISSUE.id, self.result_follow_on.status) self.assertEqual( cert_res.ORDER_STATUS_CA_UNAVAIL_FOR_ISSUE.message, self.result_follow_on.status_message) def test_should_raise_status_not_supported(self): self._test_should_raise_status_not_supported( cert_res.issue_certificate_request) def _verify_issue_certificate_plugins_called(self): self.cert_plugin.issue_certificate_request.assert_called_once_with( self.order.id, self.order_meta, self.plugin_meta, self.barbican_meta_dto ) self.mock_save_plugin.assert_called_once_with( self.order, self.plugin_meta ) self.mock_barbican_save_plugin.assert_called_once_with( self.order, self.barbican_meta ) class WhenCheckingCertificateRequests(BaseCertificateRequestsTestCase): """Tests the 'check_certificate_request()' function.""" def setUp(self): super(WhenCheckingCertificateRequests, self).setUp() def tearDown(self): super(WhenCheckingCertificateRequests, self).tearDown() def test_should_return_waiting_for_ca(self): self._test_should_return_waiting_for_ca( cert_res.check_certificate_request) self._verify_check_certificate_plugins_called() def test_should_return_certificate_generated(self): self._test_should_return_certificate_generated( cert_res.check_certificate_request) self._verify_check_certificate_plugins_called() def test_should_raise_client_data_issue_seen(self): self._test_should_raise_client_data_issue_seen( cert_res.check_certificate_request) def test_should_raise_status_not_supported(self): self._test_should_raise_status_not_supported( cert_res.check_certificate_request) def test_should_return_ca_unavailable_for_request(self): retry_msec = 123 status_msg = 'Test status' self.result.status = ( cert_man.CertificateStatus.CA_UNAVAILABLE_FOR_REQUEST) self.result.retry_msec = retry_msec self.result.status_message = status_msg order_ref = hrefs.convert_order_to_href(self.order.id) cert_res.check_certificate_request(self.order, self.project, self.result_follow_on) self._verify_check_certificate_plugins_called() epm = self.cert_event_plugin_patcher.target._EVENT_PLUGIN_MANAGER epm.notify_ca_is_unavailable.assert_called_once_with( self.project.id, order_ref, status_msg, retry_msec ) self.assertEqual( common.RetryTasks.INVOKE_SAME_TASK, self.result_follow_on.retry_task) self.assertEqual( cert_res.ORDER_STATUS_CA_UNAVAIL_FOR_CHECK.id, self.result_follow_on.status) self.assertEqual( cert_res.ORDER_STATUS_CA_UNAVAIL_FOR_CHECK.message, self.result_follow_on.status_message) def _do_pyopenssl_stored_key_request(self): self.order_meta.update(self.stored_key_meta) pkey = crypto.PKey() pkey.generate_key(crypto.TYPE_RSA, 2048) key_pem = crypto.dump_privatekey( crypto.FILETYPE_PEM, pkey) self.private_key_value = base64.b64encode(key_pem) self.public_key_value = "public_key" self.passphrase_value = None self.store_plugin.get_secret.side_effect = self.stored_key_side_effect self._test_should_return_waiting_for_ca( cert_res.issue_certificate_request) self._test_should_return_certificate_generated( cert_res.check_certificate_request) def test_should_return_for_pyopenssl_stored_key(self): self._do_pyopenssl_stored_key_request() self._verify_check_certificate_plugins_called() self.assertIsNotNone( self.order.order_barbican_meta.get('generated_csr')) def _verify_check_certificate_plugins_called(self): self.cert_plugin.check_certificate_status.assert_called_once_with( self.order.id, self.order_meta, self.plugin_meta, self.barbican_meta_dto ) self.mock_save_plugin.assert_called_with( self.order, self.plugin_meta ) class WhenCreatingSubordinateCAs(database_utils.RepositoryTestCase): """Tests the 'create_subordinate_ca()' function.""" def setUp(self): super(WhenCreatingSubordinateCAs, self).setUp() self.project = res.get_or_create_project('12345') self.project2 = res.get_or_create_project('56789') self.subject_name = "cn=subca1 signing certificate, o=example.com" self.creator_id = "user12345" self.name = "Subordinate CA #1" self.description = "This is a test subordinate CA" self.plugin_name = "dogtag_plugin" # create parent ca expiration = (datetime.datetime.utcnow() + datetime.timedelta(minutes=10)) parsed_ca = {'plugin_name': self.plugin_name, 'plugin_ca_id': 'ca_master', 'expiration': expiration.isoformat(), 'name': 'Dogtag CA', 'description': 'Master CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} self.parent_ca = models.CertificateAuthority(parsed_ca) ca_repo.create_from(self.parent_ca) self.parent_ca_ref = 'https://localhost:6311/cas/' + self.parent_ca.id self.new_ca_dict = { 'plugin_ca_id': 'ca_subordinate', 'expiration': expiration.isoformat(), 'name': 'Dogtag Subordinate CA', 'description': 'Subordinate CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY', } # mock plugin and calls to plugin self.cert_plugin = mock.MagicMock() self.cert_plugin.supports_create_ca.return_value = True self.cert_plugin.create_ca.return_value = self.new_ca_dict self._config_cert_plugin() def tearDown(self): super(WhenCreatingSubordinateCAs, self).tearDown() self.cert_plugin_patcher.stop() def test_should_create_subordinate_ca(self): subca = cert_res.create_subordinate_ca( project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) self.assertIsInstance(subca, models.CertificateAuthority) self.assertEqual(self.project.id, subca.project_id) self.assertEqual(self.creator_id, subca.creator_id) self.assertEqual(self.plugin_name, subca.plugin_name) def test_should_raise_invalid_parent_ca(self): self.parent_ca_ref = 'https://localhost:6311/cas/' + "BAD-CA-REF" self.assertRaises( excep.InvalidParentCA, cert_res.create_subordinate_ca, project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) def test_should_raise_unauthorized_parent_ca(self): subca = cert_res.create_subordinate_ca( project_model=self.project2, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) subca_ref = hrefs.convert_certificate_authority_to_href(subca.id) self.assertRaises( excep.UnauthorizedSubCA, cert_res.create_subordinate_ca, project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=subca_ref, creator_id=self.creator_id) def test_should_raise_subcas_not_supported(self): self.cert_plugin.supports_create_ca.return_value = False self.assertRaises( excep.SubCAsNotSupported, cert_res.create_subordinate_ca, project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) def test_should_raise_subcas_not_created(self): self.cert_plugin.create_ca.return_value = None self.assertRaises( excep.SubCANotCreated, cert_res.create_subordinate_ca, project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) def test_should_delete_subca(self): subca = cert_res.create_subordinate_ca( project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) self.assertIsInstance(subca, models.CertificateAuthority) cert_res.delete_subordinate_ca(self.project.external_id, subca) self.cert_plugin.delete_ca.assert_called_once_with(subca.plugin_ca_id) def test_should_delete_subca_and_all_related_db_entities(self): subca = cert_res.create_subordinate_ca( project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) project_ca = models.ProjectCertificateAuthority( self.project.id, subca.id ) project_ca_repo.create_from(project_ca) preferred_ca = models.PreferredCertificateAuthority( self.project.id, subca.id) preferred_ca_repo.create_from(preferred_ca) cert_res.delete_subordinate_ca(self.project.external_id, subca) self.cert_plugin.delete_ca.assert_called_once_with(subca.plugin_ca_id) def test_should_raise_when_delete_pref_subca_with_other_project_ca(self): subca = cert_res.create_subordinate_ca( project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) project_ca = models.ProjectCertificateAuthority( self.project.id, subca.id ) project_ca_repo.create_from(project_ca) preferred_ca = models.PreferredCertificateAuthority( self.project.id, subca.id) preferred_ca_repo.create_from(preferred_ca) subca2 = cert_res.create_subordinate_ca( project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) project_ca2 = models.ProjectCertificateAuthority( self.project.id, subca2.id ) project_ca_repo.create_from(project_ca2) self.assertRaises( excep.CannotDeletePreferredCA, cert_res.delete_subordinate_ca, self.project.external_id, subca ) def test_should_raise_cannot_delete_base_ca(self): self.assertRaises( excep.CannotDeleteBaseCA, cert_res.delete_subordinate_ca, self.project.external_id, self.parent_ca ) def test_should_raise_unauthorized_subca_delete(self): subca = cert_res.create_subordinate_ca( project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) self.assertRaises( excep.UnauthorizedSubCA, cert_res.delete_subordinate_ca, self.project2.external_id, subca ) def _config_cert_plugin(self): """Mock the certificate plugin manager.""" cert_plugin_config = { 'return_value.get_plugin.return_value': self.cert_plugin, 'return_value.get_plugin_by_name.return_value': self.cert_plugin, 'return_value.get_plugin_by_ca_id.return_value': self.cert_plugin } self.cert_plugin_patcher = mock.patch( 'barbican.plugin.interface.certificate_manager' '.CertificatePluginManager', **cert_plugin_config ) self.cert_plugin_patcher.start() barbican-9.1.0.dev50/barbican/tests/tasks/test_resources.py0000664000175000017500000004651413616500636024103 0ustar sahidsahid00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from oslo_utils import timeutils import six from barbican import i18n as u from barbican.model import models from barbican.tasks import common from barbican.tasks import resources from barbican.tests import utils class BaseOrderTestCase(utils.BaseTestCase, utils.MockModelRepositoryMixin): def setUp(self): super(BaseOrderTestCase, self).setUp() self.requestor = 'requestor1234' self.order = models.Order() self.order.id = "id1" self.order.requestor = self.requestor self.order.type = "key" self.meta = {'name': 'name', 'payload_content_type': 'application/octet-stream', 'algorithm': 'AES', 'bit_length': 256, 'expiration': timeutils.utcnow(), 'mode': 'CBC'} self.order.meta = self.meta self.external_project_id = 'keystone1234' self.project_id = 'projectid1234' self.project = models.Project() self.project.id = self.project_id self.project.external_id = self.external_project_id self.project_repo = mock.MagicMock() self.project_repo.get.return_value = self.project self.setup_project_repository_mock(self.project_repo) self.order.status = models.States.PENDING self.order.id = 'orderid1234' self.order.project_id = self.project_id self.order_repo = mock.MagicMock() self.order_repo.get.return_value = self.order self.setup_order_repository_mock(self.order_repo) self.setup_order_plugin_meta_repository_mock() self.setup_order_barbican_meta_repository_mock() self.secret = models.Secret() self.secret_repo = mock.MagicMock() self.secret_repo.create_from.return_value = None self.setup_secret_repository_mock(self.secret_repo) self.datum_repo = mock.MagicMock() self.datum_repo.create_from.return_value = None self.setup_encrypted_datum_repository_mock(self.datum_repo) self.setup_kek_datum_repository_mock() self.setup_secret_meta_repository_mock() self.container_repo = mock.MagicMock() self.container_repo.create_from.return_value = None self.setup_container_repository_mock(self.container_repo) self.container_secret_repo = mock.MagicMock() self.container_secret_repo.create_from.return_value = None self.setup_container_secret_repository_mock(self.container_secret_repo) self.container = models.Container() class WhenUsingOrderTaskHelper(BaseOrderTestCase): def setUp(self): super(WhenUsingOrderTaskHelper, self).setUp() self.result = common.FollowOnProcessingStatusDTO() self.helper = resources._OrderTaskHelper() def test_should_retrieve_entity(self): order_model = self.helper.retrieve_entity( self.order.id, self.external_project_id) self.assertEqual(self.order.id, order_model.id) self.order_repo.get.assert_called_once_with( entity_id=self.order.id, external_project_id=self.external_project_id) def test_should_handle_error(self): self.helper.handle_error(self.order, 'status_code', 'reason', ValueError()) self.assertEqual(models.States.ERROR, self.order.status) self.assertEqual('status_code', self.order.error_status_code) self.assertEqual('reason', self.order.error_reason) self.order_repo.save.assert_called_once_with(self.order) def test_should_handle_success_no_result(self): self.helper.handle_success(self.order, None) self.assertEqual(models.States.ACTIVE, self.order.status) self.assertIsNone(self.order.sub_status) self.assertIsNone(self.order.sub_status_message) self.order_repo.save.assert_called_once_with(self.order) def test_should_handle_success_result_no_follow_on_needed(self): self.helper.handle_success(self.order, self.result) self.assertEqual(models.States.ACTIVE, self.order.status) self.assertEqual('Unknown', self.order.sub_status) self.assertEqual('Unknown', self.order.sub_status_message) self.order_repo.save.assert_called_once_with(self.order) def test_should_handle_success_result_follow_on_needed(self): self.result.retry_task = common.RetryTasks.INVOKE_SAME_TASK self.result.status = 'status' self.result.status_message = 'status_message' self.helper.handle_success(self.order, self.result) self.assertEqual(models.States.PENDING, self.order.status) self.assertEqual('status', self.order.sub_status) self.assertEqual('status_message', self.order.sub_status_message) self.order_repo.save.assert_called_once_with(self.order) def test_should_handle_success_result_large_statuses_clipped(self): sub_status = 'z' * (models.SUB_STATUS_LENGTH + 1) sub_status_message = 'z' * (models.SUB_STATUS_MESSAGE_LENGTH + 1) self.result.status = sub_status self.result.status_message = sub_status_message self.helper.handle_success(self.order, self.result) self.assertEqual(sub_status[:-1], self.order.sub_status) self.assertEqual( sub_status_message[:-1], self.order.sub_status_message) self.order_repo.save.assert_called_once_with(self.order) class WhenBeginningKeyTypeOrder(BaseOrderTestCase): def setUp(self): super(WhenBeginningKeyTypeOrder, self).setUp() self.resource = resources.BeginTypeOrder() @mock.patch('barbican.plugin.resources.generate_secret') def test_should_process_key_order(self, mock_generate_secret): mock_generate_secret.return_value = self.secret self.resource.process(self.order.id, self.external_project_id) self.order_repo.get.assert_called_once_with( entity_id=self.order.id, external_project_id=self.external_project_id) self.assertEqual(self.order.status, models.States.ACTIVE) secret_info = self.order.to_dict_fields()['meta'] mock_generate_secret.assert_called_once_with( secret_info, secret_info.get('payload_content_type', 'application/octet-stream'), self.project ) def test_should_fail_during_retrieval(self): # Force an error during the order retrieval phase. self.order_repo.get = mock.MagicMock(return_value=None, side_effect=ValueError()) self.assertRaises( ValueError, self.resource.process, self.order.id, self.external_project_id, ) # Order state doesn't change because can't retrieve it to change it. self.assertEqual(models.States.PENDING, self.order.status) def test_should_fail_during_processing(self): # Force an error during the processing handler phase. self.project_repo.get = mock.MagicMock(return_value=None, side_effect=ValueError()) self.assertRaises( ValueError, self.resource.process, self.order.id, self.external_project_id, ) self.assertEqual(models.States.ERROR, self.order.status) self.assertEqual(500, self.order.error_status_code) self.assertEqual(u._('Process TypeOrder failure seen - please contact ' 'site administrator.'), self.order.error_reason) @mock.patch('barbican.plugin.resources.generate_secret') def test_should_fail_during_success_report_fail(self, mock_generate_secret): mock_generate_secret.return_value = self.secret # Force an error during the processing handler phase. self.order_repo.save = mock.MagicMock(return_value=None, side_effect=ValueError()) self.assertRaises( ValueError, self.resource.process, self.order.id, self.external_project_id, ) def test_should_fail_during_error_report_fail(self): # Force an error during the error-report handling after # error in processing handler phase. # Force an error during the processing handler phase. self.project_repo.get = mock.MagicMock(return_value=None, side_effect=TypeError()) # Force exception in the error-reporting phase. self.order_repo.save = mock.MagicMock(return_value=None, side_effect=ValueError()) # Should see the original exception (TypeError) instead of the # secondary one (ValueError). self.assertRaises( TypeError, self.resource.process, self.order.id, self.external_project_id, ) self.project_repo.get.assert_called_once_with(self.project_id) self.order_repo.save.assert_called_once_with(self.order) class WhenBeginningCertificateTypeOrder(BaseOrderTestCase): def setUp(self): super(WhenBeginningCertificateTypeOrder, self).setUp() self.order.type = models.OrderType.CERTIFICATE self.resource = resources.BeginTypeOrder() @mock.patch( 'barbican.tasks.certificate_resources.issue_certificate_request') def test_should_process_order_no_container( self, mock_issue_cert_request): mock_issue_cert_request.return_value = None result = self.resource.process_and_suppress_exceptions( self.order.id, self.external_project_id) self.order_repo.get.assert_called_once_with( entity_id=self.order.id, external_project_id=self.external_project_id) self.assertEqual(self.order.status, models.States.ACTIVE) mock_issue_cert_request.assert_called_once_with( self.order, self.project, mock.ANY ) self.assertIsNone(self.order.container_id) self.assertIsInstance(result, common.FollowOnProcessingStatusDTO) @mock.patch( 'barbican.tasks.certificate_resources.issue_certificate_request') def test_should_process_order_with_container( self, mock_issue_cert_request): mock_issue_cert_request.return_value = self.container result = self.resource.process( self.order.id, self.external_project_id) self.order_repo.get.assert_called_once_with( entity_id=self.order.id, external_project_id=self.external_project_id) self.assertEqual(self.order.status, models.States.ACTIVE) mock_issue_cert_request.assert_called_once_with( self.order, self.project, mock.ANY ) self.assertEqual(self.container.id, self.order.container_id) self.assertIsInstance(result, common.FollowOnProcessingStatusDTO) class WhenUpdatingOrder(BaseOrderTestCase): def setUp(self): super(WhenUpdatingOrder, self).setUp() self.updated_meta = 'updated' self.resource = resources.UpdateOrder() @mock.patch( 'barbican.tasks.certificate_resources.modify_certificate_request') def test_should_update_certificate_order(self, mock_modify_cert_request): self.order.type = models.OrderType.CERTIFICATE self.resource.process_and_suppress_exceptions( self.order.id, self.external_project_id, self.updated_meta) self.assertEqual(self.order.status, models.States.ACTIVE) mock_modify_cert_request.assert_called_once_with( self.order, self.updated_meta ) @mock.patch( 'barbican.tasks.certificate_resources.modify_certificate_request') def test_should_fail_during_processing(self, mock_mod_cert): mock_mod_cert.side_effect = ValueError('Abort!') self.order.type = models.OrderType.CERTIFICATE exception = self.assertRaises( ValueError, self.resource.process, self.order_id, self.external_project_id, self.meta ) self.assertEqual('Abort!', six.text_type(exception)) mock_mod_cert.assert_called_once_with(self.order, self.meta) self.assertEqual(models.States.ERROR, self.order.status) self.assertEqual(500, self.order.error_status_code) self.assertEqual(u._('Update Order failure seen - please contact ' 'site administrator.'), self.order.error_reason) class WhenBeginningAsymmetricTypeOrder(BaseOrderTestCase): def setUp(self): super(WhenBeginningAsymmetricTypeOrder, self).setUp() self.order.type = "asymmetric" self.resource = resources.BeginTypeOrder() @mock.patch('barbican.plugin.resources.generate_asymmetric_secret') def test_should_process_asymmetric_order(self, mock_generate_asymmetric_secret): mock_generate_asymmetric_secret.return_value = self.container self.resource.process(self.order.id, self.external_project_id) self.order_repo.get.assert_called_once_with( entity_id=self.order.id, external_project_id=self.external_project_id) self.assertEqual(self.order.status, models.States.ACTIVE) secret_info = self.order.to_dict_fields()['meta'] mock_generate_asymmetric_secret.assert_called_once_with( secret_info, secret_info.get('payload_content_type', 'application/octet-stream'), self.project ) def test_should_fail_during_retrieval(self): # Force an error during the order retrieval phase. self.order_repo.get = mock.MagicMock(return_value=None, side_effect=ValueError()) self.assertRaises( ValueError, self.resource.process, self.order.id, self.external_project_id, ) # Order state doesn't change because can't retrieve it to change it. self.assertEqual(models.States.PENDING, self.order.status) def test_should_fail_during_processing(self): # Force an error during the processing handler phase. self.project_repo.get = mock.MagicMock(return_value=None, side_effect=ValueError()) self.assertRaises( ValueError, self.resource.process, self.order.id, self.external_project_id, ) self.assertEqual(models.States.ERROR, self.order.status) self.assertEqual(500, self.order.error_status_code) self.assertEqual(u._('Process TypeOrder failure seen - please contact ' 'site administrator.'), self.order.error_reason) @mock.patch('barbican.plugin.resources.generate_asymmetric_secret') def test_should_fail_during_success_report_fail(self, mock_generate_asym_secret): mock_generate_asym_secret.return_value = self.container # Force an error during the processing handler phase. self.order_repo.save = mock.MagicMock(return_value=None, side_effect=ValueError()) self.assertRaises( ValueError, self.resource.process, self.order.id, self.external_project_id, ) def test_should_fail_during_error_report_fail(self): # Force an error during the error-report handling after # error in processing handler phase. # Force an error during the processing handler phase. self.project_repo.get = mock.MagicMock(return_value=None, side_effect=TypeError()) # Force exception in the error-reporting phase. self.order_repo.save = mock.MagicMock(return_value=None, side_effect=ValueError()) # Should see the original exception (TypeError) instead of the # secondary one (ValueError). self.assertRaises( TypeError, self.resource.process, self.order.id, self.external_project_id, ) self.project_repo.get.assert_called_once_with(self.project_id) self.order_repo.save.assert_called_once_with(self.order) class WhenCheckingCertificateStatus(BaseOrderTestCase): def setUp(self): super(WhenCheckingCertificateStatus, self).setUp() self.order.type = models.OrderType.CERTIFICATE self.resource = resources.CheckCertificateStatusOrder() @mock.patch( 'barbican.tasks.certificate_resources.check_certificate_request') def test_should_process_order_no_container( self, mock_check_cert_request): mock_check_cert_request.return_value = None result = self.resource.process_and_suppress_exceptions( self.order.id, self.external_project_id) self.order_repo.get.assert_called_once_with( entity_id=self.order.id, external_project_id=self.external_project_id) self.assertEqual(self.order.status, models.States.ACTIVE) mock_check_cert_request.assert_called_once_with( self.order, self.project, mock.ANY ) self.assertIsNone(self.order.container_id) self.assertIsInstance(result, common.FollowOnProcessingStatusDTO) @mock.patch( 'barbican.tasks.certificate_resources.check_certificate_request') def test_should_process_order_with_container( self, mock_check_cert_request): mock_check_cert_request.return_value = self.container self.resource.process(self.order.id, self.external_project_id) self.order_repo.get.assert_called_once_with( entity_id=self.order.id, external_project_id=self.external_project_id) self.assertEqual(self.order.status, models.States.ACTIVE) mock_check_cert_request.assert_called_once_with( self.order, self.project, mock.ANY ) self.assertEqual(self.container.id, self.order.container_id) def test_should_fail_with_bogus_order_type(self): self.order.type = 'bogus-type' self.assertRaises( NotImplementedError, self.resource.process, self.order.id, self.external_project_id, ) # Order state should be set to ERROR. self.assertEqual(models.States.ERROR, self.order.status) self.assertEqual( six.u('Check Certificate Order Status failure seen - ' 'please contact site administrator.'), self.order.error_reason) self.assertEqual(500, self.order.error_status_code) barbican-9.1.0.dev50/barbican/tests/tasks/test_keystone_consumer.py0000664000175000017500000003243213616500636025637 0ustar sahidsahid00000000000000# Copyright (c) 2014 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import sqlalchemy from barbican.common import exception from barbican.common import resources as c_resources from barbican.model import models from barbican.model import repositories as rep from barbican.plugin.crypto import manager from barbican.plugin import resources as plugin from barbican.tasks import keystone_consumer as consumer from barbican.tests import database_utils from oslo_utils import uuidutils class InitializeDatabaseMixin(object): def _init_memory_db_setup(self): # Force a refresh of the singleton plugin manager for each test. manager._PLUGIN_MANAGER = None manager.CONF.set_override('enabled_crypto_plugins', ['simple_crypto'], group='crypto') self.project_id1 = uuidutils.generate_uuid() self.project_id2 = uuidutils.generate_uuid(dashed=False) self.project1_data = c_resources.get_or_create_project( self.project_id1) self.assertIsNotNone(self.project1_data) self.project2_data = c_resources.get_or_create_project( self.project_id2) self.assertIsNotNone(self.project2_data) def _create_secret_for_project(self, project_data): secret_info = {"name": uuidutils.generate_uuid(dashed=False), "algorithm": "aes", "bit_length": 256, "mode": "cbc", "payload_content_type": "application/octet-stream"} new_secret = plugin.generate_secret( secret_info, secret_info.get('payload_content_type'), project_data) return new_secret class WhenUsingKeystoneEventConsumer( database_utils.RepositoryTestCase, InitializeDatabaseMixin): """Test all but the process() method on KeystoneEventConsumer class. For unit testing the process() method, use the WhenUsingKeystoneEventConsumerProcessMethod class. """ def setUp(self): super(WhenUsingKeystoneEventConsumer, self).setUp() self.kek_repo = rep.get_kek_datum_repository() self.project_repo = rep.get_project_repository() self.secret_meta_repo = rep.get_secret_meta_repository() self.secret_repo = rep.get_secret_repository() self.transport_key_repo = rep.get_transport_key_repository() def test_get_project_entities_lookup_call(self): self._init_memory_db_setup() secret = self._create_secret_for_project(self.project2_data) project2_id = self.project2_data.id self.assertIsNotNone(secret) db_secrets = self.secret_repo.get_project_entities(project2_id) self.assertEqual(1, len(db_secrets)) self.assertEqual(secret.id, db_secrets[0].id) db_kek = self.kek_repo.get_project_entities(project2_id) self.assertEqual(1, len(db_kek)) # secret_meta_repo does not implement function # _build_get_project_entities_query, so it should raise error self.assertRaises(NotImplementedError, self.secret_meta_repo.get_project_entities, project2_id) # transport_key_repo does not implement function # _build_get_project_entities_query, so it should raise error self.assertRaises(NotImplementedError, self.transport_key_repo.get_project_entities, project2_id) @mock.patch.object(models.Project, 'delete', side_effect=sqlalchemy.exc.SQLAlchemyError) def test_delete_project_entities_alchemy_error_suppress_exception_true( self, mock_entity_delete): self._init_memory_db_setup() secret = self._create_secret_for_project(self.project1_data) self.assertIsNotNone(secret) project1_id = self.project1_data.id # sqlalchemy error is suppressed here no_error = self.project_repo.delete_project_entities( project1_id, suppress_exception=True) self.assertIsNone(no_error) @mock.patch.object(models.Project, 'delete', side_effect=sqlalchemy.exc.SQLAlchemyError) def test_delete_project_entities_alchemy_error_suppress_exception_false( self, mock_entity_delete): self._init_memory_db_setup() secret = self._create_secret_for_project(self.project1_data) self.assertIsNotNone(secret) project1_id = self.project1_data.id # sqlalchemy error is not suppressed here self.assertRaises(exception.BarbicanException, self.project_repo.delete_project_entities, project1_id, suppress_exception=False) def test_delete_project_entities_not_impl_error_suppress_exception_true( self): self._init_memory_db_setup() secret = self._create_secret_for_project(self.project1_data) self.assertIsNotNone(secret) project1_id = self.project1_data.id # NotImplementedError is not suppressed regardless of related flag self.assertRaises(NotImplementedError, self.secret_meta_repo.delete_project_entities, project1_id, suppress_exception=True) def test_delete_project_entities_not_impl_error_suppress_exception_false( self): self._init_memory_db_setup() secret = self._create_secret_for_project(self.project1_data) self.assertIsNotNone(secret) project1_id = self.project1_data.id # NotImplementedError is not suppressed regardless of related flag self.assertRaises(NotImplementedError, self.secret_meta_repo.delete_project_entities, project1_id, suppress_exception=False) def test_invoke_handle_error(self): task = consumer.KeystoneEventConsumer() project = mock.MagicMock() project.project_id = 'project_id' status = 'status' message = 'message' exception_test = ValueError('Abort!') resource_type = 'type' operation_type = 'operation' task.handle_error( project, status, message, exception_test, project_id=None, resource_type=resource_type, operation_type=operation_type) class WhenUsingKeystoneEventConsumerProcessMethod( database_utils.RepositoryTestCase, InitializeDatabaseMixin): """Test only the process() method on KeystoneEventConsumer class. For unit testing all but the process() method, use the WhenUsingKeystoneEventConsumer class. """ def setUp(self): super(WhenUsingKeystoneEventConsumerProcessMethod, self).setUp() # Override the database start function as repositories.start() is # already invoked by the RepositoryTestCase base class setUp(). # Similarly, override the clear function. self.task = consumer.KeystoneEventConsumer( db_start=mock.MagicMock(), db_clear=mock.MagicMock() ) def test_project_entities_cleanup_for_no_matching_barbican_project(self): self._init_memory_db_setup() result = self.task.process(project_id=self.project_id1, resource_type='project', operation_type='deleted') self.assertIsNone(result, 'No return is expected as result') def test_project_entities_cleanup_for_missing_barbican_project(self): self._init_memory_db_setup() result = self.task.process(project_id=None, resource_type='project', operation_type='deleted') self.assertIsNone(result, 'No return is expected as result') @mock.patch.object(consumer.KeystoneEventConsumer, 'handle_success') def test_existing_project_entities_cleanup_for_plain_secret( self, mock_handle_success): self._init_memory_db_setup() secret = self._create_secret_for_project(self.project1_data) self.assertIsNotNone(secret) secret_id = secret.id project1_id = self.project1_data.id secret_repo = rep.get_secret_repository() db_secrets = secret_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_secrets)) self.assertEqual(secret.id, db_secrets[0].id) # Get secret_store_metadata for related secret self.assertGreater(len(db_secrets[0].secret_store_metadata), 0) secret_metadata_id = list(db_secrets[0]. secret_store_metadata.values())[0].id self.assertIsNotNone(secret_metadata_id) # Get db entry for secret_store_metadata by id to make sure its # presence before removing via delete project task secret_meta_repo = rep.get_secret_meta_repository() db_secret_store_meta = secret_meta_repo.get( entity_id=secret_metadata_id) self.assertIsNotNone(db_secret_store_meta) kek_repo = rep.get_kek_datum_repository() db_kek = kek_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_kek)) # task = consumer.KeystoneEventConsumer() result = self.task.process(project_id=self.project_id1, resource_type='project', operation_type='deleted') self.assertIsNone(result, 'No return is expected as result') mock_handle_success.assert_has_calls([]) _, kwargs = mock_handle_success.call_args self.assertEqual(self.project_id1, kwargs['project_id']) self.assertEqual('project', kwargs['resource_type']) self.assertEqual('deleted', kwargs['operation_type']) # After project entities delete, make sure secret is not found ex = self.assertRaises(exception.NotFound, secret_repo.get, entity_id=secret_id, external_project_id=self.project_id1) self.assertIn(secret_id, str(ex)) # After project entities delete, make sure kek data is not found entities = kek_repo.get_project_entities(project1_id) self.assertEqual(0, len(entities)) project_repo = rep.get_project_repository() db_project = project_repo.get_project_entities(project1_id) self.assertEqual(0, len(db_project)) # Should have deleted SecretStoreMetadatum via children delete self.assertRaises(exception.NotFound, secret_meta_repo.get, entity_id=secret_metadata_id) @mock.patch.object(consumer.KeystoneEventConsumer, 'handle_error') @mock.patch.object(rep.ProjectRepo, 'delete_project_entities', side_effect=exception.BarbicanException) def test_rollback_with_error_during_project_cleanup(self, mock_delete, mock_handle_error): self._init_memory_db_setup() secret = self._create_secret_for_project(self.project1_data) self.assertIsNotNone(secret) secret_id = secret.id project1_id = self.project1_data.id secret_repo = rep.get_secret_repository() db_secrets = secret_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_secrets)) self.assertEqual(secret.id, db_secrets[0].id) kek_repo = rep.get_kek_datum_repository() db_kek = kek_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_kek)) # Commit changes made so far before creating rollback scenario rep.commit() handle_error_mock = mock.MagicMock() self.task.handler_error = handle_error_mock self.assertRaises(exception.BarbicanException, self.task.process, project_id=self.project_id1, resource_type='project', operation_type='deleted') mock_handle_error.assert_called_once_with( self.project1_data, 500, mock.ANY, mock.ANY, operation_type='deleted', project_id=mock.ANY, resource_type='project', ) args, kwargs = mock_handle_error.call_args self.assertEqual(500, args[1]) self.assertEqual(self.project_id1, kwargs['project_id']) self.assertEqual('project', kwargs['resource_type']) self.assertEqual('deleted', kwargs['operation_type']) # Make sure entities are still present after rollback db_secrets = secret_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_secrets)) self.assertEqual(secret_id, db_secrets[0].id) db_kek = kek_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_kek)) project_repo = rep.get_project_repository() db_project = project_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_project)) barbican-9.1.0.dev50/barbican/tests/tasks/__init__.py0000664000175000017500000000000013616500636022545 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/tasks/test_common.py0000664000175000017500000000334013616500636023347 0ustar sahidsahid00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican import i18n as u from barbican.tasks import common from barbican.tests import utils class WhenUsingFollowOnProcessingStatusDTO(utils.BaseTestCase): """Test using the :class:`WhenUsingFollowOnProcessingStatusDTO` class.""" def setUp(self): super(WhenUsingFollowOnProcessingStatusDTO, self).setUp() self.target = common.FollowOnProcessingStatusDTO() def test_should_have_expected_defaults(self): self.assertEqual( common.RetryTasks.NO_ACTION_REQUIRED, self.target.retry_task) self.assertEqual(u._('Unknown'), self.target.status) self.assertEqual(u._('Unknown'), self.target.status_message) self.assertEqual(common.RETRY_MSEC_DEFAULT, self.target.retry_msec) self.assertFalse(self.target.is_follow_on_needed()) def test_should_indicate_no_follow_on_with_no_retry_task(self): self.target.retry_task = None self.assertFalse(self.target.is_follow_on_needed()) def test_should_indicate_follow_on_when_retry_task_provided(self): self.target.retry_task = common.RetryTasks.INVOKE_SAME_TASK self.assertTrue(self.target.is_follow_on_needed()) barbican-9.1.0.dev50/barbican/tests/common/0000775000175000017500000000000013616500640020604 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/common/test_quota.py0000664000175000017500000002750113616500636023360 0ustar sahidsahid00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import six import unittest from barbican.common import exception as excep from barbican.common import quota from barbican.model import models from barbican.tests import database_utils class WhenTestingQuotaDriverFunctions(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingQuotaDriverFunctions, self).setUp() self.quota_driver = quota.QuotaDriver() def test_get_defaults(self): quotas = self.quota_driver._get_defaults() self.assertEqual(-1, quotas['secrets']) self.assertEqual(-1, quotas['orders']) self.assertEqual(-1, quotas['containers']) self.assertEqual(-1, quotas['consumers']) self.assertEqual(-1, quotas['cas']) def test_compute_effective_quotas_using_some_defaults(self): configured_quotas = {'consumers': None, 'containers': 66, 'orders': None, 'secrets': 55, 'cas': None} quotas = self.quota_driver._compute_effective_quotas(configured_quotas) expected_quotas = {'consumers': -1, 'containers': 66, 'orders': -1, 'secrets': 55, 'cas': -1} self.assertEqual(expected_quotas, quotas) def test_compute_effective_quotas_using_all_defaults(self): configured_quotas = {'consumers': None, 'containers': None, 'orders': None, 'secrets': None, 'cas': None} quotas = self.quota_driver._compute_effective_quotas(configured_quotas) expected_quotas = {'consumers': -1, 'containers': -1, 'orders': -1, 'secrets': -1, 'cas': -1} self.assertEqual(expected_quotas, quotas) def test_is_unlimited_true(self): self.assertTrue(self.quota_driver.is_unlimited_value(-1)) def test_is_unlimited_false(self): self.assertFalse(self.quota_driver.is_unlimited_value(1)) def test_is_disabled_true(self): self.assertTrue(self.quota_driver.is_disabled_value(0)) def test_is_disabled_false(self): self.assertFalse(self.quota_driver.is_disabled_value(1)) def test_should_get_project_quotas(self): self.create_a_test_project_quotas() project_quotas = self.quota_driver.get_project_quotas( self.get_test_project_id()) self.assertEqual({'project_quotas': self.get_test_parsed_project_quotas()}, project_quotas) def test_should_return_not_found_get_project_quotas(self): project_quotas = self.quota_driver.get_project_quotas('dummy') self.assertIsNone(project_quotas) def test_should_get_project_quotas_list(self): self.create_a_test_project_quotas() project_quotas = self.quota_driver.get_project_quotas_list() self.assertEqual({'project_quotas': [{ 'project_id': u'project1', 'project_quotas': {'consumers': 105, 'containers': 103, 'orders': 102, 'secrets': 101, 'cas': 106}}], 'total': 1}, project_quotas) def test_should_get_empty_project_quotas_list(self): project_quotas = self.quota_driver.get_project_quotas_list() self.assertEqual({'total': 0, 'project_quotas': []}, project_quotas) def test_should_delete_project_quotas(self): self.create_a_test_project_quotas() self.quota_driver.delete_project_quotas( self.get_test_project_id()) def test_should_raise_not_found_delete_project_quotas(self): self.assertRaises( excep.NotFound, self.quota_driver.delete_project_quotas, 'dummy') def test_get_project_quotas_with_partial_definition(self): self.create_a_test_project_quotas('partial') project_quotas = self.quota_driver.get_project_quotas( self.get_test_project_id('partial')) self.assertEqual({'project_quotas': self.get_test_response_project_quotas('partial')}, project_quotas) def test_get_project_quotas_using_empty_definition(self): self.create_a_test_project_quotas('none') project_quotas = self.quota_driver.get_project_quotas( self.get_test_project_id('none')) self.assertEqual({'project_quotas': self.get_test_response_project_quotas('none')}, project_quotas) def test_get_quotas_using_some_defaults(self): self.create_a_test_project_quotas('partial') quotas = self.quota_driver.get_quotas( self.get_test_project_id('partial')) expected_quotas = {'quotas': {'consumers': -1, 'containers': 66, 'orders': -1, 'secrets': 55, 'cas': -1}} self.assertEqual(expected_quotas, quotas) def test_get_quotas_using_all_defaults(self): quotas = self.quota_driver.get_quotas('not_configured') expected_quotas = {'quotas': {'consumers': -1, 'containers': -1, 'orders': -1, 'secrets': -1, 'cas': -1}} self.assertEqual(expected_quotas, quotas) # ----------------------- Helper Functions --------------------------- def get_test_project_id(self, index=1): if index == 'partial': return 'project_partial' elif index == 'none': return 'project_none' else: return 'project' + str(index) def get_test_parsed_project_quotas(self, index=1): if index == 'partial': parsed_project_quotas = { 'secrets': 55, 'containers': 66} elif index == 'none': parsed_project_quotas = {} else: parsed_project_quotas = { 'secrets': index * 100 + 1, 'orders': index * 100 + 2, 'containers': index * 100 + 3, 'consumers': index * 100 + 5, 'cas': index * 100 + 6} return parsed_project_quotas def get_test_response_project_quotas(self, index=1): if index == 'partial': response_project_quotas = { 'secrets': 55, 'orders': None, 'containers': 66, 'consumers': None, 'cas': None} elif index == 'none': response_project_quotas = { 'secrets': None, 'orders': None, 'containers': None, 'consumers': None, 'cas': None} else: response_project_quotas = { 'secrets': index * 100 + 1, 'orders': index * 100 + 2, 'containers': index * 100 + 3, 'consumers': index * 100 + 5, 'cas': index * 100 + 6} return response_project_quotas def create_a_test_project_quotas(self, index=1): project_id = self.get_test_project_id(index) parsed_project_quotas = self.get_test_parsed_project_quotas(index) self.quota_driver.set_project_quotas(project_id, parsed_project_quotas) def create_project_quotas(self): for index in [1, 2, 3]: self.create_a_test_project_quotas(index) class DummyRepoForTestingQuotaEnforcement(object): def __init__(self, get_count_return_value): self.get_count_return_value = get_count_return_value def get_count(self, internal_project_id): return self.get_count_return_value class WhenTestingQuotaEnforcingFunctions(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingQuotaEnforcingFunctions, self).setUp() self.quota_driver = quota.QuotaDriver() self.project = models.Project() self.project.id = 'my_internal_id' self.project.external_id = 'my_keystone_id' def test_should_pass_default_unlimited(self): test_repo = DummyRepoForTestingQuotaEnforcement(0) quota_enforcer = quota.QuotaEnforcer('secrets', test_repo) quota_enforcer.enforce(self.project) def test_should_raise_disabled_value(self): test_repo = DummyRepoForTestingQuotaEnforcement(0) quota_enforcer = quota.QuotaEnforcer('secrets', test_repo) disabled_project_quotas = {'consumers': 0, 'containers': 0, 'orders': 0, 'secrets': 0, 'cas': 0} self.quota_driver.set_project_quotas(self.project.external_id, disabled_project_quotas) exception = self.assertRaises( excep.QuotaReached, quota_enforcer.enforce, self.project ) self.assertIn('Quota reached for project', six.text_type(exception)) self.assertIn('my_keystone_id', six.text_type(exception)) self.assertIn('secrets', six.text_type(exception)) self.assertIn(str(0), six.text_type(exception)) def test_should_pass_below_limit(self): test_repo = DummyRepoForTestingQuotaEnforcement(4) quota_enforcer = quota.QuotaEnforcer('secrets', test_repo) five_project_quotas = {'consumers': 5, 'containers': 5, 'orders': 5, 'secrets': 5, 'cas': 5} self.quota_driver.set_project_quotas(self.project.external_id, five_project_quotas) quota_enforcer.enforce(self.project) def test_should_raise_equal_limit(self): test_repo = DummyRepoForTestingQuotaEnforcement(5) quota_enforcer = quota.QuotaEnforcer('secrets', test_repo) five_project_quotas = {'consumers': 5, 'containers': 5, 'orders': 5, 'secrets': 5, 'cas': 5} self.quota_driver.set_project_quotas(self.project.external_id, five_project_quotas) exception = self.assertRaises( excep.QuotaReached, quota_enforcer.enforce, self.project ) self.assertIn('Quota reached for project', six.text_type(exception)) self.assertIn('my_keystone_id', six.text_type(exception)) self.assertIn('secrets', six.text_type(exception)) self.assertIn(str(5), six.text_type(exception)) def test_should_raise_above_limit(self): test_repo = DummyRepoForTestingQuotaEnforcement(6) quota_enforcer = quota.QuotaEnforcer('secrets', test_repo) five_project_quotas = {'consumers': 5, 'containers': 5, 'orders': 5, 'secrets': 5, 'cas': 5} self.quota_driver.set_project_quotas(self.project.external_id, five_project_quotas) exception = self.assertRaises( excep.QuotaReached, quota_enforcer.enforce, self.project ) self.assertIn('Quota reached for project', six.text_type(exception)) self.assertIn('my_keystone_id', six.text_type(exception)) self.assertIn('secrets', six.text_type(exception)) self.assertIn(str(5), six.text_type(exception)) if __name__ == '__main__': unittest.main() barbican-9.1.0.dev50/barbican/tests/common/__init__.py0000664000175000017500000000000013616500636022710 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/common/test_hrefs.py0000664000175000017500000000222513616500636023332 0ustar sahidsahid00000000000000# Copyright (c) 2015, Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import hrefs from barbican.tests import utils as test_utils class WhenTestingGetContainerID(test_utils.BaseTestCase): def test_get_container_id_passes(self): test_ref = 'https://localhost/v1/containers/good_container_ref' result = hrefs.get_container_id_from_ref(test_ref) self.assertEqual('good_container_ref', result) def test_get_container_id_raises(self): test_ref = 'bad_container_ref' self.assertRaises(IndexError, hrefs.get_container_id_from_ref, test_ref) barbican-9.1.0.dev50/barbican/tests/common/test_validators.py0000664000175000017500000021646413616500636024407 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime import six import unittest from oslo_serialization import base64 import testtools from barbican.common import exception as excep from barbican.common import validators from barbican.tests import certificate_utils as certs from barbican.tests import keys from barbican.tests import utils VALID_EXTENSIONS = "valid extensions" VALID_FULL_CMC = "valid CMC" def get_symmetric_key_req(): return {'name': 'mysymmetrickey', 'payload_content_type': 'application/octet-stream', 'payload_content_encoding': 'base64', 'algorithm': 'aes', 'bit_length': 256, 'secret_type': 'symmetric', 'payload': 'gF6+lLoF3ohA9aPRpt+6bQ=='} def get_private_key_req(): return {'name': 'myprivatekey', 'payload_content_type': 'application/pkcs8', 'payload_content_encoding': 'base64', 'algorithm': 'rsa', 'bit_length': 2048, 'secret_type': 'private', 'payload': base64.encode_as_text(keys.get_private_key_pem())} def get_public_key_req(): return {'name': 'mypublickey', 'payload_content_type': 'application/octet-stream', 'payload_content_encoding': 'base64', 'algorithm': 'rsa', 'bit_length': 2048, 'secret_type': 'public', 'payload': base64.encode_as_text(keys.get_public_key_pem())} def get_certificate_req(): return {'name': 'mycertificate', 'payload_content_type': 'application/octet-stream', 'payload_content_encoding': 'base64', 'algorithm': 'rsa', 'bit_length': 2048, 'secret_type': 'certificate', 'payload': base64.encode_as_text(keys.get_certificate_pem())} def get_passphrase_req(): return {'name': 'mypassphrase', 'payload_content_type': 'text/plain', 'secret_type': 'passphrase', 'payload': 'mysecretpassphrase'} def suite(): suite = unittest.TestSuite() suite.addTest(WhenTestingSecretValidator()) return suite class WhenTestingValidatorsFunctions(utils.BaseTestCase): def test_secret_too_big_is_false_for_small_secrets(self): data = b'\xb0' is_too_big = validators.secret_too_big(data) self.assertFalse(is_too_big) def test_secret_too_big_is_true_for_big_secrets(self): data = b'\x01' * validators.CONF.max_allowed_secret_in_bytes data += b'\x01' is_too_big = validators.secret_too_big(data) self.assertTrue(is_too_big) def test_secret_too_big_is_true_for_big_unicode_secrets(self): beer = u'\U0001F37A' data = beer * (validators.CONF.max_allowed_secret_in_bytes // 4) data += u'1' is_too_big = validators.secret_too_big(data) self.assertTrue(is_too_big) @utils.parameterized_test_case class WhenTestingSecretValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingSecretValidator, self).setUp() self.name = 'name' self.payload = 'not-encrypted' self.payload_content_type = 'text/plain' self.secret_algorithm = 'algo' self.secret_bit_length = 512 self.secret_type = 'opaque' self.secret_mode = 'cytype' self.secret_req = {'name': self.name, 'payload_content_type': self.payload_content_type, 'algorithm': self.secret_algorithm, 'bit_length': self.secret_bit_length, 'secret_type': self.secret_type, 'mode': self.secret_mode, 'payload': self.payload} self.validator = validators.NewSecretValidator() def test_should_validate_all_fields(self): self.validator.validate(self.secret_req) def test_should_validate_no_name(self): del self.secret_req['name'] self.validator.validate(self.secret_req) def test_should_validate_empty_name(self): self.secret_req['name'] = ' ' self.validator.validate(self.secret_req) def test_should_validate_null_name(self): self.secret_req['name'] = None self.validator.validate(self.secret_req) def test_should_validate_no_payload(self): del self.secret_req['payload'] del self.secret_req['payload_content_type'] result = self.validator.validate(self.secret_req) self.assertNotIn('payload', result) def test_should_validate_payload_with_whitespace(self): self.secret_req['payload'] = ' ' + self.payload + ' ' result = self.validator.validate(self.secret_req) self.assertEqual(self.payload, result['payload']) def test_should_validate_future_expiration(self): self.secret_req['expiration'] = '2114-02-28T19:14:44.180394' result = self.validator.validate(self.secret_req) self.assertIn('expiration', result) self.assertIsInstance(result['expiration'], datetime.datetime) def test_should_validate_future_expiration_no_t(self): self.secret_req['expiration'] = '2114-02-28 19:14:44.180394' result = self.validator.validate(self.secret_req) self.assertIn('expiration', result) self.assertIsInstance(result['expiration'], datetime.datetime) def test_should_validate_expiration_with_z(self): expiration = '2114-02-28 19:14:44.180394Z' self.secret_req['expiration'] = expiration result = self.validator.validate(self.secret_req) self.assertIn('expiration', result) self.assertIsInstance(result['expiration'], datetime.datetime) self.assertEqual(expiration[:-1], str(result['expiration'])) def test_should_validate_expiration_with_tz(self): expiration = '2114-02-28 12:14:44.180394-05:00' self.secret_req['expiration'] = expiration result = self.validator.validate(self.secret_req) self.assertIn('expiration', result) self.assertIsInstance(result['expiration'], datetime.datetime) expected = expiration[:-6].replace('12', '17', 1) self.assertEqual(expected, str(result['expiration'])) def test_should_validate_expiration_extra_whitespace(self): expiration = '2114-02-28 12:14:44.180394-05:00 ' self.secret_req['expiration'] = expiration result = self.validator.validate(self.secret_req) self.assertIn('expiration', result) self.assertIsInstance(result['expiration'], datetime.datetime) expected = expiration[:-12].replace('12', '17', 1) self.assertEqual(expected, str(result['expiration'])) def test_should_validate_empty_expiration(self): self.secret_req['expiration'] = ' ' result = self.validator.validate(self.secret_req) self.assertIn('expiration', result) self.assertFalse(result['expiration']) def test_should_raise_numeric_name(self): self.secret_req['name'] = 123 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('name', exception.invalid_property) def test_should_raise_name_length_is_greater_than_max(self): self.secret_req['name'] = 'a' * 256 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('name', exception.invalid_property) def test_should_raise_negative_bit_length(self): self.secret_req['bit_length'] = -23 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('bit_length', exception.invalid_property) self.assertIn('bit_length', six.text_type(exception)) def test_should_raise_non_integer_bit_length(self): self.secret_req['bit_length'] = "23" exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('bit_length', exception.invalid_property) self.assertIn('bit_length', six.text_type(exception)) def test_should_raise_bit_length_less_than_min(self): self.secret_req['bit_length'] = 0 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('bit_length', exception.invalid_property) self.assertIn('bit_length', six.text_type(exception)) def test_should_raise_bit_length_greater_than_max(self): self.secret_req['bit_length'] = 32768 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('bit_length', exception.invalid_property) self.assertIn('bit_length', six.text_type(exception)) def test_should_raise_mode_length_greater_than_max(self): self.secret_req['mode'] = 'a' * 256 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('mode', exception.invalid_property) self.assertIn('mode', six.text_type(exception)) def test_should_raise_mode_is_non_string(self): self.secret_req['mode'] = 123 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('mode', exception.invalid_property) self.assertIn('mode', six.text_type(exception)) def test_validation_should_raise_with_empty_payload(self): self.secret_req['payload'] = ' ' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('payload', exception.invalid_property) self.assertIn('payload', six.text_type(exception)) def test_should_raise_already_expired(self): self.secret_req['expiration'] = '2004-02-28T19:14:44.180394' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('expiration', exception.invalid_property) self.assertIn('expiration', six.text_type(exception)) def test_should_raise_expiration_nonsense(self): self.secret_req['expiration'] = 'nonsense' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('expiration', exception.invalid_property) self.assertIn('expiration', six.text_type(exception)) def test_should_raise_expiration_is_non_string(self): self.secret_req['expiration'] = 123 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('expiration', exception.invalid_property) self.assertIn('expiration', six.text_type(exception)) def test_should_raise_expiration_greater_than_max(self): self.secret_req['expiration'] = 'a' * 256 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('expiration', exception.invalid_property) self.assertIn('expiration', six.text_type(exception)) def test_should_raise_algorithm_is_non_string(self): self.secret_req['algorithm'] = 123 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('algorithm', exception.invalid_property) self.assertIn('algorithm', six.text_type(exception)) def test_should_raise_algorithm_greater_than_max(self): self.secret_req['algorithm'] = 'a' * 256 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('algorithm', exception.invalid_property) self.assertIn('algorithm', six.text_type(exception)) def test_should_raise_all_nulls(self): self.secret_req = {'name': None, 'algorithm': None, 'bit_length': None, 'mode': None} self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) def test_should_raise_all_empties(self): self.secret_req = {'name': '', 'algorithm': '', 'bit_length': '', 'mode': ''} self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) def test_should_raise_no_payload_content_type(self): del self.secret_req['payload_content_type'] self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) def test_should_raise_with_message_w_bad_payload_content_type(self): self.secret_req['payload_content_type'] = 'plain/text' try: self.validator.validate(self.secret_req) except excep.InvalidObject as e: self.assertIsNotNone(e) self.assertIsNotNone(six.text_type(e)) else: self.fail('No validation exception was raised') def test_should_validate_mixed_case_payload_content_type(self): self.secret_req['payload_content_type'] = 'TeXT/PlaiN' self.validator.validate(self.secret_req) def test_should_validate_upper_case_payload_content_type(self): self.secret_req['payload_content_type'] = 'TEXT/PLAIN' self.validator.validate(self.secret_req) def test_should_raise_with_mixed_case_wrong_payload_content_type(self): self.secret_req['payload_content_type'] = 'TeXT/PlaneS' self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) def test_should_raise_with_upper_case_wrong_payload_content_type(self): self.secret_req['payload_content_type'] = 'TEXT/PLANE' self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) def test_should_raise_payload_content_type_greater_than_max(self): self.secret_req['payload_content_type'] = 'a' * 256 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('payload_content_type', exception.invalid_property) self.assertIn('payload_content_type', six.text_type(exception)) def test_should_raise_with_payload_content_encoding_greater_than_max(self): self.secret_req['payload_content_encoding'] = 'a' * 256 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('payload_content_encoding', exception.invalid_property) self.assertIn('payload_content_encoding', six.text_type(exception)) def test_should_raise_with_plain_text_and_encoding(self): self.secret_req['payload_content_encoding'] = 'base64' self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) def test_should_raise_with_wrong_encoding(self): self.secret_req['payload_content_type'] = 'application/octet-stream' self.secret_req['payload_content_encoding'] = 'unsupported' self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) def test_should_validate_with_supported_encoding(self): self.secret_req['payload_content_type'] = 'application/octet-stream' self.secret_req['payload_content_encoding'] = 'base64' self.secret_req['payload'] = 'bXktc2VjcmV0LWhlcmU=' self.validator.validate(self.secret_req) def test_validation_should_validate_with_good_base64_payload(self): self.secret_req['payload_content_type'] = 'application/octet-stream' self.secret_req['payload_content_encoding'] = 'base64' self.secret_req['payload'] = 'bXktc2VjcmV0LWhlcmU=' self.validator.validate(self.secret_req) def test_validation_should_raise_with_bad_base64_payload(self): self.secret_req['payload_content_type'] = 'application/octet-stream' self.secret_req['payload_content_encoding'] = 'base64' self.secret_req['payload'] = 'bad base 64' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('payload', exception.invalid_property) def test_validation_should_raise_with_unicode_payload(self): self.secret_req['payload_content_type'] = 'application/octet-stream' self.secret_req['payload_content_encoding'] = 'base64' self.secret_req['payload'] = six.unichr(0x0080) exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('payload', exception.invalid_property) def test_should_pass_with_no_secret_type(self): request = dict(self.secret_req) del request['secret_type'] self.validator.validate(request) def test_should_fail_with_unknown_secret_type(self): self.secret_req['secret_type'] = 'unknown_type' self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) @utils.parameterized_dataset({ 'symmetric': [get_symmetric_key_req()], 'private': [get_private_key_req()], 'public': [get_public_key_req()], 'certificate': [get_certificate_req()], 'passphrase': [get_passphrase_req()], }) def test_should_pass_with_secret_type(self, request): self.validator.validate(request) @utils.parameterized_dataset({ 'symmetric': [get_symmetric_key_req(), 'foo'], 'private': [get_private_key_req(), 'foo'], 'public': [get_public_key_req(), 'foo'], 'certificate': [get_certificate_req(), 'foo'], 'passphrase': [get_passphrase_req(), 'base64'], }) def test_should_fail_with_bad_encoding(self, request, content_encoding): request['payload_content_encoding'] = content_encoding self.assertRaises( excep.InvalidObject, self.validator.validate, request, ) @utils.parameterized_dataset({ 'symmetric': [get_symmetric_key_req(), 'text/plain'], 'private': [get_private_key_req(), 'text/plain'], 'public': [get_public_key_req(), 'text/plain'], 'certificate': [get_certificate_req(), 'text/plain'], 'passphrase': [get_passphrase_req(), 'application/octet-stream'], }) def test_should_fail_with_bad_content_type(self, request, content_type): request['payload_content_type'] = content_type self.assertRaises( excep.InvalidObject, self.validator.validate, request, ) class WhenTestingContainerValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingContainerValidator, self).setUp() validators.CONF.set_override("host_href", "http://localhost:9311") self.name = 'name' self.type = 'generic' self.secret_refs = [ { 'name': 'testname', 'secret_ref': 'http://localhost:9311/1231' }, { 'name': 'testname2', 'secret_ref': 'http://localhost:9311/1232' } ] self.container_req = {'name': self.name, 'type': self.type, 'secret_refs': self.secret_refs} self.validator = validators.ContainerValidator() def test_should_validate_all_fields(self): self.validator.validate(self.container_req) def test_should_validate_no_name(self): del self.container_req['name'] self.validator.validate(self.container_req) def test_should_validate_empty_name(self): self.container_req['name'] = ' ' self.validator.validate(self.container_req) def test_should_raise_name_length_greater_than_max(self): self.container_req['name'] = 'a' * 256 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('name', exception.invalid_property) self.assertIn('name', six.text_type(exception)) def test_should_raise_nonstring_secret_name(self): self.secret_refs[0]["name"] = 5 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_secret_name_too_long(self): self.secret_refs[0]['name'] = 'a' * 256 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_numeric_name(self): self.container_req['name'] = 123 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('name', exception.invalid_property) self.assertIn('name', exception.message) def test_should_raise_no_type(self): del self.container_req['type'] exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertIn('type', six.text_type(exception)) def test_should_raise_empty_type(self): self.container_req['type'] = '' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('type', exception.invalid_property) def test_should_raise_not_supported_type(self): self.container_req['type'] = 'testtype' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('type', exception.invalid_property) def test_should_raise_all_nulls(self): self.container_req = {'name': None, 'type': None, 'bit_length': None, 'secret_refs': None} self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) def test_should_raise_all_empties(self): self.container_req = {'name': '', 'type': '', 'secret_refs': []} self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) def test_should_validate_empty_secret_refs(self): self.container_req['secret_refs'] = [] self.validator.validate(self.container_req) def test_should_raise_no_secret_ref_in_secret_refs(self): del self.container_req['secret_refs'][0]['secret_ref'] self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) def test_should_raise_empty_secret_ref_in_secret_refs(self): self.container_req['secret_refs'][0]['secret_ref'] = '' self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) def test_should_raise_numeric_secret_ref_in_secret_refs(self): self.container_req['secret_refs'][0]['secret_ref'] = 123 self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) def test_should_raise_duplicate_names_in_secret_refs(self): self.container_req['secret_refs'].append( self.container_req['secret_refs'][0]) exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_duplicate_secret_ids_in_secret_refs(self): secret_ref = self.container_req['secret_refs'][0] secret_ref['name'] = 'testname3' self.container_req['secret_refs'].append(secret_ref) exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_duplicate_secret_ref_format_ids_in_secret_refs(self): """Test duplicate secret_id presence as part of single container. Here secret_id is represented in different format and secret_id is extracted from there. """ secret_refs = [ { 'name': 'testname', 'secret_ref': 'http://localhost:9311/v1/12345/secrets/1231' }, { 'name': 'testname2', 'secret_ref': 'http://localhost:9311/v1/12345/secrets//1232' }, { 'name': 'testname3', 'secret_ref': 'http://localhost:9311/v1/12345/secrets//1231/' } ] container_req = {'name': 'name', 'type': 'generic', 'secret_refs': secret_refs} exception = self.assertRaises( excep.InvalidObject, self.validator.validate, container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_ensure_unconfigured_secret_ref_hostname_cannot_be_passed_in(self): # Attempt to add some bogus secret refs. secret_refs = [ { 'name': 'super-secret-beer-ingredient', 'secret_ref': 'http://kegsarecool.com:9311/1234/secrets/57890' }, { 'name': 'iShouldNotBeAbleToExist', 'secret_ref': 'http://invalid.fqdn:9311/v1/secrets/FAD23' } ] container_req = { 'name': 'test-container', 'type': 'generic', 'secret_refs': secret_refs } self.assertRaises( excep.UnsupportedField, self.validator.validate, container_req, ) def test_ensure_secret_ref_starts_with_request_hostname(self): # Attempt to add some bogus secret refs. secret_refs = [ { 'name': 'pass_malicious_hostname', 'secret_ref': 'http://www.malicious.com:8080/spoofForToken?' 'dummy=http://localhost:9311' }, ] container_req = { 'name': 'test-container', 'type': 'generic', 'secret_refs': secret_refs } self.assertRaises( excep.UnsupportedField, self.validator.validate, container_req, ) class WhenTestingRSAContainerValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingRSAContainerValidator, self).setUp() validators.CONF.set_override("host_href", "http://localhost:9311") self.name = 'name' self.type = 'rsa' self.secret_refs = [ { 'name': 'public_key', 'secret_ref': 'http://localhost:9311/1231' }, { 'name': 'private_key', 'secret_ref': 'http://localhost:9311/1232' }, { 'name': 'private_key_passphrase', 'secret_ref': 'http://localhost:9311/1233' } ] self.container_req = {'name': self.name, 'type': self.type, 'secret_refs': self.secret_refs} self.validator = validators.ContainerValidator() def test_should_validate_all_fields(self): self.validator.validate(self.container_req) def test_should_raise_no_names_in_secret_refs(self): del self.container_req['secret_refs'][0]['name'] exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_empty_names_in_secret_refs(self): self.container_req['secret_refs'][0]['name'] = '' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_unsupported_names_in_secret_refs(self): self.container_req['secret_refs'][0]['name'] = 'testttt' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_duplicate_secret_id_in_secret_refs(self): self.container_req['secret_refs'][0]['secret_ref'] = ( self.container_req['secret_refs'][2]['secret_ref']) exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_more_than_3_secret_refs_for_rsa_type(self): new_secret_ref = { 'name': 'new secret ref', 'secret_ref': 'http://localhost:9311/234234' } self.container_req['secret_refs'].append(new_secret_ref) exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_if_required_name_missing(self): name = 'name' type = 'certificate' secret_refs = [ { 'name': 'private_key', 'secret_ref': 'http://localhost:9311/123' }, { 'name': 'private_key_passphrase', 'secret_ref': 'http://localhost:9311/123' } ] container_req = {'name': name, 'type': type, 'secret_refs': secret_refs} exception = self.assertRaises( excep.InvalidObject, self.validator.validate, container_req) self.assertEqual('secret_refs', exception.invalid_property) class WhenTestingCertificateContainerValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingCertificateContainerValidator, self).setUp() validators.CONF.set_override("host_href", "http://localhost:9311") self.name = 'name' self.type = 'certificate' self.secret_refs = [ { 'name': 'certificate', 'secret_ref': 'http://localhost:9311/S4dfsdrf' }, { 'name': 'private_key', 'secret_ref': 'http://localhost:9311/1231' }, { 'name': 'private_key_passphrase', 'secret_ref': 'http://localhost:9311/1232' }, { 'name': 'intermediates', 'secret_ref': 'http://localhost:9311/1233' } ] self.container_req = {'name': self.name, 'type': self.type, 'secret_refs': self.secret_refs} self.validator = validators.ContainerValidator() def test_should_validate_all_fields(self): self.validator.validate(self.container_req) def test_should_raise_more_than_4_secret_refs_for_cert_type(self): new_secret_ref = { 'name': 'new secret ref', 'secret_ref': 'http://localhost:9311/234234' } self.container_req['secret_refs'].append(new_secret_ref) exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_unsupported_names_in_secret_refs(self): self.container_req['secret_refs'][0]['name'] = 'public_key' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_if_required_name_missing(self): name = 'name' type = 'certificate' secret_refs = [ { 'name': 'private_key', 'secret_ref': '123' }, { 'name': 'intermediates', 'secret_ref': '123' } ] container_req = {'name': name, 'type': type, 'secret_refs': secret_refs} exception = self.assertRaises( excep.InvalidObject, self.validator.validate, container_req) self.assertEqual('secret_refs', exception.invalid_property) class WhenTestingTransportKeyValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingTransportKeyValidator, self).setUp() self.plugin_name = 'name' self.transport_key = 'abcdef' self.transport_req = {'plugin_name': self.plugin_name, 'transport_key': self.transport_key} self.validator = validators.NewTransportKeyValidator() def test_should_raise_with_invalid_json_data_type(self): self.assertRaises( excep.InvalidObject, self.validator.validate, [] ) def test_should_raise_with_empty_transport_key(self): self.transport_req['transport_key'] = '' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.transport_req ) self.assertEqual('transport_key', exception.invalid_property) def test_should_raise_transport_key_is_non_string(self): self.transport_req['transport_key'] = 123 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.transport_req ) self.assertEqual('transport_key', exception.invalid_property) def test_should_raise_transport_key_is_missing(self): del self.transport_req['transport_key'] exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.transport_req ) self.assertEqual('transport_key', exception.invalid_property) def test_should_raise_plugin_name_is_non_string(self): self.transport_req['plugin_name'] = 123 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.transport_req ) self.assertEqual('plugin_name', exception.invalid_property) def test_should_raise_plugin_name_is_missing(self): del self.transport_req['plugin_name'] exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.transport_req ) self.assertEqual('plugin_name', exception.invalid_property) class WhenTestingConsumerValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingConsumerValidator, self).setUp() self.name = 'name' self.URL = 'http://my.url/resource/UUID' self.consumer_req = {'name': self.name, 'URL': self.URL} self.validator = validators.ContainerConsumerValidator() def test_should_raise_with_invalid_json_data_type(self): self.assertRaises( excep.InvalidObject, self.validator.validate, [] ) def test_should_raise_with_missing_name(self): consumer_req = {'URL': self.URL} exception = self.assertRaises( excep.InvalidObject, self.validator.validate, consumer_req ) self.assertIn('\'name\'', exception.args[0]) def test_should_raise_with_missing_URL(self): consumer_req = {'name': self.name} exception = self.assertRaises( excep.InvalidObject, self.validator.validate, consumer_req ) self.assertIn('\'URL\'', exception.args[0]) def test_should_validate_all_fields(self): self.validator.validate(self.consumer_req) def test_name_too_long_should_raise_with_invalid_object(self): # Negative test to make sure our maxLength parameter for the # name field raises the proper exception when a value greater # than 255 in this case is passed in. longname = 'a' * 256 consumer_req = {'name': longname, 'url': self.URL} self.assertRaises( excep.InvalidObject, self.validator.validate, consumer_req ) class WhenTestingKeyTypeOrderValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingKeyTypeOrderValidator, self).setUp() self.type = 'key' self.meta = {"name": "secretname", "algorithm": "AES", "bit_length": 256, "mode": "cbc"} self.key_order_req = {'type': self.type, 'meta': self.meta} self.validator = validators.TypeOrderValidator() def test_should_pass_with_certificate_type_in_order_refs(self): self.key_order_req['type'] = 'certificate' result = self.validator.validate(self.key_order_req) self.assertEqual('certificate', result['type']) def test_should_pass_with_null_content_type_in_meta(self): self.key_order_req['meta']['payload_content_type'] = None result = self.validator.validate(self.key_order_req) self.assertIsNone(result['meta']['payload_content_type']) def test_should_pass_good_bit_meta_in_order_refs(self): self.key_order_req['meta']['algorithm'] = 'AES' self.key_order_req['meta']['bit_length'] = 256 result = self.validator.validate(self.key_order_req) self.assertIsNone(result['meta']['expiration']) def test_should_pass_good_exp_meta_in_order_refs(self): self.key_order_req['meta']['algorithm'] = 'AES' ony_year_factor = datetime.timedelta(days=1 * 365) date_after_year = datetime.datetime.utcnow() + ony_year_factor date_after_year_str = date_after_year.strftime('%Y-%m-%d %H:%M:%S') self.key_order_req['meta']['expiration'] = date_after_year_str result = self.validator.validate(self.key_order_req) self.assertIn('expiration', result['meta']) self.assertIsInstance(result['meta']['expiration'], datetime.datetime) def test_should_raise_with_no_type_in_order_refs(self): del self.key_order_req['type'] exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) self.assertEqual('type', exception.invalid_property) def test_should_raise_with_bad_type_in_order_refs(self): self.key_order_req['type'] = 'badType' exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) self.assertEqual('type', exception.invalid_property) def test_should_raise_with_no_meta_in_order_refs(self): del self.key_order_req['meta'] exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) self.assertEqual('meta', exception.invalid_property) def test_should_raise_with_no_algorithm_in_order_refs(self): del self.key_order_req['meta']['algorithm'] self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) def test_should_raise_with_no_bit_length_in_order_refs(self): del self.key_order_req['meta']['bit_length'] exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) self.assertIn("bit_length' is required field for key type order", six.text_type(exception)) def test_should_raise_with_zero_bit_length_in_order_refs(self): self.key_order_req['meta']['bit_length'] = 0 exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) self.assertEqual('bit_length', exception.invalid_property) def test_should_raise_with_negative_bit_length_in_order_refs(self): self.key_order_req['meta']['bit_length'] = -1 exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) self.assertEqual('bit_length', exception.invalid_property) def test_should_raise_with_wrong_exp_meta_in_order_refs(self): self.key_order_req['meta']['algorithm'] = 'AES' self.key_order_req['meta']['expiration'] = '2014-02-28T19:14:44.180394' exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) self.assertEqual('expiration', exception.invalid_property) def test_should_not_raise_correct_hmac_order_refs(self): self.key_order_req['meta']['algorithm'] = 'hmacsha1' del self.key_order_req['meta']['mode'] result = self.validator.validate(self.key_order_req) self.assertIsNotNone(result) self.assertEqual('hmacsha1', result['meta']['algorithm']) def test_should_raise_with_payload_in_order(self): self.key_order_req['meta']['payload'] = 'payload' self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) class WhenTestingAsymmetricTypeOrderValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingAsymmetricTypeOrderValidator, self).setUp() self.type = 'asymmetric' self.meta = {"name": "secretname", "algorithm": "RSA", "bit_length": 256} self.asymmetric_order_req = {'type': self.type, 'meta': self.meta} self.validator = validators.TypeOrderValidator() def test_should_pass_good_meta_in_order_refs(self): result = self.validator.validate(self.asymmetric_order_req) self.assertIsNone(result['meta']['expiration']) def test_should_raise_with_no_algorithm_in_order_refs(self): del self.asymmetric_order_req['meta']['algorithm'] self.assertRaises(excep.InvalidObject, self.validator.validate, self.asymmetric_order_req) def test_should_raise_with_payload_in_order(self): self.asymmetric_order_req['meta']['payload'] = 'payload' self.assertRaises(excep.InvalidObject, self.validator.validate, self.asymmetric_order_req) def test_should_pass_with_wrong_algorithm_in_asymmetric_order_refs(self): # Note (atiwari): because validator should not check # algorithm but that should checked at crypto_plugin # supports method. self.asymmetric_order_req['meta']['algorithm'] = 'aes' result = self.validator.validate(self.asymmetric_order_req) self.assertIsNone(result['meta']['expiration']) def test_should_raise_with_no_bit_length_in_asymmetric_order_refs(self): del self.asymmetric_order_req['meta']['bit_length'] exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.asymmetric_order_req) self.assertIn( "bit_length' is required field for asymmetric key type order", six.text_type(exception)) def test_should_raise_with_zero_bit_length_in_asymmetric_order_refs(self): self.asymmetric_order_req['meta']['bit_length'] = 0 exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.asymmetric_order_req) self.assertEqual("bit_length", exception.invalid_property) def test_should_raise_with_negative_bit_len_in_asymmetric_order_refs(self): self.asymmetric_order_req['meta']['bit_length'] = -1 exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.asymmetric_order_req) self.assertEqual("bit_length", exception.invalid_property) class WhenTestingSimpleCMCOrderValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingSimpleCMCOrderValidator, self).setUp() self.type = 'certificate' request_data = base64.encode_as_text(certs.create_good_csr()) self.meta = {'request_type': 'simple-cmc', 'request_data': request_data, 'requestor_name': 'Barbican User', 'requestor_email': 'barbican_user@example.com', 'requestor_phone': '555-1212'} self._set_order() self.validator = validators.TypeOrderValidator() def _set_order(self): self.order_req = {'type': self.type, 'meta': self.meta} def test_should_pass_good_data(self): self.validator.validate(self.order_req) def test_should_raise_with_no_metadata(self): self.order_req = {'type': self.type} self.assertRaises(excep.InvalidObject, self.validator.validate, self.order_req) def test_should_raise_with_bad_request_type(self): self.meta['request_type'] = 'bad_request_type' self._set_order() self.assertRaises(excep.InvalidCertificateRequestType, self.validator.validate, self.order_req) def test_should_raise_with_no_request_data(self): del self.meta['request_data'] self._set_order() self.assertRaises(excep.MissingMetadataField, self.validator.validate, self.order_req) def test_should_raise_with_pkcs10_data_with_bad_base64(self): self.meta['request_data'] = certs.create_bad_csr() self._set_order() self.assertRaises(excep.PayloadDecodingError, self.validator.validate, self.order_req) def test_should_raise_with_bad_pkcs10_data(self): request_data = base64.encode_as_text(certs.create_bad_csr()) self.meta['request_data'] = request_data self._set_order() self.assertRaises(excep.InvalidPKCS10Data, self.validator.validate, self.order_req) def test_should_raise_with_signed_wrong_key_pkcs10_data(self): self.meta['request_data'] = base64.encode_as_text( certs.create_csr_signed_with_wrong_key()) self._set_order() self.assertRaises(excep.InvalidPKCS10Data, self.validator.validate, self.order_req) def test_should_raise_with_unsigned_pkcs10_data(self): self.meta['request_data'] = base64.encode_as_text( certs.create_csr_that_has_not_been_signed()) self._set_order() self.assertRaises(excep.InvalidPKCS10Data, self.validator.validate, self.order_req) def test_should_raise_with_payload_in_order(self): self.meta['payload'] = 'payload' self.assertRaises(excep.InvalidObject, self.validator.validate, self.order_req) class WhenTestingFullCMCOrderValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingFullCMCOrderValidator, self).setUp() self.type = 'certificate' self.meta = {'request_type': 'full-cmc', 'request_data': VALID_FULL_CMC, 'requestor_name': 'Barbican User', 'requestor_email': 'barbican_user@example.com', 'requestor_phone': '555-1212'} self._set_order() self.validator = validators.TypeOrderValidator() def _set_order(self): self.order_req = {'type': self.type, 'meta': self.meta} def test_should_raise_not_yet_implemented(self): self.assertRaises(excep.FullCMCNotSupported, self.validator.validate, self.order_req) @testtools.skip("Feature not yet implemented") def test_should_pass_good_data(self): self.validator.validate(self.order_req) @testtools.skip("Feature not yet implemented") def test_should_raise_with_no_request_data(self): del self.meta['request_data'] self._set_order() self.assertRaises(excep.MissingMetadataField, self.validator.validate, self.order_req) @testtools.skip("Not yet implemented") def test_should_raise_with_bad_cmc_data(self): self.meta['request_data'] = 'Bad CMC Data' self._set_order() self.assertRaises(excep.InvalidCMCData, self.validator.validate, self.order_req) class WhenTestingCustomOrderValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingCustomOrderValidator, self).setUp() self.type = 'certificate' self.meta = {'request_type': 'custom', 'ca_param_1': 'value_1', 'ca_param_2': 'value_2', 'requestor_name': 'Barbican User', 'requestor_email': 'barbican_user@example.com', 'requestor_phone': '555-1212'} self._set_order() self.validator = validators.TypeOrderValidator() def _set_order(self): self.order_req = {'type': self.type, 'meta': self.meta} def test_should_pass_good_data(self): self.validator.validate(self.order_req) def test_should_pass_with_no_request_type(self): # defaults to custom del self.meta['request_type'] self._set_order() self.validator.validate(self.order_req) class WhenTestingStoredKeyOrderValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingStoredKeyOrderValidator, self).setUp() self.type = 'certificate' self.meta = {'request_type': 'stored-key', 'container_ref': 'https://localhost/v1/containers/good_container_ref', 'subject_dn': 'cn=barbican-server,o=example.com', 'requestor_name': 'Barbican User', 'requestor_email': 'barbican_user@example.com', 'requestor_phone': '555-1212'} self.order_req = {'type': self.type, 'meta': self.meta} self.validator = validators.TypeOrderValidator() def test_should_pass_good_data(self): self.validator.validate(self.order_req) def test_should_raise_with_no_container_ref(self): del self.meta['container_ref'] self.assertRaises(excep.MissingMetadataField, self.validator.validate, self.order_req) def test_should_raise_with_no_subject_dn(self): del self.meta['subject_dn'] self.assertRaises(excep.MissingMetadataField, self.validator.validate, self.order_req) def test_should_pass_with_profile_and_ca_id(self): self.meta['ca_id'] = 'my_ca_id' self.meta['profile'] = 'my_profile' self.validator.validate(self.order_req) def test_should_raise_with_profile_and_no_ca_id(self): self.meta['profile'] = 'my_profile' self.assertRaises(excep.MissingMetadataField, self.validator.validate, self.order_req) def test_should_raise_with_extensions_data(self): self.meta['extensions'] = VALID_EXTENSIONS self.assertRaises(excep.CertificateExtensionsNotSupported, self.validator.validate, self.order_req) @testtools.skip("Not yet implemented") def test_should_raise_with_bad_extensions_data(self): self.meta['extensions'] = 'Bad extensions data' self.assertRaises(excep.InvalidExtensionsData, self.validator.validate, self.order_req) def test_should_pass_with_one_cn_in_dn(self): self.meta['subject_dn'] = "CN=example1" self.validator.validate(self.order_req) def test_should_pass_with_two_cn_in_dn(self): self.meta['subject_dn'] = "CN=example1,CN=example2" self.validator.validate(self.order_req) def test_should_raise_with_blank_dn(self): self.meta['subject_dn'] = "" self.assertRaises(excep.InvalidSubjectDN, self.validator.validate, self.order_req) def test_should_raise_with_bad_subject_dn(self): self.meta['subject_dn'] = "Bad subject DN data" self.assertRaises(excep.InvalidSubjectDN, self.validator.validate, self.order_req) def test_should_raise_with_payload_in_order(self): self.meta['payload'] = 'payload' self.assertRaises(excep.InvalidObject, self.validator.validate, self.order_req) @utils.parameterized_test_case class WhenTestingAclValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingAclValidator, self).setUp() self.validator = validators.ACLValidator() @utils.parameterized_dataset({ 'one_reader': [{'read': {'users': ['reader'], 'project-access': True}}], 'two_reader': [{'read': {'users': ['r1', 'r2'], 'project-access': True}}], 'private': [{'read': {'users': [], 'project-access': False}}], 'default_users': [{'read': {'project-access': False}}], 'default_creator': [{'read': {'users': ['reader']}}], 'almost_empty': [{'read': {}}], 'empty': [{}], }) def test_should_validate(self, acl_req): self.validator.validate(acl_req) @utils.parameterized_dataset({ 'foo': ['foo'], 'bad_op': [{'bad_op': {'users': ['reader'], 'project-access': True}}], 'bad_field': [{'read': {'bad_field': ['reader'], 'project-access': True}}], 'bad_user': [{'read': {'users': [27], 'project-access': True}}], 'missing_op': [{'project-access': False}], }) def test_should_raise(self, acl_req): self.assertRaises(excep.InvalidObject, self.validator.validate, acl_req) @utils.parameterized_dataset({ 'write': [{'write': {'users': ['writer'], 'project-access': True}}], 'list': [{'list': {'users': ['lister'], 'project-access': True}}], 'delete': [{'delete': {'users': ['deleter'], 'project-access': True}}], }) def test_should_raise_future(self, acl_req): self.assertRaises(excep.InvalidObject, self.validator.validate, acl_req) class WhenTestingProjectQuotasValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingProjectQuotasValidator, self).setUp() self.good_project_quotas = {"project_quotas": {"secrets": 50, "orders": 10, "containers": 20, "cas": 30}} self.bad_project_quotas = {"bad key": "bad value"} self.validator = validators.ProjectQuotaValidator() def test_should_pass_good_data(self): self.validator.validate(self.good_project_quotas) def test_should_pass_empty_properties(self): self.validator.validate({"project_quotas": {}}) def test_should_raise_bad_data(self): self.assertRaises(excep.InvalidObject, self.validator.validate, self.bad_project_quotas) def test_should_raise_empty_dict(self): self.assertRaises(excep.InvalidObject, self.validator.validate, {}) def test_should_raise_secrets_non_int(self): self.good_project_quotas['project_quotas']['secrets'] = "abc" self.assertRaises(excep.InvalidObject, self.validator.validate, self.good_project_quotas) def test_should_raise_orders_non_int(self): self.good_project_quotas['project_quotas']['orders'] = "abc" self.assertRaises(excep.InvalidObject, self.validator.validate, self.good_project_quotas) def test_should_raise_containers_non_int(self): self.good_project_quotas['project_quotas']['containers'] = "abc" self.assertRaises(excep.InvalidObject, self.validator.validate, self.good_project_quotas) def test_should_raise_cas_non_int(self): self.good_project_quotas['project_quotas']['cas'] = "abc" self.assertRaises(excep.InvalidObject, self.validator.validate, self.good_project_quotas) @utils.parameterized_test_case class WhenTestingNewCAValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingNewCAValidator, self).setUp() self.new_ca_req = {'name': 'New CA', 'subject_dn': 'cn=barbican-server,o=example.com', 'parent_ca_ref': 'https://localhost/v1/cas/parent_ca_id', 'description': 'This is a subCA'} self.validator = validators.NewCAValidator() def test_should_raise_with_empty_data(self): self.assertRaises( excep.InvalidObject, self.validator.validate, {} ) @utils.parameterized_dataset({ 'name': ['name'], 'subject_dn': ['subject_dn'], 'parent_ca_ref': ['parent_ca_ref'], }) def should_raise_if_any_required_parameter_is_missing(self, parameter): del self.new_ca_req[parameter] exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.new_ca_req ) self.assertEqual(parameter, exception.invalid_property) @utils.parameterized_dataset({ 'name': ['name'], 'subject_dn': ['subject_dn'], 'parent_ca_ref': ['parent_ca_ref'], }) def should_raise_if_any_required_parameter_is_empty(self, parameter): self.new_ca_req[parameter] = '' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.new_ca_req ) self.assertEqual(parameter, exception.invalid_property) def test_should_pass_with_valid_data(self): self.validator.validate(self.new_ca_req) def test_should_raise_with_invalid_subject_dn(self): self.new_ca_req['subject_dn'] = 'I am an invalid subject_dn!' self.assertRaises( excep.InvalidSubjectDN, self.validator.validate, self.new_ca_req ) @utils.parameterized_test_case class WhenTestingSecretMetadataValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingSecretMetadataValidator, self).setUp() self.top_key = 'metadata' self.key1 = 'city' self.value1 = 'Austin' self.key2 = 'state' self.value2 = 'Texas' self.key3 = 'country' self.value3 = 'USA' self.metadata_req = { self.top_key: { self.key1: self.value1, self.key2: self.value2, self.key3: self.value3 } } self.validator = validators.NewSecretMetadataValidator() def test_should_validate_all_fields(self): self.validator.validate(self.metadata_req) def test_should_validate_all_fields_and_make_key_lowercase(self): self.key1 = "DOgg" self.value1 = "poodle" self.metadata_req = { self.top_key: { self.key1: self.value1, self.key2: self.value2, self.key3: self.value3 } } metadata = self.validator.validate(self.metadata_req) self.assertNotIn("DOgg", metadata.keys()) self.assertIn("dogg", metadata.keys()) def test_should_validate_no_keys(self): del self.metadata_req[self.top_key][self.key1] del self.metadata_req[self.top_key][self.key2] del self.metadata_req[self.top_key][self.key3] self.validator.validate(self.metadata_req) def test_should_raise_invalid_key_no_metadata(self): del self.metadata_req[self.top_key] exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.metadata_req) self.assertIn("metadata' is a required property", six.text_type(exception)) def test_should_raise_invalid_key_non_string(self): self.key1 = 0 metadata_req = { self.top_key: { self.key1: self.value1 } } exception = self.assertRaises(excep.InvalidMetadataRequest, self.validator.validate, metadata_req) self.assertIn("Invalid Metadata. Keys and Values must be Strings.", six.text_type(exception)) def test_should_raise_invalid_key_non_url_safe_string(self): self.key1 = "key/01" metadata_req = { self.top_key: { self.key1: self.value1 } } exception = self.assertRaises(excep.InvalidMetadataKey, self.validator.validate, metadata_req) self.assertIn("Invalid Key. Key must be URL safe.", six.text_type(exception)) def test_should_raise_invalid_value_non_string(self): self.value1 = 0 metadata_req = { self.top_key: { self.key1: self.value1 } } exception = self.assertRaises(excep.InvalidMetadataRequest, self.validator.validate, metadata_req) self.assertIn("Invalid Metadata. Keys and Values must be Strings.", six.text_type(exception)) @utils.parameterized_test_case class WhenTestingSecretMetadatumValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingSecretMetadatumValidator, self).setUp() self.key1 = 'key' self.value1 = 'city' self.key2 = 'value' self.value2 = 'Austin' self.metadata_req = { self.key1: self.value1, self.key2: self.value2 } self.validator = validators.NewSecretMetadatumValidator() def test_should_validate_all_fields(self): self.validator.validate(self.metadata_req) def test_should_validate_all_fields_and_make_key_lowercase(self): self.value1 = "DOgg" self.value2 = "poodle" self.metadata_req = { self.key1: self.value1, self.key2: self.value2 } metadata = self.validator.validate(self.metadata_req) self.assertEqual("dogg", metadata['key']) def test_should_raise_invalid_empty(self): del self.metadata_req[self.key1] del self.metadata_req[self.key2] exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.metadata_req) self.assertIn("Provided object does not match schema " "'SecretMetadatum'", six.text_type(exception)) def test_should_raise_invalid_key_no_key(self): del self.metadata_req[self.key2] exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.metadata_req) self.assertIn("Provided object does not match schema " "'SecretMetadatum'", six.text_type(exception)) def test_should_raise_invalid_key_no_value(self): del self.metadata_req[self.key1] exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.metadata_req) self.assertIn("Provided object does not match schema " "'SecretMetadatum'", six.text_type(exception)) def test_should_raise_invalid_key_non_string(self): self.value1 = 0 metadata_req = { self.key1: self.value1, self.key2: self.value2 } exception = self.assertRaises(excep.InvalidObject, self.validator.validate, metadata_req) self.assertIn("Provided object does not match schema " "'SecretMetadatum'", six.text_type(exception)) def test_should_raise_invalid_key_non_url_safe_string(self): self.value1 = "key/01" metadata_req = { self.key1: self.value1, self.key2: self.value2 } exception = self.assertRaises(excep.InvalidMetadataKey, self.validator.validate, metadata_req) self.assertIn("Invalid Key. Key must be URL safe.", six.text_type(exception)) def test_should_raise_invalid_value_non_string(self): self.value2 = 0 metadata_req = { self.key1: self.value1, self.key2: self.value2 } exception = self.assertRaises(excep.InvalidObject, self.validator.validate, metadata_req) self.assertIn("Provided object does not match schema " "'SecretMetadatum'", six.text_type(exception)) def test_should_raise_invalid_extra_sent_key(self): self.value2 = 0 metadata_req = { self.key1: self.value1, self.key2: self.value2, "extra_key": "extra_value" } exception = self.assertRaises(excep.InvalidObject, self.validator.validate, metadata_req) self.assertIn("Provided object does not match schema " "'SecretMetadatum'", six.text_type(exception)) class WhenTestingSecretConsumerValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingSecretConsumerValidator, self).setUp() self.service = "service" self.resource_type = "resource_type" self.resource_id = "resource_id" self.consumer_req = { "service": self.service, "resource_type": self.resource_type, "resource_id": self.resource_id, } self.validator = validators.SecretConsumerValidator() def test_should_raise_with_invalid_json_data_type(self): self.assertRaises( excep.InvalidObject, self.validator.validate, [] ) def test_should_raise_with_missing_service(self): self.consumer_req.pop("service") exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.consumer_req ) self.assertIn('\'service\'', exception.args[0]) def test_should_raise_with_missing_resource_type(self): self.consumer_req.pop("resource_type") exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.consumer_req ) self.assertIn('\'resource_type\'', exception.args[0]) def test_should_raise_with_missing_resource_id(self): self.consumer_req.pop("resource_id") exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.consumer_req ) self.assertIn('\'resource_id\'', exception.args[0]) def test_should_validate_all_fields(self): self.validator.validate(self.consumer_req) def test_service_too_long_should_raise_with_invalid_object(self): # Negative test to make sure our maxLength parameter for the # service field raises the proper exception when a value greater # than 255 in this case is passed in. self.consumer_req["service"] = 'a' * 256 self.assertRaises( excep.InvalidObject, self.validator.validate, self.consumer_req ) def test_resource_type_too_long_should_raise_with_invalid_object(self): # Negative test to make sure our maxLength parameter for the # service field raises the proper exception when a value greater # than 255 in this case is passed in. self.consumer_req["resource_type"] = 'a' * 256 self.assertRaises( excep.InvalidObject, self.validator.validate, self.consumer_req ) if __name__ == '__main__': unittest.main() barbican-9.1.0.dev50/barbican/tests/common/test_utils.py0000664000175000017500000001666113616500636023374 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from oslo_config import cfg import six from barbican.common import config from barbican.common import utils from barbican.tests import utils as test_utils class WhenTestingHostnameForRefsGetter(test_utils.BaseTestCase): def setUp(self): super(WhenTestingHostnameForRefsGetter, self).setUp() self.host = 'host' self.version = 'version' self.external_project_id = 'external_project_id' self.resource = 'resource' self._old_host = utils.CONF.host_href self._old_version = utils.API_VERSION utils.CONF.set_override('host_href', self.host) utils.API_VERSION = self.version def tearDown(self): super(WhenTestingHostnameForRefsGetter, self).tearDown() utils.CONF.clear_override('host_href') utils.API_VERSION = self._old_version def test_hostname_for_refs(self): uri = utils.hostname_for_refs(resource=self.resource) self.assertEqual("{0}/{1}/{2}".format(self.host, self.version, self.resource), uri) def test_hostname_for_refs_no_resource(self): uri = utils.hostname_for_refs() self.assertEqual("{0}/{1}".format(self.host, self.version), uri) class WhenTestingHostByWsgiRequestForRefsGetter(test_utils.BaseTestCase): def setUp(self): super(WhenTestingHostByWsgiRequestForRefsGetter, self).setUp() self._old_version = utils.API_VERSION self.host = 'http://my_host:9311' self.version = 'version1' self.external_project_id = 'external_project_id' self.resource = 'my_resource' test_utils.mock_pecan_request(self, host=self.host) utils.CONF.set_override('host_href', None) utils.API_VERSION = self.version def tearDown(self): super(WhenTestingHostByWsgiRequestForRefsGetter, self).tearDown() utils.CONF.clear_override('host_href') utils.API_VERSION = self._old_version def test_hostname_for_refs(self): uri = utils.hostname_for_refs(resource=self.resource) self.assertEqual("{0}/{1}/{2}".format(self.host, self.version, self.resource), uri) def test_blank_conf_hosthref_for_refs(self): utils.CONF.set_override('host_href', '') uri = utils.hostname_for_refs(resource=self.resource) self.assertEqual("{0}/{1}/{2}".format(self.host, self.version, self.resource), uri) def test_hostname_for_refs_no_resource(self): uri = utils.hostname_for_refs() self.assertEqual("{0}/{1}".format(self.host, self.version), uri) class WhenTestingAcceptEncodingGetter(test_utils.BaseTestCase): def setUp(self): super(WhenTestingAcceptEncodingGetter, self).setUp() self.req = mock.Mock() def test_parses_accept_encoding_header(self): self.req.get_header.return_value = '*' ae = utils.get_accepted_encodings(self.req) self.req.get_header.assert_called_once_with('Accept-Encoding') self.assertEqual(['*'], ae) def test_returns_none_for_empty_encoding(self): self.req.get_header.return_value = None ae = utils.get_accepted_encodings(self.req) self.assertIsNone(ae) def test_parses_single_accept_with_quality_value(self): self.req.get_header.return_value = 'base64;q=0.7' ae = utils.get_accepted_encodings(self.req) self.assertEqual(['base64'], ae) def test_parses_more_than_one_encoding(self): self.req.get_header.return_value = 'base64, gzip' ae = utils.get_accepted_encodings(self.req) self.assertEqual(['base64', 'gzip'], ae) def test_can_sort_by_quality_value(self): self.req.get_header.return_value = 'base64;q=0.5, gzip;q=0.6, compress' ae = utils.get_accepted_encodings(self.req) self.assertEqual(['compress', 'gzip', 'base64'], ae) def test_returns_none_on_invalid_quality_type(self): self.req.get_header.return_value = 'base64;q=three' ae = utils.get_accepted_encodings(self.req) self.assertIsNone(ae) def test_returns_none_on_quality_too_large(self): self.req.get_header.return_value = 'base64;q=1.1' ae = utils.get_accepted_encodings(self.req) self.assertIsNone(ae) def test_returns_none_on_quality_too_small(self): self.req.get_header.return_value = 'base64;q=-0.1' ae = utils.get_accepted_encodings(self.req) self.assertIsNone(ae) def test_ignores_encoding_with_zero_quality_value(self): self.req.get_header.return_value = 'base64;q=0.5, gzip;q=0.0, compress' ae = utils.get_accepted_encodings(self.req) self.assertEqual(['compress', 'base64'], ae) class WhenTestingGenerateFullClassnameForInstance(test_utils.BaseTestCase): def setUp(self): super(WhenTestingGenerateFullClassnameForInstance, self).setUp() self.instance = test_utils.DummyClassForTesting() def test_get_fullname_for_null_instance_raises_exception(self): self.assertRaises(ValueError, utils.generate_fullname_for, None) def test_get_fullname_for_string_doesnt_include_module(self): test_string = "foo" fullname = utils.generate_fullname_for(test_string) self.assertEqual(0, fullname.count(".")) self.assertNotIn(six.moves.builtins.__name__, fullname) def test_returns_class_name_on_null_module(self): self.instance.__class__.__module__ = None name = utils.generate_fullname_for(self.instance) self.assertEqual('DummyClassForTesting', name) def test_returns_qualified_name(self): self.instance.__class__.__module__ = 'dummy' name = utils.generate_fullname_for(self.instance) self.assertEqual('dummy.DummyClassForTesting', name) class TestConfigValues(test_utils.BaseTestCase): def setUp(self): super(TestConfigValues, self).setUp() self.barbican_config = config.CONF self.oslo_config = cfg.CONF def test_barbican_conf_values_made_visible_to_oslo_conf(self): """In this, checking oslo CONF values are same as barbican config This tests shows that after the change values referenced via oslo_config.cfg.CONF value are same as barbican.common.config.CONF. """ # Checking that 'admin_role' value referred via # barbican.common.config.CONF is same as oslo_config.cfg.CONF self.assertEqual('admin', self.barbican_config._get('admin_role')) self.assertEqual('admin', self.barbican_config.admin_role) self.assertEqual('admin', self.oslo_config._get('admin_role')) self.assertEqual('admin', self.oslo_config.admin_role) # No error in getting 'project' value from both config reading # mechanism self.assertEqual('barbican', self.barbican_config.project) self.assertEqual('barbican', self.oslo_config.project) barbican-9.1.0.dev50/barbican/tests/keys.py0000664000175000017500000005123713616500636020656 0ustar sahidsahid00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. def get_private_key_pem(): """Returns a private key in PKCS#8 format This key was created by issuing the following openssl commands: openssl genrsa -out private.pem 2048 openssl pkcs8 -topk8 -nocrypt -in private.pem -out private.pk8 The byte string returned by this function is the contents of the private.pk8 file. """ return b"""-----BEGIN PRIVATE KEY----- MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQCza2VoDXmBUMmw jFu9F6MM5q/AZ1WjnWA2YNdNy237TrGN/nobDDv8FBBpUPmHNZ04H1LyxFcP8ReF rcIXpifsReu2lAWaqRPxovu5CuAhfecKv+RhjLVLJ0I+MZIb72ROKpfZTmb7dhlF gGD3vkC51BCfhGVW35w52OY/23x5MeO4yvx5myPccnxMVQ42KuDrzKqjBlSjmBnc pGYx0JgCT+syFmHsl8rOkqCPPFLo24YQn+4/pr1AYwaZAbMTl9zoLtEQj6sxScuH cS9e8niptDxlsbLQgqGVaGdE117stC95QH7UvITbuYzdjZwBFc1Sgz8GZ/2hLSsH ujJiIQcvAgMBAAECggEAMOlUKbuSpigp85Ev6Sqqbnfs7Zy+Ae6DLg/UYgbVIq9f RABdtUXujFfD6ZIDlFKPW59ec4QG3/evm+e0g9HuDEE7cviDVphFMZhm2xkV5Mt3 0rxhPB6pxaUcL+w/kpH+XDjMUJdJB8A4P3Qx+xfIeWBQb8wd/ELVSgfRLRNeqYL0 0KXVs04/FOBEhqSiqi/oHYJ4gxNrSoINX71PHVbaEikIygzi4HZVyMut3LE6ceHz fSj71ftn+Ui0TzkLOb+NoBP31haHC/sfCrpKg7QtUP9q9dRq6dZcI17q5d7oEdET eDRKhT2vm7bx2bLGeF1w2H9B/V81upjiAah2RVnecQKBgQDsfHSjR1gd+SHw/2A9 SaXS1k9LeXLt+UbDQdbjYOsh5LoT+EN/utO70RyDYqjlhzqJzciKTuAW5SVPC6gQ uCppA29Kntq7x1+Lw/4wG947poXb60tLdg3BK5mBFTORk5ATqAwVq7t+2NtS5S/J unzs5xrRolDFnSX4KnvVl6Jj3QKBgQDCOXZTVXRPEFhnqnqLErZe6EJkySwG8wgt OdCmr660bocY1i9vV+RaM1iARHX6u/suMhkz+3KRinzxIG5gQsyiWmTpFV298W9v kRtmsCQDn2my90yv4e6sLI0ng7l/N3r7CwLLNIV/CqeyaN40suzE8AjgEga5jTua 6bP5m+x8ewKBgQCeuW3DxXfkLjnUumMK36qX11XDb5FvHjebiE5FsOBAkHdAPgp3 6ZqBXfoISSjZXakxotft1MDdPRGMe2NjTWjRsQd6iyJ+lHORqIusGJhRaxQ/Ji8U R/k1ZSETnXpORD+YodrylKA0pDKY8dDgUfXVP8wlVg9mg3JfnYweMTdCVQKBgQCx 133iNmgmkTfxzGci+wJkitVohdA7mMOO7daBGnKlImOvuUd784XTlhpecNF6wi/w D82GDKLOY3meLO0EVYYczxqBVqAccXtxM/RcJcMEUi6twcXFcuJhYvXpDbOHqlyA jIeFW9U1C6OcOGvm40Lr3UKzMa5Yrtq6MW4ri7uSCwKBgQDfdqVjT4uXmGwOh1z4 Pzv6GCoc+6GobXg4DvvCUjP9MR+2+5sX0AY/f+aVCD05/Nj0RqpAwUc03zZU5ZtL 2uNe6XDjEugfFtlzea6+rbD6KpFS+nxPJA8YyWYRpNhpRWGWQakHedr3BtMtGs0h pKNAQG72HKWtSfJQMXvn2RlicA== -----END PRIVATE KEY----- """ def get_private_key_der(): """Returns a private key in DER format This key was created by issuing the following openssl commands: openssl genrsa -out private.pem 2048 openssl pkcs8 -in private.pem -topk8 -nocrypt \ -outform DER -out private_pk8.der The byte string returned by this function is the contents of the private_pk8.der file. """ key_der = ( b'\x30\x82\x04\xbf\x02\x01\x00\x30\x0d\x06\x09\x2a\x86\x48\x86' b'\xf7\x0d\x01\x01\x01\x05\x00\x04\x82\x04\xa9\x30\x82\x04\xa5' b'\x02\x01\x00\x02\x82\x01\x01\x00\xb3\x6b\x65\x68\x0d\x79\x81' b'\x50\xc9\xb0\x8c\x5b\xbd\x17\xa3\x0c\xe6\xaf\xc0\x67\x55\xa3' b'\x9d\x60\x36\x60\xd7\x4d\xcb\x6d\xfb\x4e\xb1\x8d\xfe\x7a\x1b' b'\x0c\x3b\xfc\x14\x10\x69\x50\xf9\x87\x35\x9d\x38\x1f\x52\xf2' b'\xc4\x57\x0f\xf1\x17\x85\xad\xc2\x17\xa6\x27\xec\x45\xeb\xb6' b'\x94\x05\x9a\xa9\x13\xf1\xa2\xfb\xb9\x0a\xe0\x21\x7d\xe7\x0a' b'\xbf\xe4\x61\x8c\xb5\x4b\x27\x42\x3e\x31\x92\x1b\xef\x64\x4e' b'\x2a\x97\xd9\x4e\x66\xfb\x76\x19\x45\x80\x60\xf7\xbe\x40\xb9' b'\xd4\x10\x9f\x84\x65\x56\xdf\x9c\x39\xd8\xe6\x3f\xdb\x7c\x79' b'\x31\xe3\xb8\xca\xfc\x79\x9b\x23\xdc\x72\x7c\x4c\x55\x0e\x36' b'\x2a\xe0\xeb\xcc\xaa\xa3\x06\x54\xa3\x98\x19\xdc\xa4\x66\x31' b'\xd0\x98\x02\x4f\xeb\x32\x16\x61\xec\x97\xca\xce\x92\xa0\x8f' b'\x3c\x52\xe8\xdb\x86\x10\x9f\xee\x3f\xa6\xbd\x40\x63\x06\x99' b'\x01\xb3\x13\x97\xdc\xe8\x2e\xd1\x10\x8f\xab\x31\x49\xcb\x87' b'\x71\x2f\x5e\xf2\x78\xa9\xb4\x3c\x65\xb1\xb2\xd0\x82\xa1\x95' b'\x68\x67\x44\xd7\x5e\xec\xb4\x2f\x79\x40\x7e\xd4\xbc\x84\xdb' b'\xb9\x8c\xdd\x8d\x9c\x01\x15\xcd\x52\x83\x3f\x06\x67\xfd\xa1' b'\x2d\x2b\x07\xba\x32\x62\x21\x07\x2f\x02\x03\x01\x00\x01\x02' b'\x82\x01\x00\x30\xe9\x54\x29\xbb\x92\xa6\x28\x29\xf3\x91\x2f' b'\xe9\x2a\xaa\x6e\x77\xec\xed\x9c\xbe\x01\xee\x83\x2e\x0f\xd4' b'\x62\x06\xd5\x22\xaf\x5f\x44\x00\x5d\xb5\x45\xee\x8c\x57\xc3' b'\xe9\x92\x03\x94\x52\x8f\x5b\x9f\x5e\x73\x84\x06\xdf\xf7\xaf' b'\x9b\xe7\xb4\x83\xd1\xee\x0c\x41\x3b\x72\xf8\x83\x56\x98\x45' b'\x31\x98\x66\xdb\x19\x15\xe4\xcb\x77\xd2\xbc\x61\x3c\x1e\xa9' b'\xc5\xa5\x1c\x2f\xec\x3f\x92\x91\xfe\x5c\x38\xcc\x50\x97\x49' b'\x07\xc0\x38\x3f\x74\x31\xfb\x17\xc8\x79\x60\x50\x6f\xcc\x1d' b'\xfc\x42\xd5\x4a\x07\xd1\x2d\x13\x5e\xa9\x82\xf4\xd0\xa5\xd5' b'\xb3\x4e\x3f\x14\xe0\x44\x86\xa4\xa2\xaa\x2f\xe8\x1d\x82\x78' b'\x83\x13\x6b\x4a\x82\x0d\x5f\xbd\x4f\x1d\x56\xda\x12\x29\x08' b'\xca\x0c\xe2\xe0\x76\x55\xc8\xcb\xad\xdc\xb1\x3a\x71\xe1\xf3' b'\x7d\x28\xfb\xd5\xfb\x67\xf9\x48\xb4\x4f\x39\x0b\x39\xbf\x8d' b'\xa0\x13\xf7\xd6\x16\x87\x0b\xfb\x1f\x0a\xba\x4a\x83\xb4\x2d' b'\x50\xff\x6a\xf5\xd4\x6a\xe9\xd6\x5c\x23\x5e\xea\xe5\xde\xe8' b'\x11\xd1\x13\x78\x34\x4a\x85\x3d\xaf\x9b\xb6\xf1\xd9\xb2\xc6' b'\x78\x5d\x70\xd8\x7f\x41\xfd\x5f\x35\xba\x98\xe2\x01\xa8\x76' b'\x45\x59\xde\x71\x02\x81\x81\x00\xec\x7c\x74\xa3\x47\x58\x1d' b'\xf9\x21\xf0\xff\x60\x3d\x49\xa5\xd2\xd6\x4f\x4b\x79\x72\xed' b'\xf9\x46\xc3\x41\xd6\xe3\x60\xeb\x21\xe4\xba\x13\xf8\x43\x7f' b'\xba\xd3\xbb\xd1\x1c\x83\x62\xa8\xe5\x87\x3a\x89\xcd\xc8\x8a' b'\x4e\xe0\x16\xe5\x25\x4f\x0b\xa8\x10\xb8\x2a\x69\x03\x6f\x4a' b'\x9e\xda\xbb\xc7\x5f\x8b\xc3\xfe\x30\x1b\xde\x3b\xa6\x85\xdb' b'\xeb\x4b\x4b\x76\x0d\xc1\x2b\x99\x81\x15\x33\x91\x93\x90\x13' b'\xa8\x0c\x15\xab\xbb\x7e\xd8\xdb\x52\xe5\x2f\xc9\xba\x7c\xec' b'\xe7\x1a\xd1\xa2\x50\xc5\x9d\x25\xf8\x2a\x7b\xd5\x97\xa2\x63' b'\xdd\x02\x81\x81\x00\xc2\x39\x76\x53\x55\x74\x4f\x10\x58\x67' b'\xaa\x7a\x8b\x12\xb6\x5e\xe8\x42\x64\xc9\x2c\x06\xf3\x08\x2d' b'\x39\xd0\xa6\xaf\xae\xb4\x6e\x87\x18\xd6\x2f\x6f\x57\xe4\x5a' b'\x33\x58\x80\x44\x75\xfa\xbb\xfb\x2e\x32\x19\x33\xfb\x72\x91' b'\x8a\x7c\xf1\x20\x6e\x60\x42\xcc\xa2\x5a\x64\xe9\x15\x5d\xbd' b'\xf1\x6f\x6f\x91\x1b\x66\xb0\x24\x03\x9f\x69\xb2\xf7\x4c\xaf' b'\xe1\xee\xac\x2c\x8d\x27\x83\xb9\x7f\x37\x7a\xfb\x0b\x02\xcb' b'\x34\x85\x7f\x0a\xa7\xb2\x68\xde\x34\xb2\xec\xc4\xf0\x08\xe0' b'\x12\x06\xb9\x8d\x3b\x9a\xe9\xb3\xf9\x9b\xec\x7c\x7b\x02\x81' b'\x81\x00\x9e\xb9\x6d\xc3\xc5\x77\xe4\x2e\x39\xd4\xba\x63\x0a' b'\xdf\xaa\x97\xd7\x55\xc3\x6f\x91\x6f\x1e\x37\x9b\x88\x4e\x45' b'\xb0\xe0\x40\x90\x77\x40\x3e\x0a\x77\xe9\x9a\x81\x5d\xfa\x08' b'\x49\x28\xd9\x5d\xa9\x31\xa2\xd7\xed\xd4\xc0\xdd\x3d\x11\x8c' b'\x7b\x63\x63\x4d\x68\xd1\xb1\x07\x7a\x8b\x22\x7e\x94\x73\x91' b'\xa8\x8b\xac\x18\x98\x51\x6b\x14\x3f\x26\x2f\x14\x47\xf9\x35' b'\x65\x21\x13\x9d\x7a\x4e\x44\x3f\x98\xa1\xda\xf2\x94\xa0\x34' b'\xa4\x32\x98\xf1\xd0\xe0\x51\xf5\xd5\x3f\xcc\x25\x56\x0f\x66' b'\x83\x72\x5f\x9d\x8c\x1e\x31\x37\x42\x55\x02\x81\x81\x00\xb1' b'\xd7\x7d\xe2\x36\x68\x26\x91\x37\xf1\xcc\x67\x22\xfb\x02\x64' b'\x8a\xd5\x68\x85\xd0\x3b\x98\xc3\x8e\xed\xd6\x81\x1a\x72\xa5' b'\x22\x63\xaf\xb9\x47\x7b\xf3\x85\xd3\x96\x1a\x5e\x70\xd1\x7a' b'\xc2\x2f\xf0\x0f\xcd\x86\x0c\xa2\xce\x63\x79\x9e\x2c\xed\x04' b'\x55\x86\x1c\xcf\x1a\x81\x56\xa0\x1c\x71\x7b\x71\x33\xf4\x5c' b'\x25\xc3\x04\x52\x2e\xad\xc1\xc5\xc5\x72\xe2\x61\x62\xf5\xe9' b'\x0d\xb3\x87\xaa\x5c\x80\x8c\x87\x85\x5b\xd5\x35\x0b\xa3\x9c' b'\x38\x6b\xe6\xe3\x42\xeb\xdd\x42\xb3\x31\xae\x58\xae\xda\xba' b'\x31\x6e\x2b\x8b\xbb\x92\x0b\x02\x81\x81\x00\xdf\x76\xa5\x63' b'\x4f\x8b\x97\x98\x6c\x0e\x87\x5c\xf8\x3f\x3b\xfa\x18\x2a\x1c' b'\xfb\xa1\xa8\x6d\x78\x38\x0e\xfb\xc2\x52\x33\xfd\x31\x1f\xb6' b'\xfb\x9b\x17\xd0\x06\x3f\x7f\xe6\x95\x08\x3d\x39\xfc\xd8\xf4' b'\x46\xaa\x40\xc1\x47\x34\xdf\x36\x54\xe5\x9b\x4b\xda\xe3\x5e' b'\xe9\x70\xe3\x12\xe8\x1f\x16\xd9\x73\x79\xae\xbe\xad\xb0\xfa' b'\x2a\x91\x52\xfa\x7c\x4f\x24\x0f\x18\xc9\x66\x11\xa4\xd8\x69' b'\x45\x61\x96\x41\xa9\x07\x79\xda\xf7\x06\xd3\x2d\x1a\xcd\x21' b'\xa4\xa3\x40\x40\x6e\xf6\x1c\xa5\xad\x49\xf2\x50\x31\x7b\xe7' b'\xd9\x19\x62\x70') return key_der def get_public_key_pem(): """Returns a public key in PEM format This key was created by issuing the following openssl commands: openssl genrsa -out private.pem 2048 openssl rsa -in private.pem -pubout > public.pem The byte string returned by this function is the contents of the public.pem file. """ return b"""-----BEGIN PUBLIC KEY----- MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAs2tlaA15gVDJsIxbvRej DOavwGdVo51gNmDXTctt+06xjf56Gww7/BQQaVD5hzWdOB9S8sRXD/EXha3CF6Yn 7EXrtpQFmqkT8aL7uQrgIX3nCr/kYYy1SydCPjGSG+9kTiqX2U5m+3YZRYBg975A udQQn4RlVt+cOdjmP9t8eTHjuMr8eZsj3HJ8TFUONirg68yqowZUo5gZ3KRmMdCY Ak/rMhZh7JfKzpKgjzxS6NuGEJ/uP6a9QGMGmQGzE5fc6C7REI+rMUnLh3EvXvJ4 qbQ8ZbGy0IKhlWhnRNde7LQveUB+1LyE27mM3Y2cARXNUoM/Bmf9oS0rB7oyYiEH LwIDAQAB -----END PUBLIC KEY----- """ def get_public_key_der(): """Returns a public key in DER format This key was created by issuing the following openssl commands: openssl genrsa -out private.pem 2048 openssl rsa -in private.pem -pubout > public.pem The byte string returned by this function is the contents of the public.der file. """ key_der = ( b'\x30\x82\x01\x22\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01' b'\x01\x01\x05\x00\x03\x82\x01\x0f\x00\x30\x82\x01\x0a\x02\x82' b'\x01\x01\x00\xb3\x6b\x65\x68\x0d\x79\x81\x50\xc9\xb0\x8c\x5b' b'\xbd\x17\xa3\x0c\xe6\xaf\xc0\x67\x55\xa3\x9d\x60\x36\x60\xd7' b'\x4d\xcb\x6d\xfb\x4e\xb1\x8d\xfe\x7a\x1b\x0c\x3b\xfc\x14\x10' b'\x69\x50\xf9\x87\x35\x9d\x38\x1f\x52\xf2\xc4\x57\x0f\xf1\x17' b'\x85\xad\xc2\x17\xa6\x27\xec\x45\xeb\xb6\x94\x05\x9a\xa9\x13' b'\xf1\xa2\xfb\xb9\x0a\xe0\x21\x7d\xe7\x0a\xbf\xe4\x61\x8c\xb5' b'\x4b\x27\x42\x3e\x31\x92\x1b\xef\x64\x4e\x2a\x97\xd9\x4e\x66' b'\xfb\x76\x19\x45\x80\x60\xf7\xbe\x40\xb9\xd4\x10\x9f\x84\x65' b'\x56\xdf\x9c\x39\xd8\xe6\x3f\xdb\x7c\x79\x31\xe3\xb8\xca\xfc' b'\x79\x9b\x23\xdc\x72\x7c\x4c\x55\x0e\x36\x2a\xe0\xeb\xcc\xaa' b'\xa3\x06\x54\xa3\x98\x19\xdc\xa4\x66\x31\xd0\x98\x02\x4f\xeb' b'\x32\x16\x61\xec\x97\xca\xce\x92\xa0\x8f\x3c\x52\xe8\xdb\x86' b'\x10\x9f\xee\x3f\xa6\xbd\x40\x63\x06\x99\x01\xb3\x13\x97\xdc' b'\xe8\x2e\xd1\x10\x8f\xab\x31\x49\xcb\x87\x71\x2f\x5e\xf2\x78' b'\xa9\xb4\x3c\x65\xb1\xb2\xd0\x82\xa1\x95\x68\x67\x44\xd7\x5e' b'\xec\xb4\x2f\x79\x40\x7e\xd4\xbc\x84\xdb\xb9\x8c\xdd\x8d\x9c' b'\x01\x15\xcd\x52\x83\x3f\x06\x67\xfd\xa1\x2d\x2b\x07\xba\x32' b'\x62\x21\x07\x2f\x02\x03\x01\x00\x01') return key_der def get_encrypted_private_key_pem(): """Returns an encrypted private key in PKCS#8 format This key was created by issuing the following openssl commands: openssl genrsa -out private.pem 2048 echo password > passphrase.txt openssl pkcs8 -topk8 -passout file:passphrase.txt \ -in private.pem -out private_encrypted.pk8 The byte string returned by this function is the contents of the private_encrypted.pk8 file. """ return b"""-----BEGIN ENCRYPTED PRIVATE KEY----- MIIE6TAbBgkqhkiG9w0BBQMwDgQIssadeQrYhhACAggABIIEyDNw3SV2b19yy4Q/ kTbtJ/p2X2zKDqr7GgLeAowqqhcMfvprI7G8C0XtwxkR4SjMZUXNcmOwQB2kNKtK ZilCz6pSx81iUj4s1fU460XkhkIeV+F7aB2PsTG1oDfPCuzKFjT6EuSE6lFUH89r TRuHWMPseW7lrvEB5kNMFag5QxeKjsSCNkZWOT74o4fh3cEplgCEaA+nCclXU79m 5rhaa9e1SUpPuPlhnAIDkBtHcC38B+SOYKQxLdZT1f72oZ1ozWJ4bEhKxvnNu5J+ tCvgWOXMIEJVGgf8Cu58PoR18LyyAIk7zza+1LkCiyuLNgiz8a1sVw8uBcrVgD5R 8f4XgI/Yjb16Bmpp/0iEjNcURaby9GnCCEc+W/ivSJTnG3o1Xn00FO98l2aggNpt S8gxK05NeCtdWoFFjTeIXxnb1ct0Iep8RwuO+FnupAf6aw12Uqj4qYNvNiY/kBhS P/Yd3KznasrolUZ9+PVTMUI45UTMN/XhNvXrozMq9nItWTV7wHyEL3mrYipvcxrm SnLlAp2zkmSu923cHN1teLE99/rV2jaBM03ROqvYWaxjfOjxfwz6PhdE8G//kWd0 tf2Om+fyCkBRxo1sUcuiE79hJXgP5KJCMbPsDyG/aQk4oeS1nbn15AhthwiU7A13 h9X6asgV2H+4Ljf+tr1b8p3qj3CSljfzoVErLqoHagjVB45WktHhrWbUSRpXSvPo Hh0LY62qxTa67gKjwarH5hYr5IaH39iR9bcyuvzE+u9TJWvWmeLJ7UmesfVPZtSf /JTpvr0zu4C95lXKt4FdxOhGcWwDN1Zp+lCsF5ruBGc+/pEggiXi1qvW9xUny1Of 8NqdPxGPb4/zPHGaysypPsc6LiY3esI8wa7FnDsS4e79dWinD/BPWEa5N2jLm0Rr njkHTy0xtnw/a8Ofrtyy9V1tBBOCaswzGIZZj6oHyFCtAvjZuYa8TWVmSi6EqJKi lY5wSdQQXg3H0HnQYivtOY1YbfjtRkUB9e4xkSVhvYJpY1QWBtApdUGBsxsELkDC 6cv/Kxnd9U7dz9+VhD0hAdrhFqbWqOEGTWt7xE44yzWokdKQWu5FsTs6gyXsGPen ZgZlR5pjPNGbMdftW0M473YyvtzjrCuSVgJspCzpA9uo6wfejaFb4RF/tcWtXglE Q5FzfsO1OZr6nONraShj9N1kxGBXUUOtAjZI/zoTWk3yndxw3IpvPtDTg9ByCp7F RFUtDyrki+YAIAiTgPq7qwc1upjU7R1Zlg4jIe0RI9A73NyLwa4QhgO+HmRBt7At LLuUeCFKuXMBHzlDaMYwq5ZPOb8VcMkhUoug2YJIc4YOOHh5O0mYnat0vaYO+A58 DiuYgxKmO5+6+OMk2ovZgk1sFawR4rk9HUt8goUUptZ+hoHUVGtte5YcQniIOcds qY3ni/zwswHWQRaAu8Ej4qJKt1XwZo2K04xHhL90TMaY8NpLSMCfVqDDL409TqIj zHUfYl6N2Me4eKc8vl6Sm63g57NzLqTttD6KSn8v+OmUF5mOQwcLnr3nK7S+BQfI DLPY1Oh7Kec/M/d1080/Qv9YBAJhz50TLKoxXwVeH4OOvuaHVaotElMkr5QEkEXl gRgwkbMrQjg0II0O9g== -----END ENCRYPTED PRIVATE KEY-----""" def get_passphrase_txt(): """Returns the plain text string used to encrypt the private key This key was created by issuing the following commands: echo password > passphrase.txt The byte string returned by this function is the contents of the passphrase.txt file. """ return b"""password""" def get_csr_pem(): """Returns a Certificate Signing Request (CSR) in PEM format This key was created by issuing the following openssl commands: openssl genrsa -out private.pem 2048 openssl req -new -key private.pem -out csr.pem -subj '/CN=example.com' The byte string returned by this function is the contents of the csr.pem file. """ return b"""-----BEGIN CERTIFICATE REQUEST----- MIICWzCCAUMCAQAwFjEUMBIGA1UEAwwLZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3 DQEBAQUAA4IBDwAwggEKAoIBAQCza2VoDXmBUMmwjFu9F6MM5q/AZ1WjnWA2YNdN y237TrGN/nobDDv8FBBpUPmHNZ04H1LyxFcP8ReFrcIXpifsReu2lAWaqRPxovu5 CuAhfecKv+RhjLVLJ0I+MZIb72ROKpfZTmb7dhlFgGD3vkC51BCfhGVW35w52OY/ 23x5MeO4yvx5myPccnxMVQ42KuDrzKqjBlSjmBncpGYx0JgCT+syFmHsl8rOkqCP PFLo24YQn+4/pr1AYwaZAbMTl9zoLtEQj6sxScuHcS9e8niptDxlsbLQgqGVaGdE 117stC95QH7UvITbuYzdjZwBFc1Sgz8GZ/2hLSsHujJiIQcvAgMBAAGgADANBgkq hkiG9w0BAQsFAAOCAQEAPJDIxzgtUDRgpfTbTOPDJYap+Lm4jYxsCuAFbYiQ43B+ c7RyzEFOB2anrldTm3XzNytHZAkRTnN4dH09p1K1Pyepv+weSv8rvN9OohfYgpcj wQqw8ksdGb3Q6oPnTgGxmWvV4PbzHmDnOvOiQ+wuBHWXYks6tdgU7iCZ1djYibmL 1j+XEvtstou8gu1lWhzH6tStwmA9udncg5rEvfDUDyvMN3T06QFqrlK9K1TXIlbM RvUDrBjINIOuEeZ/5czjBl1CX1Z1YqdunrPiCQM4+oUAtjyD6ZAsyAEXLKdSYtKZ hSZgIl7v+UAIM+9bhpVg15aTjRzfH2OsZodFIbsMDw== -----END CERTIFICATE REQUEST-----""" def get_certificate_pem(): """Returns an X509 certificate in PEM format This certificate was created by issuing the following openssl commands: openssl genrsa -out private.pem 2048 openssl req -new -x509 -key private.pem -out cert.pem \ -days 1000 -subj '/CN=example.com' The byte string returned by this function is the contents of the cert.pem file. """ return b"""-----BEGIN CERTIFICATE----- MIIC/zCCAeegAwIBAgIJAOLqXKJ9q9/nMA0GCSqGSIb3DQEBCwUAMBYxFDASBgNV BAMMC2V4YW1wbGUuY29tMB4XDTE1MDQxMTAyMTUyOVoXDTE4MDEwNTAyMTUyOVow FjEUMBIGA1UEAwwLZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw ggEKAoIBAQCza2VoDXmBUMmwjFu9F6MM5q/AZ1WjnWA2YNdNy237TrGN/nobDDv8 FBBpUPmHNZ04H1LyxFcP8ReFrcIXpifsReu2lAWaqRPxovu5CuAhfecKv+RhjLVL J0I+MZIb72ROKpfZTmb7dhlFgGD3vkC51BCfhGVW35w52OY/23x5MeO4yvx5myPc cnxMVQ42KuDrzKqjBlSjmBncpGYx0JgCT+syFmHsl8rOkqCPPFLo24YQn+4/pr1A YwaZAbMTl9zoLtEQj6sxScuHcS9e8niptDxlsbLQgqGVaGdE117stC95QH7UvITb uYzdjZwBFc1Sgz8GZ/2hLSsHujJiIQcvAgMBAAGjUDBOMB0GA1UdDgQWBBSUq2A0 b2Xo+sKvmKgN8Wq8l6j82jAfBgNVHSMEGDAWgBSUq2A0b2Xo+sKvmKgN8Wq8l6j8 2jAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBjiuqhlzNVOVLrHDQy Gr0fTACFJdDREnuhZp4d91++DmMCT+bcTG0+GCp3rfFOuEWpJLLLPdSOnIsnibsO syKPXuBBX5kmdYIojbdjUTSwnhcx9JTAfKSmxXWSC0rnKCefAf44Mm6fqvoTyTbe GSQP6nHzc7eLaK/efcrMvYdct+TeTkHjqR8Lu4pjZvRdUQadQHhDyN+ONKdKD9Tr jvfPim0b7Aq885PjSN6Qo4Z9HXR6+nK+bTz9HyUATMfDGNQt0L3vyfVxbNOxkCBc YI4hFtGfkOzd6B7r2sY1wGKdTLHkuT4m4/9A/SOzvnH+epnJqIS9jw+1iRj8xcDA 6PNT -----END CERTIFICATE----- """ def get_certificate_der(): """Returns an X509 certificate in DER format This certificate was created by issuing the following openssl commands: openssl genrsa -out private.pem 2048 openssl req -new -x509 -key private.pem -out cert.pem \ -days 1000 -subj '/CN=example.com' openssl x509 -outform der -in cert.pem -out cert.der The byte string returned by this function is the contents of the cert.der file. """ cert_der = ( b'\x30\x82\x02\xff\x30\x82\x01\xe7\xa0\x03\x02\x01\x02\x02\x09' b'\x00\xe2\xea\x5c\xa2\x7d\xab\xdf\xe7\x30\x0d\x06\x09\x2a\x86' b'\x48\x86\xf7\x0d\x01\x01\x0b\x05\x00\x30\x16\x31\x14\x30\x12' b'\x06\x03\x55\x04\x03\x0c\x0b\x65\x78\x61\x6d\x70\x6c\x65\x2e' b'\x63\x6f\x6d\x30\x1e\x17\x0d\x31\x35\x30\x34\x31\x31\x30\x32' b'\x31\x35\x32\x39\x5a\x17\x0d\x31\x38\x30\x31\x30\x35\x30\x32' b'\x31\x35\x32\x39\x5a\x30\x16\x31\x14\x30\x12\x06\x03\x55\x04' b'\x03\x0c\x0b\x65\x78\x61\x6d\x70\x6c\x65\x2e\x63\x6f\x6d\x30' b'\x82\x01\x22\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01' b'\x01\x05\x00\x03\x82\x01\x0f\x00\x30\x82\x01\x0a\x02\x82\x01' b'\x01\x00\xb3\x6b\x65\x68\x0d\x79\x81\x50\xc9\xb0\x8c\x5b\xbd' b'\x17\xa3\x0c\xe6\xaf\xc0\x67\x55\xa3\x9d\x60\x36\x60\xd7\x4d' b'\xcb\x6d\xfb\x4e\xb1\x8d\xfe\x7a\x1b\x0c\x3b\xfc\x14\x10\x69' b'\x50\xf9\x87\x35\x9d\x38\x1f\x52\xf2\xc4\x57\x0f\xf1\x17\x85' b'\xad\xc2\x17\xa6\x27\xec\x45\xeb\xb6\x94\x05\x9a\xa9\x13\xf1' b'\xa2\xfb\xb9\x0a\xe0\x21\x7d\xe7\x0a\xbf\xe4\x61\x8c\xb5\x4b' b'\x27\x42\x3e\x31\x92\x1b\xef\x64\x4e\x2a\x97\xd9\x4e\x66\xfb' b'\x76\x19\x45\x80\x60\xf7\xbe\x40\xb9\xd4\x10\x9f\x84\x65\x56' b'\xdf\x9c\x39\xd8\xe6\x3f\xdb\x7c\x79\x31\xe3\xb8\xca\xfc\x79' b'\x9b\x23\xdc\x72\x7c\x4c\x55\x0e\x36\x2a\xe0\xeb\xcc\xaa\xa3' b'\x06\x54\xa3\x98\x19\xdc\xa4\x66\x31\xd0\x98\x02\x4f\xeb\x32' b'\x16\x61\xec\x97\xca\xce\x92\xa0\x8f\x3c\x52\xe8\xdb\x86\x10' b'\x9f\xee\x3f\xa6\xbd\x40\x63\x06\x99\x01\xb3\x13\x97\xdc\xe8' b'\x2e\xd1\x10\x8f\xab\x31\x49\xcb\x87\x71\x2f\x5e\xf2\x78\xa9' b'\xb4\x3c\x65\xb1\xb2\xd0\x82\xa1\x95\x68\x67\x44\xd7\x5e\xec' b'\xb4\x2f\x79\x40\x7e\xd4\xbc\x84\xdb\xb9\x8c\xdd\x8d\x9c\x01' b'\x15\xcd\x52\x83\x3f\x06\x67\xfd\xa1\x2d\x2b\x07\xba\x32\x62' b'\x21\x07\x2f\x02\x03\x01\x00\x01\xa3\x50\x30\x4e\x30\x1d\x06' b'\x03\x55\x1d\x0e\x04\x16\x04\x14\x94\xab\x60\x34\x6f\x65\xe8' b'\xfa\xc2\xaf\x98\xa8\x0d\xf1\x6a\xbc\x97\xa8\xfc\xda\x30\x1f' b'\x06\x03\x55\x1d\x23\x04\x18\x30\x16\x80\x14\x94\xab\x60\x34' b'\x6f\x65\xe8\xfa\xc2\xaf\x98\xa8\x0d\xf1\x6a\xbc\x97\xa8\xfc' b'\xda\x30\x0c\x06\x03\x55\x1d\x13\x04\x05\x30\x03\x01\x01\xff' b'\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x0b\x05\x00' b'\x03\x82\x01\x01\x00\x63\x8a\xea\xa1\x97\x33\x55\x39\x52\xeb' b'\x1c\x34\x32\x1a\xbd\x1f\x4c\x00\x85\x25\xd0\xd1\x12\x7b\xa1' b'\x66\x9e\x1d\xf7\x5f\xbe\x0e\x63\x02\x4f\xe6\xdc\x4c\x6d\x3e' b'\x18\x2a\x77\xad\xf1\x4e\xb8\x45\xa9\x24\xb2\xcb\x3d\xd4\x8e' b'\x9c\x8b\x27\x89\xbb\x0e\xb3\x22\x8f\x5e\xe0\x41\x5f\x99\x26' b'\x75\x82\x28\x8d\xb7\x63\x51\x34\xb0\x9e\x17\x31\xf4\x94\xc0' b'\x7c\xa4\xa6\xc5\x75\x92\x0b\x4a\xe7\x28\x27\x9f\x01\xfe\x38' b'\x32\x6e\x9f\xaa\xfa\x13\xc9\x36\xde\x19\x24\x0f\xea\x71\xf3' b'\x73\xb7\x8b\x68\xaf\xde\x7d\xca\xcc\xbd\x87\x5c\xb7\xe4\xde' b'\x4e\x41\xe3\xa9\x1f\x0b\xbb\x8a\x63\x66\xf4\x5d\x51\x06\x9d' b'\x40\x78\x43\xc8\xdf\x8e\x34\xa7\x4a\x0f\xd4\xeb\x8e\xf7\xcf' b'\x8a\x6d\x1b\xec\x0a\xbc\xf3\x93\xe3\x48\xde\x90\xa3\x86\x7d' b'\x1d\x74\x7a\xfa\x72\xbe\x6d\x3c\xfd\x1f\x25\x00\x4c\xc7\xc3' b'\x18\xd4\x2d\xd0\xbd\xef\xc9\xf5\x71\x6c\xd3\xb1\x90\x20\x5c' b'\x60\x8e\x21\x16\xd1\x9f\x90\xec\xdd\xe8\x1e\xeb\xda\xc6\x35' b'\xc0\x62\x9d\x4c\xb1\xe4\xb9\x3e\x26\xe3\xff\x40\xfd\x23\xb3' b'\xbe\x71\xfe\x7a\x99\xc9\xa8\x84\xbd\x8f\x0f\xb5\x89\x18\xfc' b'\xc5\xc0\xc0\xe8\xf3\x53') return cert_der barbican-9.1.0.dev50/barbican/tests/plugin/0000775000175000017500000000000013616500640020612 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/plugin/test_snakeoil_ca.py0000664000175000017500000004316113616500636024505 0ustar sahidsahid00000000000000# Copyright 2014 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import base64 import os from cryptography.hazmat.backends import default_backend from cryptography import x509 import fixtures import mock from OpenSSL import crypto from oslo_config import fixture as oslo_fixture import barbican.plugin.interface.certificate_manager as cm from barbican.plugin import snakeoil_ca from barbican.tests import certificate_utils from barbican.tests import utils class BaseTestCase(utils.BaseTestCase): def setUp(self): super(BaseTestCase, self).setUp() self.conf = self.useFixture(oslo_fixture.Config( conf=snakeoil_ca.CONF)).conf self.tmp_dir = self.useFixture(fixtures.TempDir()).path def tearDown(self): super(BaseTestCase, self).tearDown() class CaTestCase(BaseTestCase): def test_gen_cacert_no_file_storage(self): subject_dn = ( 'cn=Test CN,o=Test O,L=Test L,st=Test ST,ou=Test OU' ) ca = snakeoil_ca.SnakeoilCA(cert_path=None, key_path=None, key_size=512, subject_dn=subject_dn) subject = ca.cert.get_subject() self.assertIsNotNone(ca.key) self.assertEqual("Test ST", subject.ST) self.assertEqual("Test L", subject.L) self.assertEqual("Test O", subject.O) self.assertEqual("Test CN", subject.CN) self.assertEqual("Test OU", subject.OU) self.assertEqual( ca.chain, crypto.dump_certificate(crypto.FILETYPE_PEM, ca.cert)) def test_gen_cacert_with_file_storage(self): cert_path = self.tmp_dir + 'cert.pem' key_path = self.tmp_dir + 'key.pem' chain_path = self.tmp_dir + 'cert.chain' pkcs7_path = self.tmp_dir + 'cert.p7b' subject_dn = 'cn=Test CN,o=Test O,L=Test L,st=Test ST' ca = snakeoil_ca.SnakeoilCA( cert_path=cert_path, key_path=key_path, chain_path=chain_path, pkcs7_path=pkcs7_path, key_size=2048, subject_dn=subject_dn) subject = ca.cert.get_subject() self.assertEqual( ca.chain, crypto.dump_certificate(crypto.FILETYPE_PEM, ca.cert)) self.assertIsNotNone(ca.key) self.assertEqual("Test ST", subject.ST) self.assertEqual("Test L", subject.L) self.assertEqual("Test O", subject.O) self.assertEqual("Test CN", subject.CN) # Make sure we preserve existing keypairs ca = snakeoil_ca.SnakeoilCA( cert_path=cert_path, key_path=key_path, chain_path=chain_path, pkcs7_path=pkcs7_path ) subject = ca.cert.get_subject() self.assertEqual("Test ST", subject.ST) self.assertEqual("Test L", subject.L) self.assertEqual("Test O", subject.O) self.assertEqual("Test CN", subject.CN) def test_gen_sub_cacert_with_file_storage(self): cert_path = self.tmp_dir + 'cert.pem' key_path = self.tmp_dir + 'key.pem' chain_path = self.tmp_dir + 'cert.chain' pkcs7_path = self.tmp_dir + 'cert.p7b' subject_dn = 'cn=Test CN,o=Test O,L=Test L,st=Test ST' parent_ca = snakeoil_ca.SnakeoilCA( cert_path=cert_path, key_path=key_path, chain_path=chain_path, pkcs7_path=pkcs7_path, key_size=2048, subject_dn=subject_dn) self.assertIsNotNone(parent_ca) # create a sub-ca subject_dn = 'cn=Sub CA Test CN,o=Test O,L=Test L,st=Test ST' cert_path = self.tmp_dir + 'sub_cert.pem' key_path = self.tmp_dir + 'sub_key.pem' chain_path = self.tmp_dir + 'sub_cert.chain' pkcs7_path = self.tmp_dir + 'sub_cert.p7b' sub_ca = snakeoil_ca.SnakeoilCA( cert_path=cert_path, key_path=key_path, chain_path=chain_path, pkcs7_path=pkcs7_path, key_size=2048, subject_dn=subject_dn, parent_chain_path=parent_ca.chain_path, signing_dn=parent_ca.subject_dn, signing_key=parent_ca.key ) subject = sub_ca.cert.get_subject() self.assertEqual("Test ST", subject.ST) self.assertEqual("Test L", subject.L) self.assertEqual("Test O", subject.O) self.assertEqual("Sub CA Test CN", subject.CN) class CertManagerTestCase(BaseTestCase): def setUp(self): super(CertManagerTestCase, self).setUp() subject_dn = 'cn=Test CN,o=Test O,L=Test L,st=Test ST' self.ca = snakeoil_ca.SnakeoilCA(cert_path=None, key_path=None, key_size=512, subject_dn=subject_dn) def verify_sig(self, encoded_cert): cert = x509.load_der_x509_certificate(encoded_cert, default_backend()) crypto.verify( self.ca.cert, cert.signature, cert.tbs_certificate_bytes, 'sha256') def test_gen_cert_no_file_storage(self): req = certificate_utils.get_valid_csr_object() cm = snakeoil_ca.CertManager(self.ca) cert = cm.make_certificate(req) first_serial = cert.get_serial_number() cert_enc = crypto.dump_certificate(crypto.FILETYPE_ASN1, cert) self.verify_sig(cert_enc) cert = cm.make_certificate(req) self.assertNotEqual(first_serial, cert.get_serial_number()) self.verify_sig(cert_enc) cm = snakeoil_ca.CertManager(self.ca) cert = cm.make_certificate(req) def test_gen_cert_with_file_storage(self): req = certificate_utils.get_valid_csr_object() cm = snakeoil_ca.CertManager(self.ca) cert = cm.make_certificate(req) cert_enc = crypto.dump_certificate(crypto.FILETYPE_ASN1, cert) first_serial = cert.get_serial_number() self.verify_sig(cert_enc) cm = snakeoil_ca.CertManager(self.ca) cert = cm.make_certificate(req) self.assertNotEqual(first_serial, cert.get_serial_number()) class SnakeoilCAPluginTestCase(BaseTestCase): def setUp(self): super(SnakeoilCAPluginTestCase, self).setUp() self.ca_cert_path = os.path.join(self.tmp_dir, 'ca.cert') self.ca_key_path = os.path.join(self.tmp_dir, 'ca.key') self.ca_chain_path = os.path.join(self.tmp_dir, 'ca.chain') self.ca_pkcs7_path = os.path.join(self.tmp_dir, 'ca.pkcs7') self.db_dir = self.tmp_dir self.conf.snakeoil_ca_plugin.subca_cert_key_directory = os.path.join( self.tmp_dir, 'subca_cert_key_dir') self.subca_cert_key_directory = ( self.conf.snakeoil_ca_plugin.subca_cert_key_directory) self.plugin = snakeoil_ca.SnakeoilCACertificatePlugin( self.conf) self.order_id = mock.MagicMock() self.barbican_meta_dto = cm.BarbicanMetaDTO() def test_issue_certificate_request(self): req = certificate_utils.get_valid_csr_object() req_enc = crypto.dump_certificate_request(crypto.FILETYPE_PEM, req) req_enc = base64.b64encode(req_enc) order_meta = {'request_data': req_enc} resp = self.plugin.issue_certificate_request(self.order_id, order_meta, {}, self.barbican_meta_dto) crypto.load_certificate( crypto.FILETYPE_PEM, base64.b64decode(resp.certificate)) def test_issue_certificate_request_with_ca_id(self): req = certificate_utils.get_valid_csr_object() req_enc = crypto.dump_certificate_request(crypto.FILETYPE_PEM, req) req_enc = base64.b64encode(req_enc) order_meta = {'request_data': req_enc} plugin_meta = {'plugin_ca_id': self.plugin.get_default_ca_name()} self.barbican_meta_dto.plugin_ca_id = self.plugin.get_default_ca_name() resp = self.plugin.issue_certificate_request(self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) crypto.load_certificate( crypto.FILETYPE_PEM, base64.b64decode(resp.certificate)) def test_issue_raises_with_invalid_ca_id(self): req = certificate_utils.get_valid_csr_object() req_enc = crypto.dump_certificate_request(crypto.FILETYPE_PEM, req) req_enc = base64.b64encode(req_enc) order_meta = {'request_data': req_enc} plugin_meta = {'plugin_ca_id': "invalid_ca_id"} self.barbican_meta_dto.plugin_ca_id = "invalid_ca_id" self.assertRaises( cm.CertificateGeneralException, self.plugin.issue_certificate_request, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) def test_issue_certificate_request_set_subject(self): req = certificate_utils.get_valid_csr_object() subj = req.get_subject() subj.countryName = 'US' subj.stateOrProvinceName = 'OR' subj.localityName = 'Testlandia' subj.organizationName = 'Testers Anon' subj.organizationalUnitName = 'Testers OU' subj.commonName = 'Testing' req_enc = crypto.dump_certificate_request(crypto.FILETYPE_PEM, req) req_enc = base64.b64encode(req_enc) order_meta = {'request_data': req_enc} resp = self.plugin.issue_certificate_request(self.order_id, order_meta, {}, self.barbican_meta_dto) cert = crypto.load_certificate( crypto.FILETYPE_PEM, base64.b64decode(resp.certificate)) cert_subj = cert.get_subject() self.assertEqual('US', cert_subj.C) self.assertEqual('OR', cert_subj.ST) self.assertEqual('Testlandia', cert_subj.L) self.assertEqual('Testers Anon', cert_subj.O) self.assertEqual('Testers OU', cert_subj.OU) self.assertEqual('Testing', cert_subj.CN) def test_issue_certificate_request_stored_key(self): req = certificate_utils.get_valid_csr_object() req_enc = crypto.dump_certificate_request(crypto.FILETYPE_PEM, req) self.barbican_meta_dto.generated_csr = req_enc resp = self.plugin.issue_certificate_request( self.order_id, {}, {}, self.barbican_meta_dto) crypto.load_certificate( crypto.FILETYPE_PEM, base64.b64decode(resp.certificate)) def test_no_request_data(self): res = self.plugin.issue_certificate_request( self.order_id, {}, {}, self.barbican_meta_dto) self.assertIs(cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, res.status) self.assertEqual("No request_data specified", res.status_message) def test_get_default_ca_name(self): self.assertEqual("Snakeoil CA", self.plugin.get_default_ca_name()) def test_get_default_signing_cert(self): ca_cert = self.plugin.get_default_signing_cert() self.assertEqual( crypto.dump_certificate(crypto.FILETYPE_PEM, self.plugin.ca.cert), ca_cert) def test_get_default_intermediates_none(self): intermediates = self.plugin.get_default_intermediates() self.assertIsNone(intermediates) def test_not_implemented(self): self.assertRaises(NotImplementedError, self.plugin.modify_certificate_request, '', {}, {}, {}) self.assertRaises(NotImplementedError, self.plugin.cancel_certificate_request, '', {}, {}, {}) self.assertRaises(NotImplementedError, self.plugin.check_certificate_status, '', {}, {}, {}) def test_support_request_types(self): manager = cm.CertificatePluginManager() manager.extensions = [mock.MagicMock(obj=self.plugin)] cert_spec = { cm.REQUEST_TYPE: cm.CertificateRequestType.CUSTOM_REQUEST} self.assertEqual(self.plugin, manager.get_plugin(cert_spec)) self.assertTrue(self.plugin.supports(cert_spec)) cert_spec = { cm.REQUEST_TYPE: cm.CertificateRequestType.STORED_KEY_REQUEST} self.assertEqual(self.plugin, manager.get_plugin(cert_spec)) self.assertTrue(self.plugin.supports(cert_spec)) cert_spec = { cm.REQUEST_TYPE: cm.CertificateRequestType.FULL_CMC_REQUEST} self.assertRaises(cm.CertificatePluginNotFound, manager.get_plugin, cert_spec) self.assertFalse(self.plugin.supports(cert_spec)) def test_supports_create_ca(self): self.assertTrue(self.plugin.supports_create_ca()) def _create_subca(self): create_ca_dto = cm.CACreateDTO( name="sub ca1", description="subordinate ca", subject_dn="cn=subordinate ca signing cert, o=example.com", parent_ca_id=self.plugin.get_default_ca_name() ) return self.plugin.create_ca(create_ca_dto) def test_create_ca(self): subca_dict = self._create_subca() self.assertEqual("sub ca1", subca_dict.get(cm.INFO_NAME)) self.assertIsNotNone(subca_dict.get(cm.INFO_EXPIRATION)) self.assertIsNotNone(subca_dict.get(cm.PLUGIN_CA_ID)) ca_cert = subca_dict.get(cm.INFO_CA_SIGNING_CERT) self.assertIsNotNone(ca_cert) intermediates = subca_dict.get(cm.INFO_INTERMEDIATES) self.assertIsNotNone(intermediates) cacert = crypto.load_certificate(crypto.FILETYPE_PEM, ca_cert) subject = cacert.get_subject() self.assertEqual( "subordinate ca signing cert", subject.CN) pkcs7 = crypto.load_pkcs7_data(crypto.FILETYPE_PEM, intermediates) self.assertTrue(pkcs7.type_is_signed()) # TODO(alee) Verify that ca cert is signed by parent CA def test_issue_certificate_request_with_subca_id(self): subca_dict = self._create_subca() req = certificate_utils.get_valid_csr_object() req_enc = crypto.dump_certificate_request(crypto.FILETYPE_PEM, req) req_enc = base64.b64encode(req_enc) order_meta = {'request_data': req_enc} plugin_meta = {'plugin_ca_id': subca_dict.get(cm.PLUGIN_CA_ID)} self.barbican_meta_dto.plugin_ca_id = subca_dict.get(cm.PLUGIN_CA_ID) resp = self.plugin.issue_certificate_request(self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) new_cert = crypto.load_certificate( crypto.FILETYPE_PEM, base64.b64decode(resp.certificate)) signing_cert = crypto.load_certificate( crypto.FILETYPE_PEM, subca_dict['ca_signing_certificate']) self.assertEqual(signing_cert.get_subject(), new_cert.get_issuer()) def test_delete_ca(self): subca_dict = self._create_subca() ca_id = subca_dict.get(cm.PLUGIN_CA_ID) self.assertIsNotNone(ca_id) cert_path = os.path.join(self.subca_cert_key_directory, ca_id + ".cert") key_path = os.path.join(self.subca_cert_key_directory, ca_id + ".key") self.assertTrue(os.path.exists(cert_path)) self.assertTrue(os.path.exists(key_path)) self.plugin.delete_ca(ca_id) self.assertFalse(os.path.exists(cert_path)) self.assertFalse(os.path.exists(key_path)) cas = self.plugin.get_ca_info() self.assertNotIn(ca_id, cas.keys()) def test_raises_no_parent_id_passed_in(self): create_ca_dto = cm.CACreateDTO( name="sub ca1", description="subordinate ca", subject_dn="cn=subordinate ca signing cert, o=example.com", ) self.assertRaises( cm.CertificateGeneralException, self.plugin.create_ca, create_ca_dto ) def test_raises_invalid_parent_id_passed_in(self): create_ca_dto = cm.CACreateDTO( name="sub ca1", description="subordinate ca", subject_dn="cn=subordinate ca signing cert, o=example.com", parent_ca_id="foo" ) self.assertRaises( cm.CertificateGeneralException, self.plugin.create_ca, create_ca_dto ) def test_get_ca_info(self): ca_info = self.plugin.get_ca_info() ca_dict = ca_info.get(self.plugin.ca.name) self.assertIsNotNone(ca_dict) self.assertEqual(self.plugin.ca.name, ca_dict.get(cm.INFO_NAME)) self.assertIsNotNone(ca_dict.get(cm.INFO_CA_SIGNING_CERT)) self.assertEqual(str, type(ca_dict.get(cm.INFO_EXPIRATION))) def test_get_ca_info_with_subca(self): subca_dict = self._create_subca() subca_id = subca_dict.get(cm.PLUGIN_CA_ID) ca_info = self.plugin.get_ca_info() self.assertIn(subca_id, ca_info.keys()) self.assertIn(self.plugin.get_default_ca_name(), ca_info.keys()) self.assertEqual(str, type(subca_dict.get(cm.INFO_EXPIRATION))) barbican-9.1.0.dev50/barbican/tests/plugin/util/0000775000175000017500000000000013616500640021567 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/plugin/util/test_multiple_backends.py0000664000175000017500000006326013616500636026701 0ustar sahidsahid00000000000000# (c) Copyright 2016 Hewlett Packard Enterprise Development Company LP # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import collections import mock from oslo_utils import uuidutils from barbican.common import config from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.plugin.crypto import base from barbican.plugin.crypto import manager as cm from barbican.plugin.crypto import p11_crypto from barbican.plugin.crypto import simple_crypto from barbican.plugin.interface import secret_store from barbican.plugin import kmip_secret_store as kss from barbican.plugin import store_crypto from barbican.plugin.util import multiple_backends from barbican.tests import utils as test_utils class MockedManager(object): NAME_PREFIX = "friendly_" def __init__(self, names, enabled=True, plugin_lookup_field='store_plugin'): ExtTuple = collections.namedtuple('ExtTuple', ['name', 'obj']) self.extensions = [] for name in names: m = mock.MagicMock() m.get_plugin_name.return_value = self.NAME_PREFIX + name new_extension = ExtTuple(name, m) self.extensions.append(new_extension) self.global_default_store_dict = None self.parsed_stores = multiple_backends.read_multiple_backends_config() class WhenReadingMultipleBackendsConfig(test_utils.MultipleBackendsTestCase): def test_successful_conf_read(self): ss_plugins = ['ss_p1', 'ss_p2', 'ss_p3'] cr_plugins = ['cr_p1', 'cr_p2', 'cr_p3'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True, global_default_index=1) stores = multiple_backends.read_multiple_backends_config() self.assertEqual(len(ss_plugins), len(stores)) self.assertEqual('ss_p1', stores[0].store_plugin) self.assertEqual('cr_p1', stores[0].crypto_plugin) self.assertEqual(False, stores[0].global_default) self.assertEqual('ss_p2', stores[1].store_plugin) self.assertEqual('cr_p2', stores[1].crypto_plugin) self.assertTrue(stores[1].global_default) self.assertEqual('ss_p3', stores[2].store_plugin) self.assertEqual('cr_p3', stores[2].crypto_plugin) self.assertEqual(False, stores[2].global_default) def test_fail_when_store_plugin_name_missing(self): ss_plugins = ['ss_p1', 'ss_p3'] cr_plugins = ['cr_p1', 'cr_p2', 'cr_p3'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True) self.assertRaises(exception.MultipleStorePluginValueMissing, multiple_backends.read_multiple_backends_config) def test_fail_when_store_plugin_name_is_blank(self): ss_plugins = ['ss_p1', '', 'ss_p3'] cr_plugins = ['cr_p1', 'cr_p2', 'cr_p3'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True) self.assertRaises(exception.MultipleStorePluginValueMissing, multiple_backends.read_multiple_backends_config) def test_successful_conf_read_when_crypto_plugin_name_is_missing(self): ss_plugins = ['ss_p1', 'ss_p2', 'ss_p3'] cr_plugins = ['cr_p1', 'cr_p3'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True) stores = multiple_backends.read_multiple_backends_config() self.assertEqual(len(ss_plugins), len(stores)) def test_conf_read_when_multiple_plugin_disabled(self): ss_plugins = ['ss_p1', 'ss_p2', 'ss_p3'] cr_plugins = ['cr_p1', 'cr_p3'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=False) stores = multiple_backends.read_multiple_backends_config() self.assertIsNone(stores) def test_successful_conf_read_when_crypto_plugin_name_is_blank(self): ss_plugins = ['ss_p1', 'ss_p2', 'ss_p3'] cr_plugins = ['cr_p1', '', 'cr_p3'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True) stores = multiple_backends.read_multiple_backends_config() self.assertEqual(len(ss_plugins), len(stores)) self.assertEqual('', stores[1].crypto_plugin) def test_fail_when_global_default_not_specified(self): ss_plugins = ['ss_p1', 'ss_p2', 'ss_p3'] cr_plugins = ['cr_p1', 'cr_p2', 'cr_p3'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True, global_default_index=-1) self.assertRaises(exception.MultipleStoreIncorrectGlobalDefault, multiple_backends.read_multiple_backends_config) def test_fail_when_stores_lookup_suffix_missing_when_enabled(self): ss_plugins = ['ss_p1', 'ss_p2', 'ss_p3'] cr_plugins = ['cr_p1', 'cr_p2', 'cr_p3'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True, global_default_index=0) conf = config.get_module_config('secretstore') conf.set_override("stores_lookup_suffix", [], group='secretstore') self.assertRaises(exception.MultipleSecretStoreLookupFailed, multiple_backends.read_multiple_backends_config) def test_fail_when_secretstore_section_missing(self): ss_plugins = ['ss_p1', 'ss_p2', 'ss_p3'] cr_plugins = ['cr_p1', 'cr_p2', 'cr_p3'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True, global_default_index=-1) ss_conf = config.get_module_config('secretstore') existing_value = ss_conf.secretstore.stores_lookup_suffix existing_value.append('unknown_section') ss_conf.set_override('stores_lookup_suffix', existing_value, 'secretstore') self.assertRaises(exception.MultipleStorePluginValueMissing, multiple_backends.read_multiple_backends_config) class WhenInvokingSyncSecretStores(test_utils.MultipleBackendsTestCase): def setUp(self): super(WhenInvokingSyncSecretStores, self).setUp() def test_successful_syncup_no_existing_secret_stores(self): ss_plugins = ['ss_p1', 'ss_p2', 'ss_p3', 'ss_p4', 'ss_p5'] cr_plugins = ['cr_p1', 'cr_p2', 'cr_p3', 'cr_p4', 'cr_p5'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True) secretstore_manager = MockedManager(ss_plugins) crypto_manager = MockedManager(cr_plugins) multiple_backends.sync_secret_stores(secretstore_manager, crypto_manager) default_secret_store = multiple_backends.\ get_global_default_secret_store() self.assertEqual('ss_p1', default_secret_store.store_plugin) self.assertEqual('cr_p1', default_secret_store.crypto_plugin) self.assertEqual(MockedManager.NAME_PREFIX + 'cr_p1', default_secret_store.name) ss_db_entries = repositories.get_secret_stores_repository().get_all() self.assertEqual(5, len(ss_db_entries)) def test_syncup_with_existing_secret_stores(self): ss_plugins = ['ss_p1', 'ss_p2', 'ss_p3', 'ss_p4', 'ss_p5'] cr_plugins = ['cr_p1', '', 'cr_p3', 'cr_p4', 'cr_p5'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True) secretstore_manager = MockedManager(ss_plugins) crypto_manager = MockedManager(cr_plugins) multiple_backends.sync_secret_stores(secretstore_manager, crypto_manager) ss_db_entries = repositories.get_secret_stores_repository().get_all() self.assertEqual(5, len(ss_db_entries)) # check friendly name for the case when crypto plugin is not there ss_db_entry = self._get_secret_store_entry('ss_p2', None) self.assertIsNotNone(ss_db_entry) self.assertEqual(MockedManager.NAME_PREFIX + 'ss_p2', ss_db_entry.name) ss_plugins = ['ss_p3', 'ss_p4', 'ss_p5', 'ss_p6'] cr_plugins = ['cr_p3', 'cr_p4', 'cr_p5', 'cr_p6'] # update conf and re-run sync store self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True) secretstore_manager = MockedManager(ss_plugins) crypto_manager = MockedManager(cr_plugins) multiple_backends.sync_secret_stores(secretstore_manager, crypto_manager) ss_db_entry = self._get_secret_store_entry('ss_p2', 'cr_p2') self.assertIsNone(ss_db_entry) ss_db_entry = self._get_secret_store_entry('ss_p6', 'cr_p6') self.assertIsNotNone(ss_db_entry) default_secret_store = multiple_backends.\ get_global_default_secret_store() self.assertEqual('ss_p3', default_secret_store.store_plugin) self.assertEqual('cr_p3', default_secret_store.crypto_plugin) self.assertEqual(MockedManager.NAME_PREFIX + 'cr_p3', default_secret_store.name) ss_db_entries = repositories.get_secret_stores_repository().get_all() self.assertEqual(4, len(ss_db_entries)) def test_syncup_modify_global_default(self): ss_plugins = ['ss_p1', 'ss_p2', 'ss_p3', 'ss_p4', 'ss_p5'] cr_plugins = ['cr_p1', 'cr_p2', 'cr_p3', 'cr_p4', 'cr_p5'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True) secretstore_manager = MockedManager(ss_plugins) crypto_manager = MockedManager(cr_plugins) multiple_backends.sync_secret_stores(secretstore_manager, crypto_manager) global_secret_store = multiple_backends.\ get_global_default_secret_store() self.assertEqual('ss_p1', global_secret_store.store_plugin) self.assertEqual('cr_p1', global_secret_store.crypto_plugin) self.assertEqual(MockedManager.NAME_PREFIX + 'cr_p1', global_secret_store.name) ss_plugins = ['ss_p9', 'ss_p4', 'ss_p5'] cr_plugins = ['cr_p9', 'cr_p4', 'cr_p5'] # update conf and re-run sync store self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True) secretstore_manager = MockedManager(ss_plugins) crypto_manager = MockedManager(cr_plugins) multiple_backends.sync_secret_stores(secretstore_manager, crypto_manager) global_secret_store = multiple_backends.\ get_global_default_secret_store() self.assertEqual('ss_p9', global_secret_store.store_plugin) self.assertEqual('cr_p9', global_secret_store.crypto_plugin) self.assertEqual(MockedManager.NAME_PREFIX + 'cr_p9', global_secret_store.name) def test_syncup_with_store_and_crypto_plugins_count_mismatch(self): ss_plugins = ['ss_p1', 'ss_p2', 'ss_p3', 'ss_p4'] cr_plugins = ['cr_p1', '', 'cr_p3'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True) secretstore_manager = MockedManager(ss_plugins) crypto_manager = MockedManager(cr_plugins) multiple_backends.sync_secret_stores(secretstore_manager, crypto_manager) # empty crypto_plugin name maps to None in database entry ss_db_entry = self._get_secret_store_entry('ss_p2', None) self.assertIsNotNone(ss_db_entry) ss_db_entry = self._get_secret_store_entry('ss_p2', '') self.assertIsNone(ss_db_entry) # missing crypto plugin name maps to None in database entry ss_db_entry = self._get_secret_store_entry('ss_p4', None) self.assertIsNotNone(ss_db_entry) def test_syncup_delete_secret_store_with_preferred_project_using_it(self): """Removing secret store will fail if its defined as preferred store. """ ss_plugins = ['ss_p1', 'ss_p2', 'ss_p3', 'ss_p4'] cr_plugins = ['cr_p1', 'cr_p2', 'cr_p3', 'cr_p4'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True) secretstore_manager = MockedManager(ss_plugins) crypto_manager = MockedManager(cr_plugins) multiple_backends.sync_secret_stores(secretstore_manager, crypto_manager) with mock.patch('barbican.model.repositories.' 'get_project_secret_store_repository') as ps_repo: # Mocking with 2 projects as using preferred secret store ps_repo.get_count_by_secret_store.return_value = 2 ss_plugins = ['ss_p3', 'ss_p4'] cr_plugins = ['cr_p3', 'cr_p4'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True) secretstore_manager = MockedManager(ss_plugins) crypto_manager = MockedManager(cr_plugins) self.assertRaises(exception.MultipleStorePluginStillInUse, multiple_backends.sync_secret_stores, secretstore_manager, crypto_manager) def test_get_global_default_store_when_multiple_backends_disabled(self): ss_plugins = ['ss_p1', 'ss_p2', 'ss_p3'] cr_plugins = ['cr_p1', 'cr_p2', 'cr_p3'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=False) default_store = multiple_backends.get_global_default_secret_store() self.assertIsNone(default_store) class TestGetApplicablePlugins(test_utils.MultipleBackendsTestCase): def setUp(self): super(TestGetApplicablePlugins, self).setUp() def test_get_when_project_preferred_plugin_is_set(self): ss_plugins = ['ss_p1', 'ss_p2', 'ss_p3'] cr_plugins = ['cr_p1', 'cr_p2', 'cr_p3'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True) ss_manager = MockedManager(ss_plugins) project_id = uuidutils.generate_uuid(dashed=False) with mock.patch('barbican.model.repositories.ProjectSecretStoreRepo.' 'get_secret_store_for_project') as pref_func: # set preferred secret store to one of value in config m_dict = {'store_plugin': 'ss_p3'} m_rec = mock.MagicMock() m_rec.secret_store.to_dict_fields.return_value = m_dict pref_func.return_value = m_rec objs = multiple_backends.get_applicable_store_plugins( ss_manager, project_id, None) self.assertIn(project_id, pref_func.call_args_list[0][0]) self.assertIsInstance(objs, list) self.assertEqual(1, len(objs)) self.assertIn('ss_p3', objs[0].get_plugin_name()) def test_get_when_project_preferred_plugin_is_not_found_in_conf(self): ss_plugins = ['ss_p1', 'ss_p2', 'ss_p3'] cr_plugins = ['cr_p1', 'cr_p2', 'cr_p3'] self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True) ss_manager = MockedManager(ss_plugins) project_id = uuidutils.generate_uuid(dashed=False) with mock.patch('barbican.model.repositories.ProjectSecretStoreRepo.' 'get_secret_store_for_project') as pref_func: # set preferred secret store value which is not defined in config m_dict = {'store_plugin': 'old_preferred_plugin'} m_rec = mock.MagicMock() m_rec.secret_store.to_dict_fields.return_value = m_dict pref_func.return_value = m_rec self.assertRaises(exception.MultipleStorePreferredPluginMissing, multiple_backends.get_applicable_store_plugins, ss_manager, project_id, None) self.assertIn(project_id, pref_func.call_args_list[0][0]) def test_get_when_project_preferred_plugin_not_set_then_default_used(self): ss_plugins = ['ss_p1', 'ss_p2', 'ss_p3'] cr_plugins = ['cr_p1', 'cr_p2', 'cr_p3'] # setting second plugin to be global default self.init_via_conf_file(ss_plugins, cr_plugins, enabled=True, global_default_index=1) cr_manager = MockedManager(cr_plugins, plugin_lookup_field='crypto_plugin') project_id = uuidutils.generate_uuid(dashed=False) with mock.patch('barbican.plugin.util.multiple_backends.' 'get_global_default_secret_store') as gd_func: m_dict = {'crypto_plugin': 'cr_p2'} gd_func.return_value.to_dict_fields.return_value = m_dict objs = multiple_backends.get_applicable_crypto_plugins(cr_manager, project_id, None) gd_func.assert_called_once_with() self.assertIsInstance(objs, list) self.assertEqual(1, len(objs)) self.assertIn('cr_p2', objs[0].get_plugin_name()) # call again with no project_id set objs = multiple_backends.get_applicable_crypto_plugins(cr_manager, None, None) gd_func.assert_called_once_with() self.assertIsInstance(objs, list) self.assertEqual(1, len(objs)) self.assertIn('cr_p2', objs[0].get_plugin_name()) def test_get_applicable_store_plugins_when_multiple_backend_not_enabled( self): ss_config = config.get_module_config('secretstore') ss_plugins = ['ss_p11', 'ss_p22', 'ss_p33', 'ss_p44'] ss_conf_plugins = ['ss_p1', 'ss_p2', 'ss_p3'] cr_conf_plugins = ['cr_p1', 'cr_p2', 'cr_p3'] self.init_via_conf_file(ss_conf_plugins, cr_conf_plugins, enabled=False) ss_manager = MockedManager(ss_plugins) ss_config.set_override("enabled_secretstore_plugins", ss_plugins, group='secretstore') objs = multiple_backends.get_applicable_store_plugins(ss_manager, None, None) self.assertEqual(4, len(objs)) @test_utils.parameterized_test_case class TestPluginsGenerateStoreAPIMultipleBackend( test_utils.MultipleBackendsTestCase): backend_dataset = { "db_backend": [{ 'store_plugins': ['store_crypto', 'kmip_plugin', 'store_crypto'], 'crypto_plugins': ['simple_crypto', '', 'p11_crypto'], 'default_store_class': store_crypto.StoreCryptoAdapterPlugin, 'default_crypto_class': simple_crypto.SimpleCryptoPlugin }], "kmip": [{ 'store_plugins': ['kmip_plugin', 'store_crypto', 'store_crypto'], 'crypto_plugins': ['', 'p11_crypto', 'simple_crypto'], 'default_store_class': kss.KMIPSecretStore, 'default_crypto_class': None }], "pkcs11": [{ 'store_plugins': ['store_crypto', 'store_crypto', 'kmip_plugin'], 'crypto_plugins': ['p11_crypto', 'simple_crypto', ''], 'default_store_class': store_crypto.StoreCryptoAdapterPlugin, 'default_crypto_class': p11_crypto.P11CryptoPlugin }] } def setUp(self): super(TestPluginsGenerateStoreAPIMultipleBackend, self).setUp() def _create_project(self): session = repositories.get_project_repository().get_session() project = models.Project() project.external_id = ("keystone_project_id" + uuidutils.generate_uuid(dashed=False)) project.save(session=session) return project def _create_project_store(self, project_id, secret_store_id): proj_store_repo = repositories.get_project_secret_store_repository() session = proj_store_repo.get_session() proj_model = models.ProjectSecretStore(project_id, secret_store_id) proj_s_store = proj_store_repo.create_from(proj_model, session) proj_s_store.save(session=session) return proj_s_store @test_utils.parameterized_dataset(backend_dataset) def test_no_preferred_default_plugin(self, dataset): """Check name, plugin and crypto class used for default secret store Secret store name is crypto class plugin name if defined otherwise user friendly name is derived from store class plugin name """ self.init_via_conf_file(dataset['store_plugins'], dataset['crypto_plugins'], enabled=True) with mock.patch('barbican.plugin.crypto.p11_crypto.P11CryptoPlugin.' '_create_pkcs11'), \ mock.patch('kmip.pie.client.ProxyKmipClient'): manager = secret_store.SecretStorePluginManager() keySpec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128) plugin_found = manager.get_plugin_store(keySpec) self.assertIsInstance(plugin_found, dataset['default_store_class']) global_secret_store = multiple_backends.\ get_global_default_secret_store() if dataset['default_crypto_class']: crypto_plugin = cm.get_manager().get_plugin_store_generate( base.PluginSupportTypes.ENCRYPT_DECRYPT) self.assertIsInstance(crypto_plugin, dataset['default_crypto_class']) # make sure secret store name is same as crypto class friendly name # as store_plugin class is not direct impl of SecretStoreBase self.assertEqual(global_secret_store.name, crypto_plugin.get_plugin_name()) else: # crypto class is not used # make sure secret store name is same as store plugin class # friendly name self.assertEqual(global_secret_store.name, plugin_found.get_plugin_name()) # error raised for no crypto plugin self.assertRaises(base.CryptoPluginNotFound, cm.get_manager().get_plugin_store_generate, base.PluginSupportTypes.ENCRYPT_DECRYPT) @test_utils.parameterized_dataset(backend_dataset) def test_project_preferred_default_plugin(self, dataset): """Check project preferred behavior with different global default""" self.init_via_conf_file(dataset['store_plugins'], dataset['crypto_plugins'], enabled=True) with mock.patch('barbican.plugin.crypto.p11_crypto.P11CryptoPlugin.' '_create_pkcs11'), \ mock.patch('kmip.pie.client.ProxyKmipClient'): manager = secret_store.SecretStorePluginManager() pkcs11_secret_store = self._get_secret_store_entry('store_crypto', 'p11_crypto') kmip_secret_store = self._get_secret_store_entry('kmip_plugin', None) db_secret_store = self._get_secret_store_entry('store_crypto', 'simple_crypto') project1 = self._create_project() project2 = self._create_project() project3 = self._create_project() # For project1 , make pkcs11 as preferred secret store self._create_project_store(project1.id, pkcs11_secret_store.id) # For project2 , make kmip as preferred secret store self._create_project_store(project2.id, kmip_secret_store.id) # For project3 , make db backend as preferred secret store self._create_project_store(project3.id, db_secret_store.id) keySpec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128) cm_manager = cm.get_manager() # For project1, verify store and crypto plugin instance used are pkcs11 # specific plugin_found = manager.get_plugin_store(keySpec, project_id=project1.id) self.assertIsInstance(plugin_found, store_crypto.StoreCryptoAdapterPlugin) crypto_plugin = cm.get_manager().get_plugin_store_generate( base.PluginSupportTypes.ENCRYPT_DECRYPT, project_id=project1.id) self.assertIsInstance(crypto_plugin, p11_crypto.P11CryptoPlugin) # For project2, verify store plugin instance is kmip specific # and there is no crypto plugin instance plugin_found = manager.get_plugin_store(keySpec, project_id=project2.id) self.assertIsInstance(plugin_found, kss.KMIPSecretStore) self.assertRaises( base.CryptoPluginNotFound, cm_manager.get_plugin_store_generate, base.PluginSupportTypes.ENCRYPT_DECRYPT, project_id=project2.id) # For project3, verify store and crypto plugin instance used are db # backend specific plugin_found = manager.get_plugin_store(keySpec, project_id=project3.id) self.assertIsInstance(plugin_found, store_crypto.StoreCryptoAdapterPlugin) crypto_plugin = cm.get_manager().get_plugin_store_generate( base.PluginSupportTypes.ENCRYPT_DECRYPT, project_id=project3.id) self.assertIsInstance(crypto_plugin, simple_crypto.SimpleCryptoPlugin) # Make sure for project with no preferred setting, uses global default project4 = self._create_project() plugin_found = manager.get_plugin_store(keySpec, project_id=project4.id) self.assertIsInstance(plugin_found, dataset['default_store_class']) barbican-9.1.0.dev50/barbican/tests/plugin/util/__init__.py0000664000175000017500000000000013616500636023673 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/plugin/util/test_mime_types.py0000664000175000017500000002153013616500636025361 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.model import models from barbican.plugin.util import mime_types from barbican.tests import utils class WhenTestingIsBase64ProcessingNeeded(utils.BaseTestCase): def test_is_base64_needed(self): r = mime_types.is_base64_processing_needed('application/octet-stream', 'base64') self.assertTrue(r) def test_is_base64_plus_needed(self): r = mime_types.is_base64_processing_needed('application/octet-stream', 'base64;q=0.5, ' 'gzip;q=0.6, compress') self.assertTrue(r) def test_not_base64_needed_binary(self): r = mime_types.is_base64_processing_needed('application/octet-stream', None) self.assertFalse(r) def test_not_base64_needed_invalid_content_type(self): r = mime_types.is_base64_processing_needed('bababooey', 'base64') self.assertFalse(r) def test_not_base64_needed_text(self): r = mime_types.is_base64_processing_needed('text/plain', 'base64') self.assertFalse(r) class WhenTestingIsBase64ProcessingSupported(utils.BaseTestCase): def test_is_base64_supported_application_octet_stream(self): r = mime_types.is_base64_encoding_supported('application/octet-stream') self.assertTrue(r) def test_is_base64_supported_with_unsupported_values(self): mimes_where_base64_is_not_supported = ['text/plain', 'bogus'] for mime in mimes_where_base64_is_not_supported: r = mime_types.is_base64_encoding_supported(mime) self.assertFalse(r) class WhenTestingAllowBinaryContent(utils.BaseTestCase): def test_binary_content_without_encoding(self): r = mime_types.use_binary_content_as_is('application/octet-stream', None) self.assertTrue(r) def test_binary_content_with_valid_encoding(self): r = mime_types.use_binary_content_as_is('application/octet-stream', 'binary') self.assertTrue(r) def test_binary_content_with_encoding(self): r = mime_types.use_binary_content_as_is('application/octet-stream', 'binary;q=0.5, ' 'gzip;q=0.6, compress') self.assertTrue(r) def test_binary_content_with_base64_encoding(self): r = mime_types.use_binary_content_as_is('application/octet-stream', 'base64') self.assertFalse(r) def test_not_allow_with_invalid_content_type(self): r = mime_types.use_binary_content_as_is('invalid_content_type', 'binary') self.assertFalse(r) def test_plain_content_with_base64_encoding(self): r = mime_types.use_binary_content_as_is('text/plain', 'base64') self.assertFalse(r) class WhenTestingAugmentFieldsWithContentTypes(utils.BaseTestCase): def setUp(self): super(WhenTestingAugmentFieldsWithContentTypes, self).setUp() self.secret = models.Secret({}) self.secret.secret_id = "secret#1" self.datum = models.EncryptedDatum(self.secret) self.secret.encrypted_data = [self.datum] def test_static_supported_plain_text(self): for pt in mime_types.PLAIN_TEXT: self.assertEqual('text/plain', mime_types.INTERNAL_CTYPES[pt]) def test_static_supported_binary(self): for bin in mime_types.BINARY: self.assertIn(mime_types.INTERNAL_CTYPES[bin], mime_types.BINARY) def test_static_content_to_encodings(self): self.assertIn('text/plain', mime_types.CTYPES_TO_ENCODINGS) self.assertIsNone(mime_types.CTYPES_TO_ENCODINGS['text/plain']) self.assertIn('application/aes', mime_types.CTYPES_TO_ENCODINGS) self.assertIsNone(mime_types.CTYPES_TO_ENCODINGS['application/aes']) self.assertIn('application/octet-stream', mime_types.CTYPES_TO_ENCODINGS) self.assertIn('base64', mime_types.CTYPES_TO_ENCODINGS[ 'application/octet-stream']) self.assertIn('binary', mime_types.CTYPES_TO_ENCODINGS[ 'application/octet-stream']) def test_secret_with_matching_datum(self): for ct in mime_types.SUPPORTED: self._test_secret_and_datum_for_content_type(ct) def test_secret_with_non_matching_datum(self): self.datum.content_type = "bababooey" fields = mime_types.augment_fields_with_content_types(self.secret) self.assertNotIn("bababooey", fields) def _test_secret_and_datum_for_content_type(self, content_type): self.assertIn(content_type, mime_types.INTERNAL_CTYPES) # TODO(rm_work): This is deprecated and should eventually be removed self.datum.content_type = mime_types.INTERNAL_CTYPES[content_type] # Set up the Secret Metadata content_meta = models.SecretStoreMetadatum('content_type', self.datum.content_type) self.secret.secret_store_metadata['content_type'] = content_meta fields = mime_types.augment_fields_with_content_types(self.secret) self.assertIn('content_types', fields) content_types = fields['content_types'] self.assertIn('default', content_types) self.assertEqual(self.datum.content_type, content_types['default']) class WhenTestingNormalizationOfMIMETypes(utils.BaseTestCase): def test_plain_text_normalization(self): mimes = ['text/plain', ' text/plain ', 'text/plain;charset=utf-8', 'text/plain;charset=UTF-8', 'text/plain; charset=utf-8', 'text/plain; charset=UTF-8', 'text/plain; charset=utf-8', 'text/plain; charset=UTF-8', 'text/plain ; charset = utf-8', 'text/plain ; charset = UTF-8'] for mime in mimes: self._test_plain_text_mime_type(mime) def _test_plain_text_mime_type(self, mime): r = mime_types.normalize_content_type(mime) self.assertEqual('text/plain', r) def test_unsupported_charset_in_plain_text_mime(self): mime = 'text/plain; charset=ISO-8859-1' r = mime_types.normalize_content_type(mime) self.assertEqual(mime, r) def test_malformed_charset_in_plain_text_mime(self): mime = 'text/plain; charset is ISO-8859-1' r = mime_types.normalize_content_type(mime) self.assertEqual(mime, r) def test_binary_normalization(self): mime = 'application/octet-stream' r = mime_types.normalize_content_type(mime) self.assertEqual('application/octet-stream', r) def test_bogus_mime_normalization(self): mime = 'something/bogus' r = mime_types.normalize_content_type(mime) self.assertEqual('something/bogus', r) @utils.parameterized_test_case class WhenTestingIfContentTypeAndEncodingSupported(utils.BaseTestCase): @utils.parameterized_dataset({ 'plaintext_none': ['text/plain', None], 'octectstream_base64': ['application/octet-stream', 'base64'], 'pkcs8_base64': ['application/pkcs8', 'base64'] }) def test_content_type_encoding_supported(self, content_type, encoding): self.assertTrue(mime_types.is_content_type_with_encoding_supported( content_type, encoding)) @utils.parameterized_dataset({ 'plaintext_none': ['text/plain', 'base64'], 'octectstream_no_encoding': ['application/octet-stream', None], 'pkcs8_no_encoding': ['application/pkcs8', None], 'unknown_with_valid_encoding': ['application/unknown-content-type', 'base64'] }) def test_content_type_encoding_not_supported(self, content_type, encoding): self.assertFalse(mime_types.is_content_type_with_encoding_supported( content_type, encoding)) barbican-9.1.0.dev50/barbican/tests/plugin/util/test_utils.py0000664000175000017500000000407713616500636024355 0ustar sahidsahid00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.plugin.util import utils as plugin_utils from barbican.tests import utils as test_utils class ExtensionStub(object): def __init__(self): self.name = 'my_name' self.plugin_instance = 'my_instance' self.obj = None self.exc = None self.args = None self.kwargs = None def plugin(self, *args, **kwargs): if self.exc: raise self.exc self.args = args self.kwargs = kwargs return self.plugin_instance def set_raise_exception(self, exc): self.exc = exc class ManagerStub(object): def __init__(self, extensions): self.extensions = extensions class WhenInvokingInstantiatePlugins(test_utils.BaseTestCase): def setUp(self): super(WhenInvokingInstantiatePlugins, self).setUp() self.extension = ExtensionStub() self.manager = ManagerStub([self.extension]) def test_creates_plugin_instance(self): args = ('foo', 'bar') kwargs = {'foo': 1} plugin_utils.instantiate_plugins( self.manager, invoke_args=args, invoke_kwargs=kwargs) self.assertEqual('my_instance', self.extension.obj) self.assertEqual(args, self.extension.args) self.assertEqual(kwargs, self.extension.kwargs) def test_does_not_create_plugin_instance_due_to_error(self): self.extension.set_raise_exception(ValueError()) plugin_utils.instantiate_plugins(self.manager) self.assertIsNone(self.extension.obj) barbican-9.1.0.dev50/barbican/tests/plugin/util/test_translations.py0000664000175000017500000003235213616500636025733 0ustar sahidsahid00000000000000# Copyright (c) 2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_serialization import base64 from barbican.plugin.interface import secret_store as s from barbican.plugin.util import translations from barbican.tests import keys from barbican.tests import utils @utils.parameterized_test_case class WhenNormalizingBeforeEncryption(utils.BaseTestCase): dataset_for_raised_exceptions = { 'non_encrypted_content': { 'exception': s.SecretNoPayloadProvidedException, 'unencrypted': None, 'secret_type': s.SecretType.OPAQUE, 'content_type': '', 'content_encoding': '' }, 'invalid_content_type': { 'exception': s.SecretContentTypeNotSupportedException, 'unencrypted': 'stuff', 'secret_type': s.SecretType.OPAQUE, 'content_type': 'nope', 'content_encoding': '' }, 'content_encoding_isnt_base64': { 'exception': s.SecretContentEncodingMustBeBase64, 'unencrypted': 'stuff', 'secret_type': s.SecretType.OPAQUE, 'content_type': 'application/octet-stream', 'content_encoding': 'other_stuff', 'enforce_text_only': True }, 'unsupported_content_encoding': { 'exception': s.SecretContentEncodingNotSupportedException, 'unencrypted': 'stuff', 'secret_type': s.SecretType.OPAQUE, 'content_type': 'application/octet-stream', 'content_encoding': 'other_stuff' } } dataset_for_normalization = { 'plain_text': { 'unencrypted': 'stuff', 'secret_type': s.SecretType.OPAQUE, 'content_type': 'text/plain', 'content_encoding': '', 'expected': base64.encode_as_bytes('stuff') }, 'binary_base64': { 'unencrypted': base64.encode_as_bytes('stuff'), 'secret_type': s.SecretType.OPAQUE, 'content_type': 'application/octet-stream', 'content_encoding': 'base64', 'expected': base64.encode_as_bytes('stuff') }, 'binary': { 'unencrypted': 'stuff', 'secret_type': s.SecretType.OPAQUE, 'content_type': 'application/octet-stream', 'content_encoding': None, 'expected': base64.encode_as_bytes('stuff') }, 'symmetric_base64': { 'unencrypted': base64.encode_as_bytes('stuff'), 'secret_type': s.SecretType.SYMMETRIC, 'content_type': 'application/octet-stream', 'content_encoding': 'base64', 'expected': base64.encode_as_bytes('stuff') }, 'symmetric': { 'unencrypted': 'stuff', 'secret_type': s.SecretType.SYMMETRIC, 'content_type': 'application/octet-stream', 'content_encoding': None, 'expected': base64.encode_as_bytes('stuff') }, 'private_base64': { 'unencrypted': base64.encode_as_bytes(keys.get_private_key_pem()), 'secret_type': s.SecretType.PRIVATE, 'content_type': 'application/octet-stream', 'content_encoding': 'base64', 'expected': base64.encode_as_bytes(keys.get_private_key_pem()) }, 'private': { 'unencrypted': keys.get_private_key_pem(), 'secret_type': s.SecretType.PRIVATE, 'content_type': 'application/octet-stream', 'content_encoding': None, 'expected': base64.encode_as_bytes(keys.get_private_key_pem()) }, 'public_base64': { 'unencrypted': base64.encode_as_bytes(keys.get_public_key_pem()), 'secret_type': s.SecretType.PUBLIC, 'content_type': 'application/octet-stream', 'content_encoding': 'base64', 'expected': base64.encode_as_bytes(keys.get_public_key_pem()) }, 'public': { 'unencrypted': keys.get_public_key_pem(), 'secret_type': s.SecretType.PUBLIC, 'content_type': 'application/octet-stream', 'content_encoding': None, 'expected': base64.encode_as_bytes(keys.get_public_key_pem()) }, 'certificate_base64': { 'unencrypted': base64.encode_as_bytes(keys.get_certificate_pem()), 'secret_type': s.SecretType.CERTIFICATE, 'content_type': 'application/octet-stream', 'content_encoding': 'base64', 'expected': base64.encode_as_bytes(keys.get_certificate_pem()) }, 'certificate': { 'unencrypted': keys.get_certificate_pem(), 'secret_type': s.SecretType.CERTIFICATE, 'content_type': 'application/octet-stream', 'content_encoding': None, 'expected': base64.encode_as_bytes(keys.get_certificate_pem()) }, } def setUp(self): super(WhenNormalizingBeforeEncryption, self).setUp() # Aliasing to reduce the number of line continuations self.normalize = translations.normalize_before_encryption @utils.parameterized_dataset(dataset_for_normalization) def test_can_normalize(self, **kwargs): unencrypted, content_type = self.normalize( unencrypted=kwargs['unencrypted'], content_type=kwargs['content_type'], content_encoding=kwargs['content_encoding'], secret_type=kwargs['secret_type'] ) self.assertEqual(kwargs['expected'], unencrypted) self.assertEqual(kwargs['content_type'], content_type) def test_can_normalize_tmp_plain_text(self): unencrypted, content_type = self.normalize( unencrypted='stuff', content_type='text/plain', content_encoding='', secret_type=s.SecretType.OPAQUE ) self.assertEqual(base64.encode_as_bytes('stuff'), unencrypted) self.assertEqual('text/plain', content_type) def test_null_content_encoding_gets_passed_through(self): unencrypted, content_type = self.normalize( unencrypted='bam', content_type='application/octet-stream', content_encoding=None, secret_type=s.SecretType.OPAQUE ) self.assertEqual(base64.encode_as_bytes('bam'), unencrypted) self.assertEqual('application/octet-stream', content_type) def test_can_normalize_base64_str(self): unencrypted, content_type = self.normalize( unencrypted=base64.encode_as_bytes('stuff').decode('utf-8'), content_type='application/octet-stream', content_encoding='base64', secret_type=s.SecretType.OPAQUE ) self.assertEqual(base64.encode_as_bytes('stuff'), unencrypted) self.assertEqual('application/octet-stream', content_type) def test_can_normalize_base64_bytes(self): unencrypted, content_type = self.normalize( unencrypted=base64.encode_as_bytes('stuff'), content_type='application/octet-stream', content_encoding='base64', secret_type=s.SecretType.OPAQUE ) self.assertEqual(base64.encode_as_bytes('stuff'), unencrypted) self.assertEqual('application/octet-stream', content_type) @utils.parameterized_dataset(dataset_for_raised_exceptions) def test_normalize_raising_exceptions_with(self, exception, **kwargs): self.assertRaises(exception, self.normalize, **kwargs) class WhenAnalyzingBeforeDecryption(utils.BaseTestCase): def setUp(self): super(WhenAnalyzingBeforeDecryption, self).setUp() # Aliasing to reduce the number of line continuations self.analyze = translations.analyze_before_decryption def test_supported_content_type_doesnt_raise_exception(self): try: self.analyze('text/plain') except Exception as e: self.fail('Shouldn\'t have raised: {0}'.format(e)) def test_unsupported_content_type_raises_exception(self): exception = s.SecretAcceptNotSupportedException kwargs = {'content_type': 'nope!'} self.assertRaises(exception, self.analyze, **kwargs) @utils.parameterized_test_case class WhenDenormalizingAfterDecryption(utils.BaseTestCase): dataset_for_pem_denormalize = { 'private_key': { 'encoded_pem': base64.encode_as_bytes(keys.get_private_key_pem()), 'content_type': 'application/octet-stream' }, 'public_key': { 'encoded_pem': base64.encode_as_bytes(keys.get_public_key_pem()), 'content_type': 'application/octet-stream' }, 'certificate': { 'encoded_pem': base64.encode_as_bytes(keys.get_certificate_pem()), 'content_type': 'application/octet-stream' } } def setUp(self): super(WhenDenormalizingAfterDecryption, self).setUp() # Aliasing to reduce the number of line continuations self.denormalize = translations.denormalize_after_decryption def test_ascii_characters_to_utf8_with_plain_text(self): secret = 'bam' normalized_secret = base64.encode_as_bytes(secret) unencrypted = self.denormalize(normalized_secret, 'text/plain') self.assertEqual('bam', unencrypted) def test_ascii_characters_to_utf8_with_app_octet_stream(self): unencrypted = self.denormalize(base64.encode_as_bytes('bam'), 'application/octet-stream') self.assertEqual(b'bam', unencrypted) def test_non_ascii_character_with_plain_text_raises_exception(self): exception = s.SecretAcceptNotSupportedException kwargs = { 'unencrypted': base64.encode_as_bytes(b'\xff'), 'content_type': 'text/plain' } self.assertRaises(exception, self.denormalize, **kwargs) def test_content_type_not_text_or_binary_raises_exception(self): exception = s.SecretContentTypeNotSupportedException kwargs = { 'unencrypted': 'bam', 'content_type': 'other_content_type' } self.assertRaises(exception, self.denormalize, **kwargs) @utils.parameterized_dataset(dataset_for_pem_denormalize) def test_denormalize_pem(self, encoded_pem, content_type): denorm_secret = self.denormalize(encoded_pem, content_type) self.assertEqual(base64.decode_as_bytes(encoded_pem), denorm_secret) class WhenConvertingKeyFormats(utils.BaseTestCase): def test_passes_convert_private_pem_to_der(self): pem = keys.get_private_key_pem() expected_der = keys.get_private_key_der() der = translations.convert_pem_to_der( pem, s.SecretType.PRIVATE) self.assertEqual(expected_der, der) def test_passes_convert_private_der_to_pem(self): der = keys.get_private_key_der() expected_pem = keys.get_private_key_pem() pem = translations.convert_der_to_pem( der, s.SecretType.PRIVATE) self.assertEqual(expected_pem, pem) def test_passes_convert_public_pem_to_der(self): pem = keys.get_public_key_pem() expected_der = keys.get_public_key_der() der = translations.convert_pem_to_der( pem, s.SecretType.PUBLIC) self.assertEqual(expected_der, der) def test_passes_convert_public_der_to_pem(self): der = keys.get_public_key_der() expected_pem = keys.get_public_key_pem() pem = translations.convert_der_to_pem( der, s.SecretType.PUBLIC) self.assertEqual(expected_pem, pem) def test_passes_convert_certificate_pem_to_der(self): pem = keys.get_certificate_pem() expected_der = keys.get_certificate_der() der = translations.convert_pem_to_der( pem, s.SecretType.CERTIFICATE) self.assertEqual(expected_der, der) def test_passes_convert_certificate_der_to_pem(self): der = keys.get_certificate_der() expected_pem = keys.get_certificate_pem() pem = translations.convert_der_to_pem( der, s.SecretType.CERTIFICATE) self.assertEqual(expected_pem, pem) def test_passes_certificate_conversion(self): pem = keys.get_certificate_pem() der = translations.convert_pem_to_der( pem, s.SecretType.CERTIFICATE) converted_pem = translations.convert_der_to_pem( der, s.SecretType.CERTIFICATE) self.assertEqual(pem, converted_pem) def test_should_raise_to_pem_with_bad_secret_type(self): self.assertRaises(s.SecretGeneralException, translations.convert_der_to_pem, "der", "bad type") def test_should_raise_to_der_with_bad_secret_type(self): self.assertRaises(s.SecretGeneralException, translations.convert_pem_to_der, "pem", "bad type") barbican-9.1.0.dev50/barbican/tests/plugin/test_simple_certificate_manager.py0000664000175000017500000000636113616500636027563 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import testtools import barbican.plugin.interface.certificate_manager as cm import barbican.plugin.simple_certificate_manager as simple class WhenTestingSimpleCertificateManagerPlugin(testtools.TestCase): def setUp(self): super(WhenTestingSimpleCertificateManagerPlugin, self).setUp() self.plugin = simple.SimpleCertificatePlugin() def test_issue_certificate_request(self): result = self.plugin.issue_certificate_request(None, None, None, None) self.assertEqual(cm.CertificateStatus.WAITING_FOR_CA, result.status) def test_check_certificate_status(self): result = self.plugin.check_certificate_status(None, None, None, None) self.assertEqual( cm.CertificateStatus.CERTIFICATE_GENERATED, result.status) def test_modify_certificate_request(self): result = self.plugin.modify_certificate_request(None, None, None, None) self.assertEqual(cm.CertificateStatus.WAITING_FOR_CA, result.status) def test_cancel_certificate_request(self): result = self.plugin.cancel_certificate_request(None, None, None, None) self.assertEqual(cm.CertificateStatus.REQUEST_CANCELED, result.status) def test_supports(self): result = self.plugin.supports(None) self.assertTrue(result) def test_get_ca_info(self): result = self.plugin.get_ca_info() name = self.plugin.get_default_ca_name() self.assertIn(name, result) self.assertEqual(name, result[name][cm.INFO_NAME]) self.assertEqual(self.plugin.get_default_signing_cert(), result[name][cm.INFO_CA_SIGNING_CERT]) def test_supported_request_types(self): result = self.plugin.supported_request_types() supported_list = [cm.CertificateRequestType.CUSTOM_REQUEST, cm.CertificateRequestType.SIMPLE_CMC_REQUEST, cm.CertificateRequestType.FULL_CMC_REQUEST, cm.CertificateRequestType.STORED_KEY_REQUEST] self.assertEqual(supported_list, result) class WhenTestingSimpleCertificateEventManagerPlugin(testtools.TestCase): def setUp(self): super(WhenTestingSimpleCertificateEventManagerPlugin, self).setUp() self.plugin = simple.SimpleCertificateEventPlugin() def test_notify_ca_is_unavailable(self): # Test that eventing plugin method does not have side effects such as # raising exceptions. self.plugin.notify_ca_is_unavailable(None, None, None, None) def test_notify_certificate_is_ready(self): # Test that eventing plugin method does not have side effects such as # raising exceptions. self.plugin.notify_certificate_is_ready(None, None, None) barbican-9.1.0.dev50/barbican/tests/plugin/test_symantec.py0000664000175000017500000000655013616500636024061 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import testtools try: import barbican.plugin.interface.certificate_manager as cm import barbican.plugin.symantec as sym imports_ok = True except ImportError: # Symantec imports probably not available imports_ok = False from barbican.tests import utils @testtools.skipIf(not imports_ok, "Symantec imports not available") class WhenTestingSymantecPlugin(utils.BaseTestCase): def setUp(self): super(WhenTestingSymantecPlugin, self).setUp() self.order_meta = { 'cert_type': 'ssl123', 'organization': 'Shinra Corp', 'phone': '555-555-5555', 'so many things...': 'more...' } self.error_msg = 'Error Message Here' self.symantec = sym.SymantecCertificatePlugin() self.barbican_plugin_dto = cm.BarbicanMetaDTO() self.symantec_patcher = mock.patch( 'barbican.plugin.symantec._ca_create_order' ) self.mock_create_order = self.symantec_patcher.start() def tearDown(self): super(WhenTestingSymantecPlugin, self).tearDown() if hasattr(self, 'mock_create_order'): self.mock_create_order.stop() def test_successful_issue_certificate_request(self): self.mock_create_order.return_value = (True, None, None) order_id = '1234' plugin_meta = {} result = self.symantec.issue_certificate_request( order_id, self.order_meta, plugin_meta, self.barbican_plugin_dto ) self.assertEqual("waiting for CA", result.status) def test_unsuccessful_certificate_request_can_retry(self): self.mock_create_order.return_value = (False, self.error_msg, True) order_id = '1234' plugin_meta = {} result = self.symantec.issue_certificate_request( order_id, self.order_meta, plugin_meta, self.barbican_plugin_dto ) self.assertEqual("client data issue seen", result.status) def test_unsuccessful_certificate_request_no_retry(self): self.mock_create_order.return_value = (False, self.error_msg, False) order_id = '12345' plugin_meta = {} result = self.symantec.issue_certificate_request( order_id, self.order_meta, plugin_meta, self.barbican_plugin_dto ) self.assertEqual("CA unavailable for request", result.status) def test_should_raise_unsupported_certificate_request(self): order_id = '1234' plugin_meta = {} self.assertRaises( NotImplementedError, self.symantec.check_certificate_status, order_id, self.order_meta, plugin_meta, self.barbican_plugin_dto ) barbican-9.1.0.dev50/barbican/tests/plugin/test_kmip.py0000664000175000017500000010632413616500636023176 0ustar sahidsahid00000000000000# Copyright (c) 2014 Johns Hopkins University Applied Physics Laboratory # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import ssl import stat import testtools import mock from barbican.plugin.interface import secret_store from barbican.tests import keys from barbican.tests import utils from kmip.core import enums from kmip.pie import client from kmip.pie import objects from barbican.plugin import kmip_secret_store as kss from barbican.plugin.util import translations def get_sample_opaque_secret(): return objects.OpaqueObject( base64.b64decode(utils.get_symmetric_key()), enums.OpaqueDataType.NONE) def get_sample_symmetric_key(key_b64=utils.get_symmetric_key(), key_length=128, algorithm=enums.CryptographicAlgorithm.AES): return objects.SymmetricKey( algorithm, key_length, base64.b64decode(key_b64)) def get_sample_public_key(pkcs1=False): if pkcs1: public_key_value = kss.get_public_key_der_pkcs1( keys.get_public_key_pem()) key_format_type = enums.KeyFormatType.PKCS_1 else: public_key_value = keys.get_public_key_der() key_format_type = enums.KeyFormatType.X_509 return objects.PublicKey( enums.CryptographicAlgorithm.RSA, 2048, public_key_value, key_format_type) def get_sample_private_key(pkcs1=False): if pkcs1: private_key_value = kss.get_private_key_der_pkcs1( keys.get_private_key_pem()) key_format_type = enums.KeyFormatType.PKCS_1 else: private_key_value = keys.get_private_key_der() key_format_type = enums.KeyFormatType.PKCS_8 return objects.PrivateKey( enums.CryptographicAlgorithm.RSA, 2048, private_key_value, key_format_type) def get_sample_certificate(): return objects.X509Certificate( keys.get_certificate_der()) @utils.parameterized_test_case class WhenTestingKMIPSecretStore(utils.BaseTestCase): """Test using the KMIP server backend for SecretStore.""" def setUp(self): super(WhenTestingKMIPSecretStore, self).setUp() self.expected_username = "sample_username" self.expected_password = "sample_password" CONF = kss.CONF CONF.kmip_plugin.username = self.expected_username CONF.kmip_plugin.password = self.expected_password CONF.kmip_plugin.keyfile = None CONF.kmip_plugin.pkcs1_only = False # get the latest protocol that SSL supports protocol_dict = ssl.__dict__.get('_PROTOCOL_NAMES') latest_protocol = protocol_dict.get(max(protocol_dict.keys())) if not latest_protocol.startswith('PROTOCOL_'): latest_protocol = 'PROTOCOL_' + latest_protocol CONF.kmip_plugin.ssl_version = latest_protocol self.secret_store = kss.KMIPSecretStore(CONF) self.credential = self.secret_store.credential self.symmetric_type = secret_store.SecretType.SYMMETRIC self.sample_secret_features = { 'key_format_type': enums.KeyFormatType.RAW, 'key_value': { 'bytes': bytearray(b'\x00\x00\x00') }, 'cryptographic_algorithm': enums.CryptographicAlgorithm.AES, 'cryptographic_length': 128 } self.symmetric_key_uuid = 'dde870ad-cea3-41a3-9bb9-e8ab579a2f91' self.public_key_uuid = 'cb908abb-d363-4d9f-8ef2-5e84d27dd25c' self.private_key_uuid = '2d4c0544-4ec6-45b7-81cd-b23c75744eac' self.sample_secret = get_sample_symmetric_key() self.secret_store.client.open = mock.MagicMock( spec=client.ProxyKmipClient.open) self.secret_store.client.close = mock.MagicMock( spec=client.ProxyKmipClient.close) self.secret_store.client.create = mock.MagicMock( return_value=self.symmetric_key_uuid) self.secret_store.client.create_key_pair = mock.MagicMock( return_value=(self.public_key_uuid, self.private_key_uuid)) self.secret_store.client.register = mock.MagicMock( return_value='uuid') self.secret_store.client.destroy = mock.MagicMock( return_value=None) self.secret_store.client.get = mock.MagicMock( return_value=self.sample_secret) # --------------- TEST CONFIG OPTIONS --------------------------------- def test_enable_pkcs1_only_config_option(self): CONF = kss.CONF CONF.kmip_plugin.pkcs1_only = True secret_store = kss.KMIPSecretStore(CONF) self.assertTrue(secret_store.pkcs1_only) @testtools.skipIf(not getattr(ssl, "PROTOCOL_TLSv1_2", None), "TLSv1.2 is not available on this system") def test_enable_tlsv12_config_option(self): ssl_version = "PROTOCOL_TLSv1_2" CONF = kss.CONF CONF.kmip_plugin.ssl_version = ssl_version kss.KMIPSecretStore(CONF) self.assertEqual(ssl_version, CONF.kmip_plugin.ssl_version) @testtools.skipIf(not getattr(ssl, "PROTOCOL_TLSv1", None), "TLSv1 is not available on this system") def test_enable_tlsv1_config_option(self): ssl_version = "PROTOCOL_TLSv1" CONF = kss.CONF CONF.kmip_plugin.ssl_version = ssl_version kss.KMIPSecretStore(CONF) self.assertEqual(ssl_version, CONF.kmip_plugin.ssl_version) # --------------- TEST GENERATE_SUPPORTS --------------------------------- def test_generate_supports_aes(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, None, 'mode') for x in [128, 192, 256]: key_spec.bit_length = x self.assertTrue(self.secret_store.generate_supports(key_spec)) def test_generate_supports_des(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.DES, None, 'mode') for x in [56]: key_spec.bit_length = x self.assertTrue(self.secret_store.generate_supports(key_spec)) def test_generate_supports_desede(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.DESEDE, None, 'mode') for x in [56, 112, 168]: key_spec.bit_length = x self.assertTrue(self.secret_store.generate_supports(key_spec)) def test_generate_supports_rsa(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, None, 'mode') for x in [2048, 3072, 4096]: key_spec.bit_length = x self.assertTrue(self.secret_store.generate_supports(key_spec)) def test_generate_supports_dsa(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.DSA, None, 'mode') for x in [2048, 3072]: key_spec.bit_length = x self.assertTrue(self.secret_store.generate_supports(key_spec)) def test_generate_supports_with_invalid_alg(self): key_spec = secret_store.KeySpec('invalid_alg', 56, 'mode') self.assertFalse(self.secret_store.generate_supports(key_spec)) def test_generate_supports_with_valid_alg_invalid_bit_length(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 56, 'mode') self.assertFalse(self.secret_store.generate_supports(key_spec)) # ------------ TEST GENERATE_SYMMETRIC ----------------------------------- def test_generate_symmetric_key_assert_called(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') self.secret_store.generate_symmetric_key(key_spec) self.secret_store.client.create.assert_called_once_with( enums.CryptographicAlgorithm.AES, 128) def test_generate_symmetric_key_return_value(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') return_value = self.secret_store.generate_symmetric_key(key_spec) expected = {kss.KMIPSecretStore.KEY_UUID: self.symmetric_key_uuid} self.assertEqual(expected, return_value) def test_generate_symmetric_key_server_error_occurs(self): self.secret_store.client.create.side_effect = Exception key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') self.assertRaises( secret_store.SecretGeneralException, self.secret_store.generate_symmetric_key, key_spec) def test_generate_symmetric_key_invalid_algorithm(self): key_spec = secret_store.KeySpec('invalid_algorithm', 128, 'mode') self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, self.secret_store.generate_symmetric_key, key_spec) def test_generate_symmetric_key_valid_algorithm_invalid_bit_length(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 56, 'mode') self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, self.secret_store.generate_symmetric_key, key_spec) def test_generate_symmetric_key_not_symmetric_algorithm(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048, 'mode') self.assertRaises( kss.KMIPSecretStoreError, self.secret_store.generate_symmetric_key, key_spec) def test_generate_symmetric_key_error_opening_connection(self): self.secret_store.client.open.side_effect = Exception key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') self.assertRaises( secret_store.SecretGeneralException, self.secret_store.generate_symmetric_key, key_spec) # ---------------- TEST GENERATE_ASYMMETRIC ------------------------------ def test_generate_asymmetric_key_assert_called(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048, 'mode') self.secret_store.generate_asymmetric_key(key_spec) self.secret_store.client.create_key_pair.assert_called_once_with( enums.CryptographicAlgorithm.RSA, 2048) def test_generate_asymmetric_key_return_value(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048, 'mode') return_value = self.secret_store.generate_asymmetric_key(key_spec) expected_private_key_meta = { kss.KMIPSecretStore.KEY_UUID: self.private_key_uuid} expected_public_key_meta = { kss.KMIPSecretStore.KEY_UUID: self.public_key_uuid} expected_passphrase_meta = None self.assertEqual( expected_private_key_meta, return_value.private_key_meta) self.assertEqual( expected_public_key_meta, return_value.public_key_meta) self.assertEqual( expected_passphrase_meta, return_value.passphrase_meta) def test_generate_asymmetric_key_server_error_occurs(self): self.secret_store.client.create_key_pair.side_effect = Exception key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048, 'mode') self.assertRaises( secret_store.SecretGeneralException, self.secret_store.generate_asymmetric_key, key_spec) def test_generate_asymmetric_key_invalid_algorithm(self): key_spec = secret_store.KeySpec('invalid_algorithm', 160, 'mode') self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, self.secret_store.generate_asymmetric_key, key_spec) def test_generate_asymmetric_key_valid_algorithm_invalid_bit_length(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 56, 'mode') self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, self.secret_store.generate_asymmetric_key, key_spec) def test_generate_asymmetric_key_not_asymmetric_algorithm(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, self.secret_store.generate_asymmetric_key, key_spec) def test_generate_asymmetric_key_check_for_passphrase(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048, 'mode', 'passphrase') self.assertRaises( kss.KMIPSecretStoreActionNotSupported, self.secret_store.generate_asymmetric_key, key_spec) def test_generate_asymmetric_key_error_opening_connection(self): self.secret_store.client.open.side_effect = Exception key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048, 'mode') self.assertRaises( secret_store.SecretGeneralException, self.secret_store.generate_asymmetric_key, key_spec) # ----------------- TEST STORE ------------------------------------------- def test_store_symmetric_secret_assert_called(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') sym_key = utils.get_symmetric_key() secret_dto = secret_store.SecretDTO(secret_store.SecretType.SYMMETRIC, sym_key, key_spec, 'content_type', transport_key=None) self.secret_store.store_secret(secret_dto) self.secret_store.client.register.assert_called_once_with( objects.SymmetricKey( enums.CryptographicAlgorithm.AES, 128, base64.b64decode(utils.get_symmetric_key()))) def test_store_symmetric_secret_return_value(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') sym_key = utils.get_symmetric_key() secret_dto = secret_store.SecretDTO(secret_store.SecretType.SYMMETRIC, sym_key, key_spec, 'content_type', transport_key=None) return_value = self.secret_store.store_secret(secret_dto) expected = {kss.KMIPSecretStore.KEY_UUID: 'uuid'} self.assertEqual(expected, return_value) def test_store_passphrase_secret_assert_called(self): key_spec = secret_store.KeySpec(None, None, None) passphrase = base64.b64encode(b"supersecretpassphrase") secret_dto = secret_store.SecretDTO(secret_store.SecretType.PASSPHRASE, passphrase, key_spec, 'content_type', transport_key=None) self.secret_store.store_secret(secret_dto) self.secret_store.client.register.assert_called_once_with( objects.SecretData( base64.b64decode(passphrase), enums.SecretDataType.PASSWORD)) def test_store_passphrase_secret_return_value(self): key_spec = secret_store.KeySpec(None, None, None) passphrase = b"supersecretpassphrase" secret_dto = secret_store.SecretDTO(secret_store.SecretType.PASSPHRASE, base64.b64encode(passphrase), key_spec, 'content_type', transport_key=None) return_value = self.secret_store.store_secret(secret_dto) expected = {kss.KMIPSecretStore.KEY_UUID: 'uuid'} self.assertEqual(expected, return_value) def test_store_opaque_secret_assert_called(self): key_spec = secret_store.KeySpec(None, None, None) opaque = base64.b64encode(b'\x00\x01\x02\x03\x04\x05\x06\x07') secret_dto = secret_store.SecretDTO(secret_store.SecretType.OPAQUE, opaque, key_spec, 'content_type', transport_key=None) self.secret_store.store_secret(secret_dto) self.secret_store.client.register.assert_called_once_with( objects.OpaqueObject( base64.b64decode(opaque), enums.OpaqueDataType.NONE)) def test_store_opaque_secret_return_value(self): key_spec = secret_store.KeySpec(None, None, None) opaque = b'\x00\x01\x02\x03\x04\x05\x06\x07' secret_dto = secret_store.SecretDTO(secret_store.SecretType.OPAQUE, base64.b64encode(opaque), key_spec, 'content_type', transport_key=None) return_value = self.secret_store.store_secret(secret_dto) expected = {kss.KMIPSecretStore.KEY_UUID: 'uuid'} self.assertEqual(expected, return_value) @utils.parameterized_dataset({ 'private_pkcs8': [secret_store.SecretType.PRIVATE, keys.get_private_key_pem(), enums.ObjectType.PRIVATE_KEY, keys.get_private_key_der(), False], 'private_pkcs1': [secret_store.SecretType.PRIVATE, keys.get_private_key_pem(), enums.ObjectType.PRIVATE_KEY, kss.get_private_key_der_pkcs1( keys.get_private_key_pem()), True], 'public_pkcs8': [secret_store.SecretType.PUBLIC, keys.get_public_key_pem(), enums.ObjectType.PUBLIC_KEY, keys.get_public_key_der(), False], 'public_pkcs1': [secret_store.SecretType.PUBLIC, keys.get_public_key_pem(), enums.ObjectType.PUBLIC_KEY, kss.get_public_key_der_pkcs1( keys.get_public_key_pem()), True], }) def test_store_asymmetric_key_secret_assert_called(self, barbican_type, barbican_key, kmip_type, kmip_key, pkcs1_only): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048) secret_value = base64.b64encode(barbican_key) secret_dto = secret_store.SecretDTO(barbican_type, secret_value, key_spec, 'content_type') self.secret_store.pkcs1_only = pkcs1_only self.secret_store.store_secret(secret_dto) secret_value = base64.b64decode(secret_value) if not pkcs1_only: secret_value = translations.convert_pem_to_der( secret_value, barbican_type) if kmip_type == enums.ObjectType.PUBLIC_KEY: if pkcs1_only: secret_value = kss.get_public_key_der_pkcs1(secret_value) secret = objects.PublicKey( enums.CryptographicAlgorithm.RSA, 2048, secret_value, enums.KeyFormatType.X_509) else: if pkcs1_only: secret_value = kss.get_private_key_der_pkcs1(secret_value) secret = objects.PrivateKey( enums.CryptographicAlgorithm.RSA, 2048, secret_value, enums.KeyFormatType.PKCS_8) self.secret_store.client.register.assert_called_once_with(secret) @utils.parameterized_dataset({ 'private_pkcs8': [secret_store.SecretType.PRIVATE, keys.get_private_key_pem(), False], 'private_pkcs1': [secret_store.SecretType.PRIVATE, keys.get_private_key_pem(), True], 'public_pkcs8': [secret_store.SecretType.PUBLIC, keys.get_public_key_pem(), False], 'public_pkcs1': [secret_store.SecretType.PUBLIC, keys.get_public_key_pem(), True], }) def test_store_asymmetric_key_secret_return_value(self, barbican_type, barbican_key, pkcs1_only): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048) secret_dto = secret_store.SecretDTO(barbican_type, base64.b64encode(barbican_key), key_spec, 'content_type') self.secret_store.pkcs1_only = pkcs1_only return_value = self.secret_store.store_secret(secret_dto) expected = {kss.KMIPSecretStore.KEY_UUID: 'uuid'} self.assertEqual(expected, return_value) @utils.parameterized_dataset({ 'rsa': [secret_store.KeyAlgorithm.RSA, 2048], 'no_key_spec': [None, None] }) def test_store_certificate_secret_assert_called( self, algorithm, bit_length): key_spec = secret_store.KeySpec(algorithm, bit_length) certificate_value = base64.b64encode(keys.get_certificate_pem()) secret_dto = secret_store.SecretDTO( secret_store.SecretType.CERTIFICATE, certificate_value, key_spec, 'content_type') self.secret_store.store_secret(secret_dto) self.secret_store.client.register.assert_called_once_with( objects.X509Certificate(translations.convert_pem_to_der( base64.b64decode(certificate_value), secret_store.SecretType.CERTIFICATE))) def test_store_certificate_secret_return_value(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048) secret_dto = secret_store.SecretDTO( secret_store.SecretType.CERTIFICATE, base64.b64encode(keys.get_certificate_pem()), key_spec, 'content_type') return_value = self.secret_store.store_secret(secret_dto) expected = {kss.KMIPSecretStore.KEY_UUID: 'uuid'} self.assertEqual(expected, return_value) def test_store_secret_server_error_occurs(self): self.secret_store.client.register.side_effect = Exception key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') secret_dto = secret_store.SecretDTO(secret_store.SecretType.SYMMETRIC, utils.get_symmetric_key(), key_spec, 'content_type', transport_key=None) self.assertRaises( secret_store.SecretGeneralException, self.secret_store.store_secret, secret_dto) def test_store_secret_invalid_algorithm(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.DSA, 128, 'mode') secret_dto = secret_store.SecretDTO(secret_store.SecretType.SYMMETRIC, "AAAA", key_spec, 'content_type', transport_key=None) self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, self.secret_store.store_secret, secret_dto) def test_store_secret_valid_algorithm_invalid_bit_length(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 56, 'mode') secret_dto = secret_store.SecretDTO(secret_store.SecretType.SYMMETRIC, "AAAA", key_spec, 'content_type', transport_key=None) self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, self.secret_store.store_secret, secret_dto) def test_store_secret_error_opening_connection(self): self.secret_store.client.open.side_effect = Exception key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') secret_dto = secret_store.SecretDTO(secret_store.SecretType.SYMMETRIC, utils.get_symmetric_key(), key_spec, 'content_type', transport_key=None) self.assertRaises( secret_store.SecretGeneralException, self.secret_store.store_secret, secret_dto) # --------------- TEST GET ----------------------------------------------- @utils.parameterized_dataset({ 'symmetric': [get_sample_symmetric_key(), secret_store.SecretType.SYMMETRIC, utils.get_symmetric_key(), False], 'hmac_sha1': [get_sample_symmetric_key( algorithm=enums.CryptographicAlgorithm.HMAC_SHA1), secret_store.SecretType.SYMMETRIC, utils.get_symmetric_key(), False], 'hmac_sha256': [get_sample_symmetric_key( algorithm=enums.CryptographicAlgorithm.HMAC_SHA256), secret_store.SecretType.SYMMETRIC, utils.get_symmetric_key(), False], 'hmac_sha384': [get_sample_symmetric_key( algorithm=enums.CryptographicAlgorithm.HMAC_SHA384), secret_store.SecretType.SYMMETRIC, utils.get_symmetric_key(), False], 'hmac_sha512': [get_sample_symmetric_key( algorithm=enums.CryptographicAlgorithm.HMAC_SHA512), secret_store.SecretType.SYMMETRIC, utils.get_symmetric_key(), False], 'triple_des': [get_sample_symmetric_key( key_b64=utils.get_triple_des_key(), key_length=192, algorithm=enums.CryptographicAlgorithm.TRIPLE_DES), secret_store.SecretType.SYMMETRIC, utils.get_triple_des_key(), False], 'opaque': [get_sample_opaque_secret(), secret_store.SecretType.OPAQUE, utils.get_symmetric_key(), False], 'public_key': [get_sample_public_key(), secret_store.SecretType.PUBLIC, base64.b64encode(keys.get_public_key_pem()), False], 'public_key_pkcs1': [get_sample_public_key(pkcs1=True), secret_store.SecretType.PUBLIC, base64.b64encode(keys.get_public_key_pem()), True], 'private_key': [get_sample_private_key(), secret_store.SecretType.PRIVATE, base64.b64encode(keys.get_private_key_pem()), False], 'private_key_pkcs1': [get_sample_private_key(pkcs1=True), secret_store.SecretType.PRIVATE, base64.b64encode(keys.get_private_key_pem()), True], 'certificate': [get_sample_certificate(), secret_store.SecretType.CERTIFICATE, base64.b64encode(keys.get_certificate_pem()), False] }) def test_get_secret(self, kmip_secret, secret_type, expected_secret, pkcs1_only): self.secret_store.pkcs1_only = pkcs1_only self.secret_store.client.get.return_value = kmip_secret uuid = utils.generate_test_uuid(0) metadata = {kss.KMIPSecretStore.KEY_UUID: uuid} secret_dto = self.secret_store.get_secret(secret_type, metadata) self.secret_store.client.get.assert_called_once_with(uuid) self.assertEqual(secret_store.SecretDTO, type(secret_dto)) self.assertEqual(secret_type, secret_dto.type) self.assertEqual(expected_secret, secret_dto.secret) def test_get_secret_symmetric_return_value_invalid_key_material_type(self): invalid_secret = self.sample_secret invalid_secret.value = list('invalid') self.secret_store.client.get.return_value = invalid_secret metadata = {kss.KMIPSecretStore.KEY_UUID: self.symmetric_key_uuid} self.assertRaises( secret_store.SecretGeneralException, self.secret_store.get_secret, self.symmetric_type, metadata) def test_get_secret_symmetric_server_error_occurs(self): self.secret_store.client.get.side_effect = Exception metadata = {kss.KMIPSecretStore.KEY_UUID: self.symmetric_key_uuid} self.assertRaises( secret_store.SecretGeneralException, self.secret_store.get_secret, self.symmetric_type, metadata) def test_get_secret_symmetric_error_opening_connection(self): self.secret_store.client.open.side_effect = Exception metadata = {kss.KMIPSecretStore.KEY_UUID: self.symmetric_key_uuid} self.assertRaises( secret_store.SecretGeneralException, self.secret_store.get_secret, self.symmetric_type, metadata) # ---------------- TEST DELETE ------------------------------------------- def test_delete_with_null_metadata_values(self): metadata = {kss.KMIPSecretStore.KEY_UUID: None} self.assertIsNone(self.secret_store.delete_secret(metadata)) def test_delete_secret_assert_called(self): metadata = {kss.KMIPSecretStore.KEY_UUID: self.symmetric_key_uuid} self.secret_store.delete_secret(metadata) self.secret_store.client.destroy.assert_called_once_with( self.symmetric_key_uuid) def test_delete_secret_return_value(self): metadata = {kss.KMIPSecretStore.KEY_UUID: self.symmetric_key_uuid} return_value = self.secret_store.delete_secret(metadata) self.assertIsNone(return_value) def test_delete_secret_server_error_occurs(self): self.secret_store.client.destroy.side_effect = Exception metadata = {kss.KMIPSecretStore.KEY_UUID: self.symmetric_key_uuid} self.assertRaises( secret_store.SecretGeneralException, self.secret_store.delete_secret, metadata) def test_delete_secret_error_opening_connection(self): self.secret_store.client.open.side_effect = Exception metadata = {kss.KMIPSecretStore.KEY_UUID: self.symmetric_key_uuid} self.assertRaises( secret_store.SecretGeneralException, self.secret_store.delete_secret, metadata) # -------------- TEST HELPER FUNCTIONS ----------------------------------- def test_credential(self): actual_credential = self.secret_store.credential self.assertEqual( self.expected_username, actual_credential.credential_value.username) self.assertEqual( self.expected_password, actual_credential.credential_value.password) def test_credential_None(self): CONF = kss.CONF CONF.kmip_plugin.username = None CONF.kmip_plugin.password = None CONF.kmip_plugin.keyfile = None secret_store = kss.KMIPSecretStore(CONF) self.assertIsNone(secret_store.credential) def test_map_type_ss_to_kmip_valid_type(self): ss_types = [secret_store.SecretType.SYMMETRIC, secret_store.SecretType.PUBLIC, secret_store.SecretType.PRIVATE] for ss_type in ss_types: self.assertIsNotNone( self.secret_store._map_type_ss_to_kmip(ss_type)) def test_map_type_ss_to_kmip_invalid_type(self): object_type, key_format_type = ( self.secret_store._map_type_ss_to_kmip('bad_type')) self.assertIsNone(object_type) self.assertIsNone(key_format_type) def test_validate_keyfile_permissions_good(self): config = {'return_value.st_mode': (stat.S_IRUSR | stat.S_IFREG)} with mock.patch('os.stat', **config): self.assertIsNone( self.secret_store._validate_keyfile_permissions('/some/path/')) def test_check_keyfile_permissions_bad(self): config = {'return_value.st_mode': (stat.S_IWOTH | stat.S_IFREG)} with mock.patch('os.stat', **config): self.assertRaises( kss.KMIPSecretStoreError, self.secret_store._validate_keyfile_permissions, '/some/path/') def test_checks_keyfile_permissions(self): config = {'return_value': True} func = ("barbican.plugin.kmip_secret_store." "KMIPSecretStore._validate_keyfile_permissions") with mock.patch(func, **config) as m: CONF = kss.CONF CONF.kmip_plugin.keyfile = '/some/path' kss.KMIPSecretStore(CONF) self.assertEqual(1, len(m.mock_calls)) def test_get_plugin_name(self): CONF = kss.CONF CONF.kmip_plugin.plugin_name = "Test KMIP Plugin" secret_store = kss.KMIPSecretStore(CONF) self.assertEqual("Test KMIP Plugin", secret_store.get_plugin_name()) barbican-9.1.0.dev50/barbican/tests/plugin/interface/0000775000175000017500000000000013616500640022552 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/plugin/interface/test_certificate_manager.py0000664000175000017500000002662313616500636030155 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime import mock import testtools from barbican.common import utils as common_utils from barbican.model import models from barbican.plugin.interface import certificate_manager as cm from barbican.tests import database_utils from barbican.tests import utils class WhenTestingCertificateEventPluginManager(testtools.TestCase): def setUp(self): super(WhenTestingCertificateEventPluginManager, self).setUp() self.project_id = '1234' self.order_ref = 'http://www.mycerts.com/v1/orders/123456' self.container_ref = 'http://www.mycerts.com/v1/containers/654321' self.error_msg = 'Something is broken' self.retry_in_msec = 5432 self.plugin_returned = mock.MagicMock() self.plugin_name = common_utils.generate_fullname_for( self.plugin_returned) self.plugin_loaded = mock.MagicMock(obj=self.plugin_returned) self.manager = cm.get_event_plugin_manager() self.manager.extensions = [self.plugin_loaded] def test_get_plugin_by_name(self): self.assertEqual(self.plugin_returned, self.manager.get_plugin_by_name(self.plugin_name)) def test_notify_ca_is_unavailable(self): self.manager.notify_ca_is_unavailable( self.project_id, self.order_ref, self.error_msg, self.retry_in_msec) self.plugin_returned.notify_ca_is_unavailable.assert_called_once_with( self.project_id, self.order_ref, self.error_msg, self.retry_in_msec) def test_notify_certificate_is_ready(self): self.manager.notify_certificate_is_ready( self.project_id, self.order_ref, self.container_ref) pr = self.plugin_returned pr.notify_certificate_is_ready.assert_called_once_with( self.project_id, self.order_ref, self.container_ref) def test_invoke_certificate_plugins(self): self.manager._invoke_certificate_plugins( 'test_invoke_certificate_plugins', self.project_id, self.order_ref, self.container_ref) # The _invoke_certificate_plugins method should invoke on # self.plugin_returned the same method by name as the function # that invoked it...in this case it is this test method. pr = self.plugin_returned pr.test_invoke_certificate_plugins.assert_called_once_with( self.project_id, self.order_ref, self.container_ref) def test_raises_error_with_no_plugin_by_name_found(self): self.manager.extensions = [] self.assertRaises( cm.CertificateEventPluginNotFound, self.manager.get_plugin_by_name, 'any-name-here' ) def test_raises_error_with_no_plugin_for_invoke_certificate_plugins(self): self.manager.extensions = [] self.assertRaises( cm.CertificateEventPluginNotFound, self.manager._invoke_certificate_plugins, self.project_id, self.order_ref, self.error_msg, self.retry_in_msec, ) class WhenTestingCertificatePluginManager(database_utils.RepositoryTestCase, utils.MockModelRepositoryMixin): def setUp(self): super(WhenTestingCertificatePluginManager, self).setUp() self.cert_spec = {} self.plugin_returned = mock.MagicMock() self.plugin_name = common_utils.generate_fullname_for( self.plugin_returned) types_list = [cm.CertificateRequestType.SIMPLE_CMC_REQUEST, cm.CertificateRequestType.CUSTOM_REQUEST] self.plugin_returned.supported_request_types.return_value = types_list self.plugin_returned.supports.return_value = True self.plugin_loaded = mock.MagicMock(obj=self.plugin_returned) expiration = (datetime.datetime.utcnow() + datetime.timedelta( days=cm.CA_INFO_DEFAULT_EXPIRATION_DAYS)) ca_info = { cm.INFO_NAME: "my_ca", cm.INFO_DESCRIPTION: "Certificate Authority my_ca", cm.INFO_CA_SIGNING_CERT: "Undefined", cm.INFO_INTERMEDIATES: "Undefined", cm.INFO_EXPIRATION: expiration.isoformat() } self.plugin_returned.get_ca_info.return_value = { 'plugin_ca_id1': ca_info } parsed_ca = { 'plugin_name': self.plugin_name, 'plugin_ca_id': 'plugin_ca_id1', 'name': self.plugin_name, 'description': 'Master CA for default plugin', 'ca_signing_certificate': 'ZZZZZ', 'intermediates': 'YYYYY' } self.ca = models.CertificateAuthority(parsed_ca) self.ca.id = 'ca_id' self.ca_repo = mock.MagicMock() self.ca_repo.get_by_create_date.return_value = ( self.ca, 0, 1, 1) self.ca_repo.create_from.return_value = None self.ca_repo.get.return_value = self.ca self.project = models.Project() self.project.id = '12345' self.setup_ca_repository_mock(self.ca_repo) self.plugin_loaded = mock.MagicMock(obj=self.plugin_returned) self.manager = cm.CertificatePluginManager() self.manager.extensions = [self.plugin_loaded] def test_get_plugin_by_name(self): self.assertEqual(self.plugin_returned, self.manager.get_plugin_by_name(self.plugin_name)) def test_get_plugin_by_ca_id(self): self.assertEqual(self.plugin_returned, self.manager.get_plugin_by_ca_id('ca_id')) def test_raises_error_with_no_plugin_by_ca_id_found(self): self.ca_repo.get.return_value = None self.assertRaises( cm.CertificatePluginNotFoundForCAID, self.manager.get_plugin_by_ca_id, 'any-name-here' ) def test_raises_error_with_no_plugin_by_name_found(self): self.manager.extensions = [] self.assertRaises( cm.CertificatePluginNotFound, self.manager.get_plugin_by_name, 'any-name-here' ) def test_get_plugin_no_request_type_provided(self): # no request_type defaults to "custom" self.assertEqual(self.plugin_returned, self.manager.get_plugin(self.cert_spec)) def test_get_plugin_request_type_supported(self): self.cert_spec = { cm.REQUEST_TYPE: cm.CertificateRequestType.SIMPLE_CMC_REQUEST} self.assertEqual(self.plugin_returned, self.manager.get_plugin(self.cert_spec)) def test_raises_error_get_plugin_request_type_not_supported(self): self.cert_spec = { cm.REQUEST_TYPE: cm.CertificateRequestType.FULL_CMC_REQUEST} self.assertRaises( cm.CertificatePluginNotFound, self.manager.get_plugin, self.cert_spec ) def test_raises_error_with_no_plugin_found(self): self.manager.extensions = [] self.assertRaises( cm.CertificatePluginNotFound, self.manager.get_plugin, self.cert_spec ) def test_get_plugin_with_ca_to_be_added(self): self.ca_repo.get_by_create_date.return_value = ( None, 0, 1, 0) self.assertEqual(self.plugin_returned, self.manager.get_plugin(self.cert_spec)) def test_refresh_ca_list(self): utc_now = datetime.datetime.utcnow() expired_time = utc_now - datetime.timedelta(days=1) expiration = utc_now + datetime.timedelta(days=1) ca1_info = { cm.INFO_NAME: "expired_ca_to_be_modified", cm.INFO_DESCRIPTION: "expired_ca to be modified", cm.INFO_CA_SIGNING_CERT: "XXXXXXX-expired-XXXXXX", cm.INFO_INTERMEDIATES: "YYYYYYY-expired-YYYYYYY", cm.INFO_EXPIRATION: expired_time.isoformat() } ca1_modified_info = { cm.INFO_NAME: "expired_ca_to_be_modified", cm.INFO_DESCRIPTION: "expired_ca to be modified", cm.INFO_CA_SIGNING_CERT: "XXXXXXX-no-longer-expired-XXXXXX", cm.INFO_INTERMEDIATES: "YYYYYYY-no-longer-expired-YYYYYYY", cm.INFO_EXPIRATION: expiration.isoformat() } ca2_info = { cm.INFO_NAME: "expired_ca_to_be_deleted", cm.INFO_DESCRIPTION: "expired ca to be deleted", cm.INFO_CA_SIGNING_CERT: "XXXX-expired-to-be-deleted-XXXX", cm.INFO_INTERMEDIATES: "YYYY-expired-to-be-deleted-YYYY", cm.INFO_EXPIRATION: expired_time.isoformat() } ca3_info = { cm.INFO_NAME: "new-ca-to-be-added", cm.INFO_DESCRIPTION: "new-ca-to-be-added", cm.INFO_CA_SIGNING_CERT: "XXXX-to-be-addeed-XXXX", cm.INFO_INTERMEDIATES: "YYYY-to-be-added-YYYY", cm.INFO_EXPIRATION: expiration.isoformat() } self.plugin_returned.get_ca_info.return_value = { 'plugin_ca_id_ca1': ca1_modified_info, 'plugin_ca_id_ca3': ca3_info } parsed_ca1 = dict(ca1_info) parsed_ca1[cm.PLUGIN_CA_ID] = 'plugin_ca_id_ca1' parsed_ca1['plugin_name'] = self.plugin_name ca1 = models.CertificateAuthority(parsed_ca1) ca1.id = "ca1_id" parsed_ca2 = dict(ca2_info) parsed_ca2[cm.PLUGIN_CA_ID] = 'plugin_ca_id_ca2' parsed_ca2['plugin_name'] = self.plugin_name ca2 = models.CertificateAuthority(parsed_ca2) ca2.id = "ca2_id" side_effect = [(None, 0, 4, 0), ([ca1, ca2], 0, 4, 2)] self.ca_repo.get_by_create_date.side_effect = side_effect self.manager.refresh_ca_table() self.plugin_returned.get_ca_info.assert_called_once_with() self.ca_repo.update_entity.assert_called_once_with( ca1, ca1_modified_info) self.ca_repo.delete_entity_by_id.assert_called_once_with( ca2.id, None) self.ca_repo.create_from.assert_has_calls([]) def test_refresh_ca_list_plugin_when_get_ca_info_raises(self): self.ca_repo.get_by_create_date.return_value = (None, 0, 4, 0) self.plugin_returned.get_ca_info.side_effect = Exception() self.manager.refresh_ca_table() self.plugin_returned.get_ca_info.assert_called_once_with() def test_refresh_ca_list_with_bad_ca_returned_from_plugin(self): ca3_info = { cm.INFO_DESCRIPTION: "PLUGIN FAIL: this-ca-has-no-info", } self.plugin_returned.get_ca_info.return_value = { 'plugin_ca_id_ca3': ca3_info } self.ca_repo.get_by_create_date.return_value = (None, 0, 4, 0) self.ca_repo.create_from.side_effect = Exception() self.manager.refresh_ca_table() self.plugin_returned.get_ca_info.assert_called_once_with() self.ca_repo.create_from.assert_has_calls([]) barbican-9.1.0.dev50/barbican/tests/plugin/interface/test_secret_store.py0000664000175000017500000002757313616500636026707 0ustar sahidsahid00000000000000# Copyright (c) 2014 Johns Hopkins University Applied Physics Laboratory # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import six from barbican.common import utils as common_utils from barbican.plugin.crypto import base from barbican.plugin.crypto import manager as cm from barbican.plugin.crypto import p11_crypto from barbican.plugin.interface import secret_store as str from barbican.plugin import kmip_secret_store as kss from barbican.plugin import store_crypto from barbican.tests import utils class TestSecretStore(str.SecretStoreBase): """Secret store plugin for testing support.""" def __init__(self, supported_alg_list): super(TestSecretStore, self).__init__() self.alg_list = supported_alg_list def get_plugin_name(self): raise NotImplementedError # pragma: no cover def generate_symmetric_key(self, key_spec): raise NotImplementedError # pragma: no cover def generate_asymmetric_key(self, key_spec): raise NotImplementedError # pragma: no cover def store_secret(self, secret_dto): raise NotImplementedError # pragma: no cover def get_secret(self, secret_metadata): raise NotImplementedError # pragma: no cover def generate_supports(self, key_spec): return key_spec.alg in self.alg_list def delete_secret(self, secret_metadata): raise NotImplementedError # pragma: no cover def store_secret_supports(self, key_spec): return key_spec.alg in self.alg_list class TestSecretStoreWithTransportKey(str.SecretStoreBase): """Secret store plugin for testing support. This plugin will override the relevant methods for key wrapping. """ def __init__(self, supported_alg_list): super(TestSecretStoreWithTransportKey, self).__init__() self.alg_list = supported_alg_list def get_plugin_name(self): raise NotImplementedError # pragma: no cover def generate_symmetric_key(self, key_spec): raise NotImplementedError # pragma: no cover def generate_asymmetric_key(self, key_spec): raise NotImplementedError # pragma: no cover def store_secret(self, secret_dto): raise NotImplementedError # pragma: no cover def get_secret(self, secret_metadata): raise NotImplementedError # pragma: no cover def generate_supports(self, key_spec): return key_spec.alg in self.alg_list def delete_secret(self, secret_metadata): raise NotImplementedError # pragma: no cover def store_secret_supports(self, key_spec): return key_spec.alg in self.alg_list def get_transport_key(self): return "transport key" def is_transport_key_current(self, transport_key): return True class WhenTestingSecretStorePluginManager(utils.BaseTestCase): def setUp(self): super(WhenTestingSecretStorePluginManager, self).setUp() self.manager = str.SecretStorePluginManager() def test_get_store_supported_plugin_no_plugin_name(self): plugin = TestSecretStore([str.KeyAlgorithm.AES]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertEqual(plugin, self.manager.get_plugin_store(keySpec)) def test_get_store_supported_plugin_with_plugin_name(self): plugin = TestSecretStore([str.KeyAlgorithm.AES]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] plugin_found = self.manager.get_plugin_store( None, plugin_name=common_utils.generate_fullname_for(plugin)) self.assertEqual(plugin, plugin_found) def test_get_generate_supported_plugin(self): plugin = TestSecretStore([str.KeyAlgorithm.AES]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertEqual(plugin, self.manager.get_plugin_generate(keySpec)) def test_get_store_no_plugin_found(self): self.manager.extensions = [] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertRaises( str.SecretStorePluginsNotConfigured, self.manager.get_plugin_store, keySpec, ) def test_get_store_no_plugin_found_by_name(self): plugin = TestSecretStore([str.KeyAlgorithm.AES]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) plugin_name = 'plugin' exception_result = self.assertRaises( str.SecretStorePluginNotFound, self.manager.get_plugin_store, keySpec, plugin_name=plugin_name ) self.assertEqual( 'Secret store plugin "{name}" not found.'.format(name=plugin_name), six.text_type(exception_result)) def test_get_generate_no_plugin_found(self): self.manager.extensions = [] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertRaises( str.SecretStorePluginsNotConfigured, self.manager.get_plugin_generate, keySpec, ) def test_get_store_no_supported_plugin(self): plugin = TestSecretStore([]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertRaises( str.SecretStoreSupportedPluginNotFound, self.manager.get_plugin_store, keySpec, ) def test_get_generate_no_supported_plugin(self): plugin = TestSecretStore([]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertRaises( str.SecretGenerateSupportedPluginNotFound, self.manager.get_plugin_generate, keySpec, ) def test_get_store_no_plugin_with_tkey_and_no_supports_storage(self): plugin = TestSecretStore([]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertRaises( str.SecretStoreSupportedPluginNotFound, self.manager.get_plugin_store, key_spec=keySpec, transport_key_needed=True, ) def test_get_store_plugin_with_tkey_and_no_supports_storage(self): plugin = TestSecretStoreWithTransportKey([]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertRaises( str.SecretStoreSupportedPluginNotFound, self.manager.get_plugin_store, key_spec=keySpec, transport_key_needed=True, ) def test_get_store_plugin_with_no_tkey_and_supports_storage(self): plugin = TestSecretStore([str.KeyAlgorithm.AES]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertRaises( str.SecretStoreSupportedPluginNotFound, self.manager.get_plugin_store, key_spec=keySpec, transport_key_needed=True, ) @mock.patch('barbican.common.utils.generate_fullname_for') def test_get_retrieve_plugin_raises_when_not_available( self, generate_full_name_for): plugin = TestSecretStore([str.KeyAlgorithm.AES]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] generate_full_name_for.return_value = "another plugin name" plugin_name = 'plugin name searched for' exception_result = self.assertRaises( str.StorePluginNotAvailableOrMisconfigured, self.manager.get_plugin_retrieve_delete, plugin_name=plugin_name, ) self.assertIn(plugin_name, six.text_type(exception_result)) def test_get_store_plugin_with_tkey_and_supports_storage(self): plugin1 = TestSecretStore([str.KeyAlgorithm.AES]) plugin1_mock = mock.MagicMock(obj=plugin1) plugin2 = TestSecretStoreWithTransportKey([str.KeyAlgorithm.AES]) plugin2_mock = mock.MagicMock(obj=plugin2) self.manager.extensions = [plugin1_mock, plugin2_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertEqual(plugin2, self.manager.get_plugin_store( key_spec=keySpec, transport_key_needed=True)) class TestSecretStorePluginManagerMultipleBackend( utils.MultipleBackendsTestCase): def test_plugin_created_as_per_mulitple_backend_conf(self): """Check plugins are created as per multiple backend conf""" store_plugin_names = ['store_crypto', 'kmip_plugin', 'store_crypto'] crypto_plugin_names = ['p11_crypto', '', 'simple_crypto'] self.init_via_conf_file(store_plugin_names, crypto_plugin_names, enabled=True) with mock.patch('barbican.plugin.crypto.p11_crypto.P11CryptoPlugin.' '_create_pkcs11') as m_pkcs11, \ mock.patch('kmip.pie.client.ProxyKmipClient') as m_kmip: manager = str.SecretStorePluginManager() # check pkcs11 and kmip plugin instantiation call is invoked m_pkcs11.called_once_with(mock.ANY, mock.ANY) m_kmip.called_once_with(mock.ANY) # check store crypto adapter is matched as its defined first. keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) plugin_found = manager.get_plugin_store(keySpec) self.assertIsInstance(plugin_found, store_crypto.StoreCryptoAdapterPlugin) # check pkcs11 crypto is matched as its defined first. crypto_plugin = cm.get_manager().get_plugin_store_generate( base.PluginSupportTypes.ENCRYPT_DECRYPT) self.assertIsInstance(crypto_plugin, p11_crypto.P11CryptoPlugin) def test_plugin_created_kmip_default_mulitple_backend_conf(self): """Check plugins are created as per multiple backend conf Here KMIP plugin is marked as global default plugin """ store_plugin_names = ['store_crypto', 'kmip_plugin', 'store_crypto'] crypto_plugin_names = ['p11_crypto', '', 'simple_crypto'] self.init_via_conf_file(store_plugin_names, crypto_plugin_names, enabled=True, global_default_index=1) with mock.patch('barbican.plugin.crypto.p11_crypto.P11CryptoPlugin.' '_create_pkcs11') as m_pkcs11, \ mock.patch('kmip.pie.client.ProxyKmipClient') as m_kmip: manager = str.SecretStorePluginManager() # check pkcs11 and kmip plugin instantiation call is invoked m_pkcs11.called_once_with(mock.ANY, mock.ANY) m_kmip.called_once_with(mock.ANY) # check kmip store is matched as its global default store. keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) plugin_found = manager.get_plugin_store(keySpec) self.assertIsInstance(plugin_found, kss.KMIPSecretStore) barbican-9.1.0.dev50/barbican/tests/plugin/interface/__init__.py0000664000175000017500000000000013616500636024656 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/plugin/crypto/0000775000175000017500000000000013616500640022132 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/plugin/crypto/test_manager.py0000664000175000017500000000762513616500636025174 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import threading from barbican.common import utils as common_utils from barbican.plugin.crypto import base from barbican.plugin.crypto import manager as cm from barbican.tests import utils class MyThread(threading.Thread): def __init__(self, index, results): threading.Thread.__init__(self) self.index = index self.results = results def run(self): self.results[self.index] = cm.get_manager() class WhenTestingManager(utils.BaseTestCase): def setUp(self): super(WhenTestingManager, self).setUp() self.plugin_returned = mock.MagicMock() self.plugin_type = base.PluginSupportTypes.ENCRYPT_DECRYPT self.plugin_returned.supports.return_value = True self.plugin_name = common_utils.generate_fullname_for( self.plugin_returned) self.plugin_loaded = mock.MagicMock(obj=self.plugin_returned) self.manager = cm.get_manager() self.manager.extensions = [self.plugin_loaded] def test_can_override_enabled_plugins(self): """Verify can override default configuration for plugin selection.""" # Reset manager singleton otherwise we have test execution # order problems cm._PLUGIN_MANAGER = None cm.CONF.set_override( "enabled_crypto_plugins", ['foo_plugin'], group='crypto') manager_to_test = cm.get_manager() self.assertIsInstance( manager_to_test, cm._CryptoPluginManager) self.assertListEqual(['foo_plugin'], manager_to_test._names) def test_get_plugin_store_generate(self): self.assertEqual( self.plugin_returned, self.manager.get_plugin_store_generate(self.plugin_type)) def test_raises_error_with_wrong_plugin_type(self): self.plugin_returned.supports.return_value = False self.assertRaises( base.CryptoPluginUnsupportedOperation, self.manager.get_plugin_store_generate, self.plugin_type) def test_raises_error_with_no_active_store_generate_plugin(self): self.manager.extensions = [] self.assertRaises( base.CryptoPluginNotFound, self.manager.get_plugin_store_generate, self.plugin_type) def test_get_plugin_retrieve(self): self.assertEqual( self.plugin_returned, self.manager.get_plugin_retrieve(self.plugin_name)) def test_raises_error_with_wrong_plugin_name(self): self.assertRaises( base.CryptoPluginUnsupportedOperation, self.manager.get_plugin_retrieve, 'other-name') def test_raises_error_with_no_active_plugin_name(self): self.manager.extensions = [] self.assertRaises( base.CryptoPluginNotFound, self.manager.get_plugin_retrieve, self.plugin_name) def test_get_manager_with_multi_threads(self): self.manager.extensions = [] self.manager = None results = [None] * 10 # setup 10 threads to call get_manager() at same time for i in range(10): t = MyThread(i, results) t.start() # verify all threads return one and same plugin manager for i in range(10): self.assertIsInstance(results[i], cm._CryptoPluginManager) self.assertEqual(results[0], results[i]) barbican-9.1.0.dev50/barbican/tests/plugin/crypto/__init__.py0000664000175000017500000000000013616500636024236 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/plugin/crypto/test_pkcs11.py0000664000175000017500000003734113616500636024662 0ustar sahidsahid00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import six from barbican.common import exception from barbican.plugin.crypto import pkcs11 from barbican.tests import utils if six.PY3: long = int class WhenTestingPKCS11(utils.BaseTestCase): def setUp(self): super(WhenTestingPKCS11, self).setUp() self.lib = mock.Mock() self.lib.C_Initialize.return_value = pkcs11.CKR_OK self.lib.C_Finalize.return_value = pkcs11.CKR_OK self.lib.C_OpenSession.side_effect = self._open_session self.lib.C_CloseSession.return_value = pkcs11.CKR_OK self.lib.C_GetSessionInfo.side_effect = self._get_session_user self.lib.C_Login.return_value = pkcs11.CKR_OK self.lib.C_FindObjectsInit.return_value = pkcs11.CKR_OK self.lib.C_FindObjects.side_effect = self._find_objects_one self.lib.C_FindObjectsFinal.return_value = pkcs11.CKR_OK self.lib.C_GenerateKey.side_effect = self._generate_key self.lib.C_GenerateRandom.side_effect = self._generate_random self.lib.C_SeedRandom.return_value = pkcs11.CKR_OK self.lib.C_EncryptInit.return_value = pkcs11.CKR_OK self.lib.C_Encrypt.side_effect = self._encrypt self.lib.C_DecryptInit.return_value = pkcs11.CKR_OK self.lib.C_Decrypt.side_effect = self._decrypt self.lib.C_WrapKey.side_effect = self._wrap_key self.lib.C_UnwrapKey.side_effect = self._unwrap_key self.lib.C_SignInit.return_value = pkcs11.CKR_OK self.lib.C_Sign.side_effect = self._sign self.lib.C_VerifyInit.return_value = pkcs11.CKR_OK self.lib.C_Verify.side_effect = self._verify self.lib.C_DestroyObject.return_value = pkcs11.CKR_OK self.ffi = pkcs11.build_ffi() setattr(self.ffi, 'dlopen', lambda x: self.lib) self.cfg_mock = mock.MagicMock(name='config mock') self.cfg_mock.library_path = '/dev/null' self.cfg_mock.login_passphrase = 'foobar' self.cfg_mock.rw_session = False self.cfg_mock.slot_id = 1 self.cfg_mock.encryption_mechanism = 'CKM_AES_CBC' self.cfg_mock.hmac_keywrap_mechanism = 'CKM_SHA256_HMAC' self.pkcs11 = pkcs11.PKCS11( self.cfg_mock.library_path, self.cfg_mock.login_passphrase, self.cfg_mock.rw_session, self.cfg_mock.slot_id, self.cfg_mock.encryption_mechanism, ffi=self.ffi, hmac_keywrap_mechanism=self.cfg_mock.hmac_keywrap_mechanism ) def _generate_random(self, session, buf, length): self.ffi.buffer(buf)[:] = b'0' * length return pkcs11.CKR_OK def _get_session_public(self, session, session_info_ptr): if self.cfg_mock.rw_session: session_info_ptr[0].state = pkcs11.CKS_RW_PUBLIC_SESSION else: session_info_ptr[0].state = pkcs11.CKS_RO_PUBLIC_SESSION return pkcs11.CKR_OK def _get_session_user(self, session, session_info_ptr): if self.cfg_mock.rw_session: session_info_ptr[0].state = pkcs11.CKS_RW_USER_FUNCTIONS else: session_info_ptr[0].state = pkcs11.CKS_RO_USER_FUNCTIONS return pkcs11.CKR_OK def _open_session(self, *args, **kwargs): args[4][0] = long(1) return pkcs11.CKR_OK def _find_objects_one(self, session, obj_handle_ptr, max_count, count): obj_handle_ptr[0] = long(2) count[0] = 1 return pkcs11.CKR_OK def _find_objects_two(self, session, obj_handle_ptr, max_count, count): obj_handle_ptr[0] = long(2) count[0] = 2 return pkcs11.CKR_OK def _find_objects_zero(self, session, obj_handle_ptr, max_count, count): count[0] = 0 return pkcs11.CKR_OK def _generate_key(self, session, mech, attributes, attributes_len, obj_handle_ptr): obj_handle_ptr[0] = long(3) return pkcs11.CKR_OK def _encrypt(self, session, pt, pt_len, ct, ct_len): if self.pkcs11.generate_iv: self.ffi.buffer(ct)[:] = pt[::-1] + b'0' * self.pkcs11.gcmtagsize else: self.ffi.buffer(ct)[:] = pt[::-1] + b'0' * (self.pkcs11.gcmtagsize * 2) return pkcs11.CKR_OK def _decrypt(self, session, ct, ct_len, pt, pt_len): tmp = ct[:-self.pkcs11.gcmtagsize][::-1] self.ffi.buffer(pt)[:len(tmp)] = tmp return pkcs11.CKR_OK def _wrap_key(self, *args, **kwargs): wrapped_key = args[4] wrapped_key_len = args[5] wrapped_key_len[0] = long(16) if wrapped_key != self.ffi.NULL: self.ffi.buffer(wrapped_key)[:] = b'0' * 16 return pkcs11.CKR_OK def _unwrap_key(self, *args, **kwargs): unwrapped_key = args[7] unwrapped_key[0] = long(1) return pkcs11.CKR_OK def _sign(self, *args, **kwargs): buf = args[3] buf_len = args[4] self.ffi.buffer(buf)[:] = b'0' * buf_len[0] return pkcs11.CKR_OK def _verify(self, *args, **kwargs): return pkcs11.CKR_OK def test_public_get_session(self): self.lib.C_GetSessionInfo.side_effect = self._get_session_public sess = self.pkcs11.get_session() self.assertEqual(1, sess) self.assertEqual(2, self.lib.C_OpenSession.call_count) self.assertEqual(2, self.lib.C_GetSessionInfo.call_count) self.assertEqual(1, self.lib.C_Login.call_count) self.assertEqual(1, self.lib.C_CloseSession.call_count) def test_user_get_session(self): self.pkcs11.get_session() self.assertEqual(2, self.lib.C_OpenSession.call_count) self.assertEqual(2, self.lib.C_GetSessionInfo.call_count) self.assertEqual(0, self.lib.C_Login.call_count) def test_seed_random(self): rd = "random-data" session = 'session' self.pkcs11._seed_random(session, rd) self.lib.C_SeedRandom.assert_called_once_with( session, mock.ANY, len(rd)) def test_generate_random(self): r = self.pkcs11.generate_random(32, mock.MagicMock()) self.assertEqual(b'0' * 32, r) self.assertEqual(2, self.lib.C_GenerateRandom.call_count) def test_rng_self_test_fail(self): def _bad_generate_random(session, buf, length): self.ffi.buffer(buf)[:] = b'\x00' * length return pkcs11.CKR_OK self.lib.C_GenerateRandom.side_effect = _bad_generate_random self.assertRaises(exception.P11CryptoPluginException, self.pkcs11._rng_self_test, mock.MagicMock()) def test_get_key_handle_one_key(self): key = self.pkcs11.get_key_handle('CKK_AES', 'foo', mock.MagicMock()) self.assertEqual(2, key) self.assertEqual(1, self.lib.C_FindObjectsInit.call_count) self.assertEqual(1, self.lib.C_FindObjects.call_count) self.assertEqual(1, self.lib.C_FindObjectsFinal.call_count) def test_get_key_handle_no_keys(self): self.lib.C_FindObjects.side_effect = self._find_objects_zero key = self.pkcs11.get_key_handle('CKK_AES', 'foo', mock.MagicMock()) self.assertIsNone(key) self.assertEqual(1, self.lib.C_FindObjectsInit.call_count) self.assertEqual(1, self.lib.C_FindObjects.call_count) self.assertEqual(1, self.lib.C_FindObjectsFinal.call_count) def test_get_key_handle_multiple_keys(self): self.lib.C_FindObjects.side_effect = self._find_objects_two self.assertRaises(exception.P11CryptoPluginKeyException, self.pkcs11.get_key_handle, 'CKK_AES', 'foo', mock.MagicMock()) self.assertEqual(1, self.lib.C_FindObjectsInit.call_count) self.assertEqual(1, self.lib.C_FindObjects.call_count) self.assertEqual(1, self.lib.C_FindObjectsFinal.call_count) def test_generate_session_key(self): key = self.pkcs11.generate_key('CKK_AES', 16, 'CKM_AES_KEY_GEN', mock.MagicMock(), encrypt=True) self.assertEqual(3, key) self.assertEqual(1, self.lib.C_GenerateKey.call_count) def test_generate_master_key(self): key = self.pkcs11.generate_key('CKK_AES', 16, 'CKM_AES_KEY_GEN', mock.MagicMock(), key_label='key', encrypt=True, master_key=True) self.assertEqual(3, key) self.assertEqual(1, self.lib.C_GenerateKey.call_count) def test_generate_key_no_flags(self): self.assertRaises(exception.P11CryptoPluginException, self.pkcs11.generate_key, 'CKK_AES', 16, mock.MagicMock(), mock.MagicMock()) def test_generate_master_key_no_label(self): self.assertRaises(ValueError, self.pkcs11.generate_key, 'CKK_AES', 16, mock.MagicMock(), mock.MagicMock(), encrypt=True, master_key=True) def test_encrypt_with_no_iv_generation(self): pt = b'0123456789ABCDEF' self.pkcs11.generate_iv = False ct = self.pkcs11._VENDOR_SAFENET_CKM_AES_GCM_encrypt( mock.MagicMock(), pt, mock.MagicMock() ) self.assertEqual(ct['ct'][:len(pt)], pt[::-1]) self.assertGreater(len(ct['iv']), 0) self.assertEqual(1, self.lib.C_GenerateRandom.call_count) self.assertEqual(1, self.lib.C_EncryptInit.call_count) self.assertEqual(1, self.lib.C_Encrypt.call_count) def test_encrypt_with_iv_generation(self): pt = b'0123456789ABCDEF' self.pkcs11.generate_iv = True ct = self.pkcs11._VENDOR_SAFENET_CKM_AES_GCM_encrypt( mock.MagicMock(), pt, mock.MagicMock() ) self.assertEqual(ct['ct'][:len(pt)], pt[::-1]) self.assertGreater(len(ct['iv']), 0) self.assertEqual(2, self.lib.C_GenerateRandom.call_count) self.assertEqual(1, self.lib.C_EncryptInit.call_count) self.assertEqual(1, self.lib.C_Encrypt.call_count) def test_decrypt(self): ct = b'c2VjcmV0a2V5BwcHBwcHBw==' iv = b'0' * self.pkcs11.noncesize pt = self.pkcs11.decrypt('VENDOR_SAFENET_CKM_AES_GCM', mock.MagicMock(), iv, ct, mock.MagicMock()) pt_len = len(ct) - self.pkcs11.gcmtagsize self.assertEqual(pt[:pt_len], ct[:-self.pkcs11.gcmtagsize][::-1]) self.assertEqual(1, self.lib.C_DecryptInit.call_count) self.assertEqual(1, self.lib.C_Decrypt.call_count) def test_decrypt_with_pad(self): ct = b'c2VjcmV0a2V5BwcHBwcHBw==' iv = b'0' * self.pkcs11.blocksize pt = self.pkcs11.decrypt('VENDOR_SAFENET_CKM_AES_GCM', mock.MagicMock(), iv, ct, mock.MagicMock()) pt_len = len(ct) - self.pkcs11.gcmtagsize - 3 self.assertEqual(pt[:pt_len], ct[3:-self.pkcs11.gcmtagsize][::-1]) self.assertEqual(1, self.lib.C_DecryptInit.call_count) self.assertEqual(1, self.lib.C_Decrypt.call_count) def test_decrypt_with_pad_new_iv(self): ct = b'c2VjcmV0a2V5BwcHBwcHBw==' iv = b'0' * self.pkcs11.gcmtagsize pt = self.pkcs11.decrypt('VENDOR_SAFENET_CKM_AES_GCM', mock.MagicMock(), iv, ct, mock.MagicMock()) pt_len = len(ct) - self.pkcs11.gcmtagsize self.assertEqual(pt[:pt_len], ct[:-self.pkcs11.gcmtagsize][::-1]) self.assertEqual(1, self.lib.C_DecryptInit.call_count) self.assertEqual(1, self.lib.C_Decrypt.call_count) def test_decrypt_with_pad_wrong_size(self): ct = b'c2VjcmV0a2V5BwcHBwcHBw==' iv = b'0' * self.pkcs11.blocksize pt = self.pkcs11.decrypt('VENDOR_SAFENET_CKM_AES_GCM', mock.MagicMock(), iv, ct, mock.MagicMock()) pt_len = len(ct) - self.pkcs11.gcmtagsize self.assertEqual(pt[:pt_len], ct[:-self.pkcs11.gcmtagsize][::-1]) self.assertEqual(1, self.lib.C_DecryptInit.call_count) self.assertEqual(1, self.lib.C_Decrypt.call_count) def test_decrypt_with_pad_wrong_length(self): ct = b'c2VjcmV0a2V5BwcHBwcHBw==' iv = b'0' * self.pkcs11.blocksize pt = self.pkcs11.decrypt('VENDOR_SAFENET_CKM_AES_GCM', mock.MagicMock(), iv, ct, mock.MagicMock()) pt_len = len(ct) - self.pkcs11.gcmtagsize self.assertEqual(pt[:pt_len], ct[:-self.pkcs11.gcmtagsize][::-1]) self.assertEqual(1, self.lib.C_DecryptInit.call_count) self.assertEqual(1, self.lib.C_Decrypt.call_count) def test_decrypt_with_too_large_pad(self): ct = b'c2VjcmV0a2V5BwcHBwcHBw==' iv = b'0' * self.pkcs11.blocksize pt = self.pkcs11.decrypt('VENDOR_SAFENET_CKM_AES_GCM', mock.MagicMock(), iv, ct, mock.MagicMock()) pt_len = len(ct) - self.pkcs11.gcmtagsize self.assertEqual(pt[:pt_len], ct[:-self.pkcs11.gcmtagsize][::-1]) self.assertEqual(1, self.lib.C_DecryptInit.call_count) self.assertEqual(1, self.lib.C_Decrypt.call_count) def test_wrap_key(self): wkek = self.pkcs11.wrap_key(mock.Mock(), mock.Mock(), mock.Mock()) self.assertGreater(len(wkek['iv']), 0) self.assertEqual(b'0' * 16, wkek['wrapped_key']) self.assertEqual(2, self.lib.C_GenerateRandom.call_count) self.assertEqual(2, self.lib.C_WrapKey.call_count) def test_unwrap_key(self): kek = self.pkcs11.unwrap_key(mock.Mock(), b'0' * 16, b'0' * 16, mock.Mock()) self.assertEqual(1, kek) self.assertEqual(self.lib.C_UnwrapKey.call_count, 1) def test_compute_hmac(self): buf = self.pkcs11.compute_hmac(mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) self.assertEqual(32, len(buf)) self.assertEqual(1, self.lib.C_SignInit.call_count) self.assertEqual(1, self.lib.C_Sign.call_count) def test_verify_hmac(self): self.pkcs11.verify_hmac(mock.MagicMock(), mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) self.assertEqual(1, self.lib.C_VerifyInit.call_count) self.assertEqual(1, self.lib.C_Verify.call_count) def test_destroy_object(self): self.pkcs11.destroy_object(mock.MagicMock(), mock.MagicMock()) self.assertEqual(1, self.lib.C_DestroyObject.call_count) def test_invalid_build_attributes(self): self.assertRaises(TypeError, self.pkcs11._build_attributes, [pkcs11.Attribute(pkcs11.CKA_CLASS, {})]) def test_finalize(self): self.pkcs11.finalize() self.assertEqual(1, self.lib.C_Finalize.call_count) def test_check_error(self): self.assertIsNone(self.pkcs11._check_error(pkcs11.CKR_OK)) def test_check_error_with_without_specific_handling(self): self.assertRaises(exception.P11CryptoPluginException, self.pkcs11._check_error, 5) def test_check_error_with_token_error(self): self.assertRaises(exception.P11CryptoTokenException, self.pkcs11._check_error, 0xe0) def test_converting_unicode_to_bytes(self): self.assertEqual(b'foo', pkcs11._to_bytes(u'foo')) def test_converting_default_str_type_to_bytes(self): self.assertEqual(b'foo', pkcs11._to_bytes('foo')) barbican-9.1.0.dev50/barbican/tests/plugin/crypto/test_crypto.py0000664000175000017500000004056013616500636025075 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os from cryptography import fernet from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization import mock import six from barbican.model import models from barbican.plugin.crypto import base as plugin from barbican.plugin.crypto import simple_crypto as simple from barbican.tests import utils class WhenTestingSimpleCryptoPlugin(utils.BaseTestCase): def setUp(self): super(WhenTestingSimpleCryptoPlugin, self).setUp() self.plugin = simple.SimpleCryptoPlugin() def _get_mocked_kek_meta_dto(self): # For SimpleCryptoPlugin, per-project KEKs are stored in # kek_meta_dto.plugin_meta. SimpleCryptoPlugin does a get-or-create # on the plugin_meta field, so plugin_meta should be None initially. kek_meta_dto = plugin.KEKMetaDTO(mock.MagicMock()) kek_meta_dto.plugin_meta = None return self.plugin.bind_kek_metadata(kek_meta_dto) def test_encrypt_unicode_raises_value_error(self): unencrypted = u'unicode_beer\U0001F37A' encrypt_dto = plugin.EncryptDTO(unencrypted) secret = mock.MagicMock() secret.mime_type = 'text/plain' kek_meta_dto = self._get_mocked_kek_meta_dto() self.assertRaises( ValueError, self.plugin.encrypt, encrypt_dto, kek_meta_dto, mock.MagicMock(), ) def test_encrypt_with_unicode_kek_must_pass(self): """Test plan: Generate a kek Encrypt with master kek Convert to unicode call plugin.encrypt on unencrypted decrypt response cypher_text Compare with unencrypted """ project_kek = fernet.Fernet.generate_key() encryptor = fernet.Fernet(self.plugin.master_kek) ENC_project_kek = encryptor.encrypt(project_kek) UENC_project_kek = six.u(ENC_project_kek) kek_meta_dto = self._get_mocked_kek_meta_dto() kek_meta_dto.plugin_meta = UENC_project_kek unencrypted = b'PlainTextSecret' encrypt_dto = plugin.EncryptDTO(unencrypted) response_dto = self.plugin.encrypt(encrypt_dto, kek_meta_dto, mock.MagicMock()) project_encryptor = fernet.Fernet(project_kek) decrypted = project_encryptor.decrypt(response_dto.cypher_text) self.assertEqual(unencrypted, decrypted) def test_decrypt_kek_not_created(self): kek_meta_dto = mock.MagicMock() kek_meta_dto.plugin_meta = None self.assertRaises( ValueError, self.plugin.decrypt, mock.MagicMock(), kek_meta_dto, mock.MagicMock(), mock.MagicMock(), ) def test_byte_string_encryption(self): unencrypted = b'some_secret' encrypt_dto = plugin.EncryptDTO(unencrypted) kek_meta_dto = self._get_mocked_kek_meta_dto() response_dto = self.plugin.encrypt(encrypt_dto, kek_meta_dto, mock.MagicMock()) decrypt_dto = plugin.DecryptDTO(response_dto.cypher_text) decrypted = self.plugin.decrypt(decrypt_dto, kek_meta_dto, response_dto.kek_meta_extended, mock.MagicMock()) self.assertEqual(unencrypted, decrypted) def test_random_bytes_encryption(self): unencrypted = os.urandom(10) encrypt_dto = plugin.EncryptDTO(unencrypted) kek_meta_dto = self._get_mocked_kek_meta_dto() response_dto = self.plugin.encrypt(encrypt_dto, kek_meta_dto, mock.MagicMock()) decrypt_dto = plugin.DecryptDTO(response_dto.cypher_text) decrypted = self.plugin.decrypt(decrypt_dto, kek_meta_dto, response_dto.kek_meta_extended, mock.MagicMock()) self.assertEqual(unencrypted, decrypted) def test_generate_256_bit_key(self): secret = models.Secret() secret.bit_length = 256 secret.algorithm = "AES" kek_meta_dto = self._get_mocked_kek_meta_dto() generate_dto = plugin.GenerateDTO( secret.algorithm, secret.bit_length, secret.mode, None) response_dto = self.plugin.generate_symmetric( generate_dto, kek_meta_dto, mock.MagicMock() ) decrypt_dto = plugin.DecryptDTO(response_dto.cypher_text) key = self.plugin.decrypt(decrypt_dto, kek_meta_dto, response_dto.kek_meta_extended, mock.MagicMock()) self.assertEqual(32, len(key)) def test_generate_192_bit_key(self): secret = models.Secret() secret.bit_length = 192 secret.algorithm = "AES" kek_meta_dto = self._get_mocked_kek_meta_dto() generate_dto = plugin.GenerateDTO( secret.algorithm, secret.bit_length, None, None) response_dto = self.plugin.generate_symmetric( generate_dto, kek_meta_dto, mock.MagicMock() ) decrypt_dto = plugin.DecryptDTO(response_dto.cypher_text) key = self.plugin.decrypt(decrypt_dto, kek_meta_dto, response_dto.kek_meta_extended, mock.MagicMock()) self.assertEqual(24, len(key)) def test_generate_128_bit_key(self): secret = models.Secret() secret.bit_length = 128 secret.algorithm = "AES" kek_meta_dto = self._get_mocked_kek_meta_dto() generate_dto = plugin.GenerateDTO( secret.algorithm, secret.bit_length, None, None) response_dto = self.plugin.generate_symmetric( generate_dto, kek_meta_dto, mock.MagicMock() ) decrypt_dto = plugin.DecryptDTO(response_dto.cypher_text) key = self.plugin.decrypt(decrypt_dto, kek_meta_dto, response_dto.kek_meta_extended, mock.MagicMock()) self.assertEqual(16, len(key)) def test_supports_encrypt_decrypt(self): self.assertTrue( self.plugin.supports(plugin.PluginSupportTypes.ENCRYPT_DECRYPT) ) def test_supports_symmetric_key_generation(self): self.assertTrue( self.plugin.supports( plugin.PluginSupportTypes.SYMMETRIC_KEY_GENERATION, 'AES', 64) ) self.assertFalse( self.plugin.supports( plugin.PluginSupportTypes.SYMMETRIC_KEY_GENERATION, 'AES') ) self.assertTrue( self.plugin.supports( plugin.PluginSupportTypes.SYMMETRIC_KEY_GENERATION, 'hmacsha512', 128) ) self.assertFalse( self.plugin.supports( plugin.PluginSupportTypes.SYMMETRIC_KEY_GENERATION, 'hmacsha512', 12) ) self.assertFalse( self.plugin.supports( plugin.PluginSupportTypes.SYMMETRIC_KEY_GENERATION, 'Camillia', 128) ) def test_does_not_support_unknown_type(self): self.assertFalse( self.plugin.supports("SOMETHING_RANDOM") ) def test_bind_kek_metadata(self): kek_metadata_dto = mock.MagicMock() kek_metadata_dto = self.plugin.bind_kek_metadata(kek_metadata_dto) self.assertEqual('aes', kek_metadata_dto.algorithm) self.assertEqual(128, kek_metadata_dto.bit_length) self.assertEqual('cbc', kek_metadata_dto.mode) def test_supports_asymmetric_key_generation(self): self.assertTrue( self.plugin.supports( plugin.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION, 'DSA', 1024) ) self.assertTrue( self.plugin.supports( plugin.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION, "RSA", 1024) ) self.assertFalse( self.plugin.supports( plugin.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION, "DSA", 512) ) self.assertFalse( self.plugin.supports( plugin.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION, "RSA", 64) ) def test_generate_asymmetric_1024_bit_key(self): generate_dto = plugin.GenerateDTO('rsa', 1024, None, None) kek_meta_dto = self._get_mocked_kek_meta_dto() private_dto, public_dto, passwd_dto = self.plugin.generate_asymmetric( generate_dto, kek_meta_dto, mock.MagicMock()) decrypt_dto = plugin.DecryptDTO(private_dto.cypher_text) private_dto = self.plugin.decrypt(decrypt_dto, kek_meta_dto, private_dto.kek_meta_extended, mock.MagicMock()) decrypt_dto = plugin.DecryptDTO(public_dto.cypher_text) public_dto = self.plugin.decrypt(decrypt_dto, kek_meta_dto, public_dto.kek_meta_extended, mock.MagicMock()) # check we can reload the private and public keys private_key = serialization.load_pem_private_key( data=private_dto, password=None, backend=default_backend() ) public_key = serialization.load_pem_public_key( data=public_dto, backend=default_backend() ) self.assertEqual(1024, private_key.key_size) self.assertEqual(1024, public_key.key_size) public_key = public_key.public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.PKCS1 ) # get the public key from the private key we recovered to compare recovered_key = private_key.public_key().public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.PKCS1 ) self.assertTrue(public_key == recovered_key) def test_generate_1024_bit_RSA_key_with_passphrase(self): generate_dto = plugin.GenerateDTO('rsa', 1024, None, 'changeme') kek_meta_dto = self._get_mocked_kek_meta_dto() private_dto, public_dto, passwd_dto = self.plugin.generate_asymmetric( generate_dto, kek_meta_dto, mock.MagicMock() ) decrypt_dto = plugin.DecryptDTO(private_dto.cypher_text) private_dto = self.plugin.decrypt(decrypt_dto, kek_meta_dto, private_dto.kek_meta_extended, mock.MagicMock()) decrypt_dto = plugin.DecryptDTO(public_dto.cypher_text) public_dto = self.plugin.decrypt(decrypt_dto, kek_meta_dto, public_dto.kek_meta_extended, mock.MagicMock()) # check we can reload the private and public keys private_key = serialization.load_pem_private_key( data=private_dto, password='changeme'.encode(), backend=default_backend() ) public_key = serialization.load_pem_public_key( data=public_dto, backend=default_backend() ) self.assertEqual(1024, private_key.key_size) self.assertEqual(1024, public_key.key_size) public_key = public_key.public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.PKCS1 ) # get the public key from the private key we recovered to compare recovered_key = private_key.public_key().public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.PKCS1 ) self.assertTrue(public_key == recovered_key) def test_generate_1024_bit_DSA_key_with_passphrase(self): generate_dto = plugin.GenerateDTO('dsa', 1024, None, 'changeme') kek_meta_dto = self._get_mocked_kek_meta_dto() private_dto, public_dto, passwd_dto = self.plugin.generate_asymmetric( generate_dto, kek_meta_dto, mock.MagicMock() ) decrypt_dto = plugin.DecryptDTO(private_dto.cypher_text) private_dto = self.plugin.decrypt(decrypt_dto, kek_meta_dto, private_dto.kek_meta_extended, mock.MagicMock()) decrypt_dto = plugin.DecryptDTO(public_dto.cypher_text) public_dto = self.plugin.decrypt(decrypt_dto, kek_meta_dto, public_dto.kek_meta_extended, mock.MagicMock()) # check we can reload the private and public keys private_key = serialization.load_der_private_key( data=private_dto, password='changeme'.encode(), backend=default_backend() ) public_key = serialization.load_der_public_key( data=public_dto, backend=default_backend() ) self.assertEqual(1024, private_key.key_size) self.assertEqual(1024, public_key.key_size) public_key = public_key.public_bytes( encoding=serialization.Encoding.DER, format=serialization.PublicFormat.SubjectPublicKeyInfo ) # get the public key from the private key we recovered to compare recovered_key = private_key.public_key().public_bytes( encoding=serialization.Encoding.DER, format=serialization.PublicFormat.SubjectPublicKeyInfo ) self.assertTrue(public_key == recovered_key) def test_generate_1024_DSA_key_in_pem_and_reconstruct_key_der(self): generate_dto = plugin.GenerateDTO('dsa', 1024, None, None) kek_meta_dto = self._get_mocked_kek_meta_dto() private_dto, public_dto, passwd_dto = self.plugin.generate_asymmetric( generate_dto, kek_meta_dto, mock.MagicMock() ) decrypt_dto = plugin.DecryptDTO(private_dto.cypher_text) private_dto = self.plugin.decrypt(decrypt_dto, kek_meta_dto, private_dto.kek_meta_extended, mock.MagicMock()) private_key = serialization.load_der_private_key( data=private_dto, password=None, backend=default_backend() ) self.assertEqual(1024, private_key.key_size) def test_generate_128_bit_hmac_key(self): secret = models.Secret() secret.bit_length = 128 secret.algorithm = "hmacsha256" kek_meta_dto = self._get_mocked_kek_meta_dto() generate_dto = plugin.GenerateDTO( secret.algorithm, secret.bit_length, None, None) response_dto = self.plugin.generate_symmetric( generate_dto, kek_meta_dto, mock.MagicMock() ) decrypt_dto = plugin.DecryptDTO(response_dto.cypher_text) key = self.plugin.decrypt(decrypt_dto, kek_meta_dto, response_dto.kek_meta_extended, mock.MagicMock()) self.assertEqual(16, len(key)) def test_get_plugin_name(self): self.assertIsNotNone(self.plugin.get_plugin_name()) barbican-9.1.0.dev50/barbican/tests/plugin/crypto/test_p11_crypto.py0000664000175000017500000003555713616500636025570 0ustar sahidsahid00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import six from barbican.common import exception as ex from barbican.model import models from barbican.plugin.crypto import base as plugin_import from barbican.plugin.crypto import p11_crypto from barbican.plugin.crypto import pkcs11 from barbican.tests import utils if six.PY3: long = int def generate_random_effect(length, session): return b'0' * length class WhenTestingP11CryptoPlugin(utils.BaseTestCase): def setUp(self): super(WhenTestingP11CryptoPlugin, self).setUp() self.pkcs11 = mock.Mock() self.pkcs11.get_session.return_value = long(1) self.pkcs11.return_session.return_value = None self.pkcs11.generate_random.side_effect = generate_random_effect self.pkcs11.get_key_handle.return_value = long(2) self.pkcs11.encrypt.return_value = {'iv': b'0', 'ct': b'0'} self.pkcs11.decrypt.return_value = b'0' self.pkcs11.generate_key.return_value = long(3) self.pkcs11.wrap_key.return_value = {'iv': b'1', 'wrapped_key': b'1'} self.pkcs11.unwrap_key.return_value = long(4) self.pkcs11.compute_hmac.return_value = b'1' self.pkcs11.verify_hmac.return_value = None self.pkcs11.destroy_object.return_value = None self.pkcs11.finalize.return_value = None self.cfg_mock = mock.MagicMock(name='config mock') self.cfg_mock.p11_crypto_plugin.mkek_label = 'mkek_label' self.cfg_mock.p11_crypto_plugin.hmac_label = 'hmac_label' self.cfg_mock.p11_crypto_plugin.mkek_length = 32 self.cfg_mock.p11_crypto_plugin.slot_id = 1 self.cfg_mock.p11_crypto_plugin.rw_session = True self.cfg_mock.p11_crypto_plugin.pkek_length = 32 self.cfg_mock.p11_crypto_plugin.pkek_cache_ttl = 900 self.cfg_mock.p11_crypto_plugin.pkek_cache_limit = 10 self.cfg_mock.p11_crypto_plugin.encryption_mechanism = 'CKM_AES_CBC' self.cfg_mock.p11_crypto_plugin.seed_file = '' self.cfg_mock.p11_crypto_plugin.seed_length = 32 self.cfg_mock.p11_crypto_plugin.hmac_keywrap_mechanism = \ 'CKM_SHA256_HMAC' self.plugin_name = 'Test PKCS11 plugin' self.cfg_mock.p11_crypto_plugin.plugin_name = self.plugin_name self.plugin = p11_crypto.P11CryptoPlugin( conf=self.cfg_mock, pkcs11=self.pkcs11 ) def test_invalid_library_path(self): cfg = self.cfg_mock.p11_crypto_plugin cfg.library_path = None self.assertRaises(ValueError, p11_crypto.P11CryptoPlugin, conf=self.cfg_mock, pkcs11=self.pkcs11) def test_bind_kek_metadata_without_existing_key(self): kek_datum = models.KEKDatum() dto = plugin_import.KEKMetaDTO(kek_datum) dto = self.plugin.bind_kek_metadata(dto) self.assertEqual('AES', dto.algorithm) self.assertEqual(256, dto.bit_length) self.assertEqual('CBC', dto.mode) self.assertEqual(2, self.pkcs11.get_key_handle.call_count) self.assertEqual(1, self.pkcs11.generate_key.call_count) self.assertEqual(1, self.pkcs11.wrap_key.call_count) self.assertEqual(1, self.pkcs11.compute_hmac.call_count) def test_bind_kek_metadata_with_existing_key(self): kek_datum = models.KEKDatum() dto = plugin_import.KEKMetaDTO(kek_datum) dto.plugin_meta = '{}' dto = self.plugin.bind_kek_metadata(dto) self.assertEqual(0, self.pkcs11.generate_key.call_count) self.assertEqual(0, self.pkcs11.wrap_key.call_count) self.assertEqual(0, self.pkcs11.compute_hmac.call_count) def test_encrypt(self): payload = b'test payload' encrypt_dto = plugin_import.EncryptDTO(payload) kek_meta = mock.MagicMock() kek_meta.kek_label = 'pkek' kek_meta.plugin_meta = ('{"iv": "iv==",' '"hmac": "hmac",' '"wrapped_key": "wrappedkey==",' '"mkek_label": "mkek_label",' '"hmac_label": "hmac_label"}') response_dto = self.plugin.encrypt(encrypt_dto, kek_meta, mock.MagicMock()) self.assertEqual(b'0', response_dto.cypher_text) self.assertIn('iv', response_dto.kek_meta_extended) self.assertEqual(2, self.pkcs11.get_key_handle.call_count) self.assertEqual(2, self.pkcs11.get_session.call_count) self.assertEqual(1, self.pkcs11.verify_hmac.call_count) self.assertEqual(1, self.pkcs11.unwrap_key.call_count) self.assertEqual(1, self.pkcs11.encrypt.call_count) self.assertEqual(1, self.pkcs11.return_session.call_count) def test_encrypt_bad_session(self): self.pkcs11.get_session.return_value = mock.DEFAULT self.pkcs11.get_session.side_effect = ex.P11CryptoPluginException( 'Testing error handling' ) payload = b'test payload' encrypt_dto = plugin_import.EncryptDTO(payload) kek_meta = mock.MagicMock() kek_meta.kek_label = 'pkek' kek_meta.plugin_meta = ('{"iv": "iv==",' '"hmac": "hmac",' '"wrapped_key": "wrappedkey==",' '"mkek_label": "mkek_label",' '"hmac_label": "hmac_label"}') self.assertRaises(ex.P11CryptoPluginException, self.plugin._encrypt, encrypt_dto, kek_meta, mock.MagicMock()) self.assertEqual(2, self.pkcs11.get_key_handle.call_count) self.assertEqual(2, self.pkcs11.get_session.call_count) self.assertEqual(1, self.pkcs11.verify_hmac.call_count) self.assertEqual(1, self.pkcs11.unwrap_key.call_count) self.assertEqual(0, self.pkcs11.encrypt.call_count) self.assertEqual(0, self.pkcs11.return_session.call_count) def test_decrypt(self): ct = b'ctct' kek_meta_extended = '{"iv":"AAAA","mechanism":"CKM_AES_CBC"}' decrypt_dto = plugin_import.DecryptDTO(ct) kek_meta = mock.MagicMock() kek_meta.kek_label = 'pkek' kek_meta.plugin_meta = ('{"iv": "iv==",' '"hmac": "hmac",' '"wrapped_key": "c2VjcmV0a2V5BwcHBwcHBw==",' '"mkek_label": "mkek_label",' '"hmac_label": "hmac_label"}') pt = self.plugin.decrypt(decrypt_dto, kek_meta, kek_meta_extended, mock.MagicMock()) self.assertEqual(b'0', pt) self.assertEqual(2, self.pkcs11.get_key_handle.call_count) self.assertEqual(2, self.pkcs11.get_session.call_count) self.assertEqual(1, self.pkcs11.verify_hmac.call_count) self.assertEqual(1, self.pkcs11.unwrap_key.call_count) self.assertEqual(1, self.pkcs11.decrypt.call_count) self.assertEqual(1, self.pkcs11.return_session.call_count) def test_decrypt_bad_session(self): self.pkcs11.get_session.return_value = mock.DEFAULT self.pkcs11.get_session.side_effect = ex.P11CryptoPluginException( 'Testing error handling' ) ct = b'ctct' kek_meta_extended = '{"iv":"AAAA","mechanism":"CKM_AES_CBC"}' decrypt_dto = plugin_import.DecryptDTO(ct) kek_meta = mock.MagicMock() kek_meta.kek_label = 'pkek' kek_meta.plugin_meta = ('{"iv": "iv==",' '"hmac": "hmac",' '"wrapped_key": "wrappedkey==",' '"mkek_label": "mkek_label",' '"hmac_label": "hmac_label"}') self.assertRaises(ex.P11CryptoPluginException, self.plugin._decrypt, decrypt_dto, kek_meta, kek_meta_extended, mock.MagicMock()) self.assertEqual(2, self.pkcs11.get_key_handle.call_count) self.assertEqual(2, self.pkcs11.get_session.call_count) self.assertEqual(1, self.pkcs11.verify_hmac.call_count) self.assertEqual(1, self.pkcs11.unwrap_key.call_count) self.assertEqual(0, self.pkcs11.decrypt.call_count) self.assertEqual(0, self.pkcs11.return_session.call_count) def test_generate_symmetric(self): secret = models.Secret() secret.bit_length = 128 secret.algorithm = 'AES' generate_dto = plugin_import.GenerateDTO( secret.algorithm, secret.bit_length, None, None) kek_meta = mock.MagicMock() kek_meta.kek_label = 'pkek' kek_meta.plugin_meta = ('{"iv": "iv==",' '"hmac": "hmac",' '"wrapped_key": "wrappedkey==",' '"mkek_label": "mkek_label",' '"hmac_label": "hmac_label"}') response_dto = self.plugin.generate_symmetric(generate_dto, kek_meta, mock.MagicMock()) self.assertEqual(b'0', response_dto.cypher_text) self.assertIn('iv', response_dto.kek_meta_extended) self.assertEqual(2, self.pkcs11.get_key_handle.call_count) self.assertEqual(2, self.pkcs11.get_session.call_count) self.assertEqual(1, self.pkcs11.generate_random.call_count) self.assertEqual(1, self.pkcs11.verify_hmac.call_count) self.assertEqual(1, self.pkcs11.unwrap_key.call_count) self.assertEqual(1, self.pkcs11.encrypt.call_count) self.assertEqual(1, self.pkcs11.return_session.call_count) def test_generate_asymmetric_raises_error(self): self.assertRaises(NotImplementedError, self.plugin.generate_asymmetric, mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) def test_supports_encrypt_decrypt(self): self.assertTrue( self.plugin.supports( plugin_import.PluginSupportTypes.ENCRYPT_DECRYPT ) ) def test_supports_symmetric_key_generation(self): self.assertTrue( self.plugin.supports( plugin_import.PluginSupportTypes.SYMMETRIC_KEY_GENERATION ) ) def test_does_not_supports_asymmetric_key_generation(self): self.assertFalse( self.plugin.supports( plugin_import.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION ) ) def test_does_not_support_unknown_type(self): self.assertFalse( self.plugin.supports('SOMETHING_RANDOM') ) def test_missing_mkek(self): self.pkcs11.get_key_handle.return_value = None self.assertRaises(ex.P11CryptoKeyHandleException, self.plugin._get_master_key, self.plugin.mkek_key_type, 'bad_key_label') def test_cached_kek_expired(self): self.plugin.pkek_cache['expired_kek'] = p11_crypto.CachedKEK(4, 0) self.assertIsNone(self.plugin._pkek_cache_get('expired_kek')) def test_create_pkcs11(self): def _generate_random(session, buf, length): ffi.buffer(buf)[:] = b'0' * length return pkcs11.CKR_OK lib = mock.Mock() lib.C_Initialize.return_value = pkcs11.CKR_OK lib.C_OpenSession.return_value = pkcs11.CKR_OK lib.C_CloseSession.return_value = pkcs11.CKR_OK lib.C_GetSessionInfo.return_value = pkcs11.CKR_OK lib.C_Login.return_value = pkcs11.CKR_OK lib.C_GenerateRandom.side_effect = _generate_random lib.C_SeedRandom.return_value = pkcs11.CKR_OK ffi = pkcs11.build_ffi() setattr(ffi, 'dlopen', lambda x: lib) p11 = self.plugin._create_pkcs11(self.cfg_mock.p11_crypto_plugin, ffi) self.assertIsInstance(p11, pkcs11.PKCS11) # test for when plugin_conf.seed_file is not None self.cfg_mock.p11_crypto_plugin.seed_file = 'seed_file' d = '01234567' * 4 mo = mock.mock_open(read_data=d) with mock.patch(six.moves.builtins.__name__ + '.open', mo, create=True): p11 = self.plugin._create_pkcs11( self.cfg_mock.p11_crypto_plugin, ffi) self.assertIsInstance(p11, pkcs11.PKCS11) mo.assert_called_once_with('seed_file', 'rb') calls = [mock.call('seed_file', 'rb'), mock.call().__enter__(), mock.call().read(32), mock.call().__exit__(None, None, None)] self.assertEqual(mo.mock_calls, calls) lib.C_SeedRandom.assert_called_once_with(mock.ANY, mock.ANY, 32) self.cfg_mock.p11_crypto_plugin.seed_file = '' def test_call_pkcs11_with_token_error(self): self.plugin._encrypt = mock.Mock() self.plugin._encrypt.side_effect = [ex.P11CryptoTokenException( 'Testing error handling' ), 'test payload'] self.plugin._reinitialize_pkcs11 = mock.Mock() self.plugin._reinitialize_pkcs11.return_value = mock.DEFAULT self.plugin.encrypt(mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) self.assertEqual(2, self.pkcs11.get_key_handle.call_count) self.assertEqual(1, self.pkcs11.get_session.call_count) self.assertEqual(0, self.pkcs11.return_session.call_count) self.assertEqual(2, self.plugin._encrypt.call_count) def test_reinitialize_pkcs11(self): pkcs11 = self.pkcs11 self.plugin._create_pkcs11 = mock.Mock() self.plugin._create_pkcs11.return_value = pkcs11 self.plugin._configure_object_cache = mock.Mock() self.plugin._configure_object_cache.return_value = mock.DEFAULT self.plugin._reinitialize_pkcs11() self.assertEqual(1, self.pkcs11.finalize.call_count) self.assertEqual(1, self.plugin._create_pkcs11.call_count) self.assertEqual(1, self.plugin._configure_object_cache.call_count) def test_get_plugin_name(self): self.assertEqual(self.plugin_name, self.plugin.get_plugin_name()) barbican-9.1.0.dev50/barbican/tests/plugin/test_resource.py0000664000175000017500000002101113616500636024052 0ustar sahidsahid00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import mock import testtools from barbican.model import models from barbican.plugin.interface import secret_store from barbican.plugin import resources from barbican.plugin import store_crypto from barbican.tests import utils @utils.parameterized_test_case class WhenTestingPluginResource(testtools.TestCase, utils.MockModelRepositoryMixin): def setUp(self): super(WhenTestingPluginResource, self).setUp() self.plugin_resource = resources self.spec = {'algorithm': 'RSA', 'bit_length': 1024, 'passphrase': 'changeit' } self.content_type = 'application/octet-stream' self.project_model = mock.MagicMock() asymmetric_meta_dto = secret_store.AsymmetricKeyMetadataDTO() # Mock plug-in self.moc_plugin = mock.MagicMock() self.moc_plugin.generate_asymmetric_key.return_value = ( asymmetric_meta_dto) self.moc_plugin.store_secret.return_value = {} moc_plugin_config = { 'return_value.get_plugin_generate.return_value': self.moc_plugin, 'return_value.get_plugin_store.return_value': self.moc_plugin, 'return_value.get_plugin_retrieve_delete.return_value': self.moc_plugin } self.moc_plugin_patcher = mock.patch( 'barbican.plugin.interface.secret_store.get_manager', **moc_plugin_config ) self.moc_plugin_manager = self.moc_plugin_patcher.start() self.addCleanup(self.moc_plugin_patcher.stop) self.setup_project_repository_mock() self.secret_repo = mock.MagicMock() self.secret_repo.create_from.return_value = None self.setup_secret_repository_mock(self.secret_repo) self.container_repo = mock.MagicMock() self.container_repo.create_from.return_value = None self.setup_container_repository_mock(self.container_repo) self.container_secret_repo = mock.MagicMock() self.container_secret_repo.create_from.return_value = None self.setup_container_secret_repository_mock( self.container_secret_repo) self.secret_meta_repo = mock.MagicMock() self.secret_meta_repo.create_from.return_value = None self.setup_secret_meta_repository_mock(self.secret_meta_repo) def tearDown(self): super(WhenTestingPluginResource, self).tearDown() def test_store_secret_dto(self): spec = {'algorithm': 'AES', 'bit_length': 256, 'secret_type': 'symmetric'} secret = base64.b64encode(b'ABCDEFABCDEFABCDEFABCDEF') self.plugin_resource.store_secret( unencrypted_raw=secret, content_type_raw=self.content_type, content_encoding='base64', secret_model=models.Secret(spec), project_model=self.project_model) dto = self.moc_plugin.store_secret.call_args_list[0][0][0] self.assertEqual("symmetric", dto.type) self.assertEqual(secret, dto.secret) self.assertEqual(spec['algorithm'], dto.key_spec.alg) self.assertEqual(spec['bit_length'], dto.key_spec.bit_length) self.assertEqual(self.content_type, dto.content_type) @utils.parameterized_dataset({ 'general_secret_store': { 'moc_plugin': None }, 'store_crypto': { 'moc_plugin': mock.MagicMock(store_crypto.StoreCryptoAdapterPlugin) } }) def test_get_secret_dto(self, moc_plugin): def mock_secret_store_store_secret(dto): self.secret_dto = dto def mock_secret_store_get_secret(secret_type, secret_metadata): return self.secret_dto def mock_store_crypto_store_secret(dto, context): self.secret_dto = dto def mock_store_crypto_get_secret( secret_type, secret_metadata, context): return self.secret_dto if moc_plugin: self.moc_plugin = moc_plugin self.moc_plugin.store_secret.return_value = {} self.moc_plugin.store_secret.side_effect = ( mock_store_crypto_store_secret) self.moc_plugin.get_secret.side_effect = ( mock_store_crypto_get_secret) moc_plugin_config = { 'return_value.get_plugin_store.return_value': self.moc_plugin, 'return_value.get_plugin_retrieve_delete.return_value': self.moc_plugin } self.moc_plugin_manager.configure_mock(**moc_plugin_config) else: self.moc_plugin.store_secret.side_effect = ( mock_secret_store_store_secret) self.moc_plugin.get_secret.side_effect = ( mock_secret_store_get_secret) raw_secret = b'ABCDEFABCDEFABCDEFABCDEF' spec = {'name': 'testsecret', 'algorithm': 'AES', 'bit_length': 256, 'secret_type': 'symmetric'} self.plugin_resource.store_secret( unencrypted_raw=base64.b64encode(raw_secret), content_type_raw=self.content_type, content_encoding='base64', secret_model=models.Secret(spec), project_model=self.project_model) secret = self.plugin_resource.get_secret( 'application/octet-stream', models.Secret(spec), None) self.assertEqual(raw_secret, secret) def test_generate_asymmetric_with_passphrase(self): """test asymmetric secret generation with passphrase.""" secret_container = self.plugin_resource.generate_asymmetric_secret( self.spec, self.content_type, self.project_model, ) self.assertEqual("rsa", secret_container.type) self.assertEqual(self.moc_plugin. generate_asymmetric_key.call_count, 1) self.assertEqual(self.container_repo. create_from.call_count, 1) self.assertEqual(self.container_secret_repo. create_from.call_count, 3) def test_generate_asymmetric_without_passphrase(self): """test asymmetric secret generation without passphrase.""" del self.spec['passphrase'] secret_container = self.plugin_resource.generate_asymmetric_secret( self.spec, self.content_type, self.project_model, ) self.assertEqual("rsa", secret_container.type) self.assertEqual(1, self.moc_plugin.generate_asymmetric_key.call_count) self.assertEqual(1, self.container_repo.create_from.call_count) self.assertEqual(2, self.container_secret_repo.create_from.call_count) def test_delete_secret_w_metadata(self): project_id = "some_id" secret_model = mock.MagicMock() secret_meta = mock.MagicMock() self.secret_meta_repo.get_metadata_for_secret.return_value = ( secret_meta) self.plugin_resource.delete_secret(secret_model=secret_model, project_id=project_id) self.secret_meta_repo.get_metadata_for_secret.assert_called_once_with( secret_model.id) self.moc_plugin.delete_secret.assert_called_once_with(secret_meta) self.secret_repo.delete_entity_by_id.assert_called_once_with( entity_id=secret_model.id, external_project_id=project_id) def test_delete_secret_w_out_metadata(self): project_id = "some_id" secret_model = mock.MagicMock() self.secret_meta_repo.get_metadata_for_secret.return_value = None self.plugin_resource.delete_secret(secret_model=secret_model, project_id=project_id) self.secret_meta_repo.get_metadata_for_secret.assert_called_once_with( secret_model.id) self.secret_repo.delete_entity_by_id.assert_called_once_with( entity_id=secret_model.id, external_project_id=project_id) barbican-9.1.0.dev50/barbican/tests/plugin/test_castellan_secret_store.py0000664000175000017500000001717413616500636026771 0ustar sahidsahid00000000000000# Copyright (c) 2018 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from castellan.common import exception from castellan.common.objects import opaque_data import mock import barbican.plugin.castellan_secret_store as css import barbican.plugin.interface.secret_store as ss import barbican.plugin.vault_secret_store as vss from barbican.tests import utils key_ref1 = 'aff825be-6ede-4b1d-aeb0-aaec8e62aec6' key_ref2 = '9c94c9c7-16ea-43e8-8ebe-0de282c0e6d5' secret_passphrase = 'secret passphrase' class WhenTestingVaultSecretStore(utils.BaseTestCase): def setUp(self): super(WhenTestingVaultSecretStore, self).setUp() self.key_manager_mock = mock.MagicMock(name="key manager mock") self.key_manager_mock.create_key_pair.return_value = ( key_ref1, key_ref2 ) self.key_manager_mock.create_key.return_value = key_ref1 self.key_manager_mock.store.return_value = key_ref1 secret_object = opaque_data.OpaqueData(secret_passphrase) self.key_manager_mock.get.return_value = secret_object self.cfg_mock = mock.MagicMock(name='config mock') self.cfg_mock.vault_plugin = mock.MagicMock( use_ssl=False, root_token_id='12345' ) self.plugin = vss.VaultSecretStore(self.cfg_mock) self.plugin.key_manager = self.key_manager_mock self.plugin_name = "VaultSecretStore" def test_generate_symmetric_key(self): key_spec = ss.KeySpec(ss.KeyAlgorithm.AES, 128) response = self.plugin.generate_symmetric_key(key_spec) self.plugin.key_manager.create_key.assert_called_once_with( mock.ANY, ss.KeyAlgorithm.AES, 128 ) expected_response = {css.CastellanSecretStore.KEY_ID: key_ref1} self.assertEqual(response, expected_response) def test_generate_symmetric_key_raises_exception(self): key_spec = ss.KeySpec(ss.KeyAlgorithm.AES, 128) self.plugin.key_manager.create_key.side_effect = exception.Forbidden() self.assertRaises( ss.SecretGeneralException, self.plugin.generate_symmetric_key, key_spec ) def test_generate_asymmetric_key(self): key_spec = ss.KeySpec(ss.KeyAlgorithm.RSA, 2048) response = self.plugin.generate_asymmetric_key(key_spec) self.plugin.key_manager.create_key_pair.assert_called_once_with( mock.ANY, ss.KeyAlgorithm.RSA, 2048) self.assertIsInstance(response, ss.AsymmetricKeyMetadataDTO) self.assertEqual( response.public_key_meta[css.CastellanSecretStore.KEY_ID], key_ref2 ) self.assertEqual( response.private_key_meta[css.CastellanSecretStore.KEY_ID], key_ref1 ) def test_generate_asymmetric_throws_exception(self): key_spec = ss.KeySpec(ss.KeyAlgorithm.RSA, 2048) self.plugin.key_manager.create_key_pair.side_effect = ( exception.Forbidden() ) self.assertRaises( ss.SecretGeneralException, self.plugin.generate_asymmetric_key, key_spec ) def test_generate_asymmetric_throws_passphrase_exception(self): key_spec = ss.KeySpec( alg=ss.KeyAlgorithm.RSA, bit_length=2048, passphrase="some passphrase" ) self.assertRaises( ss.GeneratePassphraseNotSupportedException, self.plugin.generate_asymmetric_key, key_spec ) def test_store_secret(self): payload = 'encrypt me!!' key_spec = mock.MagicMock() content_type = mock.MagicMock() transport_key = None secret_dto = ss.SecretDTO(ss.SecretType.SYMMETRIC, payload, key_spec, content_type, transport_key) response = self.plugin.store_secret(secret_dto) data = opaque_data.OpaqueData(secret_dto.secret) self.plugin.key_manager.store.assert_called_once_with( mock.ANY, data ) expected_response = {css.CastellanSecretStore.KEY_ID: key_ref1} self.assertEqual(response, expected_response) def test_store_secret_raises_exception(self): payload = 'encrypt me!!' key_spec = mock.MagicMock() content_type = mock.MagicMock() transport_key = None secret_dto = ss.SecretDTO(ss.SecretType.SYMMETRIC, payload, key_spec, content_type, transport_key) self.plugin.key_manager.store.side_effect = exception.Forbidden() self.assertRaises( ss.SecretGeneralException, self.plugin.store_secret, secret_dto ) def test_get_secret(self): secret_metadata = { css.CastellanSecretStore.KEY_ID: key_ref1, "content_type": "application/octet-stream" } response = self.plugin.get_secret( ss.SecretType.SYMMETRIC, secret_metadata ) self.assertIsInstance(response, ss.SecretDTO) self.assertEqual(ss.SecretType.SYMMETRIC, response.type) self.assertEqual(secret_passphrase, response.secret) self.plugin.key_manager.get.assert_called_once_with( mock.ANY, key_ref1 ) def test_get_secret_throws_exception(self): secret_metadata = {css.CastellanSecretStore.KEY_ID: key_ref1} self.plugin.key_manager.get.side_effect = exception.Forbidden() self.assertRaises( ss.SecretGeneralException, self.plugin.get_secret, ss.SecretType.SYMMETRIC, secret_metadata ) def test_delete_secret(self): secret_metadata = {css.CastellanSecretStore.KEY_ID: key_ref1} self.plugin.delete_secret(secret_metadata) self.plugin.key_manager.delete.assert_called_once_with( mock.ANY, key_ref1 ) def test_delete_secret_throws_exception(self): secret_metadata = {css.CastellanSecretStore.KEY_ID: key_ref1} self.plugin.key_manager.delete.side_effect = exception.Forbidden() self.assertRaises( ss.SecretGeneralException, self.plugin.delete_secret, secret_metadata ) def test_delete_secret_throws_key_error(self): secret_metadata = {css.CastellanSecretStore.KEY_ID: key_ref1} self.plugin.key_manager.delete.side_effect = KeyError() self.plugin.delete_secret(secret_metadata) self.plugin.key_manager.delete.assert_called_once_with( mock.ANY, key_ref1 ) def test_store_secret_supports(self): self.assertTrue( self.plugin.generate_supports(mock.ANY) ) def test_generate_supports(self): self.assertTrue( self.plugin.generate_supports(mock.ANY) ) def test_get_plugin_name(self): self.assertEqual(self.plugin_name, self.plugin.get_plugin_name()) barbican-9.1.0.dev50/barbican/tests/plugin/test_store_crypto.py0000664000175000017500000007126113616500636024773 0ustar sahidsahid00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import mock import testtools from barbican.common import utils from barbican.model import models from barbican.plugin.crypto import base from barbican.plugin.interface import secret_store from barbican.plugin import store_crypto from barbican.tests import keys from barbican.tests import utils as test_utils def get_private_dto(): spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048) return secret_store.SecretDTO(secret_store.SecretType.PRIVATE, base64.b64encode( keys.get_private_key_pem()), spec, 'application/pkcs8') def get_public_dto(): spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048) return secret_store.SecretDTO(secret_store.SecretType.PUBLIC, base64.b64encode( keys.get_public_key_pem()), spec, 'application/octet-stream') def get_certificate_dto(): spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048) return secret_store.SecretDTO(secret_store.SecretType.CERTIFICATE, base64.b64encode( keys.get_certificate_pem()), spec, 'application/octet-stream') class TestSecretStoreBase(testtools.TestCase, test_utils.MockModelRepositoryMixin): """Define common configurations for testing store_crypto.py.""" def setUp(self): super(TestSecretStoreBase, self).setUp() self.patchers = [] # List of patchers utilized in this test class. self.project_id = '12345' self.content_type = 'application/octet-stream' self.content_encoding = 'base64' self.secret = base64.b64encode(b'secret') self.decrypted_secret = b'decrypted_secret' self.cypher_text = b'cypher_text' self.kek_meta_extended = 'kek-meta-extended' self.spec_aes = secret_store.KeySpec('AES', 64, 'CBC') self.spec_rsa = secret_store.KeySpec( 'RSA', 1024, passphrase='changeit') self.project_model = mock.MagicMock() self.project_model.id = 'project-model-id' self.project_model.external_id = self.project_id self.secret_dto = secret_store.SecretDTO( secret_store.SecretType.OPAQUE, self.secret, secret_store.KeySpec(), self.content_type ) self.response_dto = base.ResponseDTO( self.cypher_text, kek_meta_extended=self.kek_meta_extended) self.private_key_dto = base.ResponseDTO(self.cypher_text) self.public_key_dto = base.ResponseDTO(self.cypher_text) self.passphrase_dto = base.ResponseDTO(self.cypher_text) self.kek_meta_project_model = models.KEKDatum() self.kek_meta_project_model.plugin_name = 'plugin-name' self.kek_meta_project_model.kek_label = 'kek-meta-label' self.kek_meta_project_model.algorithm = 'kek-meta-algo' self.kek_meta_project_model.bit_length = 1024 self.kek_meta_project_model.mode = 'kek=meta-mode' self.kek_meta_project_model.plugin_meta = 'kek-meta-plugin-meta' self.encrypted_datum_model = models.EncryptedDatum() self.encrypted_datum_model.kek_meta_project = ( self.kek_meta_project_model) self.encrypted_datum_model.cypher_text = base64.b64encode( b'cypher_text') self.encrypted_datum_model.content_type = 'content_type' self.encrypted_datum_model.kek_meta_extended = 'extended_meta' self.secret_model = models.Secret( { 'algorithm': 'myalg', 'bit_length': 1024, 'mode': 'mymode' } ) self.secret_model.id = 'secret-model-id' self.secret_model.encrypted_data = [self.encrypted_datum_model] self.context = store_crypto.StoreCryptoContext( secret_model=self.secret_model, project_model=self.project_model, content_type=self.content_type) def tearDown(self): super(TestSecretStoreBase, self).tearDown() for patcher in self.patchers: patcher.stop() def init_patchers(self): self._config_get_secret_repository() self._config_get_encrypted_datum_repository() self._config_get_kek_datum_repository() def _start_patcher(self, patcher): mock = patcher.start() self.patchers.append(patcher) return mock def _config_get_secret_repository(self): """Mock the get_secret_repository() factory function.""" self.secret_repo = mock.MagicMock() self.secret_repo.create_from.return_value = self.secret_model self.setup_secret_repository_mock(self.secret_repo) def _config_get_encrypted_datum_repository(self): """Mock the get_encrypted_datum_repository() factory function.""" self.datum_repo = mock.MagicMock() self.datum_repo.create_from.return_value = None self.setup_encrypted_datum_repository_mock(self.datum_repo) def _config_get_kek_datum_repository(self): """Mock the get_kek_datum_repository() factory function.""" kek_model = self.kek_meta_project_model self.kek_repo = mock.MagicMock() self.kek_repo.find_or_create_kek_datum.return_value = kek_model self.setup_kek_datum_repository_mock(self.kek_repo) @test_utils.parameterized_test_case class WhenTestingStoreCrypto(TestSecretStoreBase): dataset_for_pem = { 'private': [get_private_dto()], 'public': [get_public_dto()], 'certificate': [get_certificate_dto()] } def setUp(self): super(WhenTestingStoreCrypto, self).setUp() self.init_patchers() self._config_crypto_plugin() self._config_private_methods() self.plugin_to_test = store_crypto.StoreCryptoAdapterPlugin() def test_store_secret_with_context_type(self): """Test storing a secret.""" response_dict = self.plugin_to_test.store_secret( self.secret_dto, self.context) self.assertIsNone(response_dict) # Verify encrypt plugin and method where invoked. encrypt_mock = self.encrypting_plugin.encrypt self.assertEqual(1, encrypt_mock.call_count) args, kwargs = encrypt_mock.call_args test_encrypt_dto, test_kek_meta_dto, test_project_id = tuple(args) self.assertIsInstance(test_encrypt_dto, base.EncryptDTO) self.assertEqual(b'secret', test_encrypt_dto.unencrypted) self.assertEqual(self.kek_meta_dto, test_kek_meta_dto) self.assertEqual(self.project_id, test_project_id) def test_store_secret_without_context_type(self): """Test storing a secret.""" self.context.content_type = None self.plugin_to_test.store_secret( self.secret_dto, self.context) self.assertEqual(self.content_type, self.context.content_type) @test_utils.parameterized_dataset(dataset_for_pem) def test_store_pem_secret(self, secret_dto): """Test storing a secret that is PEM encoded.""" response_dict = self.plugin_to_test.store_secret( secret_dto, self.context) self.assertIsNone(response_dict) raw_content = base64.b64decode(secret_dto.secret) # Verify encrypt plugin and method where invoked. encrypt_mock = self.encrypting_plugin.encrypt self.assertEqual(1, encrypt_mock.call_count) args, kwargs = encrypt_mock.call_args test_encrypt_dto, test_kek_meta_dto, test_project_id = tuple(args) self.assertIsInstance(test_encrypt_dto, base.EncryptDTO) self.assertEqual(raw_content, test_encrypt_dto.unencrypted) self.assertEqual(self.kek_meta_dto, test_kek_meta_dto) self.assertEqual(self.project_id, test_project_id) def test_get_secret(self): """Test getting a secret.""" secret_dto = self.plugin_to_test.get_secret( secret_store.SecretType.OPAQUE, None, # Secret metadata is not relevant to store_crypto process. self.context) # Verify response. self.assertIsInstance(secret_dto, secret_store.SecretDTO) self.assertEqual(secret_store.SecretType.OPAQUE, secret_dto.type) self.assertEqual( base64.encodestring(self.decrypted_secret).rstrip(b'\n'), secret_dto.secret) self.assertEqual( self.encrypted_datum_model.content_type, secret_dto.content_type) self.assertIsInstance(secret_dto.key_spec, secret_store.KeySpec) self.assertEqual( self.secret_model.algorithm, secret_dto.key_spec.alg) self.assertEqual( self.secret_model.bit_length, secret_dto.key_spec.bit_length) self.assertEqual( self.secret_model.mode, secret_dto.key_spec.mode) # Verify decrypt plugin and method where invoked. decrypt_mock = self.retrieving_plugin.decrypt self.assertEqual(1, decrypt_mock.call_count) args, kwargs = decrypt_mock.call_args ( test_decrypt, test_kek_meta, test_kek_meta_extended, test_project_id ) = tuple(args) self.assertIsInstance(test_decrypt, base.DecryptDTO) self.assertEqual( base64.b64decode(self.encrypted_datum_model.cypher_text), test_decrypt.encrypted) self.assertIsInstance(test_kek_meta, base.KEKMetaDTO) self.assertEqual( self.kek_meta_project_model.plugin_name, test_kek_meta.plugin_name) self.assertEqual( self.encrypted_datum_model.kek_meta_extended, test_kek_meta_extended) self.assertEqual(self.project_id, test_project_id) @test_utils.parameterized_dataset(dataset_for_pem) def test_get_secret_encoding(self, input_secret_dto): """Test getting a secret that should be returend in PEM format.""" secret = input_secret_dto.secret key_spec = input_secret_dto.key_spec secret_type = input_secret_dto.type decrypt_mock = self.retrieving_plugin.decrypt decrypt_mock.return_value = base64.decodestring(secret) secret_model = self.context.secret_model secret_model.algorithm = key_spec.alg secret_model.bit_length = key_spec.bit_length secret_model.mode = key_spec.mode secret_dto = self.plugin_to_test.get_secret( secret_type, None, # Secret metadata is not relevant to store_crypto process. self.context) # Verify response. self.assertIsInstance(secret_dto, secret_store.SecretDTO) self.assertEqual(secret, secret_dto.secret) self.assertEqual(secret_type, secret_dto.type) self.assertIsInstance(secret_dto.key_spec, secret_store.KeySpec) self.assertEqual( secret_model.algorithm, secret_dto.key_spec.alg) self.assertEqual( secret_model.bit_length, secret_dto.key_spec.bit_length) self.assertEqual( secret_model.mode, secret_dto.key_spec.mode) def test_generate_symmetric_key(self): """test symmetric secret generation.""" generation_type = base.PluginSupportTypes.SYMMETRIC_KEY_GENERATION self._config_determine_generation_type_private_method( generation_type) response_dict = self.plugin_to_test.generate_symmetric_key( self.spec_aes, self.context) self.assertIsNone(response_dict) # Verify KEK objects finder was invoked. method_target = self.find_or_create_kek_objects_patcher.target method_mock = method_target._find_or_create_kek_objects self.assertEqual(1, method_mock.call_count) # Verify generating plugin and method where invoked. self._verify_generating_plugin_args( self.generating_plugin.generate_symmetric, self.spec_aes.alg, self.spec_aes.bit_length) # Verify secret save was invoked. method_target = self.store_secret_and_datum_patcher.target method_mock = method_target._store_secret_and_datum self.assertEqual(1, method_mock.call_count) def test_generate_asymmetric_key_with_passphrase(self): """test asymmetric secret generation with passphrase.""" self._test_generate_asymmetric_key(passphrase='passphrase') def test_generate_asymmetric_key_without_passphrase(self): """test asymmetric secret generation with passphrase.""" self._test_generate_asymmetric_key(passphrase=None) def test_generate_supports(self): """test generate_supports.""" # False return if KeySpec == None self.assertFalse(self.plugin_to_test.generate_supports(None)) # AES KeySpec should be supported. key_spec = secret_store.KeySpec(alg='AES', bit_length=64, mode='CBC') self.assertTrue(self.plugin_to_test.generate_supports(key_spec)) key_spec = secret_store.KeySpec(alg='aes', bit_length=64, mode='CBC') self.assertTrue(self.plugin_to_test.generate_supports(key_spec)) # RSA KeySpec should be supported. key_spec = secret_store.KeySpec(alg='RSA', bit_length=2048) self.assertTrue(self.plugin_to_test.generate_supports(key_spec)) # Camellia KeySpec should not be supported. self.key_spec = secret_store.KeySpec('Camellia', 64) self.assertFalse(self.plugin_to_test.generate_supports(self.key_spec)) # Bogus KeySpec should not be supported. key_spec = secret_store.KeySpec(alg='bogus', bit_length=2048) self.assertFalse(self.plugin_to_test.generate_supports(key_spec)) def test_store_secret_supports(self): # All spec types are supported for storage. key_spec = secret_store.KeySpec( alg='anyalg', bit_length=64, mode='CBC') self.assertTrue(self.plugin_to_test.store_secret_supports(key_spec)) def test_delete_secret(self): """Delete is not implemented, so just verify passes.""" self.plugin_to_test.delete_secret(None) def test_should_raise_secret_not_found_get_secret_with_no_model(self): self.context.secret_model = None self.assertRaises( secret_store.SecretNotFoundException, self.plugin_to_test.get_secret, secret_store.SecretType.OPAQUE, None, # get_secret() doesn't use the secret metadata argument self.context ) def test_should_raise_secret_not_found_get_secret_no_encrypted_data(self): self.context.secret_model.encrypted_data = [] self.assertRaises( secret_store.SecretNotFoundException, self.plugin_to_test.get_secret, secret_store.SecretType.OPAQUE, None, # get_secret() doesn't use the secret metadata argument self.context ) def test_should_raise_algorithm_not_supported_generate_symmetric_key(self): generation_type = base.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION self._config_determine_generation_type_private_method( generation_type) self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, self.plugin_to_test.generate_symmetric_key, self.spec_aes, self.context ) def test_should_raise_algo_not_supported_generate_asymmetric_key(self): generation_type = base.PluginSupportTypes.SYMMETRIC_KEY_GENERATION self._config_determine_generation_type_private_method( generation_type) self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, self.plugin_to_test.generate_asymmetric_key, self.spec_rsa, self.context ) def _test_generate_asymmetric_key(self, passphrase=None): """test asymmetric secret generation with passphrase parameter.""" self.spec_rsa.passphrase = passphrase generation_type = base.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION self._config_determine_generation_type_private_method( generation_type) response_dto = self.plugin_to_test.generate_asymmetric_key( self.spec_rsa, self.context) # Verify response. self.assertIsInstance( response_dto, secret_store.AsymmetricKeyMetadataDTO) self.assertIsNone(response_dto.private_key_meta) self.assertIsNone(response_dto.public_key_meta) self.assertIsNone(response_dto.passphrase_meta) # Verify KEK objects finder was invoked. method_target = self.find_or_create_kek_objects_patcher.target method_mock = method_target._find_or_create_kek_objects self.assertEqual(1, method_mock.call_count) # Verify generating plugin and method where invoked. self._verify_generating_plugin_args( self.generating_plugin.generate_asymmetric, self.spec_rsa.alg, self.spec_rsa.bit_length) # Assert the secret save was called the proper number of times. call_count = 2 if passphrase: call_count = 3 method_target = self.store_secret_and_datum_patcher.target method_mock = method_target._store_secret_and_datum self.assertEqual(call_count, method_mock.call_count) def _verify_generating_plugin_args(self, generate_mock, alg, bit_length): """Verify generating plugin and method where invoked.""" self.assertEqual(1, generate_mock.call_count) args, kwargs = generate_mock.call_args test_generate_dto, test_kek_meta_dto, test_project_id = tuple(args) self.assertIsInstance(test_generate_dto, base.GenerateDTO) self.assertEqual(alg, test_generate_dto.algorithm) self.assertEqual(bit_length, test_generate_dto.bit_length) self.assertEqual(self.kek_meta_dto, test_kek_meta_dto) self.assertEqual(self.project_id, test_project_id) return generate_mock def _config_crypto_plugin(self): """Mock the crypto plugin.""" # Create encrypting and generating plugins (the same plugin does both) response_dto = self.response_dto self.generating_plugin = mock.MagicMock() self.encrypting_plugin = self.generating_plugin self.generating_plugin.encrypt.return_value = response_dto self.generating_plugin.generate_symmetric.return_value = response_dto self.generating_plugin.generate_asymmetric.return_value = ( self.private_key_dto, self.public_key_dto, self.passphrase_dto ) # Create secret retrieving plugin self.retrieving_plugin = mock.MagicMock() self.retrieving_plugin.decrypt.return_value = self.decrypted_secret gen_plugin_config = { 'get_plugin_store_generate.return_value': self.generating_plugin, 'get_plugin_retrieve.return_value': self.retrieving_plugin, } self.gen_plugin_patcher = mock.patch( 'barbican.plugin.crypto.manager._PLUGIN_MANAGER', **gen_plugin_config ) self._start_patcher(self.gen_plugin_patcher) def _config_private_methods(self): """Mock store_crypto's private methods.""" # Mock _find_or_create_kek_objects(). self.kek_meta_dto = mock.MagicMock() find_or_create_kek_objects_config = { 'return_value': ( self.kek_meta_project_model, self.kek_meta_dto), } self.find_or_create_kek_objects_patcher = mock.patch( 'barbican.plugin.store_crypto._find_or_create_kek_objects', **find_or_create_kek_objects_config ) self._start_patcher(self.find_or_create_kek_objects_patcher) # Mock _store_secret_and_datum(). self.store_secret_and_datum_patcher = mock.patch( 'barbican.plugin.store_crypto._store_secret_and_datum' ) self._start_patcher(self.store_secret_and_datum_patcher) def _config_determine_generation_type_private_method(self, type_to_return): """Mock _determine_generation_type().""" determine_generation_type_config = { 'return_value': type_to_return, } self.determine_generation_type_patcher = mock.patch( 'barbican.plugin.store_crypto._determine_generation_type', **determine_generation_type_config ) self._start_patcher(self.determine_generation_type_patcher) class WhenTestingStoreCryptoDetermineGenerationType(testtools.TestCase): """Tests store_crypto.py's _determine_generation_type() function.""" def test_symmetric_algorithms(self): for algorithm in base.PluginSupportTypes.SYMMETRIC_ALGORITHMS: self.assertEqual( base.PluginSupportTypes.SYMMETRIC_KEY_GENERATION, store_crypto._determine_generation_type(algorithm)) # Case doesn't matter. self.assertEqual( base.PluginSupportTypes.SYMMETRIC_KEY_GENERATION, store_crypto._determine_generation_type('AeS')) def test_asymmetric_algorithms(self): for algorithm in base.PluginSupportTypes.ASYMMETRIC_ALGORITHMS: self.assertEqual( base.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION, store_crypto._determine_generation_type(algorithm)) # Case doesn't matter. self.assertEqual( base.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION, store_crypto._determine_generation_type('RsA')) def test_should_raise_not_supported_no_algorithm(self): self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, store_crypto._determine_generation_type, None ) def test_should_raise_not_supported_bogus_algorithm(self): self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, store_crypto._determine_generation_type, 'bogus' ) class WhenTestingStoreCryptoFindOrCreateKekObjects(TestSecretStoreBase): """Tests store_crypto.py's _find_or_create_kek_objects() function.""" def setUp(self): super(WhenTestingStoreCryptoFindOrCreateKekObjects, self).setUp() self.init_patchers() self._config_private_methods() def test_kek_bind_completed(self): self.kek_meta_project_model.bind_completed = True plugin_inst = self kek_model, kek_meta_dto = store_crypto._find_or_create_kek_objects( plugin_inst, self.project_model) # Verify returns. self.assertEqual(self.kek_meta_project_model, kek_model) self.assertIsInstance(kek_meta_dto, base.KEKMetaDTO) # Verify the KEK repository interactions. self._verify_kek_repository_interactions(plugin_inst) def test_kek_bind_not_completed(self): self.kek_meta_project_model.bind_completed = False test_kek_metadata = 'metadata' plugin_inst = mock.MagicMock() plugin_inst.bind_kek_metadata.return_value = test_kek_metadata kek_model, kek_meta_dto = store_crypto._find_or_create_kek_objects( plugin_inst, self.project_model) # Verify returns. self.assertEqual(self.kek_meta_project_model, kek_model) self.assertEqual(test_kek_metadata, kek_meta_dto) # Verify the KEK repository interactions. self._verify_kek_repository_interactions(plugin_inst) # Verify bind operations. self.assertEqual( 1, plugin_inst.bind_kek_metadata.call_count) self.assertEqual( 1, self.bind_completed_mock.call_count) self.assertEqual( 1, self.kek_repo.save.call_count) args, kwargs = self.kek_repo.save.call_args kek_model = args[0] self.assertEqual(self.kek_meta_project_model, kek_model) def test_kek_raise_no_kek_bind_not_completed(self): self.kek_meta_project_model.bind_completed = False plugin_inst = mock.MagicMock() plugin_inst.bind_kek_metadata.return_value = None self.assertRaises( base.CryptoKEKBindingException, store_crypto._find_or_create_kek_objects, plugin_inst, self.project_model) def _verify_kek_repository_interactions(self, plugin_inst): """Verify the KEK repository interactions.""" self.assertEqual( 1, self.kek_repo.find_or_create_kek_datum.call_count) args, kwargs = self.kek_repo.find_or_create_kek_datum.call_args test_project_model = args[0] test_full_plugin_name = args[1] self.assertEqual(self.project_model, test_project_model) plugin_name = utils.generate_fullname_for(plugin_inst) self.assertEqual(plugin_name, test_full_plugin_name) def _config_private_methods(self): """Mock store_crypto's private methods.""" # Mock _indicate_bind_completed(). indicate_bind_completed_config = { 'return_value': None } self.indicate_bind_completed_patcher = mock.patch( 'barbican.plugin.store_crypto._indicate_bind_completed', **indicate_bind_completed_config) self.bind_completed_mock = self._start_patcher( self.indicate_bind_completed_patcher) class WhenTestingStoreCryptoStoreSecretAndDatum(TestSecretStoreBase): """Tests store_crypto.py's _store_secret_and_datum() function.""" def setUp(self): super(WhenTestingStoreCryptoStoreSecretAndDatum, self).setUp() self.init_patchers() def test_without_existing_secret(self): self.secret_model.id = None store_crypto._store_secret_and_datum( self.context, self.secret_model, self.kek_meta_project_model, self.response_dto) # Verify the repository interactions. self._verify_secret_repository_interactions() self._verify_encrypted_datum_repository_interactions() def test_with_existing_secret(self): store_crypto._store_secret_and_datum( self.context, self.secret_model, self.kek_meta_project_model, self.response_dto) # Verify the repository interactions. self._verify_encrypted_datum_repository_interactions() # Verify **not** these repository interactions. self.assertEqual( 0, self.secret_repo.create_from.call_count) def _verify_secret_repository_interactions(self): """Verify the secret repository interactions.""" self.assertEqual( 1, self.secret_repo.create_from.call_count) args, kwargs = self.secret_repo.create_from.call_args test_secret_model = args[0] self.assertEqual(self.secret_model, test_secret_model) def _verify_encrypted_datum_repository_interactions(self): """Verify the encrypted datum repository interactions.""" self.assertEqual( 1, self.datum_repo.create_from.call_count) args, kwargs = self.datum_repo.create_from.call_args test_datum_model = args[0] self.assertIsInstance(test_datum_model, models.EncryptedDatum) self.assertEqual( self.content_type, test_datum_model.content_type) self.assertEqual( base64.encodestring(self.cypher_text).rstrip(b'\n'), test_datum_model.cypher_text) self.assertEqual( self.response_dto.kek_meta_extended, test_datum_model.kek_meta_extended) class WhenTestingStoreCryptoIndicateBindCompleted(TestSecretStoreBase): """Tests store_crypto.py's _indicate_bind_completed() function.""" def test_bind_operation(self): kek_meta_dto = base.KEKMetaDTO(self.kek_meta_project_model) self.kek_meta_project_model.bind_completed = False store_crypto._indicate_bind_completed( kek_meta_dto, self.kek_meta_project_model) self.assertTrue(self.kek_meta_project_model.bind_completed) self.assertEqual( self.kek_meta_project_model.algorithm, kek_meta_dto.algorithm) self.assertEqual( self.kek_meta_project_model.bit_length, kek_meta_dto.bit_length) self.assertEqual( self.kek_meta_project_model.mode, kek_meta_dto.mode) self.assertEqual( self.kek_meta_project_model.plugin_meta, kek_meta_dto.plugin_meta) barbican-9.1.0.dev50/barbican/tests/plugin/test_dogtag.py0000664000175000017500000010644413616500636023506 0ustar sahidsahid00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import datetime import os import tempfile from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.hazmat.primitives import serialization import mock from requests import exceptions as request_exceptions import testtools from barbican.tests import keys from barbican.tests import utils try: import barbican.plugin.dogtag as dogtag_import import barbican.plugin.interface.certificate_manager as cm import barbican.plugin.interface.secret_store as sstore import pki import pki.cert as dogtag_cert import pki.key as dogtag_key imports_ok = True except ImportError: # dogtag imports probably not available imports_ok = False @testtools.skipIf(not imports_ok, "Dogtag imports not available") class WhenTestingDogtagKRAPlugin(utils.BaseTestCase): def setUp(self): super(WhenTestingDogtagKRAPlugin, self).setUp() self.keyclient_mock = mock.MagicMock(name="KeyClient mock") self.patcher = mock.patch('pki.crypto.NSSCryptoProvider') self.patcher.start() # create nss db for test only self.nss_dir = tempfile.mkdtemp() self.plugin_name = "Test Dogtag KRA plugin" self.cfg_mock = mock.MagicMock(name='config mock') self.cfg_mock.dogtag_plugin = mock.MagicMock( nss_db_path=self.nss_dir, plugin_name=self.plugin_name, retries=3) self.plugin = dogtag_import.DogtagKRAPlugin(self.cfg_mock) self.plugin.keyclient = self.keyclient_mock def tearDown(self): super(WhenTestingDogtagKRAPlugin, self).tearDown() self.patcher.stop() os.rmdir(self.nss_dir) def test_get_plugin_name(self): self.assertEqual(self.plugin_name, self.plugin.get_plugin_name()) def test_generate_symmetric_key(self): key_spec = sstore.KeySpec(sstore.KeyAlgorithm.AES, 128) self.plugin.generate_symmetric_key(key_spec) self.keyclient_mock.generate_symmetric_key.assert_called_once_with( mock.ANY, sstore.KeyAlgorithm.AES.upper(), 128, mock.ANY) def test_generate_asymmetric_key(self): key_spec = sstore.KeySpec(sstore.KeyAlgorithm.RSA, 2048) self.plugin.generate_asymmetric_key(key_spec) self.keyclient_mock.generate_asymmetric_key.assert_called_once_with( mock.ANY, sstore.KeyAlgorithm.RSA.upper(), 2048, mock.ANY) def test_generate_non_supported_algorithm(self): key_spec = sstore.KeySpec(sstore.KeyAlgorithm.EC, 192) self.assertRaises( dogtag_import.DogtagPluginAlgorithmException, self.plugin.generate_symmetric_key, key_spec ) def test_raises_error_with_no_pem_path(self): m = mock.MagicMock() m.dogtag_plugin = mock.MagicMock(pem_path=None, nss_db_path='/tmp') self.assertRaises( ValueError, dogtag_import.DogtagKRAPlugin, m, ) def test_store_secret(self): payload = 'encrypt me!!' key_spec = mock.MagicMock() content_type = mock.MagicMock() transport_key = None secret_dto = sstore.SecretDTO(sstore.SecretType.SYMMETRIC, payload, key_spec, content_type, transport_key) self.plugin.store_secret(secret_dto) self.keyclient_mock.archive_key.assert_called_once_with( mock.ANY, "passPhrase", payload, key_algorithm=None, key_size=None) def test_store_secret_with_tkey_id(self): payload = 'data wrapped in PKIArchiveOptions object' key_spec = mock.MagicMock() content_type = mock.MagicMock() transport_key = mock.MagicMock() secret_dto = sstore.SecretDTO(sstore.SecretType.SYMMETRIC, payload, key_spec, content_type, transport_key) self.plugin.store_secret(secret_dto) self.keyclient_mock.archive_pki_options.assert_called_once_with( mock.ANY, "passPhrase", payload, key_algorithm=None, key_size=None) def test_get_secret(self): secret_metadata = { dogtag_import.DogtagKRAPlugin.ALG: sstore.KeyAlgorithm.AES, dogtag_import.DogtagKRAPlugin.BIT_LENGTH: 256, dogtag_import.DogtagKRAPlugin.KEY_ID: 'key1' } self.plugin.get_secret(sstore.SecretType.SYMMETRIC, secret_metadata) self.keyclient_mock.retrieve_key.assert_called_once_with('key1', None) def test_get_secret_with_twsk(self): twsk = mock.MagicMock() secret_metadata = { dogtag_import.DogtagKRAPlugin.ALG: sstore.KeyAlgorithm.AES, dogtag_import.DogtagKRAPlugin.BIT_LENGTH: 256, dogtag_import.DogtagKRAPlugin.KEY_ID: 'key1', 'trans_wrapped_session_key': twsk } self.plugin.get_secret(sstore.SecretType.SYMMETRIC, secret_metadata) self.keyclient_mock.retrieve_key.assert_called_once_with('key1', twsk) def test_get_private_key(self): test_key = rsa.generate_private_key( public_exponent=65537, key_size=2048, backend=default_backend() ) key_data = dogtag_key.KeyData() key_data.data = test_key.private_bytes( serialization.Encoding.DER, serialization.PrivateFormat.PKCS8, serialization.NoEncryption()) self.keyclient_mock.retrieve_key.return_value = key_data secret_metadata = { dogtag_import.DogtagKRAPlugin.ALG: sstore.KeyAlgorithm.RSA, dogtag_import.DogtagKRAPlugin.BIT_LENGTH: 2048, dogtag_import.DogtagKRAPlugin.KEY_ID: 'key1', dogtag_import.DogtagKRAPlugin.CONVERT_TO_PEM: 'true' } result = self.plugin.get_secret(sstore.SecretType.PRIVATE, secret_metadata) self.assertEqual( test_key.private_bytes( serialization.Encoding.PEM, serialization.PrivateFormat.PKCS8, serialization.NoEncryption()), result.secret ) def test_get_public_key(self): test_public_key = rsa.generate_private_key( public_exponent=65537, key_size=2048, backend=default_backend()).public_key() key_info = dogtag_key.KeyInfo() key_info.public_key = test_public_key.public_bytes( serialization.Encoding.DER, serialization.PublicFormat.PKCS1) self.keyclient_mock.get_key_info.return_value = key_info secret_metadata = { dogtag_import.DogtagKRAPlugin.ALG: sstore.KeyAlgorithm.RSA, dogtag_import.DogtagKRAPlugin.BIT_LENGTH: 2048, dogtag_import.DogtagKRAPlugin.KEY_ID: 'key1', dogtag_import.DogtagKRAPlugin.CONVERT_TO_PEM: 'true' } result = self.plugin.get_secret(sstore.SecretType.PUBLIC, secret_metadata) self.assertEqual( test_public_key.public_bytes( serialization.Encoding.PEM, serialization.PublicFormat.PKCS1), result.secret ) def test_store_passphrase_for_using_in_private_key_retrieval(self): key_spec = sstore.KeySpec(sstore.KeyAlgorithm.RSA, 2048, passphrase="password123") # Mock the response for passphrase archival request_response = dogtag_key.KeyRequestResponse() request_info = dogtag_key.KeyRequestInfo() request_info.key_url = "https://example_url/1" request_response.request_info = request_info self.keyclient_mock.archive_key.return_value = request_response asym_key_DTO = self.plugin.generate_asymmetric_key(key_spec) self.assertEqual( '1', asym_key_DTO.private_key_meta[ dogtag_import.DogtagKRAPlugin.PASSPHRASE_KEY_ID] ) self.keyclient_mock.generate_asymmetric_key.assert_called_once_with( mock.ANY, sstore.KeyAlgorithm.RSA.upper(), 2048, mock.ANY) def test_supports_symmetric_aes_key_generation(self): key_spec = sstore.KeySpec(sstore.KeyAlgorithm.AES, 256) self.assertTrue( self.plugin.generate_supports(key_spec) ) def test_supports_asymmetric_rsa_key_generation(self): key_spec = sstore.KeySpec(sstore.KeyAlgorithm.RSA, 2048) self.assertTrue( self.plugin.generate_supports(key_spec) ) def test_supports_asymmetric_ec_key_generation(self): key_spec = sstore.KeySpec(sstore.KeyAlgorithm.EC, 156) self.assertFalse( self.plugin.generate_supports(key_spec) ) def test_supports_symmetric_dh_key_generation(self): key_spec = sstore.KeySpec(sstore.KeyAlgorithm.DIFFIE_HELLMAN, 156) self.assertFalse( self.plugin.generate_supports(key_spec) ) def test_does_not_support_unknown_type(self): key_spec = sstore.KeySpec("SOMETHING_RANDOM", 156) self.assertFalse( self.plugin.generate_supports(key_spec) ) @testtools.skipIf(not imports_ok, "Dogtag imports not available") class WhenTestingDogtagCAPlugin(utils.BaseTestCase): def setUp(self): super(WhenTestingDogtagCAPlugin, self).setUp() self.certclient_mock = mock.MagicMock(name="CertClient mock") self.patcher = mock.patch('pki.crypto.NSSCryptoProvider') self.patcher2 = mock.patch('pki.client.PKIConnection') self.patcher.start() self.patcher2.start() # create nss db for test only self.nss_dir = tempfile.mkdtemp() # create expiration file for test fh, self.expiration_data_path = tempfile.mkstemp() exp_time = datetime.datetime.utcnow() + datetime.timedelta(days=2) os.write(fh, exp_time.strftime( "%Y-%m-%d %H:%M:%S.%f")) os.close(fh) # create host CA file for test fh, self.host_ca_path = tempfile.mkstemp() os.write(fh, "host_ca_aid") os.close(fh) self.approved_profile_id = "caServerCert" CONF = dogtag_import.CONF CONF.dogtag_plugin.nss_db_path = self.nss_dir CONF.dogtag_plugin.ca_expiration_data_path = self.expiration_data_path CONF.dogtag_plugin.ca_host_aid_path = self.host_ca_path CONF.dogtag_plugin.auto_approved_profiles = [self.approved_profile_id] CONF.dogtag_plugin.dogtag_host = "localhost" CONF.dogtag_plugin.dogtag_port = 8443 CONF.dogtag_plugin.simple_cmc_profile = "caOtherCert" self.cfg = CONF self.plugin = dogtag_import.DogtagCAPlugin(CONF) self.plugin.certclient = self.certclient_mock self.order_id = mock.MagicMock() self.profile_id = mock.MagicMock() # request generated self.request_id_mock = mock.MagicMock() self.request = dogtag_cert.CertRequestInfo() self.request.request_id = self.request_id_mock self.request.request_status = dogtag_cert.CertRequestStatus.COMPLETE self.cert_id_mock = mock.MagicMock() self.request.cert_id = self.cert_id_mock # cert generated self.cert = mock.MagicMock() self.cert.encoded = keys.get_certificate_pem() self.cert.pkcs7_cert_chain = keys.get_certificate_der() # for cancel/modify self.review_response = mock.MagicMock() # modified request self.modified_request = mock.MagicMock() self.modified_request_id_mock = mock.MagicMock() self.modified_request.request_id = self.modified_request_id_mock self.modified_request.request_status = ( dogtag_cert.CertRequestStatus.COMPLETE) self.modified_request.cert_id = self.cert_id_mock self.barbican_meta_dto = cm.BarbicanMetaDTO() def tearDown(self): super(WhenTestingDogtagCAPlugin, self).tearDown() self.patcher2.stop() self.patcher.stop() os.rmdir(self.nss_dir) os.remove(self.host_ca_path) os.remove(self.expiration_data_path) def _process_approved_profile_request(self, order_meta, plugin_meta): enrollment_result = dogtag_cert.CertEnrollmentResult( self.request, self.cert) enrollment_results = [enrollment_result] self.certclient_mock.enroll_cert.return_value = enrollment_results result_dto = self.plugin.issue_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.enroll_cert.assert_called_once_with( self.approved_profile_id, order_meta) self.assertEqual(cm.CertificateStatus.CERTIFICATE_GENERATED, result_dto.status, "result_dto status incorrect") self.assertEqual(base64.b64encode(keys.get_certificate_pem()), result_dto.certificate) self.assertEqual( self.request_id_mock, plugin_meta.get(dogtag_import.DogtagCAPlugin.REQUEST_ID) ) def _process_non_approved_profile_request(self, order_meta, plugin_meta, profile_id, inputs=None): if inputs is None: inputs = { 'cert_request_type': 'pkcs10', 'cert_request': base64.b64decode( order_meta.get('request_data')) } # mock CertRequestInfo enrollment_result = dogtag_cert.CertRequestInfo() enrollment_result.request_id = self.request_id_mock enrollment_result.request_status = ( dogtag_cert.CertRequestStatus.PENDING) # mock CertRequestInfoCollection enrollment_results = dogtag_cert.CertRequestInfoCollection() enrollment_results.cert_request_info_list = ( [enrollment_result]) self.certclient_mock.create_enrollment_request.return_value = ( enrollment_result) self.certclient_mock.submit_enrollment_request.return_value = ( enrollment_results) result_dto = self.plugin.issue_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.create_enrollment_request.assert_called_once_with( profile_id, inputs) self.certclient_mock.submit_enrollment_request.assert_called_once_with( enrollment_result) self.assertEqual(cm.CertificateStatus.WAITING_FOR_CA, result_dto.status, "result_dto status incorrect") self.assertEqual( self.request_id_mock, plugin_meta.get(dogtag_import.DogtagCAPlugin.REQUEST_ID) ) def test_issue_simple_cmc_request(self): order_meta = { cm.REQUEST_TYPE: cm.CertificateRequestType.SIMPLE_CMC_REQUEST, 'request_data': base64.b64encode(keys.get_csr_pem()) } plugin_meta = {} self._process_non_approved_profile_request( order_meta, plugin_meta, self.cfg.dogtag_plugin.simple_cmc_profile) def test_issue_full_cmc_request(self): order_meta = { cm.REQUEST_TYPE: cm.CertificateRequestType.FULL_CMC_REQUEST, 'request_data': 'Full CMC data ...' } plugin_meta = {} self.assertRaises( dogtag_import.DogtagPluginNotSupportedException, self.plugin.issue_certificate_request, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) def test_issue_stored_key_request(self): order_meta = { cm.REQUEST_TYPE: cm.CertificateRequestType.STORED_KEY_REQUEST, 'request_data': base64.b64encode(keys.get_csr_pem()) } plugin_meta = {} self._process_non_approved_profile_request( order_meta, plugin_meta, self.cfg.dogtag_plugin.simple_cmc_profile) def test_issue_custom_key_request(self): order_meta = { cm.REQUEST_TYPE: cm.CertificateRequestType.CUSTOM_REQUEST, dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id, } plugin_meta = {} self._process_approved_profile_request(order_meta, plugin_meta) def test_issue_no_cert_request_type_provided(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id} plugin_meta = {} self._process_approved_profile_request(order_meta, plugin_meta) def test_issue_bad_cert_request_type_provided(self): order_meta = { cm.REQUEST_TYPE: 'BAD_REQUEST_TYPE', dogtag_import.DogtagCAPlugin.PROFILE_ID: self.profile_id, } plugin_meta = {} self.assertRaises( dogtag_import.DogtagPluginNotSupportedException, self.plugin.issue_certificate_request, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) def test_issue_return_data_error_with_no_profile_id(self): order_meta = {} plugin_meta = {} result_dto = self.plugin.issue_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.assertEqual(result_dto.status, cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, "result_dto status incorrect") self.assertEqual(result_dto.status_message, "No profile_id specified") def test_issue_return_data_error_with_request_rejected(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id} plugin_meta = {} self.request.request_status = dogtag_cert.CertRequestStatus.REJECTED enrollment_result = dogtag_cert.CertEnrollmentResult( self.request, None) enrollment_results = [enrollment_result] self.certclient_mock.enroll_cert.return_value = enrollment_results result_dto = self.plugin.issue_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.enroll_cert.assert_called_once_with( self.approved_profile_id, order_meta) self.assertEqual(cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, result_dto.status, "result_dto status incorrect") self.assertEqual( self.request_id_mock, plugin_meta.get(dogtag_import.DogtagCAPlugin.REQUEST_ID)) def test_issue_return_canceled_with_request_canceled(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id} plugin_meta = {} self.request.request_status = dogtag_cert.CertRequestStatus.CANCELED enrollment_result = dogtag_cert.CertEnrollmentResult( self.request, None) enrollment_results = [enrollment_result] self.certclient_mock.enroll_cert.return_value = enrollment_results result_dto = self.plugin.issue_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.enroll_cert.assert_called_once_with( self.approved_profile_id, order_meta) self.assertEqual(cm.CertificateStatus.REQUEST_CANCELED, result_dto.status, "result_dto status incorrect") self.assertEqual( self.request_id_mock, plugin_meta.get(dogtag_import.DogtagCAPlugin.REQUEST_ID), ) def test_issue_return_waiting_with_request_pending(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: "otherProfile", 'cert_request': base64.b64encode(keys.get_csr_pem())} plugin_meta = {} inputs = { 'cert_request': keys.get_csr_pem(), dogtag_import.DogtagCAPlugin.PROFILE_ID: "otherProfile" } self._process_non_approved_profile_request( order_meta, plugin_meta, "otherProfile", inputs) def test_issue_raises_error_request_complete_no_cert(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id} plugin_meta = {} enrollment_result = dogtag_cert.CertEnrollmentResult( self.request, None) enrollment_results = [enrollment_result] self.certclient_mock.enroll_cert.return_value = enrollment_results self.assertRaises( cm.CertificateGeneralException, self.plugin.issue_certificate_request, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) self.assertEqual( self.request_id_mock, plugin_meta.get(dogtag_import.DogtagCAPlugin.REQUEST_ID) ) def test_issue_raises_error_request_unknown_status(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id} plugin_meta = {} self.request.request_status = "unknown_status" enrollment_result = dogtag_cert.CertEnrollmentResult( self.request, None) enrollment_results = [enrollment_result] self.certclient_mock.enroll_cert.return_value = enrollment_results self.assertRaises( cm.CertificateGeneralException, self.plugin.issue_certificate_request, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) self.assertEqual( self.request_id_mock, plugin_meta.get(dogtag_import.DogtagCAPlugin.REQUEST_ID) ) def test_issue_return_client_error_bad_request_exception(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id} plugin_meta = {} self.certclient_mock.enroll_cert.side_effect = ( pki.BadRequestException("bad request")) result_dto = self.plugin.issue_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.enroll_cert.assert_called_once_with( self.approved_profile_id, order_meta) self.assertEqual(cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, result_dto.status, "result_dto status incorrect") def test_issue_raises_error_pki_exception(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id} plugin_meta = {} self.certclient_mock.enroll_cert.side_effect = ( pki.PKIException("generic enrollment error")) self.assertRaises( cm.CertificateGeneralException, self.plugin.issue_certificate_request, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) def test_issue_return_ca_unavailable(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id} plugin_meta = {} self.certclient_mock.enroll_cert.side_effect = ( request_exceptions.RequestException()) result_dto = self.plugin.issue_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.enroll_cert.assert_called_once_with( self.approved_profile_id, order_meta) self.assertEqual(cm.CertificateStatus.CA_UNAVAILABLE_FOR_REQUEST, result_dto.status, "result_dto status incorrect") def test_cancel_request(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.cancel_request.return_value = None self.certclient_mock.review_request.return_value = self.review_response result_dto = self.plugin.cancel_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.cancel_request.assert_called_once_with( self.request_id_mock, self.review_response) self.assertEqual(cm.CertificateStatus.REQUEST_CANCELED, result_dto.status, "result_dto_status incorrect") def test_cancel_no_request_found(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.review_request.side_effect = ( pki.RequestNotFoundException("request_not_found")) result_dto = self.plugin.cancel_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.review_request.assert_called_once_with( self.request_id_mock) self.assertEqual(cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, result_dto.status, "result_dto_status incorrect") def test_cancel_conflicting_operation(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.review_request.return_value = self.review_response self.certclient_mock.cancel_request.side_effect = ( pki.ConflictingOperationException("conflicting_operation")) result_dto = self.plugin.cancel_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.cancel_request.assert_called_once_with( self.request_id_mock, self.review_response) self.assertEqual(cm.CertificateStatus.INVALID_OPERATION, result_dto.status, "result_dto_status incorrect") def test_cancel_ca_unavailable(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.review_request.side_effect = ( request_exceptions.RequestException("request_exception")) result_dto = self.plugin.cancel_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.assertEqual(cm.CertificateStatus.CA_UNAVAILABLE_FOR_REQUEST, result_dto.status, "result_dto_status incorrect") def test_cancel_raise_error_no_request_id(self): order_meta = mock.ANY plugin_meta = {} self.assertRaises( cm.CertificateGeneralException, self.plugin.cancel_certificate_request, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) def test_check_status(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.get_request.return_value = self.request self.certclient_mock.get_cert.return_value = self.cert result_dto = self.plugin.check_certificate_status( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.get_request.assert_called_once_with( self.request_id_mock) self.certclient_mock.get_cert.assert_called_once_with( self.cert_id_mock) self.assertEqual(cm.CertificateStatus.CERTIFICATE_GENERATED, result_dto.status, "result_dto_status incorrect") self.assertEqual(keys.get_certificate_pem(), result_dto.certificate) def test_check_status_raise_error_no_request_id(self): order_meta = mock.ANY plugin_meta = {} self.assertRaises( cm.CertificateGeneralException, self.plugin.check_certificate_status, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) def test_check_status_rejected(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.request.request_status = dogtag_cert.CertRequestStatus.REJECTED self.certclient_mock.get_request.return_value = self.request result_dto = self.plugin.check_certificate_status( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.get_request.assert_called_once_with( self.request_id_mock) self.assertEqual(cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, result_dto.status, "result_dto_status incorrect") self.assertIsNone(result_dto.certificate) def test_check_status_canceled(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.request.request_status = dogtag_cert.CertRequestStatus.CANCELED self.certclient_mock.get_request.return_value = self.request result_dto = self.plugin.check_certificate_status( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.get_request.assert_called_once_with( self.request_id_mock) self.assertEqual(cm.CertificateStatus.REQUEST_CANCELED, result_dto.status, "result_dto_status incorrect") self.assertIsNone(result_dto.certificate) def test_check_status_pending(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.request.request_status = dogtag_cert.CertRequestStatus.PENDING self.certclient_mock.get_request.return_value = self.request result_dto = self.plugin.check_certificate_status( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.get_request.assert_called_once_with( self.request_id_mock) self.assertEqual(cm.CertificateStatus.WAITING_FOR_CA, result_dto.status, "result_dto_status incorrect") self.assertIsNone(result_dto.certificate) def test_check_status_raises_error_complete_no_cert(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.get_request.return_value = self.request self.certclient_mock.get_cert.return_value = None self.assertRaises( cm.CertificateGeneralException, self.plugin.check_certificate_status, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) def test_modify_request(self): order_meta = { cm.REQUEST_TYPE: cm.CertificateRequestType.SIMPLE_CMC_REQUEST, 'request_data': base64.b64encode(keys.get_csr_pem()) } plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self._process_non_approved_profile_request( order_meta, plugin_meta, self.cfg.dogtag_plugin.simple_cmc_profile) self.certclient_mock.cancel_request.return_value = None self.certclient_mock.review_request.return_value = self.review_response result_dto = self.plugin.modify_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.cancel_request.assert_called_once_with( self.request_id_mock, self.review_response) self.assertEqual(cm.CertificateStatus.WAITING_FOR_CA, result_dto.status, "result_dto_status incorrect") def test_modify_no_request_found(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.review_request.side_effect = ( pki.RequestNotFoundException("request_not_found")) result_dto = self.plugin.modify_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.review_request.assert_called_once_with( self.request_id_mock) self.assertEqual(cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, result_dto.status, "result_dto_status incorrect") def test_modify_conflicting_operation(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.review_request.return_value = self.review_response self.certclient_mock.cancel_request.side_effect = ( pki.ConflictingOperationException("conflicting_operation")) result_dto = self.plugin.modify_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.cancel_request.assert_called_once_with( self.request_id_mock, self.review_response) self.assertEqual(cm.CertificateStatus.INVALID_OPERATION, result_dto.status, "result_dto_status incorrect") def test_modify_ca_unavailable(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.review_request.side_effect = ( request_exceptions.RequestException("request_exception")) result_dto = self.plugin.modify_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.assertEqual(cm.CertificateStatus.CA_UNAVAILABLE_FOR_REQUEST, result_dto.status, "result_dto_status incorrect") def test_modify_raise_error_no_request_id(self): order_meta = mock.ANY plugin_meta = {} self.assertRaises( cm.CertificateGeneralException, self.plugin.modify_certificate_request, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) barbican-9.1.0.dev50/barbican/tests/plugin/__init__.py0000664000175000017500000000000013616500636022716 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/test_hacking.py0000664000175000017500000002311613616500636022341 0ustar sahidsahid00000000000000# Copyright 2016 GohighSec # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import textwrap import ddt import mock import pep8 from barbican.hacking import checks from barbican.tests import utils @ddt.ddt class HackingTestCase(utils.BaseTestCase): """Hacking test cases This class tests the hacking checks in barbican.hacking.checks by passing strings to the check methods like the pep8/flake8 parser would. The parser loops over each line in the file and then passes the parameters to the check method. The parameter names in the check method dictate what type of object is passed to the check method. The parameter types are:: logical_line: A processed line with the following modifications: - Multi-line statements converted to a single line. - Stripped left and right. - Contents of strings replaced with "xxx" of same length. - Comments removed. physical_line: Raw line of text from the input file. lines: a list of the raw lines from the input file tokens: the tokens that contribute to this logical line line_number: line number in the input file total_lines: number of lines in the input file blank_lines: blank lines before this one indent_char: indentation character in this file (" " or "\t") indent_level: indentation (with tabs expanded to multiples of 8) previous_indent_level: indentation on previous line previous_logical: previous logical line filename: Path of the file being run through pep8 When running a test on a check method the return will be False/None if there is no violation in the sample input. If there is an error a tuple is returned with a position in the line, and a message. So to check the result just assertTrue if the check is expected to fail and assertFalse if it should pass. """ # We are patching pep8 so that only the check under test is actually # installed. @mock.patch('pep8._checks', {'physical_line': {}, 'logical_line': {}, 'tree': {}}) def _run_check(self, code, checker, filename=None): pep8.register_check(checker) lines = textwrap.dedent(code).strip().splitlines(True) checker = pep8.Checker(filename=filename, lines=lines) checker.check_all() checker.report._deferred_print.sort() return checker.report._deferred_print def _assert_has_errors(self, code, checker, expected_errors=None, filename=None): actual_errors = [e[:3] for e in self._run_check(code, checker, filename)] self.assertEqual(expected_errors or [], actual_errors) def _assert_has_no_errors(self, code, checker, filename=None): self._assert_has_errors(code, checker, filename=filename) def test_logging_format_no_tuple_arguments(self): checker = checks.CheckLoggingFormatArgs code = """ import logging LOG = logging.getLogger() LOG.info("Message without a second argument.") LOG.critical("Message with %s arguments.", 'two') LOG.debug("Volume %s caught fire and is at %d degrees C and" " climbing.", 'volume1', 500) """ self._assert_has_no_errors(code, checker) @ddt.data(*checks.CheckLoggingFormatArgs.LOG_METHODS) def test_logging_with_tuple_argument(self, log_method): checker = checks.CheckLoggingFormatArgs code = """ import logging LOG = logging.getLogger() LOG.{0}("Volume %s caught fire and is at %d degrees C and " "climbing.", ('volume1', 500)) """ self._assert_has_errors(code.format(log_method), checker, expected_errors=[(4, 21, 'B310')]) def test_str_on_exception(self): checker = checks.CheckForStrUnicodeExc code = """ def f(a, b): try: p = str(a) + str(b) except ValueError as e: p = str(e) return p """ errors = [(5, 16, 'B314')] self._assert_has_errors(code, checker, expected_errors=errors) def test_no_str_unicode_on_exception(self): checker = checks.CheckForStrUnicodeExc code = """ def f(a, b): try: p = unicode(a) + str(b) except ValueError as e: p = e return p """ self._assert_has_no_errors(code, checker) def test_unicode_on_exception(self): checker = checks.CheckForStrUnicodeExc code = """ def f(a, b): try: p = str(a) + str(b) except ValueError as e: p = unicode(e) return p """ errors = [(5, 20, 'B314')] self._assert_has_errors(code, checker, expected_errors=errors) def test_str_on_multiple_exceptions(self): checker = checks.CheckForStrUnicodeExc code = """ def f(a, b): try: p = str(a) + str(b) except ValueError as e: try: p = unicode(a) + unicode(b) except ValueError as ve: p = str(e) + str(ve) p = e return p """ errors = [(8, 20, 'B314'), (8, 29, 'B314')] self._assert_has_errors(code, checker, expected_errors=errors) def test_str_unicode_on_multiple_exceptions(self): checker = checks.CheckForStrUnicodeExc code = """ def f(a, b): try: p = str(a) + str(b) except ValueError as e: try: p = unicode(a) + unicode(b) except ValueError as ve: p = str(e) + unicode(ve) p = str(e) return p """ errors = [(8, 20, 'B314'), (8, 33, 'B314'), (9, 16, 'B314')] self._assert_has_errors(code, checker, expected_errors=errors) def test_dict_constructor_with_list_copy(self): self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dict([(i, connect_info[i])")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " attrs = dict([(k, _from_json(v))")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " type_names = dict((value, key) for key, value in")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dict((value, key) for key, value in")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( "foo(param=dict((k, v) for k, v in bar.items()))")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dict([[i,i] for i in range(3)])")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dd = dict([i,i] for i in range(3))")))) self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy( " create_kwargs = dict(snapshot=snapshot,")))) self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy( " self._render_dict(xml, data_el, data.__dict__)")))) def test_no_xrange(self): self.assertEqual(1, len(list(checks.no_xrange("xrange(45)")))) self.assertEqual(0, len(list(checks.no_xrange("range(45)")))) def test_validate_assertTrue(self): test_value = True self.assertEqual(0, len(list(checks.validate_assertTrue( "assertTrue(True)")))) self.assertEqual(1, len(list(checks.validate_assertTrue( "assertEqual(True, %s)" % test_value)))) def test_validate_assertIsNone(self): test_value = None self.assertEqual(0, len(list(checks.validate_assertIsNone( "assertIsNone(None)")))) self.assertEqual(1, len(list(checks.validate_assertIsNone( "assertEqual(None, %s)" % test_value)))) def test_validate_assertIsNotNone(self): test_value = None self.assertEqual(0, len(list(checks.validate_assertIsNotNone( "assertIsNotNone(NotNone)")))) self.assertEqual(1, len(list(checks.validate_assertIsNotNone( "assertNotEqual(None, %s)" % test_value)))) self.assertEqual(1, len(list(checks.validate_assertIsNotNone( "assertIsNot(None, %s)" % test_value)))) def test_no_log_warn_check(self): self.assertEqual(0, len(list(checks.no_log_warn_check( "LOG.warning('This should not trigger LOG.warn" "hacking check.')")))) self.assertEqual(1, len(list(checks.no_log_warn_check( "LOG.warn('We should not use LOG.warn')")))) barbican-9.1.0.dev50/barbican/tests/__init__.py0000664000175000017500000000000013616500636021420 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/certificate_utils.py0000664000175000017500000000526713616500636023407 0ustar sahidsahid00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ The following functions were created for testing purposes. """ from OpenSSL import crypto def create_key_pair(type, bits): key_pair = crypto.PKey() key_pair.generate_key(type, bits) return key_pair def get_valid_csr_object(): """Create a valid X509Req object""" key_pair = create_key_pair(crypto.TYPE_RSA, 2048) csr = crypto.X509Req() subject = csr.get_subject() setattr(subject, "CN", "host.example.net") csr.set_pubkey(key_pair) csr.sign(key_pair, "sha256") return csr def create_good_csr(): """Generate a CSR that will pass validation.""" csr = get_valid_csr_object() pem = crypto.dump_certificate_request(crypto.FILETYPE_PEM, csr) return pem def create_csr_that_has_not_been_signed(): """Generate a CSR that has not been signed.""" key_pair = create_key_pair(crypto.TYPE_RSA, 2048) csr = crypto.X509Req() subject = csr.get_subject() setattr(subject, "CN", "host.example.net") csr.set_pubkey(key_pair) pem = crypto.dump_certificate_request(crypto.FILETYPE_PEM, csr) return pem def create_csr_signed_with_wrong_key(): """Generate a CSR that has been signed by the wrong key.""" key_pair1 = create_key_pair(crypto.TYPE_RSA, 2048) key_pair2 = create_key_pair(crypto.TYPE_RSA, 2048) csr = crypto.X509Req() subject = csr.get_subject() setattr(subject, "CN", "host.example.net") # set public key from key pair 1 csr.set_pubkey(key_pair1) # sign with public key from key pair 2 csr.sign(key_pair2, "sha256") pem = crypto.dump_certificate_request(crypto.FILETYPE_PEM, csr) return pem def create_bad_csr(): """Generate a CSR that will not parse.""" return b"Bad PKCS10 Data" def create_csr_with_bad_subject_dn(): """Generate a CSR that has a bad subject dn.""" key_pair = create_key_pair(crypto.TYPE_RSA, 2048) csr = crypto.X509Req() subject = csr.get_subject() # server certs require attribute 'CN' setattr(subject, "UID", "bar") csr.set_pubkey(key_pair) csr.sign(key_pair, "sha256") pem = crypto.dump_certificate_request(crypto.FILETYPE_PEM, csr) return pem barbican-9.1.0.dev50/barbican/tests/cmd/0000775000175000017500000000000013616500640020057 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/cmd/test-status.py0000664000175000017500000000201413616500636022733 0ustar sahidsahid00000000000000# Copyright (c) 2018 NEC, Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_upgradecheck.upgradecheck import Code from barbican.cmd import status from barbican.tests import utils class TestUpgradeChecks(utils.BaseTestCase): def setUp(self): super(TestUpgradeChecks, self).setUp() self.cmd = status.Checks() def test__check_placeholder(self): check_result = self.cmd._check_placeholder() self.assertEqual( Code.SUCCESS, check_result.code) barbican-9.1.0.dev50/barbican/tests/cmd/test_cmd.py0000664000175000017500000000557413616500636022253 0ustar sahidsahid00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import sys from barbican.cmd import retry_scheduler from barbican.cmd import worker from barbican.tests import database_utils from barbican.tests.queue import test_keystone_listener from barbican.tests import utils class WhenInvokingRetryServiceCommand(utils.BaseTestCase): """Test the retry scheduler functionality.""" def setUp(self): super(WhenInvokingRetryServiceCommand, self).setUp() sys.argv = ['barbican-retry'] @mock.patch('barbican.common.config') @mock.patch('barbican.queue.init') @mock.patch('oslo_service.service.launch') @mock.patch('barbican.queue.retry_scheduler.PeriodicServer') def test_should_launch_service( self, mock_periodic_server, mock_service_launch, mock_queue_init, mock_config): retry_scheduler.main() self.assertEqual(1, mock_queue_init.call_count) self.assertEqual(1, mock_service_launch.call_count) self.assertEqual(1, mock_periodic_server.call_count) @mock.patch('oslo_log.log.setup') @mock.patch('sys.exit') def test_should_fail_run_command( self, mock_sys_exit, mock_log_setup): mock_log_setup.side_effect = RuntimeError() retry_scheduler.main() self.assertEqual(1, mock_sys_exit.call_count) class WhenInvokingWorkerCommand(test_keystone_listener.UtilMixin, utils.BaseTestCase): """Test the asynchronous worker functionality.""" def setUp(self): super(WhenInvokingWorkerCommand, self).setUp() database_utils.setup_in_memory_db() sys.argv = ['barbican-worker'] @mock.patch('barbican.queue.init') @mock.patch('barbican.queue.get_server') @mock.patch('oslo_service.service.launch') def test_should_launch_service( self, mock_service_launch, mock_queue_task_server, mock_queue_init): self.opt_in_group('queue', asynchronous_workers=3) worker.main() self.assertEqual(1, mock_queue_init.call_count) self.assertEqual(1, mock_service_launch.call_count) # check keyword argument for number of worker matches workers_kwarg = {'restart_method': 'mutate', 'workers': 3} self.assertEqual(workers_kwarg, mock_service_launch.call_args[1]) barbican-9.1.0.dev50/barbican/tests/cmd/test_barbican_manage.py0000664000175000017500000002452213616500636024553 0ustar sahidsahid00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import fixtures import mock from barbican.cmd import barbican_manage as manager from barbican.tests import utils class TestBarbicanManageBase(utils.BaseTestCase): def setUp(self): super(TestBarbicanManageBase, self).setUp() def clear_conf(): manager.CONF.reset() manager.CONF.unregister_opt(manager.category_opt) clear_conf() self.addCleanup(clear_conf) self.useFixture(fixtures.MonkeyPatch( 'oslo_log.log.setup', lambda barbican_test, version='test': None)) manager.CONF.set_override('sql_connection', 'mockdburl') def _main_test_helper(self, argv, func_name=None, *exp_args, **exp_kwargs): self.useFixture(fixtures.MonkeyPatch('sys.argv', argv)) manager.main() func_name.assert_called_once_with(*exp_args, **exp_kwargs) class TestBarbicanManage(TestBarbicanManageBase): """Test barbican-manage functionality.""" @mock.patch('barbican.model.migration.commands.generate') def test_db_revision(self, mock_generate): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'revision', '--db-url', 'mockdb', '--message', 'mockmsg'], mock_generate, autogenerate=False, message='mockmsg', sql_url='mockdb') @mock.patch('barbican.model.migration.commands.generate') def test_db_revision_autogenerate(self, mock_generate): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'revision', '--db-url', 'mockdb', '--message', 'mockmsg', '--autogenerate'], mock_generate, autogenerate=True, message='mockmsg', sql_url='mockdb') @mock.patch('barbican.model.migration.commands.generate') def test_db_revision_no_dburl(self, mock_generate): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'revision', '--message', 'mockmsg'], mock_generate, autogenerate=False, message='mockmsg', sql_url='mockdburl') @mock.patch('barbican.model.migration.commands.upgrade') def test_db_upgrade(self, mock_upgrade): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'upgrade', '--db-url', 'mockdb'], mock_upgrade, to_version='head', sql_url='mockdb') @mock.patch('barbican.model.migration.commands.upgrade') def test_db_upgrade_no_dburl(self, mock_upgrade): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'upgrade'], mock_upgrade, to_version='head', sql_url='mockdburl') @mock.patch('barbican.model.migration.commands.history') def test_db_history(self, mock_history): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'history', '--db-url', 'mockdb'], mock_history, False, sql_url='mockdb') @mock.patch('barbican.model.migration.commands.history') def test_db_history_no_dburl(self, mock_history): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'history'], mock_history, False, sql_url='mockdburl') @mock.patch('barbican.model.clean.clean_command') def test_db_clean_no_args(self, mock_clean_command): manager.CONF.set_override('log_file', 'mock_log_file') self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'clean'], func_name=mock_clean_command, sql_url='mockdburl', min_num_days=90, do_clean_unassociated_projects=False, do_soft_delete_expired_secrets=False, verbose=False, log_file='mock_log_file') manager.CONF.clear_override('log_file') @mock.patch('barbican.model.clean.clean_command') def test_db_clean_with_args(self, mock_clean_command): manager.CONF.set_override('log_file', 'mock_log_file') self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'clean', '--db-url', 'somewhere', '--min-days', '180', '--clean-unassociated-projects', '--soft-delete-expired-secrets', '--verbose', '--log-file', '/tmp/whatevs'], func_name=mock_clean_command, sql_url='somewhere', min_num_days=180, do_clean_unassociated_projects=True, do_soft_delete_expired_secrets=True, verbose=True, log_file='/tmp/whatevs') manager.CONF.clear_override('log_file') @mock.patch('barbican.model.sync.sync_secret_stores') def test_db_sync_secret_stores_no_args(self, mock_sync_command): manager.CONF.set_override('log_file', 'mock_log_file') self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'sync_secret_stores'], func_name=mock_sync_command, sql_url='mockdburl', verbose=False, log_file='mock_log_file') manager.CONF.clear_override('log_file') @mock.patch('barbican.model.sync.sync_secret_stores') def test_db_sync_secret_stores_with_args(self, mock_sync_command): manager.CONF.set_override('log_file', 'mock_log_file') self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'sync_secret_stores', '--db-url', 'somewhere', '--verbose', '--log-file', '/tmp/whatevs'], func_name=mock_sync_command, sql_url='somewhere', verbose=True, log_file='/tmp/whatevs') manager.CONF.clear_override('log_file') @mock.patch('barbican.model.migration.commands.current') def test_db_current(self, mock_current): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'current', '--db-url', 'mockdb'], mock_current, False, sql_url='mockdb') @mock.patch('barbican.model.migration.commands.current') def test_db_current_no_dburl(self, mock_current): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'current'], mock_current, False, sql_url='mockdburl') @mock.patch('barbican.plugin.crypto.pkcs11.PKCS11') def test_hsm_check_mkek(self, mock_pkcs11): mock_pkcs11.return_value.get_session.return_value = 1 mock_pkcs11.return_value.get_key_handle.return_value = 1 mock_getkey = mock_pkcs11.return_value.get_key_handle self._main_test_helper( ['barbican.cmd.barbican_manage', 'hsm', 'check_mkek', '--library-path', 'mocklib', '--passphrase', 'mockpassewd', '--label', 'mocklabel'], mock_getkey, 'CKK_AES', 'mocklabel', 1) @mock.patch('barbican.plugin.crypto.pkcs11.PKCS11') def test_hsm_gen_mkek(self, mock_pkcs11): mock_pkcs11.return_value.get_session.return_value = 1 mock_pkcs11.return_value.get_key_handle.return_value = None mock_pkcs11.return_value.generate_key.return_value = 0 mock_genkey = mock_pkcs11.return_value.generate_key self._main_test_helper( ['barbican.cmd.barbican_manage', 'hsm', 'gen_mkek', '--library-path', 'mocklib', '--passphrase', 'mockpassewd', '--label', 'mocklabel'], mock_genkey, 'CKK_AES', 32, 'CKM_AES_KEY_GEN', 1, 'mocklabel', encrypt=True, wrap=True, master_key=True) @mock.patch('barbican.plugin.crypto.pkcs11.PKCS11') def test_hsm_gen_hmac(self, mock_pkcs11): mock_pkcs11.return_value.get_session.return_value = 1 mock_pkcs11.return_value.get_key_handle.return_value = None mock_pkcs11.return_value.generate_key.return_value = 0 mock_genkey = mock_pkcs11.return_value.generate_key self._main_test_helper( ['barbican.cmd.barbican_manage', 'hsm', 'gen_hmac', '--library-path', 'mocklib', '--passphrase', 'mockpassewd', '--label', 'mocklabel'], mock_genkey, 'CKK_AES', 32, 'CKM_AES_KEY_GEN', 1, 'mocklabel', sign=True, master_key=True) @mock.patch('barbican.plugin.crypto.pkcs11.PKCS11') def test_hsm_gen_mkek_non_default_length(self, mock_pkcs11): mock_pkcs11.return_value.get_session.return_value = 1 mock_pkcs11.return_value.get_key_handle.return_value = None mock_pkcs11.return_value.generate_key.return_value = 0 mock_genkey = mock_pkcs11.return_value.generate_key self._main_test_helper( ['barbican.cmd.barbican_manage', 'hsm', 'gen_mkek', '--length', '48', '--library-path', 'mocklib', '--passphrase', 'mockpassewd', '--label', 'mocklabel'], mock_genkey, 'CKK_AES', 48, 'CKM_AES_KEY_GEN', 1, 'mocklabel', encrypt=True, wrap=True, master_key=True) @mock.patch('barbican.plugin.crypto.pkcs11.PKCS11') def test_hsm_check_hmac(self, mock_pkcs11): mock_pkcs11.return_value.get_session.return_value = 1 mock_pkcs11.return_value.get_key_handle.return_value = 1 mock_getkey = mock_pkcs11.return_value.get_key_handle self._main_test_helper( ['barbican.cmd.barbican_manage', 'hsm', 'check_hmac', '--library-path', 'mocklib', '--passphrase', 'mockpassewd', '--label', 'mocklabel'], mock_getkey, 'CKK_AES', 'mocklabel', 1) @mock.patch('barbican.plugin.crypto.pkcs11.PKCS11') def test_hsm_gen_hmac_non_default_length(self, mock_pkcs11): mock_pkcs11.return_value.get_session.return_value = 1 mock_pkcs11.return_value.get_key_handle.return_value = None mock_pkcs11.return_value.generate_key.return_value = 0 mock_genkey = mock_pkcs11.return_value.generate_key self._main_test_helper( ['barbican.cmd.barbican_manage', 'hsm', 'gen_hmac', '--length', '48', '--library-path', 'mocklib', '--passphrase', 'mockpassewd', '--label', 'mocklabel'], mock_genkey, 'CKK_AES', 48, 'CKM_AES_KEY_GEN', 1, 'mocklabel', sign=True, master_key=True) barbican-9.1.0.dev50/barbican/tests/cmd/__init__.py0000664000175000017500000000000013616500636022163 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/tests/cmd/test_db_cleanup.py0000664000175000017500000004460513616500636023602 0ustar sahidsahid00000000000000# Copyright (c) 2016 IBM # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.model import clean from barbican.model import models from barbican.model import repositories as repos from barbican.tests import database_utils as utils from oslo_db import exception as db_exc from sqlalchemy.exc import IntegrityError import datetime import mock def _create_project(project_name): """Wrapper to create a project and clean""" def project_decorator(test_func): def project_wrapper(self, *args, **kwargs): project = utils.create_project(external_id=project_name) kwargs['project'] = project test_result = test_func(self, *args, **kwargs) project.delete() return test_result return project_wrapper return project_decorator def _entry_exists(entry): """Check to see if entry should exist in the database""" model = entry.__class__ entry_id = entry.id session = repos.get_session() query = session.query(model).filter(model.id == entry_id) count = query.count() return count >= 1 def _entry_is_soft_deleted(entry): model = entry.__class__ entry_id = entry.id session = repos.get_session() query = session.query(model) result = query.filter(model.id == entry_id).first().deleted return result def _setup_entry(name, *args, **kwargs): func_name = "create_" + name if not hasattr(utils, func_name): raise Exception("Cannot create an entry called %s", name) func = getattr(utils, func_name) kwargs['session'] = repos.get_session() entry = func(*args, **kwargs) return entry class WhenTestingDBCleanUpCommand(utils.RepositoryTestCase): def tearDown(self): super(WhenTestingDBCleanUpCommand, self).tearDown() repos.rollback() @_create_project("my keystone id") def test_soft_deleted_secret_orders(self, project): """Test that secrets without child order get deleted""" # Create a secret tied to an order and one secret that is not secret1 = _setup_entry('secret', project=project) secret2 = _setup_entry('secret', project=project) order = _setup_entry('order', project=project, secret=secret1) # Delete secrets secret1.delete() secret2.delete() clean.cleanup_parent_with_no_child(models.Secret, models.Order) # Assert that only secret2 is removed self.assertTrue(_entry_exists(secret1)) self.assertFalse(_entry_exists(secret2)) # delete order and secret order.delete() clean.cleanup_all() self.assertFalse(_entry_exists(order)) self.assertFalse(_entry_exists(secret2)) def test_cleanup_soft_deletes_transport_keys(self): """Test Cleaning up soft deleted transport keys""" # create transport key transport_key = _setup_entry('transport_key') # delete transport key transport_key.delete() clean.cleanup_all() self.assertFalse(_entry_exists(transport_key)) @_create_project("my keystone id") def test_cleanup_soft_deletes_secrets(self, project): """Test cleaning up secrets and secret_meta""" # create secret and secret_meta secret = _setup_entry('secret', project=project) secret_metadatum = _setup_entry('secret_metadatum', secret=secret) secret_user_metadatum = _setup_entry('secret_user_metadatum', secret=secret) kek_datum = _setup_entry('kek_datum', project=project) enc_datum = _setup_entry('encrypted_datum', secret=secret, kek_datum=kek_datum) # delete secret, it should automatically delete # secret_metadatum, enc_datum, and secret_user_metadatum # kek_datum should still exist secret.delete() clean.cleanup_all() self.assertFalse(_entry_exists(secret)) self.assertFalse(_entry_exists(secret_metadatum)) self.assertFalse(_entry_exists(secret_user_metadatum)) self.assertFalse(_entry_exists(enc_datum)) self.assertTrue(_entry_exists(kek_datum)) @_create_project("my keystone id") def test_cleanup_soft_deletes_containers(self, project): """Test cleaning up containers and secrets""" # create container, secret, and container_secret container = _setup_entry('container', project=project) secret = _setup_entry('secret', project=project) container_secret = _setup_entry('container_secret', container=container, secret=secret) # delete container secret and container container.delete() clean.cleanup_all() # check that container secret and container are deleted # but secret still exists self.assertFalse(_entry_exists(container_secret)) self.assertFalse(_entry_exists(container)) self.assertTrue(_entry_exists(secret)) # cleanup secrets secret.delete() clean.cleanup_all() self.assertFalse(_entry_exists(secret)) @_create_project("my keystone id") def test_cleanup_container_with_order_child(self, project): container = _setup_entry('container', project=project) secret = _setup_entry('secret', project=project) secret_container = _setup_entry('container_secret', container=container, secret=secret) order = _setup_entry('order', project=project, secret=secret, container=container) container.delete() clean.cleanup_all() # only the secret_container should be removed from the database # since it is a child of the container self.assertFalse(_entry_exists(secret_container)) self.assertTrue(_entry_exists(secret)) self.assertTrue(_entry_exists(order)) # container should still exist since child order still exists self.assertTrue(_entry_exists(container)) order.delete() clean.cleanup_all() # assert that only the secret exists self.assertFalse(_entry_exists(order)) self.assertFalse(_entry_exists(container)) self.assertTrue(_entry_exists(secret)) secret.delete() clean.cleanup_all() # the secret should now be able to be removed self.assertFalse(_entry_exists(secret)) @_create_project("my clean order keystone id") def test_cleanup_orders(self, project): """Test cleaning up an order and it's children""" # create order, order meta, and plugin meta, and retry task order = _setup_entry('order', project=project) order_barbican_meta_data = _setup_entry('order_meta_datum', order=order) order_plugin_metadata = _setup_entry('order_plugin_metadatum', order=order) order_retry_task = _setup_entry('order_retry', order=order) # soft delete order and retry task, # it should automatically delete the children order.delete() order_retry_task.delete() clean.cleanup_all() # assert everything has been cleaned up self.assertFalse(_entry_exists(order)) self.assertFalse(_entry_exists(order_plugin_metadata)) self.assertFalse(_entry_exists(order_retry_task)) self.assertFalse(_entry_exists(order_barbican_meta_data)) @_create_project("my clean order with child keystone id") def test_cleanup_order_with_child(self, project): """Test cleaning up an order with a child""" # create order and retry task order = _setup_entry('order', project=project) order_retry_task = _setup_entry('order_retry', order=order) # soft delete order and retry task, # it should automatically delete the children order.delete() clean.cleanup_all() # assert that the order was not cleaned due to child self.assertTrue(_entry_exists(order)) self.assertTrue(_entry_exists(order_retry_task)) order_retry_task.delete() clean.cleanup_all() # assert everything has been cleaned up self.assertFalse(_entry_exists(order)) self.assertFalse(_entry_exists(order_retry_task)) @_create_project("my keystone id") def test_cleanup_soft_deletion_date(self, project): """Test cleaning up entries within date""" secret = _setup_entry('secret', project=project) order = order = _setup_entry('order', project=project, secret=secret) current_time = datetime.datetime.utcnow() tomorrow = current_time + datetime.timedelta(days=1) yesterday = current_time - datetime.timedelta(days=1) secret.delete() order.delete() # Assert that nothing is deleted due to date clean.cleanup_softdeletes(models.Order, threshold_date=yesterday) clean.cleanup_parent_with_no_child(models.Secret, models.Order, threshold_date=yesterday) self.assertTrue(_entry_exists(secret)) self.assertTrue(_entry_exists(order)) # Assert that everything is deleted due to date clean.cleanup_softdeletes(models.Order, threshold_date=tomorrow) clean.cleanup_parent_with_no_child(models.Secret, models.Order, threshold_date=tomorrow) self.assertFalse(_entry_exists(secret)) self.assertFalse(_entry_exists(order)) @_create_project("my keystone id") def test_soft_deleting_expired_secrets(self, project): """Test soft deleting secrets that are expired""" current_time = datetime.datetime.utcnow() tomorrow = current_time + datetime.timedelta(days=1) yesterday = current_time - datetime.timedelta(days=1) not_expired_secret = _setup_entry('secret', project=project) expired_secret = _setup_entry('secret', project=project) not_expired_secret.expiration = tomorrow expired_secret.expiration = yesterday # Create children for expired secret expired_secret_store_metadatum = _setup_entry('secret_metadatum', secret=expired_secret) expired_secret_user_metadatum = _setup_entry('secret_user_metadatum', secret=expired_secret) kek_datum = _setup_entry('kek_datum', project=project) expired_enc_datum = _setup_entry('encrypted_datum', secret=expired_secret, kek_datum=kek_datum) container = _setup_entry('container', project=project) expired_container_secret = _setup_entry('container_secret', container=container, secret=expired_secret) expired_acl_secret = _setup_entry('acl_secret', secret=expired_secret, user_ids=["fern", "chris"]) clean.soft_delete_expired_secrets(current_time) self.assertTrue(_entry_is_soft_deleted(expired_secret)) self.assertFalse(_entry_is_soft_deleted(not_expired_secret)) # Make sure the children of the expired secret are soft deleted as well self.assertTrue(_entry_is_soft_deleted(expired_enc_datum)) self.assertTrue(_entry_is_soft_deleted(expired_container_secret)) self.assertTrue(_entry_is_soft_deleted(expired_secret_store_metadatum)) self.assertTrue(_entry_is_soft_deleted(expired_secret_user_metadatum)) self.assertFalse(_entry_exists(expired_acl_secret)) def test_cleaning_unassociated_projects(self): """Test cleaning projects that have no child entries""" childless_project = _setup_entry('project', external_id="childless project") project_with_children = _setup_entry( 'project', external_id="project with children") project_children_list = list() project_children_list.append( _setup_entry('kek_datum', project=project_with_children)) project_children_list.append( _setup_entry('secret', project=project_with_children)) container = _setup_entry('container', project=project_with_children) project_children_list.append(container) project_children_list.append( _setup_entry('container_consumer_meta', container=container)) cert_authority = _setup_entry('certificate_authority', project=project_with_children) project_children_list.append(cert_authority) project_children_list.append( _setup_entry('preferred_cert_authority', cert_authority=cert_authority)) project_children_list.append( _setup_entry('project_cert_authority', certificate_authority=cert_authority)) project_children_list.append(_setup_entry('project_quotas', project=project_with_children)) clean.cleanup_unassociated_projects() self.assertTrue(_entry_exists(project_with_children)) self.assertFalse(_entry_exists(childless_project)) container.delete() for child in project_children_list: child.delete() clean.cleanup_all() clean.cleanup_unassociated_projects() self.assertFalse(_entry_exists(project_with_children)) @mock.patch('barbican.model.clean.cleanup_all') @mock.patch('barbican.model.clean.soft_delete_expired_secrets') @mock.patch('barbican.model.clean.cleanup_unassociated_projects') @mock.patch('barbican.model.clean.repo') @mock.patch('barbican.model.clean.log') @mock.patch('barbican.model.clean.CONF') def test_clean_up_command(self, mock_conf, mock_log, mock_repo, mock_clean_unc_projects, mock_soft_del_expire_secrets, mock_clean_all): """Tests the clean command""" test_sql_url = "mysql+pymysql://notrealuser:datab@127.0.0.1/barbican't" min_num_days = 91 do_clean_unassociated_projects = True do_soft_delete_expired_secrets = True verbose = True test_log_file = "/tmp/sometempfile" clean.clean_command(test_sql_url, min_num_days, do_clean_unassociated_projects, do_soft_delete_expired_secrets, verbose, test_log_file) set_calls = [mock.call('debug', True), mock.call('log_file', test_log_file), mock.call('sql_connection', test_sql_url)] mock_conf.set_override.assert_has_calls(set_calls) clear_calls = [mock.call('debug'), mock.call('log_file'), mock.call('sql_connection')] mock_conf.clear_override.assert_has_calls(clear_calls) self.assertTrue(mock_repo.setup_database_engine_and_factory.called) self.assertTrue(mock_repo.commit.called) self.assertTrue(mock_repo.clear.called) self.assertTrue(mock_clean_unc_projects.called) self.assertTrue(mock_soft_del_expire_secrets) self.assertTrue(mock_clean_all) @mock.patch('barbican.model.clean.cleanup_all') @mock.patch('barbican.model.clean.soft_delete_expired_secrets') @mock.patch('barbican.model.clean.cleanup_unassociated_projects') @mock.patch('barbican.model.clean.repo') @mock.patch('barbican.model.clean.log') @mock.patch('barbican.model.clean.CONF') def test_clean_up_command_with_false_args( self, mock_conf, mock_log, mock_repo, mock_clean_unc_projects, mock_soft_del_expire_secrets, mock_clean_all): """Tests the clean command with false args""" test_sql_url = None min_num_days = -1 do_clean_unassociated_projects = False do_soft_delete_expired_secrets = False verbose = None test_log_file = None clean.clean_command(test_sql_url, min_num_days, do_clean_unassociated_projects, do_soft_delete_expired_secrets, verbose, test_log_file) mock_conf.set_override.assert_not_called() mock_conf.clear_override.assert_not_called() self.assertTrue(mock_repo.setup_database_engine_and_factory.called) self.assertTrue(mock_repo.commit.called) self.assertTrue(mock_repo.clear.called) self.assertTrue(mock_clean_all) self.assertFalse(mock_clean_unc_projects.called) self.assertFalse(mock_soft_del_expire_secrets.called) @mock.patch('barbican.model.clean.cleanup_all', side_effect=IntegrityError("", "", "", "")) @mock.patch('barbican.model.clean.repo') @mock.patch('barbican.model.clean.log') @mock.patch('barbican.model.clean.CONF') def test_clean_up_command_with_exception( self, mock_conf, mock_log, mock_repo, mock_clean_all): """Tests that the clean command throws exceptions""" args = ("sql", 2, False, False, False, "/tmp/nope") self.assertRaises(IntegrityError, clean.clean_command, *args) self.assertTrue(mock_repo.rollback.called) @_create_project("my integrity error keystone id") def test_db_cleanup_raise_integrity_error(self, project): """Test that an integrity error is thrown This test tests the invalid scenario where the secret meta was not marked for deletion during the secret deletion. We want to make sure an integrity error is thrown during clean up. """ # create secret secret = _setup_entry('secret', project=project) secret_metadatum = _setup_entry('secret_metadatum', secret=secret) # delete parent but not child and assert integrity error secret.deleted = True secret_metadatum.deleted = False self.assertRaises(db_exc.DBReferenceError, clean.cleanup_all) barbican-9.1.0.dev50/barbican/__init__.py0000664000175000017500000000000013616500636020256 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/locale/0000775000175000017500000000000013616500640017411 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/locale/zh_CN/0000775000175000017500000000000013616500640020412 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/locale/zh_CN/LC_MESSAGES/0000775000175000017500000000000013616500640022177 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/locale/zh_CN/LC_MESSAGES/barbican.po0000664000175000017500000010337713616500636024320 0ustar sahidsahid00000000000000# Translations template for barbican. # Copyright (C) 2015 ORGANIZATION # This file is distributed under the same license as the barbican project. # # Translators: # DuanXin <1145833162@qq.com>, 2015 # OpenStack Infra , 2015. #zanata # Andreas Jaeger , 2016. #zanata # Jeremy Liu , 2016. #zanata msgid "" msgstr "" "Project-Id-Version: barbican VERSION\n" "Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" "POT-Creation-Date: 2020-01-04 06:24+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2016-09-17 01:29+0000\n" "Last-Translator: Jeremy Liu \n" "Language: zh_CN\n" "Plural-Forms: nplurals=1; plural=0;\n" "Generated-By: Babel 2.0\n" "X-Generator: Zanata 4.3.3\n" "Language-Team: Chinese (China)\n" msgid "'algorithm' is required field for {0} type order" msgstr "'algorithm'是{0}类型的order所必须的字段" msgid "'bit_length' is required field for {0} type order" msgstr "'bit-length'是{0}类型的order所必须的字段" msgid "'expiration' is before current time" msgstr "'expiration'早于当前时间" msgid "'payload' not allowed for asymmetric type order" msgstr "'payload'不被非对称密钥类型的order允许" msgid "'payload' not allowed for certificate type order" msgstr "'payload'不被证书类型的order允许" msgid "'payload' not allowed for key type order" msgstr "'payload'不被key类型的order允许" msgid "A Content-Type of '{content_type}' for secrets is not supported" msgstr "不支持秘密内容类型'{content_type}'" msgid "A new project preferred CA must be set before this one can be deleted." msgstr "在删除项目首选CA之前必须先设置一个新的首选CA。" msgid "Address of the KMIP server" msgstr "KMIP服务器的地址" msgid "" "Allow unauthenticated users to access the API with read-only privileges. " "This only applies when using ContextMiddleware." msgstr "" "当使用上下文中间件ContextMiddleware的时候将允许未授权的用户拥有API接口的只读" "权限" msgid "An object with the specified identifier was not found." msgstr "找不到具有指定标识的对象。" msgid "An unknown exception occurred" msgstr "发生未知异常" msgid "" "An unsupported algorithm {algorithm} was passed to the " "'generate_symmetric_key' method" msgstr "一个不支持的算法{algorithm}被传到'generate_symmetric_key'方法" msgid "Apparent RNG self-test failure." msgstr "明显的RNG自我测试失败。" msgid "Bad Container Reference {ref}" msgstr "错误的容器引用{ref}" msgid "Bad format" msgstr "错误的格式" msgid "Bad key file permissions found, expected 400 for path: {file_path}" msgstr "发现错误的密钥文件权限,路径:{file_path}期望400" #, python-format msgid "CA specified by ca_id %(ca_id)s not defined for project: %(project_id)s" msgstr "指定的CA %(ca_id)s 未在项目:%(project_id)s中定义" msgid "Cannot generate a fullname for a null instance" msgstr "不能为空实例生成全名" msgid "Cannot modify order type." msgstr "不能修改order类型。" msgid "Certificate event plugin \"{name}\" not found." msgstr "证书事件插件“{name}”未发现。" msgid "Certificate event plugin not found." msgstr "证书事件插件未发现。" msgid "Certificate not found for cert_id: {cert_id}" msgstr "未找到cert_id为{cert_id}的证书" msgid "Certificate plugin \"{name}\" not found." msgstr "证书插件“{name}”未发现" msgid "Certificate plugin not found for \"{ca_id}\"." msgstr "未发现CA:“{ca_id}”的证书插件。" msgid "Certificate plugin not found or configured." msgstr "证书插件未发现,或未配置。" msgid "Certificate status of {status} not supported" msgstr "证书状态{status}不支持" msgid "Check Certificate Order Status" msgstr "检查证书order状态" msgid "" "Conflict. A secret with that name and ID is already stored in this " "container. The same secret can exist in a container as long as the name is " "unique." msgstr "" "冲突。具有相同名字和ID的秘密在该容器中已经存在。只有秘密的名字独一无二,秘密" "才能存储在容器中。" msgid "Conflict. Key in request is already in the secret metadata" msgstr "冲突。请求中的Key已经存在于秘密元数据中" #, python-format msgid "" "Container %(container_id)s does not exist for stored key certificate " "generation." msgstr "用于生成stored key certificate的容器%(container_id)s不存在。" #, python-format msgid "" "Container %(container_id)s does not reference a private key needed for " "stored key certificate generation." msgstr "用于生成stored key certificate的私钥未被容器%(container_id)s所引用。" msgid "Container Not Found" msgstr "未发现容器" msgid "Container Secret creation" msgstr "创建容器秘密" msgid "Container Secret deletion" msgstr "删除容器的秘密" msgid "Container Wrong Type" msgstr "错误的容器类型" msgid "Container creation" msgstr "创建容器" msgid "Container deletion" msgstr "删除容器" msgid "Container retrieval" msgstr "检索容器" msgid "ContainerACL(s) Update" msgstr "更新容器访问控制列表" msgid "ContainerACL(s) deletion" msgstr "删除容器访问控制列表" msgid "ContainerACL(s) retrieval" msgstr "检索容器访问控制列表" msgid "ContainerConsumer creation" msgstr "创建容器内的用户" msgid "ContainerConsumer deletion" msgstr "删除容器内的用户" msgid "ContainerConsumer retrieval" msgstr "检索容器内的用户" msgid "ContainerConsumers(s) retrieval" msgstr "检索容器的消费者" msgid "Containers(s) retrieval" msgstr "检索容器" msgid "Content-Type of '{content_type}' is not supported for PUT." msgstr "内容类型'{content_type}'不支持更新" msgid "Could not find key labeled {0}" msgstr "无法找到标签为{0}的密钥" msgid "Could not find {entity_name}" msgstr "未找到{entity_name}" msgid "Could not generate private key" msgstr "无法生成私钥" msgid "Creation not allowed because a quota has been reached" msgstr "不允许创建因为已达到配额" msgid "Crypto plugin not found." msgstr "密码插件未发现。" msgid "" "DSA keys should not have a passphrase in the database, for being used during " "retrieval." msgstr "DSA密钥不应在数据库中有密码,因为在检索过程被使用了。" msgid "Data supplied was not valid." msgstr "提供的数据无效。" msgid "" "Define the number of max threads to be used for notification server " "processing functionality." msgstr "定义通知服务器处理功能所使用的最大线程数量。" msgid "Directory in which to store certs/keys for subcas" msgstr "为子CA存储证书/密钥的目录" #, python-format msgid "Dogtag plugin does not support %s request type" msgstr "Dogtag插件不支持%s类型的请求" msgid "Domain of Symantec API" msgstr "Symantec API的域名" msgid "Duplicate reference names are not allowed" msgstr "不允许重复的引用名称" msgid "Duplicate secret ids are not allowed" msgstr "不允许重复的secret id" msgid "Encoding type must be 'base64' for text-based payloads." msgstr "基本文本的负载编码方式必须是'base64'。" msgid "" "Encryption using session key is not supported when retrieving a " "{secret_type} key." msgstr "当检索一个{secret_type}类型的密钥时,不支持使用会话密钥进行加密。" msgid "Entity ID {entity_id} not found" msgstr "未找到Entity ID:{entity_id}" msgid "" "Error configuring registry database with supplied sql_connection. Got error: " "{error}" msgstr "使用提供的sql_connection配置注册数据库出错。捕获错误:{error}" #, python-format msgid "Error deleting project entities for project_id=%s" msgstr "为id为%s的项目删除实体出错" msgid "Error while attempting to decode payload." msgstr "试图解码负载时出错。" msgid "Errors in creating subordinate CA: %(name)" msgstr "创建子CA:%(name)时出错" msgid "Exception thrown by enroll_cert: {message}" msgstr "enroll_cert抛出异常:{message}" msgid "Extension namespace to search for eventing plugins." msgstr "用于搜索事件插件的扩展命名空间" msgid "Extension namespace to search for plugins." msgstr "用于搜索插件的扩展命名空间" msgid "Extensions are not yet supported. Specify a valid profile instead." msgstr "暂不支持扩展。请指定一个有效的信息。" msgid "Failed to bind kek metadata for plugin: {name}" msgstr "为插件:{name}绑定kek元数据失败" msgid "Failed to validate JSON information: " msgstr "验证json信息失败:" msgid "Feature not implemented for '{0}' order type" msgstr "'{0}' order类型未实现的特性" msgid "Feature not implemented for PKCS11" msgstr "特性未被PKCS11实现" #, python-format msgid "" "Feature not implemented for value set on field '%(field)s' on schema " "'%(schema)s': %(reason)s" msgstr "模式'%(schema)s'的'%(field)s'字段所设置的值的特性为实现:%(reason)s" msgid "File path to concatenated \"certification authority\" certificates" msgstr "串联“certification authority”证书的文件路径" msgid "File path to local client certificate" msgstr "本地客户端证书的文件路径" msgid "File path to local client certificate keyfile" msgstr "本地客户端证书密钥文件的路径" msgid "Flag for Read/Write Sessions" msgstr "读/写会话的标志" msgid "Full CMC Requests are not yet supported." msgstr "Full CMC请求暂不支持。" msgid "General exception" msgstr "常规异常" msgid "HSM Slot ID" msgstr "HSM插槽ID" msgid "HSM returned response code: {code}" msgstr "HSM返回响应码:{code}" msgid "Hostname for the Dogtag instance" msgstr "Dogtag实例的主机名称" msgid "If 'payload' is supplied, 'payload_content_type' must also be supplied." msgstr "如果提供了'payload',那么也必须提供'payload_content_type'。" msgid "If 'payload' specified, must be non empty" msgstr "如果指定了'payload','payload'就不能为空" #, python-format msgid "Invalid CA_ID: %(ca_id)s" msgstr "无效的CA ID: %(ca_id)s" msgid "Invalid CMC Data" msgstr "无效的CMC数据" msgid "Invalid Certificate Request Type" msgstr "无效的证书请求类型" msgid "Invalid Key. Key must be URL safe." msgstr "无效的key。key必须是URL安全的。" msgid "Invalid Metadata. Keys and Values must be Strings." msgstr "无效的元数据。键值对必须是字符串。" #, python-format msgid "Invalid PKCS10 Data: %(reason)s" msgstr "无效的PKCS10数据:%(reason)s" #, python-format msgid "Invalid Parent CA: %(parent_ca_ref)s" msgstr "无效的父CA:%(parent_ca_ref)s" msgid "Invalid algorithm passed in" msgstr "传入的算法无效" #, python-format msgid "Invalid container: %(reason)s" msgstr "无效的容器:%(reason)s" msgid "Invalid date for 'expiration'" msgstr "无效的'expiration'数据" msgid "Invalid extensions data." msgstr "无效的扩展数据。" msgid "Invalid operation requested - Reason: {reason}" msgstr "无效的操作请求。原因:{reason}" msgid "Invalid payload for payload_content_encoding" msgstr "payload_content_encoding对应的payload无效" msgid "Invalid request_status returned by CA" msgstr "CA返回了无效的请求状态" msgid "Invalid request_status {status} for request_id {request_id}" msgstr "请求:{request_id}的无效的请求状态{status}" msgid "Invalid status '{status}' for {entity_name}." msgstr "无效的{entity_name}状态'{status}'" #, python-format msgid "Invalid subject DN: %(subject_dn)s" msgstr "无效的subject DN:%(subject_dn)s" msgid "KEK not yet created." msgstr "KEK还未被创建。" msgid "KMIP plugin action not support." msgstr "不支持KMIP插件行为" msgid "" "KMIP plugin does not currently support protecting the private key with a " "passphrase" msgstr "KMIP插件当前不支持使用密码保护私钥" msgid "Key encryption key to be used by Simple Crypto Plugin" msgstr "被简单密码插件所使用的KEK" msgid "" "Keystone notification queue topic name. This name needs to match one of " "values mentioned in Keystone deployment's 'notification_topics' " "configuration e.g. notification_topics=notifications, " "barbican_notificationsMultiple servers may listen on a topic and messages " "will be dispatched to one of the servers in a round-robin fashion. That's " "why Barbican service should have its own dedicated notification queue so " "that it receives all of Keystone notifications." msgstr "" "keystone通知队列话题名称。这个名称需要同部署keystone时配置" "的'notification_topics'其中之一匹配,例如notification_topics=notifications," "barbican_notificationsMultiple服务会监听一个话题并且消息会通过轮询机制被分配" "到其中一个服务。那就是为什么barbican服务需要有自己专用的通知队列才能够接收到" "所有来自keystone的通知。" msgid "List of automatically approved enrollment profiles" msgstr "自动通过的注册信息列表" msgid "List of certificate plugins to load." msgstr "待加载的证书插件列表。" msgid "List of crypto plugins to load." msgstr "待加载的密码插件列表。" msgid "List of secret store plugins to load." msgstr "待加载的证书存储插件列表" msgid "Malformed JSON" msgstr "有缺陷的 JSON" msgid "Master KEK length in bytes." msgstr "Master KEK字节长度" msgid "Missing X-Project-Id" msgstr "缺少 X-Project-Id" msgid "Missing required argument." msgstr "缺少必须的参数" #, python-format msgid "Missing required metadata field for %(required)s" msgstr "%(required)s缺少所需的元数据" msgid "Modify request: unable to cancel: {message}" msgstr "修改请求:无法取消:{message}" msgid "More than one key found for label" msgstr "找到不止一个该标签对应的key" msgid "Must be a positive integer that is a multiple of 8" msgstr "必须是8的整数倍的正整数" msgid "Must supply Non-None {0} argument for CertificateAuthority entry." msgstr "必须为CertificateAuthority入口提供非空的{0}参数" msgid "" "Must supply non-None {0} argument for CertificateAuthorityMetadatum entry." msgstr "必须为CertificateAuthorityMetadatum入口提供非空的{0}参数" msgid "Must supply non-None {0} argument for ContainerACL entry." msgstr "必须为ContainerACL入口提供非空的{0}参数" msgid "Must supply non-None {0} argument for ContainerACLUser entry." msgstr "必须为ContainerACLUser入口提供非空的{0}参数" msgid "Must supply non-None {0} argument for OrderBarbicanMetadatum entry." msgstr "必须为OrderBarbicanMetadatum入口提供非空的{0}参数" msgid "Must supply non-None {0} argument for OrderPluginMetadatum entry." msgstr "必须为OrderPluginMetadatum 入口提供非空的{0}参数" msgid "" "Must supply non-None {0} argument for PreferredCertificateAuthority entry." msgstr "必须为PreferredCertificateAuthority入口提供非空的{0}参数" msgid "" "Must supply non-None {0} argument for ProjectCertificateAuthority entry." msgstr "必须为ProjectCertificateAuthority入口提供非空的{0}参数" msgid "Must supply non-None {0} argument for ProjectQuotas entry." msgstr "必须为ProjectQuotas入口提供非空的{0}参数" msgid "Must supply non-None {0} argument for SecretACL entry." msgstr "必须为SecretACL入口提供非空的{0}参数" msgid "Must supply non-None {0} argument for SecretACLUser entry." msgstr "必须为SecretACLUser入口提供非空的{0}参数" msgid "Must supply non-None {0} argument for SecretStoreMetadatum entry." msgstr "必须为SecretStoreMetadatum入口提供非空的{0}参数。" msgid "Must supply non-None {0} argument for SecretUserMetadatum entry." msgstr "必须为SecretUserMetadatum入口提供非空的{0}参数。" msgid "Must supply non-None {0} argument for TransportKey entry." msgstr "必须为TransportKey入口提供非空的{0}参数" msgid "Must supply non-None {entity_name}." msgstr "必须提供非空的{entity_name}" msgid "Must supply {entity_name} with id=None (i.e. new entity)." msgstr "必须提供id为None的{entity_name}(i.e. new entity)。" msgid "No SQL connection configured" msgstr "没有配置SQL连接" msgid "No container found with container-ID {id}" msgstr "未发现container-ID为{id}的容器" msgid "No data supplied to process." msgstr "没有提供数据供处理。" msgid "No entities of type {entity_name} found" msgstr "未发现类型{entity_name}的实体" msgid "No key handle was found" msgstr "未发现密钥操作" msgid "No plugin was found that could support your request" msgstr "没有找到可以支持你的请求的插件" msgid "No profile_id specified" msgstr "未指定profile_id" msgid "No request found for request_id {request_id} for order {order_id}" msgstr "未找到order:{order_id}的request_id为{request_id}的请求" msgid "No request returned in enrollment_results" msgstr "注册结果未返回请求" msgid "No request_data specified" msgstr "未指定请求数据" msgid "No secret found with secret-ID {id}" msgstr "未发现secret-ID为{id}的秘密" msgid "No secret information found" msgstr "未发现秘密的信息" msgid "No secret information provided to encrypt." msgstr "未提供用于加密的秘密信息。" msgid "No secret store plugins have been configured" msgstr "为配置秘密存储插件" #, python-format msgid "" "No support for value set on field '%(field)s' on schema '%(schema)s': " "%(reason)s" msgstr "模式'%(schema)s'的'%(field)s'字段所设置的值不支持:%(reason)s" #, python-format msgid "No token was found in slot %(slot_id)s" msgstr "未找到插槽%(slot_id)s的口令" msgid "No {entity_name} found with keystone-ID {id}" msgstr "未发现keystone-ID为{id}的{entity_name}" msgid "No {entity} found with ID {id}" msgstr "未找到ID为{id}的{entity}" msgid "Not Allowed. Sorry, only the creator of a consumer can delete it." msgstr "不被允许。对不起,只有消费者的创建者才能删除它。" msgid "" "Not Found. Multiple backends support is not enabled in service configuration." msgstr "未发现。服务配置未启动多后端支持。" msgid "Not Found. No preferred secret store defined for this project." msgstr "未发现。该项目没有定义首选秘密存储。" msgid "Not Found. Secret store not found." msgstr "未发现。未发现秘密存储。" msgid "Not Found. Sorry but your secret has no payload." msgstr "未发现。抱歉你的秘密没有负载。" msgid "Not Found. Transport Key not found." msgstr "未发现。传输密钥未发现。" msgid "Number of asynchronous worker processes" msgstr "异步任务处理者的数量" msgid "Only 'generic' containers can be modified." msgstr "只有通用的容器能够被修改" msgid "Only PENDING orders can be updated. Order is in the{0} state." msgstr "只有PENGDING状态的order能被更新。当前order是{0}状态。" msgid "Only subordinate CAs can be deleted." msgstr "只有子CA才能被删除。" msgid "Only support PKCS#1 encoding of asymmetric keys" msgstr "只支持非对称密钥的PKCS#1编码" msgid "Operation is not supported." msgstr "操作不支持。" msgid "Operation not supported by Dogtag Plugin" msgstr "Dogtag插件不支持的操作" msgid "Order creation" msgstr "创建order" msgid "Order deletion" msgstr "删除order" msgid "Order retrieval" msgstr "检索order" msgid "Order type \"{order_type}\" not implemented." msgstr "order类型“{order_type}”未实现。" msgid "Order type \"{order_type}\" not supported." msgstr "order类型“{order_type}”不支持" msgid "Order update" msgstr "更新order" msgid "Order update is not supported." msgstr "不支持命令更新。" msgid "Order(s) retrieval" msgstr "检索order(s)" msgid "Passphrase encryption is not supported for DSA algorithm" msgstr "DSA算法不支持密码加密。" msgid "" "Passphrase encryption is not supported for symmetric key generating " "algorithms." msgstr "对称密钥生成算法不支持密码加密。" msgid "Password for authenticating with KMIP server" msgstr "KMIP服务器的认证密码" msgid "Password for the NSS certificate databases" msgstr "NSS证书数据库的密码" msgid "Password to login to PKCS11 session" msgstr "登录到PKCS11会话的密码" msgid "Path to CA certificate key file" msgstr "CA证书密钥文件的路径" msgid "Path to CA chain pkcs7 file" msgstr "CA链pkcs7文件的路径" msgid "Path to PEM file for authentication" msgstr "认证所需的PEM文件的路径" msgid "Path to the NSS certificate database" msgstr "NSS证书数据库的路径" msgid "Path to vendor PKCS11 library" msgstr "提供PKCS11库的路径" msgid "Plugin does not support generation of subordinate CAs" msgstr "插件不支持生成子CA" msgid "Port for the Dogtag instance" msgstr "Dogtag实例的端口" msgid "Port for the KMIP server" msgstr "KMIP服务器的端口" msgid "Problem decoding payload" msgstr "解码负载时出错" msgid "Problem seen during certificate processing - Reason: {reason}" msgstr "证书处理似乎有问题。原因:{reason}" msgid "Problem seen during crypto processing - Reason: {reason}" msgstr "加密处理似乎出现问题。原因:{reason}" msgid "Problem with data in certificate request - Reason: {reason}" msgstr "证书请求中的数据有问题。原因:{reason}" msgid "Process TypeOrder" msgstr "处理TypeOrder" msgid "Process TypeOrder failure seen - please contact site administrator." msgstr "处理order失败。请联系站点管理员。" msgid "Profile for simple CMC requests" msgstr "simple-CMC请求的信息" msgid "Project KEK Cache Item Limit" msgstr "项目KEK缓存条目限制" msgid "Project KEK Cache Time To Live, in seconds" msgstr "项目KEK生存期缓存时间,以秒为单位" msgid "Project KEK length in bytes." msgstr "项目KEK字节长度" msgid "Project Quotas" msgstr "项目配额" msgid "Project cleanup via Keystone notifications" msgstr "通过keystone通知项目清理" #, python-format msgid "Provided Transport key %(transport_key_id)s could not be found" msgstr "无法找到所提供的传输key %(transport_key_id)s" msgid "Provided field value is not supported" msgstr "不支持所提供的字段值" msgid "Provided information too large to process" msgstr "提供的信息量太大无法处理" msgid "" "Provided object does not match schema '{schema}': {reason}. Invalid " "property: '{property}'" msgstr "提供的对象与模式'{schema}'不匹配:{reason}。无效的属性:'{property}'" msgid "Provided transport key was not found." msgstr "无法找到所提供的传输key。" msgid "Queue namespace" msgstr "队列命名空间" msgid "Queue topic name" msgstr "队列话题名称" #, python-format msgid "" "Quota reached for project %(external_project_id)s. Only %(quota)s " "%(resource_type)s are allowed." msgstr "" "项目%(external_project_id)s的配额已经达到。只有%(quota)s %(resource_type)s被" "允许。" msgid "Quotas" msgstr "配额" msgid "Read Error" msgstr "读取错误" msgid "Removing preferred secret store" msgstr "移除首选秘密存储" msgid "" "Request {request_id} reports status_complete, but no cert_id has been " "returned" msgstr "请求{request_id}报告了status_complete状态,但是未返回cert_id。" msgid "Requested algorithm is not supported" msgstr "请求的算法不支持" msgid "Role used to identify an authenticated user as administrator." msgstr "一种角色,用来确定该已认证用户是管理员。" msgid "SSL version, maps to the module ssl's constants" msgstr "SSL版本,映射到模块ssl的常量" msgid "Seconds (float) to wait before starting retry scheduler" msgstr "在开始重新调度之前需要等待数秒" msgid "Seconds (float) to wait between periodic schedule events" msgstr "周期性调度事件之间等待数秒" msgid "Secret '{secret_name}' with reference '{secret_ref}' doesn't exist." msgstr "秘密:'{secret_name}'及其URL:'{secret_ref}'不存在" msgid "Secret Accept of '{accept}' not supported" msgstr "不支持的Secret Accept '{accept}'" msgid "Secret Content-Encoding of '{content_encoding}' not supported" msgstr "不支持的秘密的内容编码'{content_encoding}'" msgid "Secret algorithm of '{algorithm}' not supported" msgstr "秘密的算法'{algorithm}'不支持" msgid "Secret already has data, cannot modify it." msgstr "秘密已经有数据,不能修改。" msgid "Secret creation" msgstr "创建秘密" msgid "Secret deletion" msgstr "删除秘密" msgid "Secret metadata creation" msgstr "创建秘密元数据" msgid "Secret metadata expected but not received." msgstr "预期到机密的元数据,但没有收到。" msgid "Secret metadata retrieval" msgstr "检索秘密元数据" msgid "Secret metadatum creation" msgstr "创建秘密元数据" msgid "Secret metadatum removal" msgstr "移除秘密元数据" msgid "Secret metadatum retrieval" msgstr "检索秘密元数据" msgid "Secret metadatum update" msgstr "更新秘密元数据" msgid "Secret object type {object_type} is not supported" msgstr "秘密对象类型{object_type}不支持" msgid "Secret payload retrieval" msgstr "检索秘密负载" msgid "Secret provided doesn't exist." msgstr "提供的秘密不存在" msgid "Secret provided for '{secret_name}' doesn't exist." msgstr "为 '{机密名}'提供的机密不存在。" msgid "Secret provided is not in the container" msgstr "提供的秘密不在这个容器中" msgid "Secret retrieval" msgstr "检索秘密" msgid "Secret store plugin \"{name}\" not found." msgstr "秘密存储插件“{name}”未发现" msgid "Secret store plugin not found." msgstr "秘密存储插件未发现。" msgid "Secret store supported plugin not found." msgstr "秘密存储支持的插件未发现。" msgid "Secret type can not be converted to DER" msgstr "无法将秘密的类型转换为DER" msgid "Secret type can not be converted to PEM" msgstr "无法将秘密的类型转换为PEM" msgid "Secret update" msgstr "更新秘密" msgid "Secret(s) retrieval" msgstr "检索秘密" msgid "SecretACL(s) Update" msgstr "更新秘密访问控制列表" msgid "SecretACL(s) deletion" msgstr "删除秘密访问控制列表" msgid "SecretACL(s) retrieval" msgstr "检索秘密访问控制列表" msgid "Secret_ref does not match the configured hostname, please try again" msgstr "Secret_ref与配置的hostname不匹配,请重试" msgid "" "Secrets of type {secret_type} should not have a passphrase in the database, " "for being used during retrieval." msgstr "{secret_type}类型的秘密不应在数据库中有密码,因为在检索过程被使用了。" msgid "Server name for RPC task processing server" msgstr "RPC任务服务器的名称" msgid "Setting preferred secret store" msgstr "设置首选秘密存储" msgid "Signing key incorrect" msgstr "密钥签发错误" msgid "Status: {status}, Reason: {reason}, Message: {message}" msgstr "状态:{status},原因:{reason},消息:{message}" msgid "Subordinate CA is not owned by this project" msgstr "该项目没有子CA" msgid "Symantec password for authentication" msgstr "认证的Symantec密码" msgid "Symantec username for authentication" msgstr "认证的Symantec用户名" msgid "System" msgstr "系统" msgid "" "Text-based binary secret payloads must specify a content-encoding of 'base64'" msgstr "基于文本的二进制秘密负载必须指定内容编码为'base64'" msgid "The ca_id provided in the request is invalid" msgstr "请求中所提供的ca_id无效" msgid "The ca_id provided in the request is not defined for this project" msgstr "请求中提供的ca_id在这个项目中没有定义" msgid "" "The default exchange under which topics are scoped. May be overridden by an " "exchange name specified in the transport_url option." msgstr "" "话题被限定在默认的交换名称下。可能会被transport_url参数中指定的交换名称覆盖。" msgid "" "The minimum required reference name is 'certificate' for Certificate type" msgstr "证书类型所需要的最少的引用名称是'certificate'" msgid "" "The minimum required reference names are 'public_key' and'private_key' for " "RSA type" msgstr "RSA类型最少需要的引用名称是'public_key'和'private_key'" msgid "" "The request returned a 413 Request Entity Too Large. This generally means " "that rate limiting or a quota threshold was breached." msgstr "该请求返回了“413请求实体过大”。这通常意味着违背了速度限制或配额阈值。" msgid "" "The requested Store Plugin {plugin_name} is not currently available. This is " "probably a server misconfiguration." msgstr "请求的存储插件{plugin_name}目前不可用。这可能是服务器配置错误。" msgid "The version you requested wasn't found" msgstr "未发现你请求的版本" msgid "There was an error with the PKCS#11 library." msgstr "PKCS#11库有一个错误。" msgid "Time in days for CA entries to expire" msgstr "CA条目失效的天数" msgid "Transport Key Creation" msgstr "创建传输密钥" msgid "Transport Key deletion" msgstr "删除传输密钥" msgid "Transport Key retrieval" msgstr "检索传输密钥" msgid "Transport Key(s) retrieval" msgstr "检索传输密钥" msgid "" "Transport key wrapped session key has been provided to wrap secrets for " "retrieval, but the transport key id has not been provided." msgstr "" "已经提供了包含会话密钥的传输密钥用于加密待检索的秘密,但是未提供传输密钥的" "id。" msgid "Tried to register crypto plugin with null or empty name." msgstr "尝试使用空的名字注册密码插件。" msgid "True enables keystone notification listener functionality." msgstr "True使能keystone-notification-listener功能。" msgid "True enables queuing, False invokes workers synchronously" msgstr "True使能队列,False引用同步workers" msgid "" "True enables requeue feature in case of notification processing error. " "Enable this only when underlying transport supports this feature." msgstr "" "通知处理出错时True使能重新排队特性。只有当基础传输支持这个特性时才可以使能" "它。" msgid "URI provided invalid query string parameters." msgstr "URI包含了无效的字符串请求参数。" msgid "Unable to decode request data." msgstr "无法解码请求数据。" msgid "Unencrypted data must be a byte type, but was {unencrypted_type}" msgstr "未加密的数据必须是字节类型,但却是{unencrypted_type}" msgid "Unknown" msgstr "未知" msgid "Unknown attribute type provided." msgstr "提供的属性类型未知" msgid "Update Order" msgstr "更新order" msgid "Update Order failure seen - please contact site administrator." msgstr "更新order失败。请联系站点管理员。" msgid "Username for authenticating with KMIP server" msgstr "KMIP服务器的认证用户名" msgid "Version of tasks invoked via notifications" msgstr "通过通知引用的任务的版本" msgid "Version of tasks invoked via queue" msgstr "通过队列引用的任务的版本" msgid "Version retrieval" msgstr "检索版本" msgid "Working directory for Dogtag plugin" msgstr "Dogtag插件的工作目录" msgid "Wrong payload content-type" msgstr "错误的负载内容类型" msgid "content-encoding of '{content_encoding}' not supported" msgstr "不支持的内容编码'{content_encoding}'" msgid "content-type of '{content_type}' not supported" msgstr "不支持内容类型'{content_type}'" msgid "failure seen - please contact site administrator." msgstr "错误。请联系站点管理员。" msgid "key_label must be set for master_keys" msgstr "必须为主密钥设置密钥标签" msgid "library_path is required" msgstr "需要library_path" msgid "no request found for this order" msgstr "未找到这个order的请求" msgid "nss_password is required" msgstr "需要nss_password" msgid "" "only 'private_key', 'certificate' , 'private_key_passphrase', or " "'intermediates' reference names are allowed for Certificate type" msgstr "" "证书类型允许的引用名称只" "有'private_key','certificate','private_key_passphrase'或者'internediates'" msgid "" "only 'private_key', 'public_key' and 'private_key_passphrase' reference " "names are allowed for RSA type" msgstr "" "RSA类型允许的引用名称只" "有'private_key','public_key'和'private_key_passphrase'" msgid "password is required" msgstr "需要密码" msgid "payload must be provided when payload_content_type is specified" msgstr "当指定了'payload_content_type',就必须提供'payload'" msgid "payload_content_encoding is not one of {supported}" msgstr "payload_content_encoding不在{supported}当中" msgid "payload_content_type is not one of {supported}" msgstr "payload_content_type不在{supported}当中" msgid "pem_path is required" msgstr "需要pem_path" msgid "plugin_name must be provided" msgstr "必须提供plugin_name" msgid "request_id {req_id} returns COMPLETE but no cert returned" msgstr "请求{req_id}返回了完成状态,但是没有证书被返回" msgid "transport_key must be provided" msgstr "必须提供transport_key" msgid "url is required" msgstr "需要url" msgid "username is required" msgstr "需要用户名" msgid "{entity_name} is missing query build method for get project entities." msgstr "{entity_name}缺少用于获取项目实体的查询构建方法。" msgid "{entity_name} status is required." msgstr "{entity_name}状态是必须的。" msgid "" "{operation} attempt not allowed - please review your user/project privileges" msgstr "{operation}尝试不允许 - 请检查您的用户或者项目权限" msgid "{operation} failure seen - please contact site administrator." msgstr "{operation}失败 - 请联系网站管理员。" msgid "{operation} issue seen - {reason}." msgstr "{operation}出现问题 - {reason}。" msgid "{request} not found for {operation} for order_id {order_id}" msgstr "未找到order_id为{order_id}的{operation}操作的{request}请求" msgid "{schema_name}' within '{parent_schema_name}" msgstr "带有{parent_schema_name}的{schema_name}'" barbican-9.1.0.dev50/barbican/locale/en_GB/0000775000175000017500000000000013616500640020363 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/locale/en_GB/LC_MESSAGES/0000775000175000017500000000000013616500640022150 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/locale/en_GB/LC_MESSAGES/barbican.po0000664000175000017500000014260613616500636024267 0ustar sahidsahid00000000000000# OpenStack Infra , 2015. #zanata # Andi Chandler , 2016. #zanata # Andreas Jaeger , 2016. #zanata # Andi Chandler , 2017. #zanata # Andi Chandler , 2018. #zanata # Andi Chandler , 2019. #zanata msgid "" msgstr "" "Project-Id-Version: barbican VERSION\n" "Report-Msgid-Bugs-To: https://bugs.launchpad.net/openstack-i18n/\n" "POT-Creation-Date: 2020-01-04 06:24+0000\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "PO-Revision-Date: 2019-12-21 01:03+0000\n" "Last-Translator: Andi Chandler \n" "Language-Team: English (United Kingdom)\n" "Language: en_GB\n" "X-Generator: Zanata 4.3.3\n" "Plural-Forms: nplurals=2; plural=(n != 1)\n" msgid "" "# The maximum overflow size of the pool used by SQLAlchemy. When the number " "of checked-out connections reaches the size set in sql_pool_size, additional " "connections will be returned up to this limit. It follows then that the " "total number of simultaneous connections the pool will allow is " "sql_pool_size + sql_pool_max_overflow. Can be set to -1 to indicate no " "overflow limit, so no limit will be placed on the total number of concurrent " "connections. Comment out to allow SQLAlchemy to select the default." msgstr "" "# The maximum overflow size of the pool used by SQLAlchemy. When the number " "of checked-out connections reaches the size set in sql_pool_size, additional " "connections will be returned up to this limit. It follows then that the " "total number of simultaneous connections the pool will allow is " "sql_pool_size + sql_pool_max_overflow. Can be set to -1 to indicate no " "overflow limit, so no limit will be placed on the total number of concurrent " "connections. Comment out to allow SQLAlchemy to select the default." msgid "'algorithm' is required field for {0} type order" msgstr "'algorithm' is required field for {0} type order" msgid "'bit_length' is required field for {0} type order" msgstr "'bit_length' is required field for {0} type order" msgid "'expiration' is before current time" msgstr "'expiration' is before current time" msgid "'payload' not allowed for asymmetric type order" msgstr "'payload' not allowed for asymmetric type order" msgid "'payload' not allowed for certificate type order" msgstr "'payload' not allowed for certificate type order" msgid "'payload' not allowed for key type order" msgstr "'payload' not allowed for key type order" msgid "" ">>>>> Task exception seen for synchronous task invocation, so handling " "exception to mimic asynchronous behavior." msgstr "" ">>>>> Task exception seen for synchronous task invocation, so handling " "exception to mimic asynchronous behaviour." msgid "A Content-Type of '{content_type}' for secrets is not supported" msgstr "A Content-Type of '{content_type}' for secrets is not supported" #, python-format msgid "A defined SQL constraint check failed: %(error)s" msgstr "A defined SQL constraint check failed: %(error)s" msgid "A new project preferred CA must be set before this one can be deleted." msgstr "A new project preferred CA must be set before this one can be deleted." msgid "" "Accepts a class imported from the sqlalchemy.pool module, and handles the " "details of building the pool for you. If commented out, SQLAlchemy will " "select based on the database dialect. Other options are QueuePool (for " "SQLAlchemy-managed connections) and NullPool (to disabled SQLAlchemy " "management of connections). See http://docs.sqlalchemy.org/en/latest/core/" "pooling.html for more details" msgstr "" "Accepts a class imported from the sqlalchemy.pool module, and handles the " "details of building the pool for you. If commented out, SQLAlchemy will " "select based on the database dialect. Other options are QueuePool (for " "SQLAlchemy-managed connections) and NullPool (to disabled SQLAlchemy " "management of connections). See http://docs.sqlalchemy.org/en/latest/core/" "pooling.html for more details" msgid "Address of the KMIP server" msgstr "Address of the KMIP server" msgid "" "Allow unauthenticated users to access the API with read-only privileges. " "This only applies when using ContextMiddleware." msgstr "" "Allow unauthenticated users to access the API with read-only privileges. " "This only applies when using ContextMiddleware." msgid "" "Always set CKA_SENSITIVE=CK_TRUE including CKA_EXTRACTABLE=CK_TRUE keys." msgstr "" "Always set CKA_SENSITIVE=CK_TRUE including CKA_EXTRACTABLE=CK_TRUE keys." msgid "Amount of data to read from file for seed" msgstr "Amount of data to read from file for seed" msgid "An object with the specified identifier was not found." msgstr "An object with the specified identifier was not found." msgid "An unknown exception occurred" msgstr "An unknown exception occurred" msgid "" "An unsupported algorithm {algorithm} was passed to the " "'generate_symmetric_key' method" msgstr "" "An unsupported algorithm {algorithm} was passed to the " "'generate_symmetric_key' method" msgid "Apparent RNG self-test failure." msgstr "Apparent RNG self-test failure." msgid "Bad Container Reference {ref}" msgstr "Bad Container Reference {ref}" msgid "Bad format" msgstr "Bad format" msgid "Bad key file permissions found, expected 400 for path: {file_path}" msgstr "Bad key file permissions found, expected 400 for path: {file_path}" #, python-format msgid "CA specified by ca_id %(ca_id)s not defined for project: %(project_id)s" msgstr "" "CA specified by ca_id %(ca_id)s not defined for project: %(project_id)s" msgid "Cannot generate a fullname for a null instance" msgstr "Cannot generate a fullname for a null instance" msgid "Cannot modify order type." msgstr "Cannot modify order type." msgid "Certificate event plugin \"{name}\" not found." msgstr "Certificate event plugin \"{name}\" not found." msgid "Certificate event plugin not found." msgstr "Certificate event plugin not found." msgid "Certificate not found for cert_id: {cert_id}" msgstr "Certificate not found for cert_id: {cert_id}" msgid "Certificate plugin \"{name}\" not found." msgstr "Certificate plugin \"{name}\" not found." msgid "Certificate plugin not found for \"{ca_id}\"." msgstr "Certificate plugin not found for \"{ca_id}\"." msgid "Certificate plugin not found or configured." msgstr "Certificate plugin not found or configured." msgid "Certificate status of {status} not supported" msgstr "Certificate status of {status} not supported" msgid "Check Certificate Order Status" msgstr "Check Certificate Order Status" msgid "" "Conflict. A secret with that name and ID is already stored in this " "container. The same secret can exist in a container as long as the name is " "unique." msgstr "" "Conflict. A secret with that name and ID is already stored in this " "container. The same secret can exist in a container as long as the name is " "unique." msgid "Conflict. Key in request is already in the secret metadata" msgstr "Conflict. Key in request is already in the secret metadata" msgid "Consumer not found." msgstr "Consumer not found." #, python-format msgid "" "Container %(container_id)s does not exist for stored key certificate " "generation." msgstr "" "Container %(container_id)s does not exist for stored key certificate " "generation." #, python-format msgid "" "Container %(container_id)s does not reference a private key needed for " "stored key certificate generation." msgstr "" "Container %(container_id)s does not reference a private key needed for " "stored key certificate generation." msgid "Container Not Found" msgstr "Container Not Found" msgid "Container Secret creation" msgstr "Container Secret creation" msgid "Container Secret deletion" msgstr "Container Secret deletion" msgid "Container Wrong Type" msgstr "Container Wrong Type" msgid "Container creation" msgstr "Container creation" msgid "Container deletion" msgstr "Container deletion" msgid "Container retrieval" msgstr "Container retrieval" msgid "ContainerACL(s) Update" msgstr "ContainerACL(s) Update" msgid "ContainerACL(s) deletion" msgstr "ContainerACL(s) deletion" msgid "ContainerACL(s) retrieval" msgstr "ContainerACL(s) retrieval" msgid "ContainerConsumer creation" msgstr "ContainerConsumer creation" msgid "ContainerConsumer deletion" msgstr "ContainerConsumer deletion" msgid "ContainerConsumer retrieval" msgstr "ContainerConsumer retrieval" msgid "ContainerConsumers(s) retrieval" msgstr "ContainerConsumers(s) retrieval" msgid "Containers(s) retrieval" msgstr "Containers(s) retrieval" msgid "Content-Type of '{content_type}' is not supported for PUT." msgstr "Content-Type of '{content_type}' is not supported for PUT." msgid "" "Could not find a secret store plugin for generating secret with algorithm " "'{alg}' and bit-length '{len}'." msgstr "" "Could not find a secret store plugin for generating secret with algorithm " "'{alg}' and bit-length '{len}'." msgid "" "Could not find a secret store plugin for storing secret with algorithm " "'{alg}' and bit-length '{len}'." msgstr "" "Could not find a secret store plugin for storing secret with algorithm " "'{alg}' and bit-length '{len}'." msgid "" "Could not find an enabled crypto plugin backend that supports the requested " "operation: {operation}" msgstr "" "Could not find an enabled crypto plugin backend that supports the requested " "operation: {operation}" msgid "Could not find key labeled {0}" msgstr "Could not find key labelled {0}" msgid "Could not find {entity_name}" msgstr "Could not find {entity_name}" msgid "Could not generate private key" msgstr "Could not generate private key" msgid "Create the Barbican database on service startup." msgstr "Create the Barbican database on service startup." msgid "Creation not allowed because a quota has been reached" msgstr "Creation not allowed because a quota has been reached" msgid "Crypto plugin not found." msgstr "Crypto plugin not found." msgid "" "DSA keys should not have a passphrase in the database, for being used during " "retrieval." msgstr "" "DSA keys should not have a passphrase in the database, for being used during " "retrieval." msgid "Data supplied was not valid." msgstr "Data supplied was not valid." msgid "Default page size for the 'limit' paging URL parameter." msgstr "Default page size for the 'limit' paging URL parameter." msgid "" "Define the number of max threads to be used for notification server " "processing functionality." msgstr "" "Define the number of max threads to be used for notification server " "processing functionality." msgid "Directory in which to store certs/keys for subcas" msgstr "Directory in which to store certs/keys for subcas" #, python-format msgid "Dogtag plugin does not support %s request type" msgstr "Dogtag plugin does not support %s request type" msgid "Domain of Symantec API" msgstr "Domain of Symantec API" msgid "Duplicate reference names are not allowed" msgstr "Duplicate reference names are not allowed" msgid "Duplicate secret ids are not allowed" msgstr "Duplicate secret ids are not allowed" msgid "Encoding type must be 'base64' for text-based payloads." msgstr "Encoding type must be 'base64' for text-based payloads." msgid "" "Encryption using session key is not supported when retrieving a " "{secret_type} key." msgstr "" "Encryption using session key is not supported when retrieving a " "{secret_type} key." msgid "Entity ID {entity_id} not found" msgstr "Entity ID {entity_id} not found" msgid "" "Error configuring registry database with supplied sql_connection. Got error: " "{error}" msgstr "" "Error configuring registry database with supplied sql_connection. Got error: " "{error}" #, python-format msgid "Error deleting project entities for project_id=%s" msgstr "Error deleting project entities for project_id=%s" msgid "Error while attempting to decode payload." msgstr "Error while attempting to decode payload." msgid "Errors in creating subordinate CA: %(name)" msgstr "Errors in creating subordinate CA: %(name)" #, python-format msgid "" "Errors returned by CA when attempting to create subordinate CA: %(reason)s" msgstr "" "Errors returned by CA when attempting to create subordinate CA: %(reason)s" #, python-format msgid "" "Errors returned by CA when attempting to delete subordinate CA: %(reason)s" msgstr "" "Errors returned by CA when attempting to delete subordinate CA: %(reason)s" msgid "Exception thrown by enroll_cert: {message}" msgstr "Exception thrown by enroll_cert: {message}" msgid "Extension namespace to search for eventing plugins." msgstr "Extension namespace to search for eventing plugins." msgid "Extension namespace to search for plugins." msgstr "Extension namespace to search for plugins." msgid "Extensions are not yet supported. Specify a valid profile instead." msgstr "Extensions are not yet supported. Specify a valid profile instead." msgid "Failed to bind kek metadata for plugin: {name}" msgstr "Failed to bind KEK metadata for plugin: {name}" msgid "Failed to validate JSON information: " msgstr "Failed to validate JSON information: " msgid "Feature not implemented for '{0}' order type" msgstr "Feature not implemented for '{0}' order type" msgid "Feature not implemented for PKCS11" msgstr "Feature not implemented for PKCS11" #, python-format msgid "" "Feature not implemented for value set on field '%(field)s' on schema " "'%(schema)s': %(reason)s" msgstr "" "Feature not implemented for value set on field '%(field)s' on schema " "'%(schema)s': %(reason)s" msgid "File path to concatenated \"certification authority\" certificates" msgstr "File path to concatenated \"certification authority\" certificates" msgid "File path to local client certificate" msgstr "File path to local client certificate" msgid "File path to local client certificate keyfile" msgstr "File path to local client certificate keyfile" msgid "File to pull entropy for seeding RNG" msgstr "File to pull entropy for seeding RNG" msgid "Flag for Read/Write Sessions" msgstr "Flag for Read/Write Sessions" msgid "" "Flag to enable multiple secret store plugin backend support. Default is False" msgstr "" "Flag to enable multiple secret store plugin backend support. Default is False" msgid "" "Flag to indicate if this plugin is global default plugin for deployment. " "Default is False." msgstr "" "Flag to indicate if this plugin is global default plugin for deployment. " "Default is False." msgid "Full CMC Requests are not yet supported." msgstr "Full CMC Requests are not yet supported." msgid "General exception" msgstr "General exception" msgid "Generate IVs for CKM_AES_GCM mechanism." msgstr "Generate IVs for CKM_AES_GCM mechanism." msgid "Generating keys encrypted with passphrases is not supported" msgstr "Generating keys encrypted with passphrases is not supported" msgid "HMAC Key Generation Algorithm" msgstr "HMAC Key Generation Algorithm" msgid "HMAC Key Type" msgstr "HMAC Key Type" msgid "HMAC key wrap mechanism" msgstr "HMAC key wrap mechanism" msgid "HSM Slot ID" msgstr "HSM Slot ID" msgid "HSM returned response code: {code}" msgstr "HSM returned response code: {code}" msgid "" "Host name, for use in HATEOAS-style references Note: Typically this would be " "the load balanced endpoint that clients would use to communicate back with " "this service. If a deployment wants to derive host from wsgi request instead " "then make this blank. Blank is needed to override default config value which " "is 'http://localhost:9311'" msgstr "" "Host name, for use in HATEOAS-style references Note: Typically this would be " "the load balanced endpoint that clients would use to communicate back with " "this service. If a deployment wants to derive host from wsgi request instead " "then make this blank. Blank is needed to override default config value which " "is 'http://localhost:9311'" msgid "Hostname for the Dogtag instance" msgstr "Hostname for the Dogtag instance" msgid "If 'payload' is supplied, 'payload_content_type' must also be supplied." msgstr "" "If 'payload' is supplied, 'payload_content_type' must also be supplied." msgid "If 'payload' specified, must be non empty" msgstr "If 'payload' specified, must be non empty" msgid "In section '{0}', secret_store_plugin value is missing" msgstr "In section '{0}', secret_store_plugin value is missing" msgid "Internal name used to identify crypto_plugin." msgstr "Internal name used to identify crypto_plugin." msgid "Internal name used to identifysecretstore_plugin" msgstr "Internal name used to identify secretstore_plugin" msgid "Interval between retries of opening a SQL connection." msgstr "Interval between retries of opening a SQL connection." #, python-format msgid "Invalid CA_ID: %(ca_id)s" msgstr "Invalid CA_ID: %(ca_id)s" msgid "Invalid CMC Data" msgstr "Invalid CMC Data" msgid "Invalid Certificate Request Type" msgstr "Invalid Certificate Request Type" msgid "Invalid Key. Key must be URL safe." msgstr "Invalid Key. Key must be URL safe." msgid "Invalid Metadata. Keys and Values must be Strings." msgstr "Invalid Metadata. Keys and Values must be Strings." #, python-format msgid "Invalid PKCS10 Data: %(reason)s" msgstr "Invalid PKCS10 Data: %(reason)s" #, python-format msgid "Invalid Parent CA: %(parent_ca_ref)s" msgstr "Invalid Parent CA: %(parent_ca_ref)s" msgid "Invalid algorithm passed in" msgstr "Invalid algorithm passed in" #, python-format msgid "Invalid container: %(reason)s" msgstr "Invalid container: %(reason)s" msgid "Invalid date for 'expiration'" msgstr "Invalid date for 'expiration'" msgid "Invalid extensions data." msgstr "Invalid extensions data." msgid "Invalid operation requested - Reason: {reason}" msgstr "Invalid operation requested - Reason: {reason}" msgid "Invalid payload for payload_content_encoding" msgstr "Invalid payload for payload_content_encoding" msgid "Invalid request_status returned by CA" msgstr "Invalid request_status returned by CA" msgid "Invalid request_status {status} for request_id {request_id}" msgstr "Invalid request_status {status} for request_id {request_id}" msgid "Invalid status '{status}' for {entity_name}." msgstr "Invalid status '{status}' for {entity_name}." #, python-format msgid "Invalid subject DN: %(subject_dn)s" msgstr "Invalid subject DN: %(subject_dn)s" msgid "KEK not yet created." msgstr "KEK not yet created." msgid "KMIP plugin action not support." msgstr "KMIP plugin action not support." msgid "" "KMIP plugin does not currently support protecting the private key with a " "passphrase" msgstr "" "KMIP plugin does not currently support protecting the private key with a " "passphrase" msgid "Key archival failed. Error returned from KRA." msgstr "Key archival failed. Error returned from KRA." msgid "Key encryption key to be used by Simple Crypto Plugin" msgstr "Key encryption key to be used by Simple Crypto Plugin" msgid "Key generation failed. Error returned from KRA." msgstr "Key generation failed. Error returned from KRA." msgid "" "Keystone notification queue topic name. This name needs to match one of " "values mentioned in Keystone deployment's 'notification_topics' " "configuration e.g. notification_topics=notifications, " "barbican_notificationsMultiple servers may listen on a topic and messages " "will be dispatched to one of the servers in a round-robin fashion. That's " "why Barbican service should have its own dedicated notification queue so " "that it receives all of Keystone notifications." msgstr "" "Keystone notification queue topic name. This name needs to match one of " "values mentioned in Keystone deployment's 'notification_topics' " "configuration e.g. notification_topics=notifications, " "barbican_notificationsMultiple servers may listen on a topic and messages " "will be dispatched to one of the servers in a round-robin fashion. That's " "why Barbican service should have its own dedicated notification queue so " "that it receives all of Keystone notifications." msgid "List available secret stores" msgstr "List available secret stores" msgid "List of automatically approved enrollment profiles" msgstr "List of automatically approved enrollment profiles" msgid "List of certificate plugins to load." msgstr "List of certificate plugins to load." msgid "List of crypto plugins to load." msgstr "List of crypto plugins to load." msgid "List of secret store plugins to load." msgstr "List of secret store plugins to load." msgid "" "List of suffix to use for looking up plugins which are supported with " "multiple backend support." msgstr "" "List of suffix to use for looking up plugins which are supported with " "multiple backend support." msgid "Malformed JSON" msgstr "Malformed JSON" msgid "Master HMAC Key label (as stored in the HSM)" msgstr "Master HMAC Key label (as stored in the HSM)" msgid "Master KEK label (as stored in the HSM)" msgstr "Master KEK label (as stored in the HSM)" msgid "Master KEK length in bytes." msgstr "Master KEK length in bytes." msgid "Maximum allowed http request size against the barbican-api." msgstr "Maximum allowed HTTP request size against the Barbican-api." msgid "Maximum allowed secret size in bytes." msgstr "Maximum allowed secret size in bytes." msgid "" "Maximum number of database connection retries during startup. Set to -1 to " "specify an infinite retry count." msgstr "" "Maximum number of database connection retries during startup. Set to -1 to " "specify an infinite retry count." msgid "Maximum page size for the 'limit' paging URL parameter." msgstr "Maximum page size for the 'limit' paging URL parameter." msgid "Missing X-Project-Id" msgstr "Missing X-Project-Id" msgid "Missing required argument." msgstr "Missing required argument." #, python-format msgid "Missing required metadata field for %(required)s" msgstr "Missing required metadata field for %(required)s" msgid "Modify request: unable to cancel: {message}" msgstr "Modify request: unable to cancel: {message}" msgid "More than one key found for label" msgstr "More than one key found for label" msgid "Must be a positive integer that is a multiple of 8" msgstr "Must be a positive integer that is a multiple of 8" msgid "Must supply Non-None {0} argument for CertificateAuthority entry." msgstr "Must supply Non-None {0} argument for CertificateAuthority entry." msgid "Must supply non-Blank {0} argument for SecretStores entry." msgstr "Must supply non-Blank {0} argument for SecretStores entry." msgid "" "Must supply non-None {0} argument for CertificateAuthorityMetadatum entry." msgstr "" "Must supply non-None {0} argument for CertificateAuthorityMetadatum entry." msgid "Must supply non-None {0} argument for ContainerACL entry." msgstr "Must supply non-None {0} argument for ContainerACL entry." msgid "Must supply non-None {0} argument for ContainerACLUser entry." msgstr "Must supply non-None {0} argument for ContainerACLUser entry." msgid "Must supply non-None {0} argument for OrderBarbicanMetadatum entry." msgstr "Must supply non-None {0} argument for OrderBarbicanMetadatum entry." msgid "Must supply non-None {0} argument for OrderPluginMetadatum entry." msgstr "Must supply non-None {0} argument for OrderPluginMetadatum entry." msgid "" "Must supply non-None {0} argument for PreferredCertificateAuthority entry." msgstr "" "Must supply non-None {0} argument for PreferredCertificateAuthority entry." msgid "" "Must supply non-None {0} argument for ProjectCertificateAuthority entry." msgstr "" "Must supply non-None {0} argument for ProjectCertificateAuthority entry." msgid "Must supply non-None {0} argument for ProjectQuotas entry." msgstr "Must supply non-None {0} argument for ProjectQuotas entry." msgid "Must supply non-None {0} argument for ProjectSecretStore entry." msgstr "Must supply non-None {0} argument for ProjectSecretStore entry." msgid "Must supply non-None {0} argument for SecretACL entry." msgstr "Must supply non-None {0} argument for SecretACL entry." msgid "Must supply non-None {0} argument for SecretACLUser entry." msgstr "Must supply non-None {0} argument for SecretACLUser entry." msgid "Must supply non-None {0} argument for SecretConsumerMetadatum entry." msgstr "Must supply non-None {0} argument for SecretConsumerMetadatum entry." msgid "Must supply non-None {0} argument for SecretStoreMetadatum entry." msgstr "Must supply non-None {0} argument for SecretStoreMetadatum entry." msgid "Must supply non-None {0} argument for SecretUserMetadatum entry." msgstr "Must supply non-None {0} argument for SecretUserMetadatum entry." msgid "Must supply non-None {0} argument for TransportKey entry." msgstr "Must supply non-None {0} argument for TransportKey entry." msgid "Must supply non-None {entity_name}." msgstr "Must supply non-None {entity_name}." msgid "Must supply {entity_name} with id=None (i.e. new entity)." msgstr "Must supply {entity_name} with id=None (i.e. new entity)." msgid "No SQL connection configured" msgstr "No SQL connection configured" msgid "No container found with container-ID {id}" msgstr "No container found with container-ID {id}" msgid "No data supplied to process." msgstr "No data supplied to process." msgid "No entities of type {entity_name} found" msgstr "No entities of type {entity_name} found" msgid "No key handle was found" msgstr "No key handle was found" msgid "No plugin was found that could support your request" msgstr "No plugin was found that could support your request" msgid "No profile_id specified" msgstr "No profile_id specified" msgid "No request found for request_id {request_id} for order {order_id}" msgstr "No request found for request_id {request_id} for order {order_id}" msgid "No request returned in enrollment_results" msgstr "No request returned in enrollment_results" msgid "No request_data specified" msgstr "No request_data specified" msgid "No secret found with secret-ID {id}" msgstr "No secret found with secret-ID {id}" msgid "No secret information found" msgstr "No secret information found" msgid "No secret information provided to encrypt." msgstr "No secret information provided to encrypt." msgid "No secret store plugins have been configured" msgstr "No secret store plugins have been configured" #, python-format msgid "" "No support for value set on field '%(field)s' on schema '%(schema)s': " "%(reason)s" msgstr "" "No support for value set on field '%(field)s' on schema '%(schema)s': " "%(reason)s" #, python-format msgid "No token was found in slot %(slot_id)s" msgstr "No token was found in slot %(slot_id)s" msgid "No {entity_name} found with keystone-ID {id}" msgstr "No {entity_name} found with keystone-ID {id}" msgid "No {entity} found with ID {id}" msgstr "No {entity} found with ID {id}" msgid "Not Allowed. Sorry, only the creator of a consumer can delete it." msgstr "Not Allowed. Sorry, only the creator of a consumer can delete it." msgid "" "Not Found. Multiple backends support is not enabled in service configuration." msgstr "" "Not Found. Multiple backends support is not enabled in service configuration." msgid "Not Found. No preferred secret store defined for this project." msgstr "Not Found. No preferred secret store defined for this project." msgid "Not Found. Provided consumer id is invalid." msgstr "Not Found. Provided consumer id is invalid." msgid "Not Found. Provided container id is invalid." msgstr "Not Found. Provided container id is invalid." msgid "Not Found. Provided secret id is invalid." msgstr "Not Found. Provided secret id is invalid." msgid "Not Found. Provided transport key id is invalid." msgstr "Not Found. Provided transport key id is invalid." msgid "Not Found. Secret store not found." msgstr "Not Found. Secret store not found." msgid "Not Found. Sorry but your secret has no payload." msgstr "Not Found. Sorry but your secret has no payload." msgid "Not Found. Transport Key not found." msgstr "Not Found. Transport Key not found." msgid "Number of CAs allowed per project" msgstr "Number of CAs allowed per project" msgid "Number of asynchronous worker processes" msgstr "Number of asynchronous worker processes" msgid "Number of consumers allowed per project" msgstr "Number of consumers allowed per project" msgid "Number of containers allowed per project" msgstr "Number of containers allowed per project" msgid "Number of orders allowed per project" msgstr "Number of orders allowed per project" msgid "Number of secrets allowed per project" msgstr "Number of secrets allowed per project" msgid "Only 'generic' containers can be modified." msgstr "Only 'generic' containers can be modified." msgid "Only PENDING orders can be updated. Order is in the{0} state." msgstr "Only PENDING orders can be updated. Order is in the{0} state." msgid "Only subordinate CAs can be deleted." msgstr "Only subordinate CAs can be deleted." msgid "Only support PKCS#1 encoding of asymmetric keys" msgstr "Only support PKCS#1 encoding of asymmetric keys" msgid "Operation is not supported." msgstr "Operation is not supported." msgid "Operation not supported by Dogtag Plugin" msgstr "Operation not supported by Dogtag Plugin" msgid "Order creation" msgstr "Order creation" msgid "Order deletion" msgstr "Order deletion" msgid "Order not found." msgstr "Order not found." msgid "Order retrieval" msgstr "Order retrieval" msgid "Order type \"{order_type}\" not implemented." msgstr "Order type \"{order_type}\" not implemented." msgid "Order type \"{order_type}\" not supported." msgstr "Order type \"{order_type}\" not supported." msgid "Order update" msgstr "Order update" msgid "Order update is not supported." msgstr "Order update is not supported." msgid "Order(s) retrieval" msgstr "Order(s) retrieval" msgid "Passphrase encryption is not supported for DSA algorithm" msgstr "Passphrase encryption is not supported for DSA algorithm" msgid "" "Passphrase encryption is not supported for symmetric key generating " "algorithms." msgstr "" "Passphrase encryption is not supported for symmetric key generating " "algorithms." msgid "Password for authenticating with KMIP server" msgstr "Password for authenticating with KMIP server" msgid "Password for the NSS certificate databases" msgstr "Password for the NSS certificate databases" msgid "Password to login to PKCS11 session" msgstr "Password to login to PKCS11 session" msgid "Path to CA certificate chain file" msgstr "Path to CA certificate chain file" msgid "Path to CA certificate file" msgstr "Path to CA certificate file" msgid "Path to CA certificate key file" msgstr "Path to CA certificate key file" msgid "Path to CA chain pkcs7 file" msgstr "Path to CA chain pkcs7 file" msgid "Path to PEM file for authentication" msgstr "Path to PEM file for authentication" msgid "Path to the NSS certificate database" msgstr "Path to the NSS certificate database" msgid "Path to vendor PKCS11 library" msgstr "Path to vendor PKCS11 library" msgid "" "Period in seconds after which SQLAlchemy should reestablish its connection " "to the database. MySQL uses a default `wait_timeout` of 8 hours, after which " "it will drop idle connections. This can result in 'MySQL Gone Away' " "exceptions. If you notice this, you can lower this value to ensure that " "SQLAlchemy reconnects before MySQL can drop the connection." msgstr "" "Period in seconds after which SQLAlchemy should re-establish its connection " "to the database. MySQL uses a default `wait_timeout` of 8 hours, after which " "it will drop idle connections. This can result in 'MySQL Gone Away' " "exceptions. If you notice this, you can lower this value to ensure that " "SQLAlchemy reconnects before MySQL can drop the connection." msgid "Placeholder" msgstr "Placeholder" msgid "Plugin does not support generation of subordinate CAs" msgstr "Plugin does not support generation of subordinate CAs" msgid "" "Plugin lookup property 'stores_lookup_suffix' is not defined in service " "configuration" msgstr "" "Plugin lookup property 'stores_lookup_suffix' is not defined in service " "configuration" msgid "Port for the Dogtag instance" msgstr "Port for the Dogtag instance" msgid "Port for the KMIP server" msgstr "Port for the KMIP server" msgid "" "Preferred Secret Store plugin '{store_name}' is not currently set in service " "configuration. This is probably a server misconfiguration." msgstr "" "Preferred Secret Store plugin '{store_name}' is not currently set in service " "configuration. This is probably a server misconfiguration." msgid "Problem decoding payload" msgstr "Problem decoding payload" msgid "Problem seen during certificate processing - Reason: {reason}" msgstr "Problem seen during certificate processing - Reason: {reason}" msgid "Problem seen during crypto processing - Reason: {reason}" msgstr "Problem seen during crypto processing - Reason: {reason}" msgid "Problem with data in certificate request - Reason: {reason}" msgstr "Problem with data in certificate request - Reason: {reason}" msgid "Process TypeOrder" msgstr "Process TypeOrder" msgid "Process TypeOrder failure seen - please contact site administrator." msgstr "Process TypeOrder failure seen - please contact site administrator." msgid "Profile for simple CMC requests" msgstr "Profile for simple CMC requests" msgid "Project KEK Cache Item Limit" msgstr "Project KEK Cache Item Limit" msgid "Project KEK Cache Time To Live, in seconds" msgstr "Project KEK Cache Time To Live, in seconds" msgid "Project KEK length in bytes." msgstr "Project KEK length in bytes." msgid "Project Quotas" msgstr "Project Quotas" msgid "Project cleanup via Keystone notifications" msgstr "Project clean-up via Keystone notifications" msgid "Project quotas not found." msgstr "Project quotas not found." #, python-format msgid "Provided Transport key %(transport_key_id)s could not be found" msgstr "Provided Transport key %(transport_key_id)s could not be found" msgid "Provided field value is not supported" msgstr "Provided field value is not supported" msgid "Provided information too large to process" msgstr "Provided information too large to process" msgid "" "Provided object does not match schema '{schema}': {reason}. Invalid " "property: '{property}'" msgstr "" "Provided object does not match schema '{schema}': {reason}. Invalid " "property: '{property}'" msgid "Provided transport key was not found." msgstr "Provided transport key was not found." msgid "Queue namespace" msgstr "Queue namespace" msgid "Queue topic name" msgstr "Queue topic name" #, python-format msgid "" "Quota reached for project %(external_project_id)s. Only %(quota)s " "%(resource_type)s are allowed." msgstr "" "Quota reached for project %(external_project_id)s. Only %(quota)s " "%(resource_type)s are allowed." msgid "Quotas" msgstr "Quotas" msgid "Read Error" msgstr "Read Error" msgid "Removing preferred secret store" msgstr "Removing preferred secret store" msgid "" "Request {request_id} reports status_complete, but no cert_id has been " "returned" msgstr "" "Request {request_id} reports status_complete, but no cert_id has been " "returned" msgid "Requested algorithm is not supported" msgstr "Requested algorithm is not supported" msgid "Retries when storing or generating secrets" msgstr "Retries when storing or generating secrets" msgid "Retrieve global default secret store" msgstr "Retrieve global default secret store" msgid "Retrieve project preferred store" msgstr "Retrieve project preferred store" msgid "Role used to identify an authenticated user as administrator." msgstr "Role used to identify an authenticated user as administrator." msgid "" "SQLAlchemy connection string for the reference implementation registry " "server. Any valid SQLAlchemy connection string is fine. See: http://www." "sqlalchemy.org/docs/05/reference/sqlalchemy/connections.html#sqlalchemy." "create_engine. Note: For absolute addresses, use '////' slashes after " "'sqlite:'." msgstr "" "SQLAlchemy connection string for the reference implementation registry " "server. Any valid SQLAlchemy connection string is fine. See: http://www." "sqlalchemy.org/docs/05/reference/sqlalchemy/connections.html#sqlalchemy." "create_engine. Note: For absolute addresses, use '////' slashes after " "'sqlite:'." msgid "SSL Enabled/Disabled" msgstr "SSL Enabled/Disabled" msgid "SSL version, maps to the module ssl's constants" msgstr "SSL version, maps to the module SSL's constants" msgid "Seconds (float) to wait before starting retry scheduler" msgstr "Seconds (float) to wait before starting retry scheduler" msgid "Seconds (float) to wait between periodic schedule events" msgstr "Seconds (float) to wait between periodic schedule events" msgid "Secret '{secret_name}' with reference '{secret_ref}' doesn't exist." msgstr "Secret '{secret_name}' with reference '{secret_ref}' doesn't exist." msgid "Secret Accept of '{accept}' not supported" msgstr "Secret Accept of '{accept}' not supported" msgid "Secret Content-Encoding of '{content_encoding}' not supported" msgstr "Secret Content-Encoding of '{content_encoding}' not supported" msgid "" "Secret Store plugin '{store_name}' is still in use and can not be removed. " "Its missing in service configuration. This is probably a server " "misconfiguration." msgstr "" "Secret Store plugin '{store_name}' is still in use and can not be removed. " "Its missing in service configuration. This is probably a server " "misconfiguration." msgid "Secret algorithm of '{algorithm}' not supported" msgstr "Secret algorithm of '{algorithm}' not supported" msgid "Secret already has data, cannot modify it." msgstr "Secret already has data, cannot modify it." msgid "Secret creation" msgstr "Secret creation" msgid "Secret deletion" msgstr "Secret deletion" msgid "Secret encryption mechanism" msgstr "Secret encryption mechanism" msgid "Secret generate supported plugin not found." msgstr "Secret generate supported plugin not found." msgid "Secret metadata creation" msgstr "Secret metadata creation" msgid "Secret metadata expected but not received." msgstr "Secret metadata expected but not received." msgid "Secret metadata not found." msgstr "Secret metadata not found." msgid "Secret metadata retrieval" msgstr "Secret metadata retrieval" msgid "Secret metadatum creation" msgstr "Secret metadatum creation" msgid "Secret metadatum removal" msgstr "Secret metadatum removal" msgid "Secret metadatum retrieval" msgstr "Secret metadatum retrieval" msgid "Secret metadatum update" msgstr "Secret metadatum update" msgid "Secret not found." msgstr "Secret not found." msgid "Secret object type {object_type} is not supported" msgstr "Secret object type {object_type} is not supported" msgid "Secret payload retrieval" msgstr "Secret payload retrieval" msgid "Secret provided doesn't exist." msgstr "Secret provided doesn't exist." msgid "Secret provided for '{secret_name}' doesn't exist." msgstr "Secret provided for '{secret_name}' doesn't exist." msgid "Secret provided is not in the container" msgstr "Secret provided is not in the container" msgid "Secret retrieval" msgstr "Secret retrieval" msgid "Secret store plugin \"{name}\" not found." msgstr "Secret store plugin \"{name}\" not found." msgid "Secret store plugin not found." msgstr "Secret store plugin not found." msgid "Secret store retrieval" msgstr "Secret store retrieval" msgid "Secret store supported plugin not found." msgstr "Secret store supported plugin not found." msgid "Secret type can not be converted to DER" msgstr "Secret type can not be converted to DER" msgid "Secret type can not be converted to PEM" msgstr "Secret type can not be converted to PEM" msgid "Secret update" msgstr "Secret update" msgid "Secret(s) retrieval" msgstr "Secret(s) retrieval" msgid "SecretACL(s) Update" msgstr "SecretACL(s) Update" msgid "SecretACL(s) deletion" msgstr "SecretACL(s) deletion" msgid "SecretACL(s) retrieval" msgstr "SecretACL(s) retrieval" msgid "Secret_ref does not match the configured hostname, please try again" msgstr "Secret_ref does not match the configured hostname, please try again" msgid "Secrets container not found." msgstr "Secrets container not found." msgid "" "Secrets of type {secret_type} should not have a passphrase in the database, " "for being used during retrieval." msgstr "" "Secrets of type {secret_type} should not have a passphrase in the database, " "for being used during retrieval." msgid "Server name for RPC task processing server" msgstr "Server name for RPC task processing server" msgid "Setting preferred secret store" msgstr "Setting preferred secret store" msgid "" "Show SQLAlchemy pool-related debugging output in logs (sets DEBUG log level " "output) if specified." msgstr "" "Show SQLAlchemy pool-related debugging output in logs (sets DEBUG log level " "output) if specified." msgid "Signing key incorrect" msgstr "Signing key incorrect" msgid "" "Size of pool used by SQLAlchemy. This is the largest number of connections " "that will be kept persistently in the pool. Can be set to 0 to indicate no " "size limit. To disable pooling, use a NullPool with sql_pool_class instead. " "Comment out to allow SQLAlchemy to select the default." msgstr "" "Size of pool used by SQLAlchemy. This is the largest number of connections " "that will be kept persistently in the pool. Can be set to 0 to indicate no " "size limit. To disable pooling, use a NullPool with sql_pool_class instead. " "Comment out to allow SQLAlchemy to select the default." msgid "Status: {status}, Reason: {reason}, Message: {message}" msgstr "Status: {status}, Reason: {reason}, Message: {message}" msgid "Subordinate CA is not owned by this project" msgstr "Subordinate CA is not owned by this project" msgid "Symantec password for authentication" msgstr "Symantec password for authentication" msgid "Symantec username for authentication" msgstr "Symantec username for authentication" msgid "System" msgstr "System" msgid "" "Text-based binary secret payloads must specify a content-encoding of 'base64'" msgstr "" "Text-based binary secret payloads must specify a content-encoding of 'base64'" msgid "The ca_id provided in the request is invalid" msgstr "The ca_id provided in the request is invalid" msgid "The ca_id provided in the request is not defined for this project" msgstr "The ca_id provided in the request is not defined for this project" msgid "" "The default exchange under which topics are scoped. May be overridden by an " "exchange name specified in the transport_url option." msgstr "" "The default exchange under which topics are scoped. May be overridden by an " "exchange name specified in the transport_url option." msgid "" "The minimum required reference name is 'certificate' for Certificate type" msgstr "" "The minimum required reference name is 'certificate' for Certificate type" msgid "" "The minimum required reference names are 'public_key' and'private_key' for " "RSA type" msgstr "" "The minimum required reference names are 'public_key' and'private_key' for " "RSA type" msgid "" "The request returned a 413 Request Entity Too Large. This generally means " "that rate limiting or a quota threshold was breached." msgstr "" "The request returned a 413 Request Entity Too Large. This generally means " "that rate limiting or a quota threshold was breached." msgid "" "The requested Store Plugin {plugin_name} is not currently available. This is " "probably a server misconfiguration." msgstr "" "The requested Store Plugin {plugin_name} is not currently available. This is " "probably a server misconfiguration." msgid "The version you requested wasn't found" msgstr "The version you requested wasn't found" msgid "" "There are {count} plugins with global default as True in service " "configuration. Only one plugin can have this as True" msgstr "" "There are {count} plugins with global default as True in service " "configuration. Only one plugin can have this as True" msgid "" "There is no plugin defined with global default as True. One of plugin must " "be identified as global default" msgstr "" "There is no plugin defined with global default as True. One of plugin must " "be identified as global default" msgid "There was an error with the PKCS#11 library." msgstr "There was an error with the PKCS#11 library." msgid "Time in days for CA entries to expire" msgstr "Time in days for CA entries to expire" msgid "Transport Key Creation" msgstr "Transport Key Creation" msgid "Transport Key deletion" msgstr "Transport Key deletion" msgid "Transport Key retrieval" msgstr "Transport Key retrieval" msgid "Transport Key(s) retrieval" msgstr "Transport Key(s) retrieval" msgid "" "Transport key wrapped session key has been provided to wrap secrets for " "retrieval, but the transport key id has not been provided." msgstr "" "Transport key wrapped session key has been provided to wrap secrets for " "retrieval, but the transport key id has not been provided." msgid "Tried to register crypto plugin with null or empty name." msgstr "Tried to register crypto plugin with null or empty name." msgid "True enables keystone notification listener functionality." msgstr "True enables Keystone notification listener functionality." msgid "True enables queuing, False invokes workers synchronously" msgstr "True enables queuing, False invokes workers synchronously" msgid "" "True enables requeue feature in case of notification processing error. " "Enable this only when underlying transport supports this feature." msgstr "" "True enables re-queue feature in case of notification processing error. " "Enable this only when underlying transport supports this feature." msgid "URI provided invalid query string parameters." msgstr "URI provided invalid query string parameters." msgid "Unable to decode request data." msgstr "Unable to decode request data." msgid "Unencrypted data must be a byte type, but was {unencrypted_type}" msgstr "Unencrypted data must be a byte type, but was {unencrypted_type}" msgid "Unexpected content type. Expected content types are: {expected}" msgstr "Unexpected content type. Expected content types are: {expected}" msgid "Unknown" msgstr "Unknown" msgid "Unknown attribute type provided." msgstr "Unknown attribute type provided." msgid "Unsupported decryption mechanism" msgstr "Unsupported decryption mechanism" msgid "Update Order" msgstr "Update Order" msgid "Update Order failure seen - please contact site administrator." msgstr "Update Order failure seen - please contact site administrator." msgid "User friendly plugin name" msgstr "User friendly plugin name" msgid "Username for authenticating with KMIP server" msgstr "Username for authenticating with KMIP server" msgid "Version of tasks invoked via notifications" msgstr "Version of tasks invoked via notifications" msgid "Version of tasks invoked via queue" msgstr "Version of tasks invoked via queue" msgid "Version retrieval" msgstr "Version retrieval" msgid "Working directory for Dogtag plugin" msgstr "Working directory for Dogtag plugin" msgid "Wrong payload content-type" msgstr "Wrong payload content-type" msgid "content-encoding of '{content_encoding}' not supported" msgstr "content-encoding of '{content_encoding}' not supported" msgid "content-type of '{content_type}' not supported" msgstr "content-type of '{content_type}' not supported" msgid "failure seen - please contact site administrator." msgstr "failure seen - please contact site administrator." msgid "key_label must be set for master_keys" msgstr "key_label must be set for master_keys" msgid "library_path is required" msgstr "library_path is required" msgid "no request found for this order" msgstr "no request found for this order" msgid "nss_password is required" msgstr "nss_password is required" msgid "" "only 'private_key', 'certificate' , 'private_key_passphrase', or " "'intermediates' reference names are allowed for Certificate type" msgstr "" "only 'private_key', 'certificate' , 'private_key_passphrase', or " "'intermediates' reference names are allowed for Certificate type" msgid "" "only 'private_key', 'public_key' and 'private_key_passphrase' reference " "names are allowed for RSA type" msgstr "" "only 'private_key', 'public_key' and 'private_key_passphrase' reference " "names are allowed for RSA type" msgid "password is required" msgstr "password is required" msgid "payload must be provided when payload_content_type is specified" msgstr "payload must be provided when payload_content_type is specified" msgid "payload_content_encoding is not one of {supported}" msgstr "payload_content_encoding is not one of {supported}" msgid "payload_content_type is not one of {supported}" msgstr "payload_content_type is not one of {supported}" msgid "pem_path is required" msgstr "pem_path is required" msgid "plugin_name must be provided" msgstr "plugin_name must be provided" msgid "request_id {req_id} returns COMPLETE but no cert returned" msgstr "request_id {req_id} returns COMPLETE but no cert returned" msgid "retrieve a secret from plugin: {plugin}" msgstr "retrieve a secret from plugin: {plugin}" msgid "" "store or generate a secret of type {secret_type} with algorithm {algorithm}, " "bit length {bit_length}, and mode {mode}" msgstr "" "store or generate a secret of type {secret_type} with algorithm {algorithm}, " "bit length {bit_length}, and mode {mode}" msgid "transport_key must be provided" msgstr "transport_key must be provided" msgid "url is required" msgstr "URL is required" msgid "username is required" msgstr "username is required" msgid "{entity_name} is missing query build method for get project entities." msgstr "{entity_name} is missing query build method for get project entities." msgid "{entity_name} status is required." msgstr "{entity_name} status is required." msgid "" "{operation} attempt not allowed - please review your user/project privileges" msgstr "" "{operation} attempt not allowed - please review your user/project privileges" msgid "{operation} failure seen - please contact site administrator." msgstr "{operation} failure seen - please contact site administrator." msgid "{operation} issue seen - {reason}." msgstr "{operation} issue seen - {reason}." msgid "{request} not found for {operation} for order_id {order_id}" msgstr "{request} not found for {operation} for order_id {order_id}" msgid "{schema_name}' within '{parent_schema_name}" msgstr "{schema_name}' within '{parent_schema_name}" barbican-9.1.0.dev50/barbican/hacking/0000775000175000017500000000000013616500640017556 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/hacking/checks.py0000664000175000017500000002325213616500636021401 0ustar sahidsahid00000000000000# Copyright (c) 2016, GohighSec # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ast import re import six import pep8 """ Guidelines for writing new hacking checks - Use only for Barbican specific tests. OpenStack general tests should be submitted to the common 'hacking' module. - Pick numbers in the range B3xx. Find the current test with the highest allocated number and then pick the next value. - Keep the test method code in the source file ordered based on the B3xx value. - List the new rule in the top level HACKING.rst file - Add test cases for each new rule to barbican/tests/test_hacking.py """ oslo_namespace_imports = re.compile(r"from[\s]*oslo[.](.*)") dict_constructor_with_list_copy_re = re.compile(r".*\bdict\((\[)?(\(|\[)") assert_no_xrange_re = re.compile(r"\s*xrange\s*\(") assert_True = re.compile(r".*assertEqual\(True, .*\)") assert_None = re.compile(r".*assertEqual\(None, .*\)") assert_Not_Equal = re.compile(r".*assertNotEqual\(None, .*\)") assert_Is_Not = re.compile(r".*assertIsNot\(None, .*\)") no_log_warn = re.compile(r".*LOG.warn\(.*\)") class BaseASTChecker(ast.NodeVisitor): """Provides a simple framework for writing AST-based checks. Subclasses should implement visit_* methods like any other AST visitor implementation. When they detect an error for a particular node the method should call ``self.add_error(offending_node)``. Details about where in the code the error occurred will be pulled from the node object. Subclasses should also provide a class variable named CHECK_DESC to be used for the human readable error message. """ CHECK_DESC = 'No check message specified' def __init__(self, tree, filename): """This object is created automatically by pep8. :param tree: an AST tree :param filename: name of the file being analyzed (ignored by our checks) """ self._tree = tree self._errors = [] def run(self): """Called automatically by pep8.""" self.visit(self._tree) return self._errors def add_error(self, node, message=None): """Add an error caused by a node to the list of errors for pep8.""" message = message or self.CHECK_DESC error = (node.lineno, node.col_offset, message, self.__class__) self._errors.append(error) def _check_call_names(self, call_node, names): if isinstance(call_node, ast.Call): if isinstance(call_node.func, ast.Name): if call_node.func.id in names: return True return False class CheckLoggingFormatArgs(BaseASTChecker): """Check for improper use of logging format arguments. LOG.debug("Volume %s caught fire and is at %d degrees C and climbing.", ('volume1', 500)) The format arguments should not be a tuple as it is easy to miss. """ CHECK_DESC = 'B310 Log method arguments should not be a tuple.' LOG_METHODS = [ 'debug', 'info', 'warn', 'warning', 'error', 'exception', 'critical', 'fatal', 'trace', 'log' ] def _find_name(self, node): """Return the fully qualified name or a Name or Attribute.""" if isinstance(node, ast.Name): return node.id elif (isinstance(node, ast.Attribute) and isinstance(node.value, (ast.Name, ast.Attribute))): method_name = node.attr obj_name = self._find_name(node.value) if obj_name is None: return None return obj_name + '.' + method_name elif isinstance(node, six.string_types): return node else: # could be Subscript, Call or many more return None def visit_Call(self, node): """Look for the 'LOG.*' calls.""" # extract the obj_name and method_name if isinstance(node.func, ast.Attribute): obj_name = self._find_name(node.func.value) if isinstance(node.func.value, ast.Name): method_name = node.func.attr elif isinstance(node.func.value, ast.Attribute): obj_name = self._find_name(node.func.value) method_name = node.func.attr else: # could be Subscript, Call or many more return super(CheckLoggingFormatArgs, self).generic_visit(node) # obj must be a logger instance and method must be a log helper if (obj_name != 'LOG' or method_name not in self.LOG_METHODS): return super(CheckLoggingFormatArgs, self).generic_visit(node) # the call must have arguments if not len(node.args): return super(CheckLoggingFormatArgs, self).generic_visit(node) # any argument should not be a tuple for arg in node.args: if isinstance(arg, ast.Tuple): self.add_error(arg) return super(CheckLoggingFormatArgs, self).generic_visit(node) class CheckForStrUnicodeExc(BaseASTChecker): """Checks for the use of str() or unicode() on an exception. This currently only handles the case where str() or unicode() is used in the scope of an exception handler. If the exception is passed into a function, returned from an assertRaises, or used on an exception created in the same scope, this does not catch it. """ CHECK_DESC = ('B314 str() and unicode() cannot be used on an ' 'exception. Remove or use six.text_type()') def __init__(self, tree, filename): super(CheckForStrUnicodeExc, self).__init__(tree, filename) self.name = [] self.already_checked = [] # Python 2 def visit_TryExcept(self, node): for handler in node.handlers: if handler.name: self.name.append(handler.name.id) super(CheckForStrUnicodeExc, self).generic_visit(node) self.name = self.name[:-1] else: super(CheckForStrUnicodeExc, self).generic_visit(node) # Python 3 def visit_ExceptHandler(self, node): if node.name: self.name.append(node.name) super(CheckForStrUnicodeExc, self).generic_visit(node) self.name = self.name[:-1] else: super(CheckForStrUnicodeExc, self).generic_visit(node) def visit_Call(self, node): if self._check_call_names(node, ['str', 'unicode']): if node not in self.already_checked: self.already_checked.append(node) if isinstance(node.args[0], ast.Name): if node.args[0].id in self.name: self.add_error(node.args[0]) super(CheckForStrUnicodeExc, self).generic_visit(node) def check_oslo_namespace_imports(logical_line, physical_line, filename): """'oslo_' should be used instead of 'oslo.' B317 """ if pep8.noqa(physical_line): return if re.match(oslo_namespace_imports, logical_line): msg = ("B317: '%s' must be used instead of '%s'.") % ( logical_line.replace('oslo.', 'oslo_'), logical_line) yield(0, msg) def dict_constructor_with_list_copy(logical_line): """Use a dict comprehension instead of a dict constructor B318 """ msg = ("B318: Must use a dict comprehension instead of a dict constructor" " with a sequence of key-value pairs." ) if dict_constructor_with_list_copy_re.match(logical_line): yield (0, msg) def no_xrange(logical_line): """Do not use 'xrange' B319 """ if assert_no_xrange_re.match(logical_line): yield(0, "B319: Do not use xrange().") def validate_assertTrue(logical_line): """Use 'assertTrue' instead of 'assertEqual' B312 """ if re.match(assert_True, logical_line): msg = ("B312: Unit tests should use assertTrue(value) instead" " of using assertEqual(True, value).") yield(0, msg) def validate_assertIsNone(logical_line): """Use 'assertIsNone' instead of 'assertEqual' B311 """ if re.match(assert_None, logical_line): msg = ("B311: Unit tests should use assertIsNone(value) instead" " of using assertEqual(None, value).") yield(0, msg) def no_log_warn_check(logical_line): """Disallow 'LOG.warn' B320 """ msg = ("B320: LOG.warn is deprecated, please use LOG.warning!") if re.match(no_log_warn, logical_line): yield(0, msg) def validate_assertIsNotNone(logical_line): """Use 'assertIsNotNone' B321 """ if re.match(assert_Not_Equal, logical_line) or \ re.match(assert_Is_Not, logical_line): msg = ("B321: Unit tests should use assertIsNotNone(value) instead" " of using assertNotEqual(None, value) or" " assertIsNot(None, value).") yield(0, msg) def factory(register): register(CheckForStrUnicodeExc) register(CheckLoggingFormatArgs) register(check_oslo_namespace_imports) register(dict_constructor_with_list_copy) register(no_xrange) register(validate_assertTrue) register(validate_assertIsNone) register(no_log_warn_check) register(validate_assertIsNotNone) barbican-9.1.0.dev50/barbican/hacking/__init__.py0000664000175000017500000000000013616500636021662 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/cmd/0000775000175000017500000000000013616500640016715 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/cmd/pkcs11_migrate_kek_signatures.py0000664000175000017500000001323413616500636025207 0ustar sahidsahid00000000000000#!/usr/bin/env python # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import argparse import base64 import six import traceback from oslo_db.sqlalchemy import session from oslo_serialization import jsonutils as json from sqlalchemy import orm from sqlalchemy.orm import scoping from barbican.common import utils from barbican.model import models from barbican.plugin.crypto import p11_crypto from barbican.plugin.crypto.pkcs11 import P11CryptoPluginException # Use config values from p11_crypto CONF = p11_crypto.CONF class KekSignatureMigrator(object): def __init__(self, db_connection, library_path, login, slot_id): self.dry_run = False self.db_engine = session.create_engine(db_connection) self._session_creator = scoping.scoped_session( orm.sessionmaker( bind=self.db_engine, autocommit=True ) ) self.crypto_plugin = p11_crypto.P11CryptoPlugin(CONF) self.plugin_name = utils.generate_fullname_for(self.crypto_plugin) self.pkcs11 = self.crypto_plugin.pkcs11 self.session = self.pkcs11.get_session() def recalc_kek_hmac(self, project, kek): with self.db_session.begin(): meta_dict = json.loads(kek.plugin_meta) iv = base64.b64decode(meta_dict['iv']) wrapped_key = base64.b64decode(meta_dict['wrapped_key']) hmac = base64.b64decode(meta_dict['hmac']) kek_data = iv + wrapped_key hmac_key = self.pkcs11.get_key_handle( meta_dict['hmac_label'], self.session) # Verify if hmac signature validates with new method try: self.pkcs11.verify_hmac(hmac_key, hmac, kek_data, self.session) sig_good = True except P11CryptoPluginException as e: if 'CKR_SIGNATURE_INVALID' in six.text_type(e): sig_good = False else: raise if sig_good: msg = 'Skipping KEK {}, good signature' print(msg.format(kek.kek_label)) return # Previous method failed. # Verify if hmac signature validates with old method try: self.pkcs11.verify_hmac( hmac_key, hmac, wrapped_key, self.session ) sig_bad = True except P11CryptoPluginException as e: if 'CKR_SIGNATURE_INVALID' in six.text_type(e): sig_bad = False else: raise if not sig_bad: msg = "Skipping KEK {}, can not validate with either method!" print(msg.format(kek.kek_label)) return if self.dry_run: msg = 'KEK {} needs recalculation' print(msg.format(kek.kek_label)) return # Calculate new HMAC new_hmac = self.pkcs11.compute_hmac( hmac_key, kek_data, self.session ) # Update KEK plugin_meta with new hmac signature meta_dict['hmac'] = base64.b64encode(new_hmac) kek.plugin_meta = p11_crypto.json_dumps_compact(meta_dict) def get_keks_for_project(self, project): keks = [] with self.db_session.begin() as transaction: print('Retrieving KEKs for Project {}'.format(project.id)) query = transaction.session.query(models.KEKDatum) query = query.filter_by(project_id=project.id) query = query.filter_by(plugin_name=self.plugin_name) keks = query.all() return keks def get_projects(self): print('Retrieving all available projects') projects = [] with self.db_session.begin() as transaction: projects = transaction.session.query(models.Project).all() return projects @property def db_session(self): return self._session_creator() def execute(self, dry_run=True): self.dry_run = dry_run if self.dry_run: print('-- Running in dry-run mode --') projects = self.get_projects() for project in projects: keks = self.get_keks_for_project(project) for kek in keks: try: self.recalc_kek_hmac(project, kek) except Exception: print('Error occurred! SQLAlchemy automatically rolled-' 'back the transaction') traceback.print_exc() def main(): script_desc = ( 'Utility to migrate existing project KEK signatures to include IV.' ) parser = argparse.ArgumentParser(description=script_desc) parser.add_argument( '--dry-run', action='store_true', help='Displays changes that will be made (Non-destructive)' ) args = parser.parse_args() migrator = KekSignatureMigrator( db_connection=CONF.sql_connection, library_path=CONF.p11_crypto_plugin.library_path, login=CONF.p11_crypto_plugin.login, slot_id=CONF.p11_crypto_plugin.slot_id ) migrator.execute(args.dry_run) if __name__ == '__main__': main() barbican-9.1.0.dev50/barbican/cmd/pkcs11_kek_rewrap.py0000664000175000017500000001573113616500636022617 0ustar sahidsahid00000000000000#!/usr/bin/env python # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import argparse import base64 import traceback from oslo_db.sqlalchemy import session from oslo_serialization import jsonutils as json from sqlalchemy import orm from sqlalchemy.orm import scoping from barbican.common import utils from barbican.model import models from barbican.plugin.crypto import p11_crypto # Use config values from p11_crypto CONF = p11_crypto.CONF class KekRewrap(object): def __init__(self, conf): self.dry_run = False self.db_engine = session.create_engine(conf.sql_connection) self._session_creator = scoping.scoped_session( orm.sessionmaker( bind=self.db_engine, autocommit=True ) ) self.crypto_plugin = p11_crypto.P11CryptoPlugin(conf) self.pkcs11 = self.crypto_plugin.pkcs11 self.plugin_name = utils.generate_fullname_for(self.crypto_plugin) self.hsm_session = self.pkcs11.get_session() self.new_mkek_label = self.crypto_plugin.mkek_label self.new_hmac_label = self.crypto_plugin.hmac_label self.new_mkek_type = self.crypto_plugin.mkek_key_type self.new_hmac_type = self.crypto_plugin.hmac_key_type self.new_mkek = self.crypto_plugin._get_master_key( self.new_mkek_type, self.new_mkek_label) self.new_mkhk = self.crypto_plugin._get_master_key( self.new_hmac_type, self.new_hmac_label) def rewrap_kek(self, project, kek): with self.db_session.begin(): meta_dict = json.loads(kek.plugin_meta) # check if old and new mkek and hmac labels are the same # if so, skip this kek. if (self.new_mkek_label == meta_dict['mkek_label'] and self.new_hmac_label == meta_dict['hmac_label']): return if self.dry_run: msg = 'Would have unwrapped key with {} and rewrapped with {}' print(msg.format(meta_dict['mkek_label'], self.new_mkek_label)) print('Would have updated KEKDatum in db {}'.format(kek.id)) print('Rewrapping KEK {}'.format(kek.id)) print('Pre-change IV: {}, Wrapped Key: {}'.format( meta_dict['iv'], meta_dict['wrapped_key'])) return session = self.hsm_session # TODO(alee) We never store the mkek and hmac key types in the db # record for the KEK metadata. Therefore, for now assume that the # key types will not change. # Get KEK's master keys kek_mkek = self.pkcs11.get_key_handle( self.new_mkek_type, meta_dict['mkek_label'], session ) kek_mkhk = self.pkcs11.get_key_handle( self.new_hmac_type, meta_dict['hmac_label'], session ) # Decode data iv = base64.b64decode(meta_dict['iv']) wrapped_key = base64.b64decode(meta_dict['wrapped_key']) hmac = base64.b64decode(meta_dict['hmac']) # Verify HMAC kek_data = iv + wrapped_key self.pkcs11.verify_hmac(kek_mkhk, hmac, kek_data, session) # Unwrap KEK current_kek = self.pkcs11.unwrap_key(kek_mkek, iv, wrapped_key, session) # Wrap KEK with new master keys new_kek = self.pkcs11.wrap_key(self.new_mkek, current_kek, session) # Compute HMAC for rewrapped KEK new_kek_data = new_kek['iv'] + new_kek['wrapped_key'] new_hmac = self.pkcs11.compute_hmac(self.new_mkhk, new_kek_data, session) # Destroy unwrapped KEK self.pkcs11.destroy_object(current_kek, session) # Build updated meta dict updated_meta = meta_dict.copy() updated_meta['mkek_label'] = self.new_mkek_label updated_meta['hmac_label'] = self.new_hmac_label updated_meta['iv'] = base64.b64encode(new_kek['iv']) updated_meta['wrapped_key'] = base64.b64encode( new_kek['wrapped_key']) updated_meta['hmac'] = base64.b64encode(new_hmac) print('Post-change IV: {}, Wrapped Key: {}'.format( updated_meta['iv'], updated_meta['wrapped_key'])) # Update KEK metadata in DB kek.plugin_meta = p11_crypto.json_dumps_compact(updated_meta) def get_keks_for_project(self, project): keks = [] with self.db_session.begin() as transaction: print('Retrieving KEKs for Project {}'.format(project.id)) query = transaction.session.query(models.KEKDatum) query = query.filter_by(project_id=project.id) query = query.filter_by(plugin_name=self.plugin_name) keks = query.all() return keks def get_projects(self): print('Retrieving all available projects') projects = [] with self.db_session.begin() as transaction: projects = transaction.session.query(models.Project).all() return projects @property def db_session(self): return self._session_creator() def execute(self, dry_run=True): self.dry_run = dry_run if self.dry_run: print('-- Running in dry-run mode --') projects = self.get_projects() for project in projects: keks = self.get_keks_for_project(project) for kek in keks: try: self.rewrap_kek(project, kek) except Exception: print('Error occurred! SQLAlchemy automatically rolled-' 'back the transaction') traceback.print_exc() def main(): script_desc = 'Utility to re-wrap project KEKs after rotating an MKEK.' parser = argparse.ArgumentParser(description=script_desc) parser.add_argument( '--dry-run', action='store_true', help='Displays changes that will be made (Non-destructive)' ) args = parser.parse_args() print("Warning: Calling this utility directly is deprecated. " "Please use barbican-manage instead") rewrapper = KekRewrap(CONF) rewrapper.execute(args.dry_run) rewrapper.pkcs11.return_session(rewrapper.hsm_session) if __name__ == '__main__': main() barbican-9.1.0.dev50/barbican/cmd/retry_scheduler.py0000664000175000017500000000431713616500636022504 0ustar sahidsahid00000000000000#!/usr/bin/env python # Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Barbican worker server, running a periodic retry/scheduler process. """ import eventlet import os import sys # Oslo messaging RPC server uses eventlet. eventlet.monkey_patch() # 'Borrowed' from the Glance project: # If ../barbican/__init__.py exists, add ../ to Python search path, so that # it will override what happens to be installed in /usr/(local/)lib/python... possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), os.pardir, os.pardir)) if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')): sys.path.insert(0, possible_topdir) from barbican.common import config from barbican import queue from barbican.queue import retry_scheduler from barbican import version from oslo_log import log from oslo_service import service def fail(returncode, e): sys.stderr.write("ERROR: {0}\n".format(e)) sys.exit(returncode) def main(): try: CONF = config.CONF CONF(sys.argv[1:], project='barbican', version=version.version_info.version_string) # Import and configure logging. log.setup(CONF, 'barbican-retry-scheduler') LOG = log.getLogger(__name__) LOG.debug("Booting up Barbican worker retry/scheduler node...") # Queuing initialization (as a client only). queue.init(CONF, is_server_side=False) service.launch( CONF, retry_scheduler.PeriodicServer(), restart_method='mutate' ).wait() except RuntimeError as e: fail(1, e) if __name__ == '__main__': main() barbican-9.1.0.dev50/barbican/cmd/db_manage.py0000664000175000017500000001623213616500636021175 0ustar sahidsahid00000000000000#!/usr/bin/env python # Copyright 2010-2015 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import os import sys sys.path.insert(0, os.getcwd()) from barbican.common import config from barbican.model import clean from barbican.model.migration import commands from oslo_log import log # Import and configure logging. CONF = config.CONF log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) class DatabaseManager(object): """Database Manager class. Builds and executes a CLI parser to manage the Barbican database This extends the Alembic commands. """ def __init__(self, conf): self.conf = conf self.parser = self.get_main_parser() self.subparsers = self.parser.add_subparsers( title='subcommands', description='Action to perform') self.add_revision_args() self.add_upgrade_args() self.add_history_args() self.add_current_args() self.add_clean_args() def get_main_parser(self): """Create top-level parser and arguments.""" parser = argparse.ArgumentParser(description='Barbican DB manager.') parser.add_argument('--dburl', '-d', default=self.conf.sql_connection, help='URL to the database.') return parser def add_revision_args(self): """Create 'revision' command parser and arguments.""" create_parser = self.subparsers.add_parser('revision', help='Create a ' 'new DB version file.') create_parser.add_argument('--message', '-m', default='DB change', help='the message for the DB change') create_parser.add_argument('--autogenerate', help='autogenerate from models', action='store_true') create_parser.set_defaults(func=self.revision) def add_upgrade_args(self): """Create 'upgrade' command parser and arguments.""" create_parser = self.subparsers.add_parser('upgrade', help='Upgrade to a ' 'future version DB ' 'version file') create_parser.add_argument('--version', '-v', default='head', help='the version to upgrade to, or else ' 'the latest/head if not specified.') create_parser.set_defaults(func=self.upgrade) def add_history_args(self): """Create 'history' command parser and arguments.""" create_parser = self.subparsers.add_parser( 'history', help='List changeset scripts in chronological order.') create_parser.add_argument('--verbose', '-V', action="store_true", help='Show full information about the ' 'revisions.') create_parser.set_defaults(func=self.history) def add_current_args(self): """Create 'current' command parser and arguments.""" create_parser = self.subparsers.add_parser( 'current', help='Display the current revision for a database.') create_parser.add_argument('--verbose', '-V', action="store_true", help='Show full information about the ' 'revision.') create_parser.set_defaults(func=self.current) def add_clean_args(self): """Create 'clean' command parser and arguments.""" create_parser = self.subparsers.add_parser( 'clean', help='Clean up soft deletions in the database') create_parser.add_argument( '--min-days', '-m', type=int, default=90, help='minimum number of days to keep soft deletions. default is' ' %(default)s days.') create_parser.add_argument('--clean-unassociated-projects', '-p', action="store_true", help='Remove projects that have no ' 'associated resources.') create_parser.add_argument('--soft-delete-expired-secrets', '-e', action="store_true", help='Soft delete expired secrets.') create_parser.add_argument('--verbose', '-V', action='store_true', help='Show full information about the' ' cleanup') create_parser.add_argument('--log-file', '-L', default=CONF.log_file, type=str, help='Set log file location. ' 'Default value for log_file can be ' 'found in barbican.conf') create_parser.set_defaults(func=self.clean) def revision(self, args): """Process the 'revision' Alembic command.""" commands.generate(autogenerate=args.autogenerate, message=args.message, sql_url=args.dburl) def upgrade(self, args): """Process the 'upgrade' Alembic command.""" LOG.debug("Performing database schema migration...") commands.upgrade(to_version=args.version, sql_url=args.dburl) def history(self, args): commands.history(args.verbose, sql_url=args.dburl) def current(self, args): commands.current(args.verbose, sql_url=args.dburl) def clean(self, args): clean.clean_command( sql_url=args.dburl, min_num_days=args.min_days, do_clean_unassociated_projects=args.clean_unassociated_projects, do_soft_delete_expired_secrets=args.soft_delete_expired_secrets, verbose=args.verbose, log_file=args.log_file) def execute(self): """Parse the command line arguments.""" args = self.parser.parse_args() # Perform other setup here... args.func(args) def _exception_is_successful_exit(thrown_exception): return (isinstance(thrown_exception, SystemExit) and (thrown_exception.code is None or thrown_exception.code == 0)) def main(): try: dm = DatabaseManager(CONF) dm.execute() except Exception as ex: if not _exception_is_successful_exit(ex): LOG.exception('Problem seen trying to run barbican db manage') sys.stderr.write("ERROR: {0}\n".format(ex)) sys.exit(1) if __name__ == '__main__': main() barbican-9.1.0.dev50/barbican/cmd/worker.py0000664000175000017500000000417413616500636020613 0ustar sahidsahid00000000000000#!/usr/bin/env python # Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Barbican worker server. """ import eventlet import os import sys # Oslo messaging RPC server uses eventlet. eventlet.monkey_patch() # 'Borrowed' from the Glance project: # If ../barbican/__init__.py exists, add ../ to Python search path, so that # it will override what happens to be installed in /usr/(local/)lib/python... possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), os.pardir, os.pardir)) if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')): sys.path.insert(0, possible_topdir) from barbican.common import config from barbican import queue from barbican.queue import server from barbican import version from oslo_log import log from oslo_service import service def fail(returncode, e): sys.stderr.write("ERROR: {0}\n".format(e)) sys.exit(returncode) def main(): try: CONF = config.CONF CONF(sys.argv[1:], project='barbican', version=version.version_info.version_string) # Import and configure logging. log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) LOG.debug("Booting up Barbican worker node...") # Queuing initialization queue.init(CONF) service.launch( CONF, server.TaskServer(), workers=CONF.queue.asynchronous_workers, restart_method='mutate' ).wait() except RuntimeError as e: fail(1, e) if __name__ == '__main__': main() barbican-9.1.0.dev50/barbican/cmd/status.py0000664000175000017500000000341113616500636020616 0ustar sahidsahid00000000000000# Copyright (c) 2018 NEC, Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_upgradecheck import upgradecheck from barbican.i18n import _ class Checks(upgradecheck.UpgradeCommands): """Upgrade checks for the barbican-status upgrade check command Upgrade checks should be added as separate methods in this class and added to _upgrade_checks tuple. """ def _check_placeholder(self): # This is just a placeholder for upgrade checks, it should be # removed when the actual checks are added return upgradecheck.Result(upgradecheck.Code.SUCCESS) # The format of the check functions is to return an # oslo_upgradecheck.upgradecheck.Result # object with the appropriate # oslo_upgradecheck.upgradecheck.Code and details set. # If the check hits warnings or failures then those should be stored # in the returned Result's "details" attribute. The # summary will be rolled up at the end of the check() method. _upgrade_checks = ( # In the future there should be some real checks added here (_('Placeholder'), _check_placeholder), ) def main(): return upgradecheck.main( cfg.CONF, project='barbican', upgrade_command=Checks()) barbican-9.1.0.dev50/barbican/cmd/keystone_listener.py0000664000175000017500000000477613616500636023060 0ustar sahidsahid00000000000000#!/usr/bin/env python # Copyright 2014 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Barbican Keystone notification listener server. """ import eventlet import os import sys # Oslo messaging notification server uses eventlet. # # To have remote debugging, thread module needs to be disabled. # eventlet.monkey_patch(thread=False) eventlet.monkey_patch() # 'Borrowed' from the Glance project: # If ../barbican/__init__.py exists, add ../ to Python search path, so that # it will override what happens to be installed in /usr/(local/)lib/python... possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), os.pardir, os.pardir)) if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')): sys.path.insert(0, possible_topdir) from barbican.common import config from barbican import queue from barbican.queue import keystone_listener from barbican import version from oslo_log import log from oslo_service import service def fail(returncode, e): sys.stderr.write("ERROR: {0}\n".format(e)) sys.exit(returncode) def main(): try: config.setup_remote_pydev_debug() CONF = config.CONF CONF(sys.argv[1:], project='barbican', version=version.version_info.version_string) # Import and configure logging. log.setup(CONF, 'barbican') LOG = log.getLogger(__name__) LOG.info("Booting up Barbican Keystone listener node...") # Queuing initialization queue.init(CONF) if getattr(getattr(CONF, queue.KS_NOTIFICATIONS_GRP_NAME), 'enable'): service.launch( CONF, keystone_listener.MessageServer(CONF), restart_method='mutate' ).wait() else: LOG.info("Exiting as Barbican Keystone listener is not enabled...") except RuntimeError as e: fail(1, e) if __name__ == '__main__': sys.exit(main()) barbican-9.1.0.dev50/barbican/cmd/functionaltests/0000775000175000017500000000000013616500640022142 5ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/cmd/functionaltests/__init__.py0000664000175000017500000000000013616500636024246 0ustar sahidsahid00000000000000barbican-9.1.0.dev50/barbican/cmd/functionaltests/test_db_manage.py0000664000175000017500000002606513616500636025466 0ustar sahidsahid00000000000000# Copyright (c) 2016 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os import time from testtools import testcase from barbican.common import config as barbican_config from barbican.tests import utils from functionaltests.api import base from functionaltests.api.v1.behaviors import container_behaviors from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.models import container_models from functionaltests.api.v1.models import secret_models from functionaltests.common import config from oslo_db.sqlalchemy import session # Import and configure logging. BCONF = barbican_config.CONF CONF = config.get_config() admin_a = CONF.rbac_users.admin_a admin_b = CONF.rbac_users.admin_b class DBManageTestCase(base.TestCase): def setUp(self): super(DBManageTestCase, self).setUp() self.sbehaviors = secret_behaviors.SecretBehaviors(self.client) self.cbehaviors = container_behaviors.ContainerBehaviors(self.client) db_url = BCONF.sql_connection time.sleep(5) # Setup session for tests to query DB engine = session.create_engine(db_url) self.conn = engine.connect() def tearDown(self): super(DBManageTestCase, self).tearDown() self.conn.close() self.sbehaviors.delete_all_created_secrets() self.cbehaviors.delete_all_created_containers() def _create_secret_list(self, user, delete=False, expiration="2050-02-28T19:14:44.180394"): secret_defaults_data = { "name": "AES key", "expiration": expiration, "algorithm": "aes", "bit_length": 256, "mode": "cbc", "payload": "gF6+lLoF3ohA9aPRpt+6bQ==", "payload_content_type": "application/octet-stream", "payload_content_encoding": "base64", } secret_list = [] for i in range(0, 5): secret_model = secret_models.SecretModel(**secret_defaults_data) resp, secret_ref = self.sbehaviors.create_secret(secret_model, user_name=user) self.assertEqual(resp.status_code, 201) self.assertIsNotNone(secret_ref) secret_list.append(secret_ref) if delete is True: self._delete_secret_list(secret_list, user) return secret_list def _create_container_uuid_list( self, user, secret_expiration="2050-02-28T19:14:44.180394", delete_secret=False, delete_container=False): secret_list = self._create_secret_list( user=user, expiration=secret_expiration ) container_data = { "name": "containername", "type": "generic", "secret_refs": [ { "name": "secret", "secret_ref": secret_list[0] } ] } container_list = [] for i in range(0, 5): container_model = container_models.ContainerModel(**container_data) post_container_resp, c_ref = self.cbehaviors.create_container( container_model, user_name=user) self.assertEqual(post_container_resp.status_code, 201) self.assertIsNotNone(c_ref) container_list.append(c_ref) if delete_container is True: self._delete_container_list(container_list, user) if delete_secret is True: self._delete_secret_list(secret_list) return container_list def _delete_secret_list(self, secret_list, user): for secret in secret_list: del_resp = self.sbehaviors.delete_secret(secret, user_name=user) self.assertEqual(del_resp.status_code, 204) def _delete_container_list(self, container_list, user): for container in container_list: del_resp = self.cbehaviors.delete_container(container, user_name=user) self.assertEqual(del_resp.status_code, 204) def _get_uuid(self, ref): uuid = ref.split('/')[-1] return uuid @testcase.attr('positive') def test_active_secret_not_deleted(self): """Verify that active secrets are not removed""" project_a_secrets = self._create_secret_list(user=admin_a) project_b_secrets = self._create_secret_list(user=admin_b) os.system("python barbican/cmd/db_manage.py clean -m 0 -p -e") results = self.conn.execute("select * from secrets") secret_list = [] for row in results: secret_list.append(str(row[0])) for secret in project_a_secrets: secret_uuid = self._get_uuid(secret) self.assertIn(secret_uuid, secret_list) for secret in project_b_secrets: secret_uuid = self._get_uuid(secret) self.assertIn(secret_uuid, secret_list) @testcase.attr('positive') def test_soft_deleted_secrets_are_removed(self): """Test that soft deleted secrets are removed""" project_a_secrets = self._create_secret_list(user=admin_a, delete=True) project_b_secrets = self._create_secret_list(user=admin_b, delete=True) os.system("python barbican/cmd/db_manage.py clean -m 0 -p -e") results = self.conn.execute("select * from secrets") secret_list = [] for row in results: secret_list.append(str(row[0])) for secret in project_a_secrets: secret_uuid = self._get_uuid(secret) self.assertNotIn(secret_uuid, secret_list) for secret in project_b_secrets: secret_uuid = self._get_uuid(secret) self.assertNotIn(secret_uuid, secret_list) @testcase.attr('positive') def test_expired_secrets_are_not_removed_from_db(self): """Test expired secrests are left in soft deleted state. Currently this clean will set the threshold at the start of the test. Expired secrets will be deleted and the deleted at date will now be later then the threshold date. """ current_time = utils.create_timestamp_w_tz_and_offset(seconds=10) project_a_secrets = self._create_secret_list(user=admin_a, expiration=current_time) project_b_secrets = self._create_secret_list(user=admin_b, expiration=current_time) time.sleep(10) os.system("python barbican/cmd/db_manage.py clean -m 0 -p -e") results = self.conn.execute("select * from secrets") secret_list = [] for row in results: secret_list.append(str(row[0])) for secret in project_a_secrets: secret_uuid = self._get_uuid(secret) self.assertIn(secret_uuid, secret_list) for secret in project_b_secrets: secret_uuid = self._get_uuid(secret) self.assertIn(secret_uuid, secret_list) @testcase.attr('positive') def test_no_soft_deleted_secrets_in_db(self): """Test that no soft deleted secrets are in db""" os.system("python barbican/cmd/db_manage.py clean -m 0 -p -e") results = self.conn.execute("select * from secrets where deleted=1") secret_list = [] for row in results: secret_list.append(str(row[0])) self.assertEqual(len(secret_list), 0) @testcase.attr('positive') def test_active_containers_not_deleted(self): """Active containers are not deleted""" project_a_containers = self._create_container_uuid_list( user=admin_a) project_b_containers = self._create_container_uuid_list( user=admin_b) os.system("python barbican/cmd/db_manage.py clean -m 0 -p -e") results = self.conn.execute("select * from containers") container_list = [] for row in results: container_list.append(str(row[0])) for container in project_a_containers: container_uuid = self._get_uuid(container) self.assertIn(container_uuid, container_list) for container in project_b_containers: container_uuid = self._get_uuid(container) self.assertIn(container_uuid, container_list) @testcase.attr('positive') def test_cleanup_soft_deleted_containers(self): """Soft deleted containers are deleted""" project_a_delete_containers = self._create_container_uuid_list( user=admin_a, delete_container=True) project_b_delete_containers = self._create_container_uuid_list( user=admin_b, delete_container=True) os.system("python barbican/cmd/db_manage.py clean -m 0 -p -e") results = self.conn.execute("select * from containers") container_list = [] for row in results: container_list.append(str(row[0])) for container in project_a_delete_containers: container_uuid = self._get_uuid(container) self.assertNotIn(container_uuid, container_list) for container in project_b_delete_containers: container_uuid = self._get_uuid(container) self.assertNotIn(container_uuid, container_list) @testcase.attr('positive') def test_containers_with_expired_secrets_are_deleted(self): """Containers with expired secrets are deleted""" current_time = utils.create_timestamp_w_tz_and_offset(seconds=10) project_a_delete_containers = self._create_container_uuid_list( user=admin_a, delete_container=True, secret_expiration=current_time) project_b_delete_containers = self._create_container_uuid_list( user=admin_b, delete_container=True, secret_expiration=current_time) time.sleep(10) os.system("python barbican/cmd/db_manage.py clean -m 0 -p -e") results = self.conn.execute("select * from containers") container_list = [] for row in results: container_list.append(str(row[0])) for container in project_a_delete_containers: container_uuid = self._get_uuid(container) self.assertNotIn(container_uuid, container_list) for container in project_b_delete_containers: container_uuid = self._get_uuid(container) self.assertNotIn(container_uuid, container_list) barbican-9.1.0.dev50/barbican/cmd/functionaltests/.testr.conf0000664000175000017500000000050013616500636024230 0ustar sahidsahid00000000000000[DEFAULT] test_command= OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \ OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \ OS_LOG_CAPTURE=${OS_LOG_CAPTURE:-1} \ ${PYTHON:-python} -m coverage run -a -m subunit.run discover -s ./cmd -t . $LISTOPT $IDOPTION test_id_option=--load-list $IDFILE test_list_option=--list barbican-9.1.0.dev50/barbican/cmd/barbican_manage.py0000664000175000017500000004326113616500636022353 0ustar sahidsahid00000000000000#!/usr/bin/env python # Copyright 2010-2015 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ CLI interface for barbican management """ from __future__ import print_function import argparse import sys from oslo_config import cfg from oslo_log import log as logging from barbican.cmd import pkcs11_kek_rewrap as pkcs11_rewrap from barbican.common import config from barbican.model import clean from barbican.model.migration import commands from barbican.model import sync from barbican.plugin.crypto import pkcs11 import barbican.version CONF = cfg.CONF LOG = logging.getLogger(__name__) # Decorators for actions def args(*args, **kwargs): def _decorator(func): func.__dict__.setdefault('args', []).insert(0, (args, kwargs)) return func return _decorator class DbCommands(object): """Class for managing barbican database""" description = "Subcommands for managing barbican database" clean_description = "Clean up soft deletions in the database" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--min-days', '-m', metavar='', dest='min_days', type=int, default=90, help='minimum number of days to keep soft deletions. ' 'default is %(default)s days.') @args('--verbose', '-V', action='store_true', dest='verbose', default=False, help='Show verbose information about the clean up.') @args('--log-file', '-L', metavar='', type=str, default=None, dest='log_file', help='Set log file location. ' 'Default value for log_file can be found in barbican.conf') @args('--clean-unassociated-projects', '-p', action='store_true', dest='do_clean_unassociated_projects', default=False, help='Remove projects that have no ' 'associated resources.') @args('--soft-delete-expired-secrets', '-e', action='store_true', dest='do_soft_delete_expired_secrets', default=False, help='Soft delete secrets that are expired.') def clean(self, dburl=None, min_days=None, verbose=None, log_file=None, do_clean_unassociated_projects=None, do_soft_delete_expired_secrets=None): """Clean soft deletions in the database""" if dburl is None: dburl = CONF.sql_connection if log_file is None: log_file = CONF.log_file clean.clean_command( sql_url=dburl, min_num_days=min_days, do_clean_unassociated_projects=do_clean_unassociated_projects, do_soft_delete_expired_secrets=do_soft_delete_expired_secrets, verbose=verbose, log_file=log_file) revision_description = "Create a new database version file" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--message', '-m', metavar='', default='DB change', help='the message for the DB change') @args('--autogenerate', action="store_true", dest='autogen', default=False, help='autogenerate from models') def revision(self, dburl=None, message=None, autogen=None): """Process the 'revision' Alembic command.""" if dburl is None: commands.generate(autogenerate=autogen, message=str(message), sql_url=CONF.sql_connection) else: commands.generate(autogenerate=autogen, message=str(message), sql_url=str(dburl)) upgrade_description = "Upgrade to a future database version" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--version', '-v', metavar='', default='head', help='the version to upgrade to, or else ' 'the latest/head if not specified.') def upgrade(self, dburl=None, version=None): """Process the 'upgrade' Alembic command.""" if dburl is None: commands.upgrade(to_version=str(version), sql_url=CONF.sql_connection) else: commands.upgrade(to_version=str(version), sql_url=str(dburl)) history_description = "Show database changset history" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--verbose', '-V', action='store_true', dest='verbose', default=False, help='Show full information about the revisions.') def history(self, dburl=None, verbose=None): if dburl is None: commands.history(verbose, sql_url=CONF.sql_connection) else: commands.history(verbose, sql_url=str(dburl)) current_description = "Show current revision of database" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--verbose', '-V', action='store_true', dest='verbose', default=False, help='Show full information about the revisions.') def current(self, dburl=None, verbose=None): if dburl is None: commands.current(verbose, sql_url=CONF.sql_connection) else: commands.current(verbose, sql_url=str(dburl)) sync_secret_stores_description = ("Sync secret_stores with " # nosec "barbican.conf") @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--verbose', '-V', action='store_true', dest='verbose', default=False, help='Show verbose information about the clean up.') @args('--log-file', '-L', metavar='', type=str, default=None, dest='log_file', help='Set log file location. ' 'Default value for log_file can be found in barbican.conf') def sync_secret_stores(self, dburl=None, verbose=None, log_file=None): """Sync secret_stores table with barbican.conf""" if dburl is None: dburl = CONF.sql_connection if log_file is None: log_file = CONF.log_file sync.sync_secret_stores( sql_url=dburl, verbose=verbose, log_file=log_file) class HSMCommands(object): """Class for managing HSM/pkcs11 plugin""" description = "Subcommands for managing HSM/PKCS11" check_mkek_description = "Checks if a MKEK label is available" @args('--library-path', metavar='', dest='libpath', default='/usr/lib/libCryptoki2_64.so', help='Path to vendor PKCS11 library') @args('--slot-id', metavar='', dest='slotid', default=1, help='HSM Slot id (Should correspond to a configured PKCS11 slot, \ default is 1)') @args('--passphrase', metavar='', default=None, required=True, help='Password to login to PKCS11 session') @args('--label', '-L', metavar='