barbican-2.0.0/0000775000567000056710000000000012701406024014407 5ustar jenkinsjenkins00000000000000barbican-2.0.0/functionaltests/0000775000567000056710000000000012701406024017634 5ustar jenkinsjenkins00000000000000barbican-2.0.0/functionaltests/pretty_tox.sh0000664000567000056710000000032412701405673022421 0ustar jenkinsjenkins00000000000000#!/usr/bin/env bash set -o pipefail TESTRARGS=$1 python setup.py testr --testr-args="--subunit $TESTRARGS" | subunit-trace --no-failure-debug -f retval=$? echo -e "\nSlowest Tests:\n" testr slowest exit $retvalbarbican-2.0.0/functionaltests/__init__.py0000664000567000056710000000000012701405673021744 0ustar jenkinsjenkins00000000000000barbican-2.0.0/functionaltests/run_tests.sh0000775000567000056710000000260512701405673022235 0ustar jenkinsjenkins00000000000000#!/bin/bash # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # How many seconds to wait for the API to be responding before giving up API_RESPONDING_TIMEOUT=20 if ! timeout ${API_RESPONDING_TIMEOUT} sh -c "while ! curl -s http://127.0.0.1:9311/ 2>/dev/null | grep -q 'v1' ; do sleep 1; done"; then echo "The Barbican API failed to respond within ${API_RESPONDING_TIMEOUT} seconds" exit 1 fi echo "Successfully contacted the Barbican API" # run the tests sequentially testr init testr run --subunit | subunit-trace --no-failure-debug -f retval=$? testr slowest # run the tests in parallel SKIP=^\(\?\!\.\*\(ProjectQuotasPagingTestCase\|QuotaEnforcementTestCase\|ListingCAsTestCase\|ProjectCATestCase\|GlobalPreferredCATestCase\|CertificateAuthoritiesTestCase\)\) testr init testr run $SKIP --parallel --subunit | subunit-trace --no-failure-debug -f retval=$(($retval || $?)) testr slowest exit $retval barbican-2.0.0/functionaltests/.testr.conf0000664000567000056710000000045512701405673021737 0ustar jenkinsjenkins00000000000000[DEFAULT] test_command= OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \ OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \ OS_LOG_CAPTURE=${OS_LOG_CAPTURE:-1} \ ${PYTHON:-python} -m subunit.run discover -s ./api -t . $LISTOPT $IDOPTION test_id_option=--load-list $IDFILE test_list_option=--list barbican-2.0.0/functionaltests/post_test_hook.sh0000775000567000056710000000151412701405673023251 0ustar jenkinsjenkins00000000000000#!/bin/bash # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # This script is executed inside post_test_hook function in devstack gate. # Install packages from test-requirements.txt sudo pip install -r /opt/stack/new/barbican/test-requirements.txt cd /opt/stack/new/barbican/functionaltests echo 'Running Functional Tests' sudo ./run_tests.sh barbican-2.0.0/functionaltests/common/0000775000567000056710000000000012701406024021124 5ustar jenkinsjenkins00000000000000barbican-2.0.0/functionaltests/common/auth.py0000664000567000056710000000643412701405673022457 0ustar jenkinsjenkins00000000000000""" Copyright 2015 Rackspace Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from keystoneclient.v2_0 import client as v2_client from keystoneclient.v3 import client as v3_client from requests import auth STORED_AUTHENTICATION = None class FunctionalTestAuth(auth.AuthBase): def __init__(self, endpoint, version, username, password, project_name): self.endpoint = endpoint self.version = version self.username = username self.password = password self.project_name = project_name self._client = None @property def service_catalog(self): if not self._client: self.authenticate() return self.stored_auth.get(self.username, {}).get('service_catalog') @property def auth_client(self): if not self._client: self.authenticate() return self._client @property def stored_auth(self): global STORED_AUTHENTICATION if not STORED_AUTHENTICATION: STORED_AUTHENTICATION = {} return STORED_AUTHENTICATION def _auth_with_keystone_client(self): if self.version.lower() == 'v2': self._client = v2_client.Client( username=self.username, password=self.password, tenant_name=self.project_name, auth_url=self.endpoint ) return (self._client.auth_token, self._client.tenant_id) elif self.version.lower() == 'v3': self._client = v3_client.Client( username=self.username, password=self.password, project_name=self.project_name, auth_url=self.endpoint ) return (self._client.auth_token, self._client.project_id) else: raise Exception('Unknown authentication version') def authenticate(self): creds = self.stored_auth.get(self.username) if not creds: token, project_id = self._auth_with_keystone_client() self.stored_auth[self.username] = { 'token': token, 'project_id': project_id, 'service_catalog': self._client.service_catalog, 'user_id': self._client.auth_user_id } return self.stored_auth[self.username] def get_user_id(self): """Return the UID used by keystone to uniquely identify the user""" return self.authenticate()['user_id'] def get_project_id(self): """Return the UID used by keystone to identify the user's project""" return self.authenticate()['project_id'] def __call__(self, r): creds = self.authenticate() # modify and return the request r.headers['X-Project-Id'] = creds.get('project_id') r.headers['X-Auth-Token'] = creds.get('token') return r barbican-2.0.0/functionaltests/common/__init__.py0000664000567000056710000000000012701405673023234 0ustar jenkinsjenkins00000000000000barbican-2.0.0/functionaltests/common/config.py0000664000567000056710000001065012701405674022757 0ustar jenkinsjenkins00000000000000""" Copyright 2015 Rackspace Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import os from oslo_config import cfg TEST_CONF = None def setup_config(config_file=''): global TEST_CONF TEST_CONF = cfg.ConfigOpts() identity_group = cfg.OptGroup(name='identity') identity_options = [ cfg.StrOpt('uri', default='http://localhost:5000/v3'), cfg.StrOpt('version', default='v3'), cfg.StrOpt('username', default='admin'), cfg.StrOpt('password', default='secretadmin', secret=True), cfg.StrOpt('project_name', default='admin'), cfg.StrOpt('domain_name', default='Default'), cfg.StrOpt('region', default='RegionOne'), cfg.StrOpt('service_admin', default='service-admin'), cfg.StrOpt('service_admin_project', default='service'), cfg.StrOpt('service_admin_password', default='secretservice', secret=True)] TEST_CONF.register_group(identity_group) TEST_CONF.register_opts(identity_options, group=identity_group) rbac_users_group = cfg.OptGroup(name='rbac_users') rbac_users_options = [ cfg.StrOpt('project_a', default='project_a'), cfg.StrOpt('project_b', default='project_b'), cfg.StrOpt('admin_a', default='project_a_admin'), cfg.StrOpt('admin_a_password', default='barbican', secret=True), cfg.StrOpt('creator_a', default='project_a_creator'), cfg.StrOpt('creator_a_password', default='barbican', secret=True), cfg.StrOpt('observer_a', default='project_a_observer'), cfg.StrOpt('observer_a_password', default='barbican', secret=True), cfg.StrOpt('auditor_a', default='project_a_auditor'), cfg.StrOpt('auditor_a_password', default='barbican', secret=True), cfg.StrOpt('admin_b', default='project_b_admin'), cfg.StrOpt('admin_b_password', default='barbican', secret=True), cfg.StrOpt('creator_b', default='project_b_creator'), cfg.StrOpt('creator_b_password', default='barbican', secret=True), cfg.StrOpt('observer_b', default='project_b_observer'), cfg.StrOpt('observer_b_password', default='barbican', secret=True), cfg.StrOpt('auditor_b', default='project_b_auditor'), cfg.StrOpt('auditor_b_password', default='barbican', secret=True), ] TEST_CONF.register_group(rbac_users_group) TEST_CONF.register_opts(rbac_users_options, group=rbac_users_group) keymanager_group = cfg.OptGroup(name='keymanager') keymanager_options = [ cfg.StrOpt('service_type', default='key-manager'), cfg.StrOpt('service_name', default='barbican'), cfg.StrOpt('region_name', default='RegionOne'), cfg.StrOpt('endpoint_type', default='public'), cfg.IntOpt('timeout', default=10), cfg.StrOpt('override_url', default=''), cfg.StrOpt('override_url_version', default='') ] TEST_CONF.register_group(keymanager_group) TEST_CONF.register_opts(keymanager_options, group=keymanager_group) quotas_group = cfg.OptGroup(name='quotas') quotas_options = [ cfg.IntOpt('quota_secrets', default=-1), cfg.IntOpt('quota_orders', default=-1), cfg.IntOpt('quota_containers', default=-1), cfg.IntOpt('quota_consumers', default=-1), cfg.IntOpt('quota_cas', default=-1) ] TEST_CONF.register_group(quotas_group) TEST_CONF.register_opts(quotas_options, group=quotas_group) # Figure out which config to load config_to_load = [] local_config = './etc/barbican/barbican-functional.conf' if os.path.isfile(config_file): config_to_load.append(config_file) elif os.path.isfile(local_config): config_to_load.append(local_config) else: config_to_load.append('/etc/barbican/barbican-functional.conf') # Actually parse config TEST_CONF( (), # Required to load a anonymous config default_config_files=config_to_load ) def get_config(): if not TEST_CONF: setup_config() return TEST_CONF barbican-2.0.0/functionaltests/common/client.py0000664000567000056710000003114312701405674022770 0ustar jenkinsjenkins00000000000000""" Copyright 2014 Rackspace Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import logging import os import requests from six.moves import urllib from tempest_lib.common.utils import misc as misc_utils from functionaltests.common import auth from functionaltests.common import config LOG = logging.getLogger(__name__) CONF = config.get_config() class BarbicanClient(object): def __init__(self, api_version='v1'): self.timeout = CONF.keymanager.timeout self.api_version = api_version self.default_headers = { 'Content-Type': 'application/json', 'Accept': 'application/json' } self.region = CONF.identity.region self._default_user_name = CONF.identity.username self._auth = {} self._auth[CONF.identity.username] = auth.FunctionalTestAuth( endpoint=CONF.identity.uri, version=CONF.identity.version, username=CONF.identity.username, password=CONF.identity.password, project_name=CONF.identity.project_name) self._auth[CONF.identity.service_admin] = auth.FunctionalTestAuth( endpoint=CONF.identity.uri, version=CONF.identity.version, username=CONF.identity.service_admin, password=CONF.identity.service_admin_password, project_name=CONF.identity.service_admin_project) self._auth[CONF.rbac_users.admin_a] = auth.FunctionalTestAuth( endpoint=CONF.identity.uri, version=CONF.identity.version, username=CONF.rbac_users.admin_a, password=CONF.rbac_users.admin_a_password, project_name=CONF.rbac_users.project_a) self._auth[CONF.rbac_users.creator_a] = auth.FunctionalTestAuth( endpoint=CONF.identity.uri, version=CONF.identity.version, username=CONF.rbac_users.creator_a, password=CONF.rbac_users.creator_a_password, project_name=CONF.rbac_users.project_a) self._auth[CONF.rbac_users.observer_a] = auth.FunctionalTestAuth( endpoint=CONF.identity.uri, version=CONF.identity.version, username=CONF.rbac_users.observer_a, password=CONF.rbac_users.observer_a_password, project_name=CONF.rbac_users.project_a) self._auth[CONF.rbac_users.auditor_a] = auth.FunctionalTestAuth( endpoint=CONF.identity.uri, version=CONF.identity.version, username=CONF.rbac_users.auditor_a, password=CONF.rbac_users.auditor_a_password, project_name=CONF.rbac_users.project_a) self._auth[CONF.rbac_users.admin_b] = auth.FunctionalTestAuth( endpoint=CONF.identity.uri, version=CONF.identity.version, username=CONF.rbac_users.admin_b, password=CONF.rbac_users.admin_b_password, project_name=CONF.rbac_users.project_b) self._auth[CONF.rbac_users.creator_b] = auth.FunctionalTestAuth( endpoint=CONF.identity.uri, version=CONF.identity.version, username=CONF.rbac_users.creator_b, password=CONF.rbac_users.creator_b_password, project_name=CONF.rbac_users.project_b) self._auth[CONF.rbac_users.observer_b] = auth.FunctionalTestAuth( endpoint=CONF.identity.uri, version=CONF.identity.version, username=CONF.rbac_users.observer_b, password=CONF.rbac_users.observer_b_password, project_name=CONF.rbac_users.project_b) self._auth[CONF.rbac_users.auditor_b] = auth.FunctionalTestAuth( endpoint=CONF.identity.uri, version=CONF.identity.version, username=CONF.rbac_users.auditor_b, password=CONF.rbac_users.auditor_b_password, project_name=CONF.rbac_users.project_b) def _attempt_to_stringify_content(self, content, content_tag): if content is None: return content try: # NOTE(jaosorior): The content is decoded as ascii since the # logging module has problems with utf-8 strings and will end up # trying to decode this as ascii. return content.decode('ascii') except UnicodeDecodeError: # NOTE(jaosorior): Since we are using base64 as default and this is # only for logging (in order to debug); Lets not put too much # effort in this and just use encoded string. return content.encode('base64') def stringify_request(self, request_kwargs, response): format_kwargs = { 'code': response.status_code, 'method': request_kwargs.get('method'), 'url': request_kwargs.get('url'), 'headers': response.request.headers, } format_kwargs['body'] = self._attempt_to_stringify_content( request_kwargs.get('data'), 'body') format_kwargs['response_body'] = self._attempt_to_stringify_content( response.content, 'response_body') return ('{code} {method} {url}\n' 'Request Headers: {headers}\n' 'Request Body: {body}\n' 'Response: {response_body}').format(**format_kwargs) def log_request(self, request_kwargs, response, user_name): test_name = misc_utils.find_test_caller() str_request = self.stringify_request(request_kwargs, response) if user_name is None: user_info = '' else: user_info = "(user={0})".format(user_name) LOG.info('Request %s (%s)\n %s', user_info, test_name, str_request) def _status_is_2xx_success(self, status_code): return status_code >= 200 and status_code < 300 def attempt_to_deserialize(self, response, model_type): if (self._status_is_2xx_success(response.status_code) and model_type and hasattr(model_type, 'json_to_obj')): return model_type.json_to_obj(response.content) return None def attempt_to_serialize(self, model): if model and hasattr(model, 'obj_to_json'): return model.obj_to_json() def _get_url_w_trailing_slash(self, url): """Returns the given URL with a trailing slash Given a URL, this function will return it with a trailing slash. If there is already a trailing slash, then it will return the same URL that was given. Note that the instances where this is being used, actually need a trailing slash. Be careful not to use this when it's not needed. """ # NOTE(jaosorior): The urljoin needs this in order to actually append # a URL to another. If a URL, say http://localhost/v1 doesn't have a # slash in the end, the last fragment will be replaced with the second # parameter given to urljoin; Which is not what we want. if url[-1] != "/": return url + "/" return url def _get_base_url_from_config(self, include_version): if include_version: base_url = urllib.parse.urljoin( CONF.keymanager.override_url, CONF.keymanager.override_url_version) else: base_url = CONF.keymanager.override_url return self._get_url_w_trailing_slash(base_url) def get_base_url(self, include_version=True): if CONF.keymanager.override_url: return self._get_base_url_from_config(include_version) auth = self._auth[self._default_user_name] endpoint = auth.service_catalog.get_endpoints( service_type=CONF.keymanager.service_type, service_name=CONF.keymanager.service_name, region_name=CONF.keymanager.region_name, endpoint_type=CONF.keymanager.endpoint_type) if auth.version.lower() == 'v2': base_url = endpoint['key-manager'][0].get('publicURL') else: base_url = endpoint['key-manager'][0].get('url') # Make sure we handle the edge cases around Keystone providing # endpoints with or without versions if include_version and self.api_version not in base_url: base_url = urllib.parse.urljoin(base_url, self.api_version) elif not include_version and self.api_version in base_url: base_url, _ = os.path.split(base_url) return self._get_url_w_trailing_slash(base_url) def get_list_of_models(self, item_list, model_type): """Takes a list of barbican objects and creates a list of models :param item_list: the json returned from a barbican GET request for a list of objects :param model_type: The model used in the creation of the list of models :return A list of models and the refs for next and previous lists. """ models, next_ref, prev_ref = [], None, None for item in item_list: if 'next' == item: next_ref = item_list.get('next') elif 'previous' == item: prev_ref = item_list.get('previous') elif item in ('secrets', 'orders', 'containers', 'consumers', 'project_quotas'): for entity in item_list.get(item): models.append(model_type(**entity)) return models, next_ref, prev_ref def request(self, method, url, data=None, extra_headers=None, omit_headers=None, use_auth=True, response_model_type=None, request_model=None, params=None, user_name=None): """Prepares and sends http request through Requests.""" if url and 'http' not in url: url = urllib.parse.urljoin(self.get_base_url(), url) # Duplicate Base headers and add extras (if needed) headers = {} headers.update(self.default_headers) if extra_headers: headers.update(extra_headers) if omit_headers: for header in omit_headers: try: del headers[header] except KeyError: # key error means we tried to delete a nonexistent # entry - we don't care about that pass # Attempt to serialize model if required if request_model: data = self.attempt_to_serialize(request_model) # Prepare call arguments call_kwargs = { 'method': method, 'url': url, 'headers': headers, 'data': data, 'timeout': self.timeout, 'params': params } if use_auth: if not user_name: user_name = self._default_user_name call_kwargs['auth'] = self._auth[user_name] response = requests.request(**call_kwargs) # Attempt to deserialize the response response.model = self.attempt_to_deserialize(response, response_model_type) self.log_request(call_kwargs, response, user_name) return response def get(self, *args, **kwargs): """Proxies the request method specifically for http GET methods.""" return self.request('GET', *args, **kwargs) def post(self, *args, **kwargs): """Proxies the request method specifically for http POST methods.""" return self.request('POST', *args, **kwargs) def put(self, *args, **kwargs): """Proxies the request method specifically for http PUT methods.""" return self.request('PUT', *args, **kwargs) def delete(self, *args, **kwargs): """Proxies the request method specifically for http DELETE methods.""" return self.request('DELETE', *args, **kwargs) def patch(self, *args, **kwargs): """Proxies the request method specifically for http PATCH methods.""" return self.request('PATCH', *args, **kwargs) def get_user_id_from_name(self, user_name): if user_name and self._auth[user_name]: return self._auth[user_name].get_user_id() else: return None def get_project_id_from_name(self, user_name): if user_name and self._auth[user_name]: return self._auth[user_name].get_project_id() else: return None barbican-2.0.0/functionaltests/api/0000775000567000056710000000000012701406024020405 5ustar jenkinsjenkins00000000000000barbican-2.0.0/functionaltests/api/__init__.py0000664000567000056710000000000012701405673022515 0ustar jenkinsjenkins00000000000000barbican-2.0.0/functionaltests/api/base.py0000664000567000056710000004022312701405673021703 0ustar jenkinsjenkins00000000000000""" Copyright 2014 Rackspace Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import abc import fixtures import logging import os import uuid import oslotest.base as oslotest import six from testtools import testcase from barbican.tests import utils from functionaltests.common import client from functionaltests.common import config CONF = config.get_config() class TestCase(oslotest.BaseTestCase): max_payload_size = 10000 max_sized_payload = 'a' * max_payload_size oversized_payload = 'a' * (max_payload_size + 1) max_field_size = 255 max_sized_field = 'a' * max_field_size oversized_field = 'a' * (max_field_size + 1) log_format = ('%(asctime)s %(process)d %(levelname)-8s ' '[%(name)s] %(message)s') @classmethod def setUpClass(cls): cls.LOG = logging.getLogger(cls._get_full_case_name()) super(TestCase, cls).setUpClass() def setUp(self): self.LOG.info('Starting: %s', self._testMethodName) super(TestCase, self).setUp() self.client = client.BarbicanClient() stdout_capture = os.environ.get('OS_STDOUT_CAPTURE') stderr_capture = os.environ.get('OS_STDERR_CAPTURE') log_capture = os.environ.get('OS_LOG_CAPTURE') if ((stdout_capture and stdout_capture.lower() == 'true') or stdout_capture == '1'): stdout = self.useFixture(fixtures.StringStream('stdout')).stream self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout)) if ((stderr_capture and stderr_capture.lower() == 'true') or stderr_capture == '1'): stderr = self.useFixture(fixtures.StringStream('stderr')).stream self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr)) if ((log_capture and log_capture.lower() == 'true') or log_capture == '1'): self.useFixture(fixtures.LoggerFixture(nuke_handlers=False, format=self.log_format, level=logging.DEBUG)) def tearDown(self): super(TestCase, self).tearDown() self.LOG.info('Finished: %s\n', self._testMethodName) @classmethod def _get_full_case_name(cls): name = '{module}:{case_name}'.format( module=cls.__module__, case_name=cls.__name__ ) return name @six.add_metaclass(abc.ABCMeta) class PagingTestCase(TestCase): def setUp(self): super(PagingTestCase, self).setUp() self._all_fetched_resources = [] def tearDown(self): super(PagingTestCase, self).tearDown() def _set_filter_field(self, model): filter = str(uuid.uuid4()) self.set_filter_field(filter, model) return filter def _validate_resource_group(self, resources=[], next_ref=None, prev_ref=None, expected_size=0, next_ref_should_be_none=True, prev_ref_should_be_none=True): """Validate the returned group of resources. Will check for: 1. there is a returned group (ie not None) 2. size of the returned group 3. no duplicates within the returned group 4. no duplicates across multiple calls 5. valid next resource ref 6. valid previous resource ref :param resources: the list of resources :param expected_size: the expected size of the list :param next_ref: next href :param prev_ref: previous href :param next_ref_should_be_none: should next href be none? :param next_ref_should_be_none: should prev href be none? :param all_fetched_resources: running list of all resources (used to detect duplicates across multiple calls) """ self.assertIsNotNone(resources) self.assertEqual(len(resources), expected_size) self.assertEqual(next_ref_should_be_none, next_ref is None) self.assertEqual(prev_ref_should_be_none, prev_ref is None) # check for duplicates within this group self.assertEqual(len(resources), len(set(resources))) # check for duplicates across calls if len(self._all_fetched_resources): duplicates = [entity for entity in resources if entity in self._all_fetched_resources] self.assertEqual(len(duplicates), 0) # add to our running list of resource refs self._all_fetched_resources.extend(resources) @abc.abstractmethod def create_model(self): pass @abc.abstractmethod def create_resources(self, count=0, model=None): pass @abc.abstractmethod def get_resources(self, limit=10, offset=0, filter=""): pass @abc.abstractmethod def set_filter_field(self, filter, model): pass @testcase.attr('positive') def test_paging_with_limits_and_offsets(self): """Covers resource paging limit and offset attributes.""" test_model = self.create_model() number_of_resource_groups = 5 resources_per_group = 10 filter = self._set_filter_field(test_model) # create a number of resources self.create_resources( count=number_of_resource_groups * resources_per_group, model=test_model) # validate all groups of resources for i in range(1, number_of_resource_groups + 1): resp, resources, next_ref, prev_ref = self.get_resources( limit=resources_per_group, offset=(i - 1) * resources_per_group, filter=filter) self.assertEqual(200, resp.status_code) check_next = i == number_of_resource_groups check_prev = i == 1 self._validate_resource_group(resources=resources, next_ref=next_ref, prev_ref=prev_ref, expected_size=resources_per_group, next_ref_should_be_none=check_next, prev_ref_should_be_none=check_prev) @testcase.attr('positive') def test_paging_with_offset_zero_and_varying_limits(self): """Covers listing resources with limit attribute. Use limits from 1 to twice the number of resources we expect. Always use offset=0 so we start from the beginning. """ res_count = 25 test_model = self.create_model() filter = self._set_filter_field(test_model) self.create_resources(count=res_count, model=test_model) minimum_limit = 1 maximum_limit = res_count * 2 offset = 0 for limit in range(minimum_limit, maximum_limit): resp, resources, next_ref, prev_ref = self.get_resources( limit=limit, offset=offset, filter=filter) self.assertEqual(200, resp.status_code) check_next = limit >= res_count check_prev = offset == 0 self._validate_resource_group(resources=resources, next_ref=next_ref, prev_ref=prev_ref, expected_size=min(limit, res_count), next_ref_should_be_none=check_next, prev_ref_should_be_none=check_prev) @testcase.attr('positive') def test_paging_exceeding_paging_max_limit(self): """Covers case of listing resources with a limit that exceeds max. Create a number of resources over the max paging limit, then try to get them all in one call. It should only return the max, with a next link to get the rest. """ max_allowable_limit = 100 number_of_resources = max_allowable_limit + 10 test_model = self.create_model() filter = self._set_filter_field(test_model) self.create_resources(count=number_of_resources, model=test_model) resp, resources, next_ref, prev_ref = self.get_resources( limit=number_of_resources, offset=0, filter=filter) self.assertEqual(200, resp.status_code) self._validate_resource_group(resources=resources, next_ref=next_ref, prev_ref=prev_ref, expected_size=max_allowable_limit, next_ref_should_be_none=False, prev_ref_should_be_none=True) limit, offset = utils.get_limit_and_offset_from_ref(next_ref) # new offset and limit should both be the same as the max limit self.assertEqual(str(max_allowable_limit), limit) self.assertEqual(str(max_allowable_limit), offset) # now get the rest resp, resources, next_ref, prev_ref = self.get_resources( limit=limit, offset=offset, filter=filter) self.assertEqual(200, resp.status_code) expected_size = number_of_resources - max_allowable_limit self._validate_resource_group(resources=resources, next_ref=next_ref, prev_ref=prev_ref, expected_size=expected_size, next_ref_should_be_none=True, prev_ref_should_be_none=False) @testcase.attr('positive') def test_paging_next_option_start_in_middle(self): """Covers getting a list of resources and using the next reference.""" number_of_resources = 150 test_model = self.create_model() filter = self._set_filter_field(test_model) self.create_resources(count=number_of_resources, model=test_model) # First set of resources limit = number_of_resources // 10 offset = number_of_resources // 2 resp, resources, next_ref, prev_ref = self.get_resources( limit=limit, offset=offset, filter=filter) self.assertEqual(200, resp.status_code) self._validate_resource_group(resources=resources, next_ref=next_ref, prev_ref=prev_ref, expected_size=limit, next_ref_should_be_none=False, prev_ref_should_be_none=False) limit, offset = utils.get_limit_and_offset_from_ref(next_ref) # Next set of resources resp, resources, next_ref, prev_ref = self.get_resources( limit=limit, offset=offset, filter=filter) self._validate_resource_group(resources=resources, next_ref=next_ref, prev_ref=prev_ref, expected_size=int(limit), next_ref_should_be_none=False, prev_ref_should_be_none=False) @testcase.attr('positive') def test_paging_with_default_limit_and_varying_offsets(self): """Covers listing resources with various offsets. Use offsets from 0 to the number of resources we expect. Always use default limit. """ number_of_resources = 15 test_model = self.create_model() filter = self._set_filter_field(test_model) self.create_resources(count=number_of_resources, model=test_model) minimum_offset = 0 maximum_offset = number_of_resources limit = 10 for offset in range(minimum_offset, maximum_offset): resp, resources, next_ref, prev_ref = self.get_resources( limit=limit, offset=offset, filter=filter) self.assertEqual(200, resp.status_code) check_next = offset + limit >= number_of_resources check_prev = offset == 0 expected_size = min(limit, number_of_resources - offset) self._validate_resource_group(resources=resources, next_ref=next_ref, prev_ref=prev_ref, expected_size=expected_size, next_ref_should_be_none=check_next, prev_ref_should_be_none=check_prev) @testcase.attr('positive') def test_resources_get_paging_prev_option_start_in_middle(self): """Covers getting a list of resources and using the next reference.""" number_of_resources = 150 test_model = self.create_model() filter = self._set_filter_field(test_model) self.create_resources(count=number_of_resources, model=test_model) # First set of resources limit = number_of_resources // 10 offset = number_of_resources // 2 resp, resources, next_ref, prev_ref = self.get_resources( limit=limit, offset=offset, filter=filter) self.assertEqual(200, resp.status_code) self._validate_resource_group(resources=resources, next_ref=next_ref, prev_ref=prev_ref, expected_size=limit, next_ref_should_be_none=False, prev_ref_should_be_none=False) limit, offset = utils.get_limit_and_offset_from_ref(prev_ref) # Previous set of resources resp, resources, next_ref, prev_ref = self.get_resources( limit=limit, offset=offset, filter=filter) self._validate_resource_group(resources=resources, next_ref=next_ref, prev_ref=prev_ref, expected_size=int(limit), next_ref_should_be_none=False, prev_ref_should_be_none=False) @testcase.attr('positive') def test_paging_with_non_integer_limits_and_offsets(self): """Covers resource paging limit and offset attributes.""" test_model = self.create_model() number_of_resources = 25 # create a number of resources filter = self._set_filter_field(test_model) self.create_resources(count=number_of_resources, model=test_model) # pass in non-integer values for limit and offset resp, resources, next_ref, prev_ref = self.get_resources( limit='not-an-int-limit', offset='not-an-int-offset', filter=filter) self.assertEqual(200, resp.status_code) self._validate_resource_group(resources=resources, next_ref=next_ref, prev_ref=prev_ref, expected_size=10, next_ref_should_be_none=False, prev_ref_should_be_none=True) @testcase.attr('positive') def test_paging_with_default_limit_and_large_offsets(self): """Covers resource paging limit and offset attributes.""" test_model = self.create_model() number_of_resources = 25 # create a number of resources filter = self._set_filter_field(test_model) self.create_resources(count=number_of_resources, model=test_model) large_offset = 265613988875874769338781322035779626829233452653394495 limit = 10 # pass in non-integer values for limit and offset resp, resources, next_ref, prev_ref = self.get_resources( limit=limit, offset=large_offset, filter=filter) self.assertEqual(200, resp.status_code) self._validate_resource_group(resources=resources, next_ref=next_ref, prev_ref=prev_ref, expected_size=10, next_ref_should_be_none=False, prev_ref_should_be_none=True) barbican-2.0.0/functionaltests/api/v1/0000775000567000056710000000000012701406024020733 5ustar jenkinsjenkins00000000000000barbican-2.0.0/functionaltests/api/v1/behaviors/0000775000567000056710000000000012701406024022715 5ustar jenkinsjenkins00000000000000barbican-2.0.0/functionaltests/api/v1/behaviors/order_behaviors.py0000664000567000056710000001601012701405673026453 0ustar jenkinsjenkins00000000000000""" Copyright 2014-2015 Rackspace Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from functionaltests.api.v1.behaviors import base_behaviors from functionaltests.api.v1.behaviors import container_behaviors from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.models import order_models class OrderBehaviors(base_behaviors.BaseBehaviors): def create_order(self, model, extra_headers=None, use_auth=True, user_name=None, admin=None): """Create an order from the data in the model. :param model: The data used to create the order :param extra_headers: Optional HTTP headers to add to the request :param use_auth: Boolean to determine whether auth headers are sent :param user_name: the user used to do the create :param admin: the admin of the group to which user_name belongs :return: The create response and href for the order """ # create the order resp = self.client.post('orders', request_model=model, extra_headers=extra_headers, user_name=user_name, use_auth=use_auth) # handle expected JSON parsing errors for unauthenticated requests if resp.status_code == 401 and not use_auth: return resp, None returned_data = self.get_json(resp) order_ref = returned_data.get('order_ref') # remember this order and its admin for our housekeeping cleanup if order_ref: if admin is None: admin = user_name self.created_entities.append((order_ref, admin)) return resp, order_ref def get_order(self, order_ref, extra_headers=None, user_name=None, use_auth=True): """Get an order from an href. :param order_ref: The href for an order :param extra_headers: Optional HTTP headers to add to the request :param user_name: the user used to do the get :param use_auth: Boolean to determine whether auth headers are sent :return: The response from the get """ return self.client.get(order_ref, response_model_type=order_models.OrderModel, extra_headers=extra_headers, user_name=user_name, use_auth=use_auth) def get_orders(self, limit=10, offset=0, filter=None, extra_headers=None, user_name=None, use_auth=True): """Get a list of orders. :param limit: limits number of returned orders (default 10) :param offset: represents how many records to skip before retrieving the list (default 0) :param filter: optional filter to limit the returned orders to those whose metadata contains the filter. :param extra_headers: Optional HTTP headers to add to the request :param user_name: the user used to do the get :param use_auth: Boolean to determine whether auth headers are sent :return the response, a list of orders and the next/pref hrefs """ params = {'limit': limit, 'offset': offset} if filter: params['meta'] = filter resp = self.client.get('orders', params=params, extra_headers=extra_headers, user_name=user_name, use_auth=use_auth) # handle expected JSON parsing errors for unauthenticated requests if resp.status_code == 401 and not use_auth: return resp, None, None, None orders_list = self.get_json(resp) orders, next_ref, prev_ref = self.client.get_list_of_models( orders_list, order_models.OrderModel) return resp, orders, next_ref, prev_ref def delete_order(self, order_ref, extra_headers=None, expected_fail=False, user_name=None, use_auth=True): """Delete an order. :param order_ref: HATEOAS ref of the order to be deleted :param extra_headers: Optional HTTP headers to add to the request :param expected_fail: Flag telling the delete whether or not this operation is expected to fail (ie coming from a negative test). We need this to determine whether or not this delete should also remove an entity from our internal list for housekeeping. :param user_name: the user used to do the delete :param use_auth: Boolean to determine whether auth headers are sent :return A request response object """ resp = self.client.delete(order_ref, extra_headers=extra_headers, user_name=user_name, use_auth=use_auth) if not expected_fail: for item in self.created_entities: if item[0] == order_ref: self.created_entities.remove(item) return resp def delete_all_created_orders(self): """Delete all orders and other entities created by orders. """ container_client = container_behaviors.ContainerBehaviors(self.client) secret_client = secret_behaviors.SecretBehaviors(self.client) orders_to_delete = [order for order in self.created_entities] for (order_ref, admin) in orders_to_delete: order_resp = self.get_order(order_ref, user_name=admin) # If order has secrets if order_resp.model.secret_ref: secret_client.delete_secret(order_resp.model.secret_ref, user_name=admin) # If containers supported container_attr_exists = getattr(order_resp.model, "container_ref", None) if container_attr_exists and order_resp.model.container_ref: container_resp = container_client.get_container( order_resp.model.container_ref, user_name=admin) # remove secrets in the containers in the orders if container_resp.model.secret_refs: for secret in container_resp.model.secret_refs: secret_client.delete_secret(secret.secret_ref, user_name=admin) container_client.delete_container( order_resp.model.container_ref, user_name=admin) self.delete_order(order_ref, user_name=admin) barbican-2.0.0/functionaltests/api/v1/behaviors/secretmeta_behaviors.py0000664000567000056710000001162712701405673027505 0ustar jenkinsjenkins00000000000000""" Copyright 2016 IBM Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import json from functionaltests.api.v1.behaviors import base_behaviors class SecretMetadataBehaviors(base_behaviors.BaseBehaviors): def create_or_update_metadata(self, secret_ref, data, extra_headers=None, use_auth=True, user_name=None, admin=None): meta_ref = '%s/metadata' % secret_ref data = json.dumps(data) resp = self.client.put(meta_ref, data=data, extra_headers=extra_headers, use_auth=use_auth, user_name=user_name) # handle expected JSON parsing errors for unauthenticated requests if resp.status_code == 401 and not use_auth: return resp, None returned_data = self.get_json(resp) metadata_ref = returned_data.get('metadata_ref') if metadata_ref: if admin is None: admin = user_name self.created_entities.append((metadata_ref, admin)) return resp, metadata_ref def get_metadata(self, secret_ref, extra_headers=None, use_auth=True, user_name=None, admin=None): meta_ref = '%s/metadata' % secret_ref resp = self.client.get(meta_ref, extra_headers=extra_headers, user_name=user_name, use_auth=use_auth) # handle expected JSON parsing errors for unauthenticated requests if resp.status_code == 401 and not use_auth: return resp, None returned_data = self.get_json(resp) metadata_ref = returned_data.get('metadata_ref') if metadata_ref: if admin is None: admin = user_name self.created_entities.append((metadata_ref, admin)) return resp def create_metadatum(self, secret_ref, data, extra_headers=None, use_auth=True, user_name=None, admin=None): meta_key_ref = '%s/%s' % (secret_ref, 'metadata') data = json.dumps(data) resp = self.client.post(meta_key_ref, data=data, extra_headers=extra_headers, use_auth=use_auth, user_name=user_name) # handle expected JSON parsing errors for unauthenticated requests if resp.status_code == 401 and not use_auth: return resp, None returned_data = self.get_json(resp) metadata_ref = returned_data.get('metadata_ref') if metadata_ref: if admin is None: admin = user_name self.created_entities.append((metadata_ref, admin)) return resp, metadata_ref def update_metadatum(self, secret_ref, metadata_key, data, extra_headers=None, use_auth=True, user_name=None, admin=None): meta_key_ref = '%s/%s/%s' % (secret_ref, 'metadata', metadata_key) data = json.dumps(data) resp = self.client.put(meta_key_ref, data=data, extra_headers=extra_headers, use_auth=use_auth, user_name=user_name) # handle expected JSON parsing errors for unauthenticated requests if resp.status_code == 401 and not use_auth: return resp, None return resp def get_metadatum(self, secret_ref, metadata_key, extra_headers=None, use_auth=True, user_name=None, admin=None): meta_key_ref = '%s/%s/%s' % (secret_ref, 'metadata', metadata_key) resp = self.client.get(meta_key_ref, extra_headers=extra_headers, user_name=user_name, use_auth=use_auth) # handle expected JSON parsing errors for unauthenticated requests if resp.status_code == 401 and not use_auth: return resp, None returned_data = self.get_json(resp) metadata_ref = returned_data.get('metadata_ref') if metadata_ref: if admin is None: admin = user_name self.created_entities.append((metadata_ref, admin)) return resp def delete_metadatum(self, secret_ref, metadata_key, extra_headers=None, use_auth=True, user_name=None, admin=None): meta_key_ref = '%s/%s/%s' % (secret_ref, 'metadata', metadata_key) resp = self.client.delete(meta_key_ref, extra_headers=extra_headers, user_name=user_name, use_auth=use_auth) return resp barbican-2.0.0/functionaltests/api/v1/behaviors/secret_behaviors.py0000664000567000056710000002121012701405674026624 0ustar jenkinsjenkins00000000000000""" Copyright 2014-2015 Rackspace Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from functionaltests.api.v1.behaviors import base_behaviors from functionaltests.api.v1.models import secret_models class SecretBehaviors(base_behaviors.BaseBehaviors): def create_secret(self, model, extra_headers=None, omit_headers=None, use_auth=True, user_name=None, admin=None): """Create a secret from the data in the model. :param model: The metadata used to create the secret :param extra_headers: Optional HTTP headers to add to the request :param omit_headers: headers to delete before making the request :param use_auth: Boolean for whether to send authentication headers :param user_name: The user name used to create the secret :param admin: The user with permissions to delete the secrets :return: A tuple containing the response from the create and the href to the newly created secret """ resp = self.client.post('secrets', request_model=model, extra_headers=extra_headers, omit_headers=omit_headers, use_auth=use_auth, user_name=user_name) # handle expected JSON parsing errors for unauthenticated requests if resp.status_code == 401 and not use_auth: return resp, None returned_data = self.get_json(resp) secret_ref = returned_data.get('secret_ref') if secret_ref: if admin is None: admin = user_name self.created_entities.append((secret_ref, admin)) return resp, secret_ref def update_secret_payload(self, secret_ref, payload, payload_content_type, payload_content_encoding=None, extra_headers=None, omit_headers=None, use_auth=True, user_name=None): """Updates a secret's payload data. :param secret_ref: HATEOAS ref of the secret to be updated :param payload: new payload to be sent to server :param payload_content_type: value for the Content-Type header :param payload_content_encoding: value for the Content-Encoding header :param extra_headers: Optional HTTP headers to add to the request :param omit_headers: headers to delete before making the request :param use_auth: Boolean for whether to send authentication headers :param user_name: The user name used to update the secret :return: the response from the PUT update """ if payload_content_encoding is None: headers = {'Content-Type': payload_content_type} else: headers = {'Content-Type': payload_content_type, 'Content-Encoding': payload_content_encoding} if extra_headers: headers.update(extra_headers) return self.client.put(secret_ref, data=payload, extra_headers=headers, omit_headers=omit_headers, use_auth=use_auth, user_name=user_name) def get_secret(self, secret_ref, payload_content_type, payload_content_encoding=None, extra_headers=None, omit_headers=None, use_auth=True, user_name=None): headers = {'Accept': payload_content_type, 'Accept-Encoding': payload_content_encoding} if extra_headers: headers.update(extra_headers) return self.client.get(secret_ref + '/payload', extra_headers=headers, omit_headers=omit_headers, use_auth=use_auth, user_name=user_name) def get_secret_based_on_content_type(self, secret_ref, payload_content_type, payload_content_encoding=None, extra_headers=None, omit_headers=None, user_name=None): """Retrieves a secret's payload based on the content type NOTE: This way will be deprecated in subsequent versions of the API. """ headers = {'Accept': payload_content_type, 'Accept-Encoding': payload_content_encoding} if extra_headers: headers.update(extra_headers) return self.client.get(secret_ref, extra_headers=headers, omit_headers=omit_headers, user_name=user_name) def get_secret_metadata(self, secret_ref, extra_headers=None, omit_headers=None, use_auth=True, user_name=None): """Retrieves a secret's metadata. :param secret_ref: HATEOAS ref of the secret to be retrieved :param extra_headers: Optional HTTP headers to add to the request :param omit_headers: headers to delete before making the request :param use_auth: Boolean for whether to send authentication headers :param user_name: The user name used to get the metadata :return: A request response object """ return self.client.get( secret_ref, extra_headers=extra_headers, omit_headers=omit_headers, response_model_type=secret_models.SecretModel, use_auth=use_auth, user_name=user_name) def get_secrets(self, limit=10, offset=0, filter=None, extra_headers=None, omit_headers=None, use_auth=True, user_name=None): """Handles getting a list of secrets. :param limit: limits number of returned secrets :param offset: represents how many records to skip before retrieving the list :param filter: optional filter to limit the returned secrets to those whose name matches the filter. :param extra_headers: Optional HTTP headers to add to the request :param omit_headers: headers to delete before making the request :param use_auth: Boolean for whether to send authentication headers :param user_name: The user name used to list the secrets """ params = {'limit': limit, 'offset': offset} if filter: params['name'] = filter resp = self.client.get('secrets', params=params, extra_headers=extra_headers, omit_headers=omit_headers, use_auth=use_auth, user_name=user_name) # handle expected JSON parsing errors for unauthenticated requests if resp.status_code == 401 and not use_auth: return resp, None, None, None secrets_list = self.get_json(resp) secrets, next_ref, prev_ref = self.client.get_list_of_models( secrets_list, secret_models.SecretModel) return resp, secrets, next_ref, prev_ref def delete_secret(self, secret_ref, extra_headers=None, omit_headers=None, expected_fail=False, use_auth=True, user_name=None): """Delete a secret. :param secret_ref: HATEOAS ref of the secret to be deleted :param extra_headers: Optional HTTP headers to add to the request :param omit_headers: headers to delete before making the request :param expected_fail: If test is expected to fail the deletion :param use_auth: Boolean for whether to send authentication headers :param user_name: The user name used to delete the secret :return A request response object """ resp = self.client.delete(secret_ref, extra_headers=extra_headers, omit_headers=omit_headers, use_auth=use_auth, user_name=user_name) if not expected_fail: for item in self.created_entities: if item[0] == secret_ref: self.created_entities.remove(item) return resp def delete_all_created_secrets(self): """Delete all of the secrets that we have created.""" entities = list(self.created_entities) for (secret_ref, admin) in entities: self.delete_secret(secret_ref, user_name=admin) barbican-2.0.0/functionaltests/api/v1/behaviors/acl_behaviors.py0000664000567000056710000001017212701405673026102 0ustar jenkinsjenkins00000000000000""" Copyright 2015 Cisco Systems Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from functionaltests.api.v1.behaviors import base_behaviors from functionaltests.api.v1.models import acl_models class AclBehaviors(base_behaviors.BaseBehaviors): def create_acl(self, entity_ref, model, extra_headers=None, use_auth=True, user_name=None): """Create an acl from the data in the model. :param entity_ref: ref of secret or container for acl :param model: The metadata used to create the acl :param extra_headers: Headers used to create the acl :param use_auth: Boolean for whether to send authentication headers :param user_name: The user name used to create the acl :return: the response from the PUT request """ acl_ref = '{0}/acl'.format(entity_ref) resp = self.client.put(acl_ref, request_model=model, extra_headers=extra_headers, use_auth=use_auth, user_name=user_name) self.created_entities.append((acl_ref, user_name)) return resp def get_acl(self, acl_ref, extra_headers=None, use_auth=True, user_name=None): """Handles getting a single acl :param acl_ref: Reference to the acl to be retrieved :param extra_headers: Headers used to get the acl :param use_auth: Boolean for whether to send authentication headers :param user_name: The user name used to get the acl :return: The response of the GET. """ resp = self.client.get( acl_ref, response_model_type=acl_models.AclModel, use_auth=use_auth, user_name=user_name) return resp def update_acl(self, acl_ref, model, extra_headers=None, use_auth=True, user_name=None): """Update an acl from the data in the model. :param acl_ref: Reference of the acl to be updated :param model: The metadata used to update the acl :param extra_headers: Headers used to update the acl :param use_auth: Boolean for whether to send authentication headers :param user_name: The user name used to update the acl :return: the response from the PATCH request """ resp = self.client.patch( acl_ref, request_model=model, extra_headers=extra_headers, response_model_type=acl_models.AclModel, use_auth=use_auth, user_name=user_name) return resp def delete_acl(self, acl_ref, extra_headers=None, expected_fail=False, use_auth=True, user_name=None): """Handles deleting an acl. :param acl_ref: Reference of the acl to be deleted :param extra_headers: Any additional headers needed. :param expected_fail: If there is a negative test, this should be marked true if you are trying to delete an acl that does not exist. :param use_auth: Boolean for whether to send authentication headers :param user_name: The user name used to delete the acl :return: Response of the delete. """ resp = self.client.delete(acl_ref, extra_headers, use_auth=use_auth, user_name=user_name) if not expected_fail: for item in self.created_entities: if item[0] == acl_ref: self.created_entities.remove(item) return resp def delete_all_created_acls(self): """Delete all of the acls that we have created.""" entities = list(self.created_entities) for (acl_ref, user_name) in entities: self.delete_acl(acl_ref, user_name=user_name) barbican-2.0.0/functionaltests/api/v1/behaviors/__init__.py0000664000567000056710000000000012701405673025025 0ustar jenkinsjenkins00000000000000barbican-2.0.0/functionaltests/api/v1/behaviors/quota_behaviors.py0000664000567000056710000001310212701405673026470 0ustar jenkinsjenkins00000000000000""" Copyright 2015 Cisco Systems Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from functionaltests.api.v1.behaviors import base_behaviors from functionaltests.api.v1.models import quota_models class QuotaBehaviors(base_behaviors.BaseBehaviors): def get_quotas(self, extra_headers=None, use_auth=True, user_name=None): """Handles getting quotas :param extra_headers: extra HTTP headers for the REST request :param use_auth: Boolean for whether to send authentication headers :param user_name: The user name used for REST command :return: a request Response object """ resp = self.client.get( 'quotas', response_model_type=quota_models.QuotasResponseModel, extra_headers=extra_headers, use_auth=use_auth, user_name=user_name) return resp def get_project_quotas_list(self, limit=10, offset=0, extra_headers=None, use_auth=True, user_name=None): """Handles getting project quotas :param limit: limits number of returned orders (default 10) :param offset: represents how many records to skip before retrieving the list (default 0) :param extra_headers: extra HTTP headers for the REST request :param use_auth: Boolean for whether to send authentication headers :param user_name: The user name used for REST command :return: the response, a list of project quotas and the next/prev refs """ params = {'limit': limit, 'offset': offset} resp = self.client.get( 'project-quotas', response_model_type=quota_models.ProjectQuotaListModel, params=params, extra_headers=extra_headers, use_auth=use_auth, user_name=user_name) # handle expected JSON parsing errors for unauthenticated requests if resp.status_code == 401 and not use_auth: return resp, None, None, None project_quotas_list = self.get_json(resp) project_quotas, next_ref, prev_ref = self.client.get_list_of_models( project_quotas_list, quota_models.ProjectQuotaListItemModel) return resp, project_quotas, next_ref, prev_ref def get_project_quotas(self, project_id, extra_headers=None, use_auth=True, user_name=None): """Handles getting project quotas :param extra_headers: extra HTTP headers for the REST request :param use_auth: Boolean for whether to send authentication headers :param user_name: The user name used for REST command :return: a request Response object """ resp = self.client.get( 'project-quotas/' + project_id, response_model_type=quota_models.ProjectQuotaOneModel, extra_headers=extra_headers, use_auth=use_auth, user_name=user_name) # handle expected JSON parsing errors for unauthenticated requests if resp.status_code == 401 and not use_auth: return resp, None, None, None return resp def set_project_quotas(self, project_id, request_model, extra_headers=None, use_auth=True, user_name=None): """Handles setting project quotas :param project_id: id of project whose quotas are to be set :param extra_headers: extra HTTP headers for the REST request :param use_auth: Boolean for whether to send authentication headers :param user_name: The user name used for REST command :return: a request Response object """ resp = self.client.put( 'project-quotas/' + project_id, request_model=request_model, extra_headers=extra_headers, use_auth=use_auth, user_name=user_name) if resp.status_code == 204: self.created_entities.append((project_id, user_name)) return resp def delete_project_quotas(self, project_id, extra_headers=None, expected_fail=False, use_auth=True, user_name=None): """Handles deleting project quotas :param project_id: id of project whose quotas are to be deleted :param extra_headers: extra HTTP headers for the REST request :param use_auth: Boolean for whether to send authentication headers :param user_name: The user name used for REST command :return: a request Response object """ resp = self.client.delete('project-quotas/' + project_id, extra_headers=extra_headers, use_auth=use_auth, user_name=user_name) if resp.status_code == 204: for item in self.created_entities: if item[0] == project_id: self.created_entities.remove(item) return resp def delete_all_created_quotas(self): """Delete all of the project_quotas that we have created.""" entities = list(self.created_entities) for (acl_ref, user_name) in entities: self.delete_project_quotas(acl_ref, user_name=user_name) barbican-2.0.0/functionaltests/api/v1/behaviors/consumer_behaviors.py0000664000567000056710000001137312701405673027202 0ustar jenkinsjenkins00000000000000""" Copyright 2014-2015 Rackspace Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from functionaltests.api.v1.behaviors import base_behaviors from functionaltests.api.v1.models import consumer_model class ConsumerBehaviors(base_behaviors.BaseBehaviors): def create_consumer(self, model, container_ref, extra_headers=None, user_name=None, admin=None, use_auth=True): """Register a consumer to a container. :param model: The metadata for the consumer :param container_ref: Full reference to a container :param extra_headers: Any additional headers to pass to the request :param user_name: The user name used to create the consumer :param admin: The user with permission to delete the consumer :param use_auth: Boolean for whether to send authentication headers :return: A tuple containing the response from the create and the href to the newly registered consumer """ url = '{0}/consumers'.format(container_ref) resp = self.client.post(url, request_model=model, extra_headers=extra_headers, user_name=user_name, use_auth=use_auth) if resp.status_code == 401 and not use_auth: return resp, None if resp.status_code == 200: if admin is None: admin = user_name self.created_entities.append((container_ref, model, admin)) returned_data = self.get_json(resp) consumer_data = returned_data.get('consumers') return resp, consumer_data def get_consumers(self, container_ref, limit=10, offset=0, extra_headers=None, user_name=None, use_auth=True): """Gets a list of consumers on a container. :param container_ref: Full reference to a container :param limit: limits number of returned consumers :param offset: represents how many records to skip before retrieving the list :param extra_headers: Any additional headers to pass to the request :param user_name: The user name used to get the consumer :param use_auth: Boolean for whether to send authentication headers :return: The response from the get and refs to the next/previous list of consumers """ url = '{0}/consumers'.format(container_ref) params = {'limit': limit, 'offset': offset} resp = self.client.get(url, params=params, extra_headers=extra_headers, user_name=user_name, use_auth=use_auth) if resp.status_code == 401 and not use_auth: return resp, None, None, None consumer_list = self.get_json(resp) consumers, next_ref, prev_ref = self.client.get_list_of_models( consumer_list, consumer_model.ConsumerModel) return resp, consumers, next_ref, prev_ref def delete_consumer(self, model, container_ref, extra_headers=None, user_name=None, use_auth=True): """Deletes a consumer from a container. :param model: The metadata for the consumer :param container_ref: Full reference to a container :param extra_headers: Any additional headers to pass to the request :param user_name: The user name used to delete the consumer :param use_auth: Boolean for whether to send authentication headers :return: The response from the delete """ url = '{0}/consumers'.format(container_ref) resp = self.client.delete(url, request_model=model, extra_headers=extra_headers, user_name=user_name, use_auth=use_auth) if resp.status_code == 401 and not use_auth: return resp, None if resp.status_code != 200: return resp, None returned_data = self.get_json(resp) consumer_data = returned_data['consumers'] return resp, consumer_data def delete_all_created_consumers(self): """Delete all of the consumers that we have created.""" entities = list(self.created_entities) for (container_ref, model, admin) in entities: self.delete_consumer(model, container_ref, user_name=admin) barbican-2.0.0/functionaltests/api/v1/behaviors/container_behaviors.py0000664000567000056710000001225112701405674027326 0ustar jenkinsjenkins00000000000000""" Copyright 2014-2015 Rackspace Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from functionaltests.api.v1.behaviors import base_behaviors from functionaltests.api.v1.models import container_models class ContainerBehaviors(base_behaviors.BaseBehaviors): def create_container(self, model, extra_headers=None, user_name=None, admin=None): """Create a container from the data in the model. :param model: The metadata used to create the container :param extra_headers: Headers used to create the container :param user_name: The user name used to create the container :param admin: The user with permissions to delete the container :return: A tuple containing the response from the create and the href to the newly created container """ resp = self.client.post('containers', request_model=model, extra_headers=extra_headers, user_name=user_name) returned_data = self.get_json(resp) container_ref = returned_data.get('container_ref') if container_ref: if admin is None: admin = user_name self.created_entities.append((container_ref, admin)) return resp, container_ref def get_container(self, container_ref, extra_headers=None, user_name=None): """Handles getting a single container :param container_ref: Reference to the container to be retrieved :param extra_headers: Headers used to get the container :param user_name: The user name used to get the container :return: The response of the GET. """ resp = self.client.get( container_ref, response_model_type=container_models.ContainerModel, user_name=user_name) return resp def get_containers(self, limit=10, offset=0, filter=None, extra_headers=None, user_name=None): """Handles getting a list of containers. :param limit: limits number of returned containers :param offset: represents how many records to skip before retrieving the list :param name_filter: allows you to filter results based on name :param extra_headers: Extra headers used to retrieve a list of containers :param user_name: The user name used to get the list :return: Returns the response, a list of container models, and references to the next and previous list of containers. """ params = {'limit': limit, 'offset': offset} if filter: params['name'] = filter resp = self.client.get('containers', params=params, extra_headers=extra_headers, user_name=user_name) container_list = self.get_json(resp) containers, next_ref, prev_ref = self.client.get_list_of_models( container_list, container_models.ContainerModel) return resp, containers, next_ref, prev_ref def delete_container(self, container_ref, extra_headers=None, expected_fail=False, user_name=None): """Handles deleting a containers. :param container_ref: Reference of the container to be deleted :param extra_headers: Any additional headers needed. :param expected_fail: If there is a negative test, this should be marked true if you are trying to delete a container that does not exist. :param user_name: The user name used to delete the container :return: Response of the delete. """ resp = self.client.delete(container_ref, extra_headers, user_name=user_name) if not expected_fail: for item in self.created_entities: if item[0] == container_ref: self.created_entities.remove(item) return resp def delete_all_created_containers(self): """Delete all of the containers that we have created.""" entities = list(self.created_entities) for (container_ref, admin) in entities: self.delete_container(container_ref, user_name=admin) def update_container(self, container_ref, user_name=None): """Attempt to update a container (which is an invalid operation) Update (HTTP PUT) is not supported against a container resource, so issuing this call should fail. :param container_ref: Reference of the container to be updated :param user_name: The user name used to update the container :return: Response of the update. """ resp = self.client.put(container_ref, user_name=user_name) return resp barbican-2.0.0/functionaltests/api/v1/behaviors/ca_behaviors.py0000664000567000056710000001711012701405673025725 0ustar jenkinsjenkins00000000000000""" Copyright 2015 Red Hat, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from functionaltests.api.v1.behaviors import base_behaviors from functionaltests.api.v1.models import ca_models class CABehaviors(base_behaviors.BaseBehaviors): def get_ca(self, ca_ref, extra_headers=None, use_auth=True, user_name=None): """Handles getting a CA :param ca_ref: href for a CA :param extra_headers: extra HTTP headers for the GET request :param use_auth: Boolean for whether to send authentication headers :param user_name: The user name used for request :return: a request Response object """ return self.client.get(ca_ref, response_model_type=ca_models.CAModel, extra_headers=extra_headers, use_auth=use_auth, user_name=user_name) def get_cacert(self, ca_ref, payload_content_encoding=None, extra_headers=None, use_auth=True, user_name=None): """Retrieve the CA signing certificate. """ headers = {'Accept': 'application/octet-stream', 'Accept-Encoding': payload_content_encoding} if extra_headers: headers.update(extra_headers) return self.client.get(ca_ref + '/cacert', extra_headers=headers, use_auth=use_auth, user_name=user_name) def get_cas(self, limit=10, offset=0, user_name=None): """Handles getting a list of CAs. :param limit: limits number of returned CAs :param offset: represents how many records to skip before retrieving the list :return: the response, a list of cas, total number of cas, next and prev references """ resp = self.client.get('cas', user_name=user_name, params={'limit': limit, 'offset': offset}) # TODO(alee) refactor to use he client's get_list_of_models() resp_json = self.get_json(resp) cas, total, next_ref, prev_ref = [], 0, None, None for item in resp_json: if 'next' == item: next_ref = resp_json.get('next') elif 'previous' == item: prev_ref = resp_json.get('previous') elif 'cas' == item: cas = resp_json.get('cas') elif 'total' == item: total = resp_json.get('total') return resp, cas, total, next_ref, prev_ref def create_ca(self, model, headers=None, use_auth=True, user_name=None, admin=None): """Create a subordinate CA from the data in the model. :param model: The metadata used to create the subCA :param use_auth: Boolean for whether to send authentication headers :param user_name: The user name used to create the subCA :param admin: The user with permissions to delete the subCA :return: A tuple containing the response from the create and the href to the newly created subCA """ resp = self.client.post('cas', request_model=model, extra_headers=headers, use_auth=use_auth, user_name=user_name) # handle expected JSON parsing errors for unauthenticated requests if resp.status_code == 401 and not use_auth: return resp, None returned_data = self.get_json(resp) ca_ref = returned_data.get('ca_ref') if ca_ref: if admin is None: admin = user_name self.created_entities.append((ca_ref, admin)) return resp, ca_ref def delete_ca(self, ca_ref, extra_headers=None, expected_fail=False, use_auth=True, user_name=None): """Delete a secret. :param ca_ref: HATEOAS ref of the secret to be deleted :param extra_headers: Optional HTTP headers to add to the request :param expected_fail: If test is expected to fail the deletion :param use_auth: Boolean for whether to send authentication headers :param user_name: The user name used to delete the entity :return A request response object """ resp = self.client.delete(ca_ref, extra_headers=extra_headers, use_auth=use_auth, user_name=user_name) if not expected_fail: for item in self.created_entities: if item[0] == ca_ref: self.created_entities.remove(item) return resp def delete_all_created_cas(self): """Delete all of the cas that we have created.""" entities = list(self.created_entities) for (ca_ref, admin) in entities: self.delete_ca(ca_ref, user_name=admin) def add_ca_to_project(self, ca_ref, headers=None, use_auth=True, user_name=None): resp = self.client.post(ca_ref + '/add-to-project', extra_headers=headers, use_auth=use_auth, user_name=user_name) return resp def remove_ca_from_project(self, ca_ref, headers=None, use_auth=True, user_name=None): resp = self.client.post(ca_ref + '/remove-from-project', extra_headers=headers, use_auth=use_auth, user_name=user_name) return resp def set_preferred(self, ca_ref, headers=None, use_auth=True, user_name=None): resp = self.client.post(ca_ref + '/set-preferred', extra_headers=headers, use_auth=use_auth, user_name=user_name) return resp def get_preferred(self, extra_headers=None, use_auth=True, user_name=None): resp = self.client.get('cas/preferred', response_model_type=ca_models.CAModel, extra_headers=extra_headers, use_auth=use_auth, user_name=user_name) return resp def set_global_preferred(self, ca_ref, headers=None, use_auth=True, user_name=None): resp = self.client.post(ca_ref + '/set-global-preferred', extra_headers=headers, use_auth=use_auth, user_name=user_name) return resp def unset_global_preferred(self, headers=None, use_auth=True, user_name=None): resp = self.client.post('cas/unset-global-preferred', extra_headers=headers, use_auth=use_auth, user_name=user_name) return resp def get_global_preferred(self, extra_headers=None, use_auth=True, user_name=None): resp = self.client.get('cas/global-preferred', response_model_type=ca_models.CAModel, extra_headers=extra_headers, use_auth=use_auth, user_name=user_name) return resp barbican-2.0.0/functionaltests/api/v1/behaviors/base_behaviors.py0000664000567000056710000000352612701405673026262 0ustar jenkinsjenkins00000000000000""" Copyright 2014-2015 Rackspace Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import logging import os class BaseBehaviors(object): def __init__(self, client): self.LOG = logging.getLogger(type(self).__name__) self.client = client self.created_entities = [] def get_json(self, response): json_data = dict() try: json_data = response.json() except ValueError as e: self.LOG.exception(e) self.LOG.error("Error converting response to JSON: %s", e.message) self.LOG.error("Response Content: %s", response.content) return json_data def get_id_from_href(self, href): """Returns the id from reference. The id must be the last item in the href. :param href: The href containing the id. :returns the id portion of the href """ item_id = None if href and len(href) > 0: base, item_id = os.path.split(href) return item_id def get_user_id_from_name(self, user_name): """From a configured user name, get the unique user id from keystone""" return self.client.get_user_id_from_name(user_name) def get_project_id_from_name(self, user_name): """From a configured user name, get the project id from keystone""" return self.client.get_project_id_from_name(user_name) barbican-2.0.0/functionaltests/api/v1/__init__.py0000664000567000056710000000000012701405673023043 0ustar jenkinsjenkins00000000000000barbican-2.0.0/functionaltests/api/v1/models/0000775000567000056710000000000012701406024022216 5ustar jenkinsjenkins00000000000000barbican-2.0.0/functionaltests/api/v1/models/acl_models.py0000664000567000056710000000144212701405673024704 0ustar jenkinsjenkins00000000000000""" Copyright 2015 Cisco Systems Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from functionaltests.api.v1.models.base_models import BaseModel class AclModel(BaseModel): def __init__(self, acl_ref=None, read=None): super(AclModel, self).__init__() self.acl_ref = acl_ref self.read = read barbican-2.0.0/functionaltests/api/v1/models/container_models.py0000664000567000056710000000313412701405673026127 0ustar jenkinsjenkins00000000000000""" Copyright 2014-2015 Rackspace Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from functionaltests.api.v1.models.base_models import BaseModel class SecretRefModel(BaseModel): def __init__(self, name=None, secret_ref=None): self.name = name self.secret_ref = secret_ref class ContainerModel(BaseModel): def __init__(self, name=None, type=None, secret_refs=[], container_ref=None, consumers=None, status=None, updated=None, created=None, creator_id=None): self.name = name self.type = type self.secret_refs = secret_refs self.container_ref = container_ref self.consumers = consumers self.status = status self.updated = updated self.created = created self.creator_id = creator_id @classmethod def dict_to_obj(cls, input_dict): secret_refs = [SecretRefModel(**secret_ref) for secret_ref in input_dict.get('secret_refs', [])] return cls(input_dict.get('name'), input_dict.get('type'), secret_refs, container_ref=input_dict.get('container_ref')) barbican-2.0.0/functionaltests/api/v1/models/quota_models.py0000664000567000056710000000504512701405673025301 0ustar jenkinsjenkins00000000000000""" Copyright 2015 Cisco Systems Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from functionaltests.api.v1.models.base_models import BaseModel class QuotasModel(BaseModel): def __init__(self, secrets=None, orders=None, containers=None, consumers=None, cas=None): super(QuotasModel, self).__init__() self.secrets = secrets self.orders = orders self.containers = containers self.consumers = consumers self.cas = cas class QuotasResponseModel(BaseModel): def __init__(self, quotas=None): super(QuotasResponseModel, self).__init__() self.quotas = quotas @classmethod def dict_to_obj(cls, input_dict): quotas = QuotasModel(**input_dict.get('quotas')) return cls(quotas=quotas) class ProjectQuotaRequestModel(BaseModel): def __init__(self, project_quotas=None): super(ProjectQuotaRequestModel, self).__init__() self.project_quotas = project_quotas @classmethod def dict_to_obj(cls, input_dict): project_quotas = QuotasModel(**input_dict.get('project_quotas')) return cls(project_quotas=project_quotas) class ProjectQuotaOneModel(BaseModel): def __init__(self, project_quotas=None): super(ProjectQuotaOneModel, self).__init__() self.project_quotas = QuotasModel(**project_quotas) class ProjectQuotaListItemModel(BaseModel): def __init__(self, project_id=None, project_quotas=None): super(ProjectQuotaListItemModel, self).__init__() self.project_id = project_id self.project_quotas = QuotasModel(**project_quotas) class ProjectQuotaListModel(BaseModel): def __init__(self, project_quotas=None): super(ProjectQuotaListModel, self).__init__() self.project_quotas = project_quotas @classmethod def dict_to_obj(cls, input_dict): project_quotas = [ProjectQuotaListItemModel(**project_quotas_item) for project_quotas_item in input_dict.get('project_quotas', [])] return cls(project_quotas=project_quotas) barbican-2.0.0/functionaltests/api/v1/models/order_models.py0000664000567000056710000000317512701405673025265 0ustar jenkinsjenkins00000000000000""" Copyright 2014-2015 Rackspace Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from functionaltests.api.v1.models.base_models import BaseModel class OrderModel(BaseModel): def __init__(self, type=None, name=None, status=None, secret_ref=None, expiration=None, updated=None, created=None, meta=None, payload_content_type=None, order_ref=None, container_ref=None, error_status_code=None, error_reason=None, sub_status=None, sub_status_message=None, creator_id=None): super(OrderModel, self).__init__() self.type = type self.name = name self.status = status self.sub_status = sub_status self.sub_status_message = sub_status_message self.secret_ref = secret_ref self.expiration = expiration self.updated = updated self.created = created self.meta = meta self.payload_content_type = payload_content_type self.order_ref = order_ref self.container_ref = container_ref self.error_status_code = error_status_code self.error_reason = error_reason self.creator_id = creator_id barbican-2.0.0/functionaltests/api/v1/models/__init__.py0000664000567000056710000000000012701405673024326 0ustar jenkinsjenkins00000000000000barbican-2.0.0/functionaltests/api/v1/models/secret_models.py0000664000567000056710000000317612701405673025440 0ustar jenkinsjenkins00000000000000""" Copyright 2014-2015 Rackspace Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from functionaltests.api.v1.models.base_models import BaseModel class SecretModel(BaseModel): def __init__(self, name=None, expiration=None, algorithm=None, secret_ref=None, bit_length=None, mode=None, secret_type=None, payload_content_type=None, payload=None, content_types=None, payload_content_encoding=None, status=None, updated=None, created=None, creator_id=None, metadata=None): super(SecretModel, self).__init__() self.name = name self.expiration = expiration self.algorithm = algorithm self.bit_length = bit_length self.mode = mode self.secret_type = secret_type self.payload_content_type = payload_content_type self.payload = payload self.content_types = content_types self.payload_content_encoding = payload_content_encoding self.secret_ref = secret_ref self.status = status self.updated = updated self.created = created self.creator_id = creator_id self.metadata = metadata barbican-2.0.0/functionaltests/api/v1/models/consumer_model.py0000664000567000056710000000166612701405673025625 0ustar jenkinsjenkins00000000000000""" Copyright 2014-2015 Rackspace Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from functionaltests.api.v1.models.base_models import BaseModel class ConsumerModel(BaseModel): def __init__(self, name=None, URL=None, created=None, updated=None, status=None): super(ConsumerModel, self).__init__() self.name = name self.URL = URL self.created = created self.updated = updated self.status = status barbican-2.0.0/functionaltests/api/v1/models/ca_models.py0000664000567000056710000000257112701405673024534 0ustar jenkinsjenkins00000000000000""" Copyright 2015 Red Hat Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from functionaltests.api.v1.models.base_models import BaseModel class CAModel(BaseModel): def __init__(self, expiration=None, ca_id=None, ca_ref=None, status=None, updated=None, created=None, plugin_name=None, plugin_ca_id=None, meta=None, parent_ca_ref=None, subject_dn=None, name=None, description=None): super(CAModel, self).__init__() self.expiration = expiration self.ca_id = ca_id self.ca_ref = ca_ref self.status = status self.updated = updated self.created = created self.plugin_name = plugin_name self.plugin_ca_id = plugin_ca_id self.meta = meta self.parent_ca_ref = parent_ca_ref self.subject_dn = subject_dn self.name = name self.description = description barbican-2.0.0/functionaltests/api/v1/models/base_models.py0000664000567000056710000000562312701405673025064 0ustar jenkinsjenkins00000000000000""" Copyright 2014-2015 Rackspace Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import json import logging LOG = logging.getLogger(__name__) class BaseModel(object): """Base class for models. To allow simple (de)serialization we will use __dict__ to create """ def override_values(self, **kwargs): for attr_name, attr_value in kwargs.items(): if hasattr(self, attr_name): setattr(self, attr_name, attr_value) def obj_to_json(self): """Convert this object to a JSON string. :return A string of JSON containing the fields in this object """ return json.dumps(self.obj_to_dict()) def obj_to_dict(self): """Create a dict of the values for this model object. If there are fields that are not set in this object then those will NOT have entries in the returned dict. :return A dict representing this model """ the_dict = self.__dict__ retval = self._remove_empty_fields_from_dict(the_dict) return retval def _remove_empty_fields_from_dict(self, dictionary): """Remove k,v pairs with empty values from a dictionary. :param dictionary: a dictionary of stuff :return: the same dictionary where all k,v pairs with empty values have been removed. """ # NOTE(jaosorior): deleting a key from the incoming dictionary actually # affects the model object. So we do a copy to avoid this. resulting_dict = dictionary.copy() # Dumping the keys to a list as we'll be changing the dict size empty_keys = [k for k, v in dictionary.items() if v is None] for k in empty_keys: del resulting_dict[k] return resulting_dict @classmethod def json_to_obj(cls, serialized_str): """Create a model from a JSON string. :param serialized_str: the JSON string :return a secret object """ try: json_dict = json.loads(serialized_str) return cls.dict_to_obj(json_dict) except TypeError as e: LOG.error('Couldn\'t deserialize input: %s\n Because: %s', serialized_str, e) @classmethod def dict_to_obj(cls, input_dict): """Create an object from a dict. :param input_dict: A dict of fields. :return a model object build from the passed in dict. """ return cls(**input_dict) barbican-2.0.0/functionaltests/api/v1/smoke/0000775000567000056710000000000012701406024022051 5ustar jenkinsjenkins00000000000000barbican-2.0.0/functionaltests/api/v1/smoke/test_containers.py0000664000567000056710000002174012701405673025644 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from testtools import testcase from barbican.tests import utils from functionaltests.api import base from functionaltests.api.v1.behaviors import container_behaviors from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.models import container_models from functionaltests.api.v1.models import secret_models create_secret_defaults_data = { "name": "AES key", "expiration": "2018-02-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", "payload": "gF6+lLoF3ohA9aPRpt+6bQ==", "payload_content_type": "application/octet-stream", "payload_content_encoding": "base64", } create_container_defaults_data = { "name": "containername", "type": "generic", "secret_refs": [ { "name": "secret1", }, { "name": "secret2", }, { "name": "secret3" } ] } create_container_rsa_data = { "name": "rsacontainer", "type": "rsa", "secret_refs": [ { "name": "public_key", }, { "name": "private_key", }, { "name": "private_key_passphrase" } ] } create_container_empty_data = { "name": None, "type": "generic", "secret_refs": [] } @utils.parameterized_test_case class ContainersTestCase(base.TestCase): def setUp(self): super(ContainersTestCase, self).setUp() self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client) self.behaviors = container_behaviors.ContainerBehaviors( self.client) # Set up three secrets secret_ref_1 = self._create_a_secret() secret_ref_2 = self._create_a_secret() secret_ref_3 = self._create_a_secret() create_container_defaults_data[ 'secret_refs'][0]['secret_ref'] = secret_ref_1 create_container_defaults_data[ 'secret_refs'][1]['secret_ref'] = secret_ref_2 create_container_defaults_data[ 'secret_refs'][2]['secret_ref'] = secret_ref_3 create_container_rsa_data[ 'secret_refs'][0]['secret_ref'] = secret_ref_1 create_container_rsa_data[ 'secret_refs'][1]['secret_ref'] = secret_ref_2 create_container_rsa_data[ 'secret_refs'][2]['secret_ref'] = secret_ref_3 self.secret_id_1 = secret_ref_1.split('/')[-1] self.secret_id_2 = secret_ref_2.split('/')[-1] self.secret_id_3 = secret_ref_3.split('/')[-1] def tearDown(self): self.secret_behaviors.delete_all_created_secrets() self.behaviors.delete_all_created_containers() super(ContainersTestCase, self).tearDown() def _create_a_secret(self): secret_model = secret_models.SecretModel(**create_secret_defaults_data) resp, secret_ref = self.secret_behaviors.create_secret(secret_model) self.assertEqual(resp.status_code, 201) self.assertIsNotNone(secret_ref) return secret_ref def _get_a_secret(self, secret_id): resp = self.client.get('secrets/{0}'.format(secret_id)) self.assertEqual(resp.status_code, 200) return resp.json() @testcase.attr('positive') def test_container_create_empty(self): """Covers creating an empty generic container.""" test_model = container_models.ContainerModel( **create_container_empty_data) resp, container_ref = self.behaviors.create_container( test_model) self.assertEqual(resp.status_code, 201) self.assertGreater(len(container_ref), 0) @testcase.attr('positive') def test_container_create_defaults(self): """Covers creating a container with three secret refs.""" test_model = container_models.ContainerModel( **create_container_defaults_data) resp, container_ref = self.behaviors.create_container(test_model) self.assertEqual(resp.status_code, 201) self.assertGreater(len(container_ref), 0) @testcase.attr('positive') def test_container_create_rsa(self): """Create an RSA container with expected secret refs.""" test_model = container_models.ContainerModel( **create_container_rsa_data) resp, container_ref = self.behaviors.create_container(test_model) self.assertEqual(resp.status_code, 201) self.assertGreater(len(container_ref), 0) @utils.parameterized_dataset({ 'alphanumeric': ['a2j3j6ll9'], 'punctuation': ['~!@#$%^&*()_+`-={}[]|:;<>,.?'], 'len_255': [str(bytearray().zfill(255))], 'uuid': ['54262d9d-4bc7-4821-8df0-dc2ca8e112bb'], 'empty': [''] }) @testcase.attr('positive') def test_container_get_defaults_w_valid_name(self, name): """Covers getting a generic container with a three secrets.""" test_model = container_models.ContainerModel( **create_container_defaults_data) overrides = {'name': name} test_model.override_values(**overrides) secret_refs = [] for secret_ref in test_model.secret_refs: secret_refs.append(secret_ref['secret_ref']) resp, container_ref = self.behaviors.create_container(test_model) self.assertEqual(resp.status_code, 201) self.assertGreater(len(container_ref), 0) get_resp = self.behaviors.get_container(container_ref) # Verify the response data self.assertEqual(get_resp.status_code, 200) self.assertEqual(get_resp.model.name, test_model.name) self.assertEqual(get_resp.model.container_ref, container_ref) self.assertEqual(get_resp.model.type, test_model.type) # Verify the secret refs in the response self.assertEqual(len(get_resp.model.secret_refs), 3) self.assertIn(get_resp.model.secret_refs[0].secret_ref, secret_refs) self.assertIn(get_resp.model.secret_refs[1].secret_ref, secret_refs) self.assertIn(get_resp.model.secret_refs[2].secret_ref, secret_refs) @testcase.attr('positive') def test_container_get_rsa(self): """Covers getting an rsa container.""" test_model = container_models.ContainerModel( **create_container_rsa_data) secret_refs = [] for secret_ref in test_model.secret_refs: secret_refs.append(secret_ref['secret_ref']) resp, container_ref = self.behaviors.create_container(test_model) self.assertEqual(resp.status_code, 201) self.assertGreater(len(container_ref), 0) get_resp = self.behaviors.get_container( container_ref) # Verify the response data self.assertEqual(get_resp.status_code, 200) self.assertEqual(get_resp.model.name, "rsacontainer") self.assertEqual(get_resp.model.container_ref, container_ref) self.assertEqual(get_resp.model.type, "rsa") # Verify the secret refs in the response self.assertEqual(len(get_resp.model.secret_refs), 3) self.assertIn(get_resp.model.secret_refs[0].secret_ref, secret_refs) self.assertIn(get_resp.model.secret_refs[1].secret_ref, secret_refs) self.assertIn(get_resp.model.secret_refs[2].secret_ref, secret_refs) @testcase.attr('positive') def test_containers_get_defaults(self): """Covers getting a list of containers.""" limit = 10 offset = 0 test_model = container_models.ContainerModel( **create_container_defaults_data) for i in range(11): resp, container_ref = self.behaviors.create_container(test_model) self.assertEqual(resp.status_code, 201) self.assertGreater(len(container_ref), 0) resp, containers, next_ref, prev_ref = self.behaviors.get_containers( limit=limit, offset=offset ) self.assertEqual(resp.status_code, 200) self.assertEqual(len(containers), limit) self.assertIsNone(prev_ref) self.assertIsNotNone(next_ref) def test_container_delete_defaults(self): """Covers deleting a container.""" test_model = container_models.ContainerModel( **create_container_defaults_data) resp, container_ref = self.behaviors.create_container(test_model) self.assertEqual(resp.status_code, 201) self.assertGreater(len(container_ref), 0) del_resp = self.behaviors.delete_container(container_ref) self.assertEqual(del_resp.status_code, 204) self.assertEqual(len(del_resp.content), 0) barbican-2.0.0/functionaltests/api/v1/smoke/__init__.py0000664000567000056710000000000012701405673024161 0ustar jenkinsjenkins00000000000000barbican-2.0.0/functionaltests/api/v1/smoke/test_secrets.py0000664000567000056710000002016612701405673025150 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import binascii import copy from testtools import testcase from barbican.tests import utils from functionaltests.api import base from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.models import secret_models default_secret_create_data = { "name": "AES key", "expiration": "2018-02-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", "payload": "gF6+lLoF3ohA9aPRpt+6bQ==", "payload_content_type": "application/octet-stream", "payload_content_encoding": "base64", } # Any field with None will be created in the model with None as the value # but will be omitted in the final request to the server. # # secret_create_nones_data is effectively an empty json request to the server. default_secret_create_all_none_data = { "name": None, "expiration": None, "algorithm": None, "bit_length": None, "mode": None, "payload": None, "payload_content_type": None, "payload_content_encoding": None, } default_secret_create_emptystrings_data = { "name": '', "expiration": '', "algorithm": '', "bit_length": '', "mode": '', "payload": '', "payload_content_type": '', "payload_content_encoding": '', } default_secret_create_two_phase_data = { "name": "AES key", "expiration": "2018-02-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", } @utils.parameterized_test_case class SecretsTestCase(base.TestCase): def setUp(self): super(SecretsTestCase, self).setUp() self.behaviors = secret_behaviors.SecretBehaviors(self.client) # make a local mutable copies of the default data to prevent # possible data contamination if (when?) the data contains # any nested dicts. self.create_default_data = copy.deepcopy(default_secret_create_data) self.create_all_none_data = copy.deepcopy( default_secret_create_all_none_data) self.create_emptystrings = copy.deepcopy( default_secret_create_emptystrings_data) self.create_two_phase_data = copy.deepcopy( default_secret_create_two_phase_data) def tearDown(self): self.behaviors.delete_all_created_secrets() super(SecretsTestCase, self).tearDown() @testcase.attr('positive') def test_secret_create_no_expiration(self): """Covers creating a secret without an expiration.""" test_model = secret_models.SecretModel(**self.create_default_data) test_model.expiration = None resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) resp = self.behaviors.get_secret_metadata(secret_ref) self.assertEqual(resp.status_code, 200) self.assertEqual(resp.model.status, "ACTIVE") self.assertGreater(resp.model.secret_ref, 0) @utils.parameterized_dataset({ 'alphanumeric': ['1f34ds'], 'punctuation': ['~!@#$%^&*()_+`-={}[]|:;<>,.?'], 'uuid': ['54262d9d-4bc7-4821-8df0-dc2ca8e112bb'], 'len_255': [str(bytearray().zfill(255))], 'empty': [''], 'null': [None] }) @testcase.attr('positive') def test_secret_get_metadata_w_valid_name(self, name): """Covers getting and checking a secret's metadata.""" test_model = secret_models.SecretModel(**self.create_default_data) test_model.name = name resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) resp = self.behaviors.get_secret_metadata(secret_ref) self.assertEqual(resp.status_code, 200) self.assertEqual(resp.model.status, "ACTIVE") self.assertEqual(resp.model.name, test_model.name) self.assertEqual(resp.model.mode, test_model.mode) self.assertEqual(resp.model.algorithm, test_model.algorithm) self.assertEqual(resp.model.bit_length, test_model.bit_length) @testcase.attr('positive') def test_secret_create(self): """Covers single phase secret creation. Verify that a secret gets created with the correct http response code and a secret reference. """ test_model = secret_models.SecretModel(**self.create_default_data) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) self.assertIsNotNone(secret_ref) @testcase.attr('positive') def test_secret_delete(self): """Covers deleting a secret.""" test_model = secret_models.SecretModel(**self.create_default_data) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) del_resp = self.behaviors.delete_secret(secret_ref) self.assertEqual(del_resp.status_code, 204) @testcase.attr('positive') def test_secret_delete_minimal_secret_w_no_metadata(self): """Covers deleting a secret with nones data.""" test_model = secret_models.SecretModel(**self.create_all_none_data) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) del_resp = self.behaviors.delete_secret(secret_ref) self.assertEqual(del_resp.status_code, 204) @testcase.attr('positive') def test_secret_get(self): """Covers getting a secret's payload data.""" test_model = secret_models.SecretModel(**self.create_default_data) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) get_resp = self.behaviors.get_secret(secret_ref, test_model.payload_content_type) self.assertEqual(get_resp.status_code, 200) self.assertIn(test_model.payload, binascii.b2a_base64(get_resp.content)) @testcase.attr('positive') def test_secret_update_two_phase(self): """Covers updating a secret's payload data.""" # Create test_model = secret_models.SecretModel(**self.create_two_phase_data) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) # Update payload = "gF6+lLoF3ohA9aPRpt+6bQ==" payload_content_type = "application/octet-stream" payload_content_encoding = "base64" update_resp = self.behaviors.update_secret_payload( secret_ref, payload=payload, payload_content_type=payload_content_type, payload_content_encoding=payload_content_encoding) self.assertEqual(update_resp.status_code, 204) # Get/Check Updated sec_resp = self.behaviors.get_secret( secret_ref=secret_ref, payload_content_type=payload_content_type) self.assertEqual(sec_resp.status_code, 200) self.assertIn('gF6+lLoF3ohA9aPRpt+6bQ==', binascii.b2a_base64(sec_resp.content)) @testcase.attr('positive') def test_secrets_get_multiple_secrets(self): """Covers getting a list of secrets. Creates 11 secrets then returns a list of 2 secrets """ test_model = secret_models.SecretModel(**self.create_default_data) limit = 2 offset = 0 for i in range(0, 11): self.behaviors.create_secret(test_model) resp, secrets_list, next_ref, prev_ref = self.behaviors.get_secrets( limit=limit, offset=offset) self.assertEqual(resp.status_code, 200) self.assertEqual(len(secrets_list), limit) self.assertIsNone(prev_ref) self.assertIsNotNone(next_ref) barbican-2.0.0/functionaltests/api/v1/smoke/test_orders.py0000664000567000056710000001440312701405673024773 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import time from testtools import testcase from barbican.tests import utils from functionaltests.api import base from functionaltests.api.v1.behaviors import order_behaviors from functionaltests.api.v1.models import order_models def get_default_order_create_data(): return {'type': 'key', "meta": { "name": "barbican functional test secret name", "algorithm": "aes", "bit_length": 256, "mode": "cbc", } } # Any field with None will be created in the model with None as the value # but will be omitted in the final request (via the requests package) # to the server. # # Given that fact, order_create_nones_data is effectively an empty json request # to the server. def get_default_order_create_all_none_data(): return { 'type': None, "meta": { "name": None, "algorithm": None, "bit_length": None, "mode": None, } } @utils.parameterized_test_case class OrdersTestCase(base.TestCase): def setUp(self): super(OrdersTestCase, self).setUp() self.behaviors = order_behaviors.OrderBehaviors(self.client) self.create_default_data = get_default_order_create_data() self.create_all_none_data = get_default_order_create_all_none_data() def tearDown(self): self.behaviors.delete_all_created_orders() super(OrdersTestCase, self).tearDown() def wait_for_order(self, order_resp, order_ref): # Make sure we have an active order time_count = 1 while order_resp.model.status != "ACTIVE" and time_count <= 4: time.sleep(1) time_count += 1 order_resp = self.behaviors.get_order(order_ref) @testcase.attr('positive') def test_order_create(self): """Covers simple order creation.""" # first create an order test_model = order_models.OrderModel(**self.create_default_data) create_resp, order_ref = self.behaviors.create_order(test_model) # verify that the order was created successfully self.assertEqual(create_resp.status_code, 202) self.assertIsNotNone(order_ref) @testcase.attr('positive') def test_order_get_metadata(self): """Covers order metadata. Assumes that the order status will be active or pending. """ # first create an order test_model = order_models.OrderModel(**self.create_default_data) create_resp, order_ref = self.behaviors.create_order(test_model) # verify that the order was created successfully self.assertEqual(create_resp.status_code, 202) self.assertIsNotNone(order_ref) # given the order href, retrieve the order order_resp = self.behaviors.get_order(order_ref) # verify that the get was successful self.assertEqual(order_resp.status_code, 200) self.assertTrue(order_resp.model.status == "ACTIVE" or order_resp.model.status == "PENDING") # verify the metadata self.assertEqual(order_resp.model.meta.get('name'), test_model.meta.get('name')) self.assertEqual(order_resp.model.meta.get('mode'), test_model.meta.get('mode')) self.assertEqual(order_resp.model.meta.get('algorithm'), test_model.meta.get('algorithm')) self.assertEqual(order_resp.model.meta.get('bit_length'), test_model.meta.get('bit_length')) @testcase.attr('positive') def test_order_get(self): """Covers getting an order. Assumes that the order status will be active or pending. """ # create an order test_model = order_models.OrderModel(**self.create_default_data) create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 202) self.assertIsNotNone(order_ref) # get the order order_resp = self.behaviors.get_order(order_ref) # verify the order self.assertEqual(order_resp.status_code, 200) self.assertIsNotNone(order_resp.model.order_ref) self.assertEqual(order_resp.model.type, 'key') self.assertTrue(order_resp.model.status == "ACTIVE" or order_resp.model.status == "PENDING") @testcase.attr('positive') def test_order_delete(self): """Covers simple order deletion.""" # create an order test_model = order_models.OrderModel(**self.create_default_data) create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 202) self.assertIsNotNone(order_ref) # delete the order delete_resp = self.behaviors.delete_order(order_ref) # verify the delete self.assertEqual(delete_resp.status_code, 204) @testcase.attr('positive') def test_orders_get(self): """Covers getting a list of orders.""" # create 11 orders test_model = order_models.OrderModel(**self.create_default_data) for i in range(0, 11): create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 202) self.assertIsNotNone(order_ref) # get a list of orders limit = 7 offset = 0 resp, orders_list, next_ref, prev_ref = self.behaviors.get_orders( limit=limit, offset=offset) # verify that the get for the list was successful self.assertEqual(resp.status_code, 200) self.assertEqual(len(orders_list), limit) self.assertIsNotNone(next_ref) self.assertIsNone(prev_ref) barbican-2.0.0/functionaltests/api/v1/smoke/test_consumers.py0000664000567000056710000002070512701405673025515 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import copy from testtools import testcase from functionaltests.api import base from functionaltests.api.v1.behaviors import consumer_behaviors from functionaltests.api.v1.behaviors import container_behaviors from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.models import consumer_model from functionaltests.api.v1.models import container_models from functionaltests.api.v1.models import secret_models create_secret_data = { "name": "AES key", "expiration": "2018-02-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", "payload": "gF6+lLoF3ohA9aPRpt+6bQ==", "payload_content_type": "application/octet-stream", "payload_content_encoding": "base64", } default_consumer_data = { "name": "consumername", "URL": "consumerURL" } create_container_data = { "name": "containername", "type": "generic", "secret_refs": [ { "name": "secret1", }, { "name": "secret2", } ] } class ConsumersTestCase(base.TestCase): default_data = default_consumer_data def _create_a_secret(self): secret_model = secret_models.SecretModel(**create_secret_data) resp, secret_ref = self.secret_behaviors.create_secret(secret_model) self.assertEqual(resp.status_code, 201) self.assertIsNotNone(secret_ref) return secret_ref def setUp(self): super(ConsumersTestCase, self).setUp() self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client) self.container_behaviors = container_behaviors.ContainerBehaviors( self.client ) self.consumer_behaviors = consumer_behaviors.ConsumerBehaviors( self.client ) self.consumer_data = copy.deepcopy(self.default_data) # Set up two secrets secret_ref_1 = self._create_a_secret() secret_ref_2 = self._create_a_secret() # Create a container with our secrets create_container_data['secret_refs'][0]['secret_ref'] = secret_ref_1 create_container_data['secret_refs'][1]['secret_ref'] = secret_ref_2 container_model = container_models.ContainerModel( **create_container_data ) resp, container_ref = self.container_behaviors.create_container( container_model ) self.assertEqual(resp.status_code, 201) self.assertIsNotNone(container_ref) self.container_ref = container_ref def tearDown(self): self.secret_behaviors.delete_all_created_secrets() self.container_behaviors.delete_all_created_containers() super(ConsumersTestCase, self).tearDown() @testcase.attr('positive') def test_create_consumer_defaults(self): """Covers consumer creation. All of the data needed to create the consumer is provided in a single POST. """ test_model = consumer_model.ConsumerModel(**self.consumer_data) resp, consumer_data = self.consumer_behaviors.create_consumer( test_model, self.container_ref) self.assertEqual(resp.status_code, 200) self.assertIsNotNone(consumer_data) @testcase.attr('positive') def test_get_consumer_defaults(self): """Tests getting a list of consumers for a container.""" # Create first consumer test_model = consumer_model.ConsumerModel(**self.consumer_data) resp, consumer_data = self.consumer_behaviors.create_consumer( test_model, self.container_ref) self.assertEqual(resp.status_code, 200) self.assertIsNotNone(consumer_data) # Create second consumer test_model.name = "consumername2" test_model.URL = "consumerURL2" resp, consumer_data = self.consumer_behaviors.create_consumer( test_model, self.container_ref) self.assertEqual(resp.status_code, 200) self.assertIsNotNone(consumer_data) # Get list of consumers resp, consumers, nref, pref = self.consumer_behaviors.get_consumers( self.container_ref ) self.assertEqual(resp.status_code, 200) self.assertIn("consumername", consumers[0].name) self.assertIn("consumername2", consumers[1].name) @testcase.attr('positive') def test_delete_consumer_defaults(self): """Covers consumer deletion. A consumer is first created, and then the consumer is deleted and verified to no longer exist. """ test_model = consumer_model.ConsumerModel(**self.consumer_data) resp, consumer_data = self.consumer_behaviors.create_consumer( test_model, self.container_ref) self.assertEqual(resp.status_code, 200) self.assertIsNotNone(consumer_data) # Delete the consumer resp, consumer_data = self.consumer_behaviors.delete_consumer( test_model, self.container_ref ) self.assertEqual(resp.status_code, 200) self.assertIsNotNone(consumer_data) self.assertNotIn(test_model.name, consumer_data) self.assertNotIn(test_model.URL, consumer_data) @testcase.attr('positive') def test_recreate_consumer_defaults(self): """Covers consumer recreation.""" test_model = consumer_model.ConsumerModel(**self.consumer_data) resp, consumer_data = self.consumer_behaviors.create_consumer( test_model, self.container_ref) self.assertEqual(resp.status_code, 200) self.assertIsNotNone(consumer_data) # Delete the consumer resp, consumer_data = self.consumer_behaviors.delete_consumer( test_model, self.container_ref ) self.assertEqual(resp.status_code, 200) self.assertIsNotNone(consumer_data) self.assertNotIn(test_model.name, consumer_data) self.assertNotIn(test_model.URL, consumer_data) # Register the consumer again test_model = consumer_model.ConsumerModel(**self.consumer_data) resp, consumer_data = self.consumer_behaviors.create_consumer( test_model, self.container_ref) self.assertEqual(resp.status_code, 200) self.assertIsNotNone(consumer_data) @testcase.attr('positive') def test_create_consumer_defaults_is_idempotent(self): """Covers checking that create consumer is idempotent.""" # Register the consumer once test_model = consumer_model.ConsumerModel(**self.consumer_data) resp, consumer_data = self.consumer_behaviors.create_consumer( test_model, self.container_ref) self.assertEqual(resp.status_code, 200) self.assertIsNotNone(consumer_data) # Register the consumer again, without deleting it first test_model = consumer_model.ConsumerModel(**self.consumer_data) resp, consumer_data = self.consumer_behaviors.create_consumer( test_model, self.container_ref) count = consumer_data.count(self.consumer_data) self.assertEqual(resp.status_code, 200) self.assertIsNotNone(consumer_data) self.assertIn(self.consumer_data, consumer_data) self.assertEqual(1, count) @testcase.attr('positive') def test_create_consumer_change_host_header(self, **kwargs): """Create a consumer with a (possibly) malicious host name header.""" test_model = consumer_model.ConsumerModel(**self.consumer_data) malicious_hostname = 'some.bad.server.com' changed_host_header = {'Host': malicious_hostname} resp, consumer_data = self.consumer_behaviors.create_consumer( test_model, self.container_ref, extra_headers=changed_host_header) self.assertEqual(resp.status_code, 200) self.assertIsNotNone(consumer_data) # get Location field from result and assert that it is NOT the # malicious one. regex = '.*{0}.*'.format(malicious_hostname) self.assertNotRegexpMatches(resp.headers['location'], regex) barbican-2.0.0/functionaltests/api/v1/smoke/test_versions.py0000664000567000056710000000356512701405673025354 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from functionaltests.api import base class VersionDiscoveryTestCase(base.TestCase): def test_version_get_as_unauthenticated(self): """Covers retrieving version as unauthenticated user.""" self._do_version_test(use_auth=False) def test_version_get_as_authenticated(self): """Covers retrieving version as authenticated user.""" self._do_version_test(use_auth=True) def _do_version_test(self, use_auth=False): """Get version string with or without authentication. :param use_auth: True to use authentication, False otherwise. Default is False """ url_without_version = self.client.get_base_url(include_version=False) resp = self.client.get(url_without_version, use_auth=use_auth) body = resp.json() self.assertEqual(resp.status_code, 300) versions_response = body['versions']['values'] v1_info = versions_response[0] # NOTE(jaosorior): I used assertIn instead of assertEqual because we # might start using decimal numbers in the future. So when that happens # this test will still be valid. self.assertIn('v1', v1_info['id']) self.assertEqual(len(v1_info['media-types']), 1) self.assertEqual(v1_info['media-types'][0]['base'], 'application/json') barbican-2.0.0/functionaltests/api/v1/functional/0000775000567000056710000000000012701406024023075 5ustar jenkinsjenkins00000000000000barbican-2.0.0/functionaltests/api/v1/functional/test_secrets_rbac.py0000664000567000056710000003011412701405673027155 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 from barbican.tests import utils from functionaltests.api import base from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.models import secret_models from functionaltests.common import config CONF = config.get_config() admin_a = CONF.rbac_users.admin_a creator_a = CONF.rbac_users.creator_a observer_a = CONF.rbac_users.observer_a auditor_a = CONF.rbac_users.auditor_a test_data_rbac_store_secret = { 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 201}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 201}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 403}, } test_data_rbac_update_secret = { 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 204}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 204}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 403}, } test_data_rbac_get_secret_metadata = { 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 200}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 200}, } test_data_rbac_get_decrypted_secret = { 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 200}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 403}, } test_data_rbac_get_list_of_secrets = { 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 200}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 403}, } test_data_rbac_delete_secret = { 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 204}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 403}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 403}, } def get_default_secret_data(): return { "name": "AES key", "expiration": "2050-02-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", "payload": get_default_payload(), "payload_content_type": get_default_payload_content_type(), "payload_content_encoding": get_default_payload_content_encoding(), } def get_default_two_phase_secret_data(): return { "name": "AES key", "expiration": "2050-02-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", } def get_default_two_phase_payload_data(): return { "payload": get_default_payload(), "payload_content_type": get_default_payload_content_type(), "payload_content_encoding": get_default_payload_content_encoding(), } def get_default_payload(): return 'Z0Y2K2xMb0Yzb2hBOWFQUnB0KzZiUT09' def get_default_payload_content_encoding(): return 'base64' def get_default_payload_content_type(): return 'application/octet-stream' def get_container_req(secret_ref): return {"name": "testcontainer", "type": "generic", "secret_refs": [{'name': 'secret1', 'secret_ref': secret_ref}]} @utils.parameterized_test_case class RBACSecretsTestCase(base.TestCase): """Functional tests exercising RBAC Policies""" def setUp(self): super(RBACSecretsTestCase, self).setUp() self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client) def tearDown(self): self.secret_behaviors.delete_all_created_secrets() super(RBACSecretsTestCase, self).tearDown() @utils.parameterized_dataset(test_data_rbac_store_secret) def test_rbac_store_secret(self, user, admin, expected_return): """Test RBAC for secret store Issue a secret creation and verify that that the correct http return code comes back for the specified user. :param user: the user who will attempt to do the store :param admin: the admin of the group containing the user :param expected_return: the expected http return code """ test_model = secret_models.SecretModel(**get_default_secret_data()) resp, secret_ref = self.secret_behaviors.create_secret( test_model, user_name=user, admin=admin) self.assertEqual(expected_return, resp.status_code) self.assertEqual(expected_return == 201, secret_ref is not None) @utils.parameterized_dataset(test_data_rbac_update_secret) def test_rbac_update_secret(self, user, admin, expected_return): """Test RBAC for secret update Issue a secret update and verify that that the correct http return code comes back for the specified user. The initial secret will be stored with the admin user to ensure that it gets created successfully. We don't want the secret store to fail since we are only testing secret update here. :param user: the user who will attempt to do the update :param admin: the admin of the group containing the user :param expected_return: the expected http return code """ secret_ref = self._create_initial_secret( admin=admin, secret_data=get_default_two_phase_secret_data()) resp = self.secret_behaviors.update_secret_payload( secret_ref, user_name=user, **get_default_two_phase_payload_data()) self.assertEqual(expected_return, resp.status_code) @utils.parameterized_dataset(test_data_rbac_get_secret_metadata) def test_rbac_get_secret_metadata(self, user, admin, expected_return): """Test RBAC for secret get metadata Issue a secret get metadata and verify that that the correct http return code comes back for the specified user. The initial secret will be stored with the admin user to ensure that it gets created successfully. We don't want the secret store to fail since we are only testing secret get metadata here. :param user: the user who will attempt to do the get metadata :param admin: the admin of the group containing the user :param expected_return: the expected http return code """ secret_ref = self._create_initial_secret(admin=admin) resp = self.secret_behaviors.get_secret_metadata( secret_ref, user_name=user) self.assertEqual(expected_return, resp.status_code) self.assertEqual(expected_return == 200, resp.content is not None) @utils.parameterized_dataset(test_data_rbac_get_decrypted_secret) def test_rbac_get_decrypted_secret(self, user, admin, expected_return): """Test RBAC for secret get decrypted secret Issue a secret get decrypted data and verify that that the correct http return code comes back for the specified user. The initial secret will be stored with the admin user to ensure that it gets created successfully. We don't want the secret store to fail since we are only testing get decrypted secret here. :param user: the user who will attempt to get the decrypted secret :param admin: the admin of the group containing the user :param expected_return: the expected http return code """ secret_ref = self._create_initial_secret(admin=admin) resp = self.secret_behaviors.get_secret( secret_ref, payload_content_type='application/octet-stream', user_name=user) self.assertEqual(expected_return, resp.status_code) self.assertEqual(expected_return == 200, resp.content == base64.b64decode( get_default_payload())) @utils.parameterized_dataset(test_data_rbac_get_list_of_secrets) def test_rbac_get_list_of_secrets(self, user, admin, expected_return): """Test RBAC for get secret list Issue a get secret list and verify that that the correct http return code comes back for the specified user. Some initial secrets will be stored with the admin user to ensure that they get created successfully. We don't want the secret stores to fail since we are only testing get secret list. :param user: the user who will attempt to get the list of secrets :param admin: the admin of the group containing the user :param expected_return: the expected http return code """ for i in range(3): secret_ref = self._create_initial_secret(admin=admin) self.assertIsNotNone(secret_ref) resp, secrets, next, prev = self.secret_behaviors.get_secrets( limit=10, offset=0, user_name=user) self.assertEqual(expected_return, resp.status_code) self.assertIsNotNone(secrets) @utils.parameterized_dataset(test_data_rbac_delete_secret) def test_rbac_delete_secret(self, user, admin, expected_return): """Test RBAC for secret delete Issue a secret delete and verify that that the correct http return code comes back for the specified user. The initial secret will be stored with the admin user to ensure that it gets created successfully. We don't want the secret store to fail since we are only testing secret delete here. :param user: the user who will attempt to do the delete :param admin: the admin of the group containing the user :param expected_return: the expected http return code """ secret_ref = self._create_initial_secret(admin=admin) resp = self.secret_behaviors.delete_secret( secret_ref, user_name=user) self.assertEqual(expected_return, resp.status_code) def _create_initial_secret(self, admin=admin_a, secret_data=get_default_secret_data()): """Utility function to create a secret Some tests require a secret to exist before they test certain things, so this function can be used to do that setup. :param admin: the admin user who will create store the secret :param secret_data: the data for the secret :return: href to the newly stored secret """ test_model = secret_models.SecretModel(**secret_data) resp, secret_ref = self.secret_behaviors.create_secret( test_model, user_name=admin, admin=admin) self.assertEqual(201, resp.status_code) return secret_ref barbican-2.0.0/functionaltests/api/v1/functional/test_containers.py0000664000567000056710000004324712701405674026677 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import copy from testtools import testcase from barbican.tests import utils from functionaltests.api import base from functionaltests.api.v1.behaviors import container_behaviors from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.models import container_models from functionaltests.api.v1.models import secret_models def get_default_container_create_data(secret): return { "type": "generic", "name": "generic name", "secret_refs": [ { "name": "a secret", "secret_ref": secret } ] } create_container_data = { "name": "containername", "type": "generic", "secret_refs": [ { "name": "secret1", }, { "name": "secret2", }, { "name": "secret3" } ] } create_container_rsa_data = { "name": "rsacontainer", "type": "rsa", "secret_refs": [ { "name": "public_key", }, { "name": "private_key", }, { "name": "private_key_passphrase" } ] } accepted_str_values = { 'alphanumeric': ['a2j3j6ll9'], 'punctuation': ['~!@#$%^&*()_+`-={}[]|:;<>,.?'], 'len_255': [str(bytearray().zfill(255))], 'uuid': ['54262d9d-4bc7-4821-8df0-dc2ca8e112bb'], 'empty': [''] } class BaseContainerTestCase(base.TestCase): default_data_template = create_container_data def setUp(self): super(BaseContainerTestCase, self).setUp() self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client) self.behaviors = container_behaviors.ContainerBehaviors(self.client) # Setting up three secrets for building containers self.secret_ref_1 = self._create_a_secret() self.secret_ref_2 = self._create_a_secret() self.secret_ref_3 = self._create_a_secret() self.default_data = copy.deepcopy(self.default_data_template) default_secret_refs = self.default_data['secret_refs'] default_secret_refs[0]['secret_ref'] = self.secret_ref_1 default_secret_refs[1]['secret_ref'] = self.secret_ref_2 default_secret_refs[2]['secret_ref'] = self.secret_ref_3 def tearDown(self): self.secret_behaviors.delete_all_created_secrets() self.behaviors.delete_all_created_containers() super(BaseContainerTestCase, self).tearDown() def _create_a_secret(self): secret_defaults_data = { "name": "AES key", "expiration": "2018-02-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", "payload": "gF6+lLoF3ohA9aPRpt+6bQ==", "payload_content_type": "application/octet-stream", "payload_content_encoding": "base64", } secret_model = secret_models.SecretModel(**secret_defaults_data) resp, secret_ref = self.secret_behaviors.create_secret(secret_model) self.assertEqual(resp.status_code, 201) self.assertIsNotNone(secret_ref) return secret_ref @utils.parameterized_test_case class GenericContainersTestCase(BaseContainerTestCase): @testcase.attr('positive') def test_create_defaults_none_secret_name(self): """Covers creating a container with None as a secret name.""" test_model = container_models.ContainerModel(**self.default_data) test_model.name = None resp, container_ref = self.behaviors.create_container(test_model) self.assertEqual(resp.status_code, 201) @utils.parameterized_dataset({'0': [0], '1': [1], '50': [50]}) @testcase.attr('positive') def test_create_defaults_size(self, num_secrets): """Covers creating containers of various sizes.""" test_model = container_models.ContainerModel(**self.default_data) for i in range(0, num_secrets): secret_ref = self._create_a_secret() test_model.secret_refs.append({ 'name': 'other_secret{0}'.format(i), 'secret_ref': secret_ref }) resp, container_ref = self.behaviors.create_container(test_model) self.assertEqual(resp.status_code, 201) @utils.parameterized_dataset(accepted_str_values) @testcase.attr('positive') def test_create_defaults_name(self, name): """Covers creating generic containers with various names.""" test_model = container_models.ContainerModel(**self.default_data) test_model.name = name resp, container_ref = self.behaviors.create_container(test_model) self.assertEqual(resp.status_code, 201) @utils.parameterized_dataset(accepted_str_values) @testcase.attr('positive') def test_create_defaults_secret_name(self, name=None): """Covers creating containers with various secret ref names.""" test_model = container_models.ContainerModel(**self.default_data) test_model.secret_refs = [{ 'name': name, 'secret_ref': self.secret_ref_1 }] resp, container_ref = self.behaviors.create_container(test_model) self.assertEqual(resp.status_code, 201) get_resp = self.behaviors.get_container(container_ref) self.assertEqual(get_resp.status_code, 200) self.assertEqual(get_resp.model.secret_refs[0].name, name) @testcase.attr('negative') def test_create_defaults_invalid_type(self): """Container creating should fail with an invalid container type.""" test_model = container_models.ContainerModel(**self.default_data) test_model.type = 'bad_type' resp, container_ref = self.behaviors.create_container(test_model) self.assertEqual(resp.status_code, 400) @testcase.attr('negative') def test_create_defaults_duplicate_secret_refs(self): """Covers creating a container with a duplicated secret ref.""" test_model = container_models.ContainerModel(**self.default_data) test_model.secret_refs[0]['secret_ref'] = self.secret_ref_1 test_model.secret_refs[1]['secret_ref'] = self.secret_ref_1 test_model.secret_refs[2]['secret_ref'] = self.secret_ref_1 resp, container_ref = self.behaviors.create_container(test_model) self.assertEqual(resp.status_code, 400) @testcase.attr('negative') def test_get_non_existent_container(self): """A get on a container that does not exist should return a 404.""" resp = self.behaviors.get_container("not_a_ref") self.assertEqual(resp.status_code, 404) @testcase.attr('negative') def test_delete_non_existent_container(self): """A delete on a container that does not exist should return a 404.""" resp = self.behaviors.delete_container("not_a_ref", expected_fail=True) self.assertEqual(resp.status_code, 404) @testcase.attr('positive') def test_create_change_host_header(self, **kwargs): """Create a container with a (possibly) malicious host name header.""" test_model = container_models.ContainerModel(**self.default_data) malicious_hostname = 'some.bad.server.com' changed_host_header = {'Host': malicious_hostname} resp, container_ref = self.behaviors.create_container( test_model, extra_headers=changed_host_header) self.assertEqual(resp.status_code, 201) # get Location field from result and assert that it is NOT the # malicious one. regex = '.*{0}.*'.format(malicious_hostname) self.assertNotRegexpMatches(resp.headers['location'], regex) @utils.parameterized_test_case class RSAContainersTestCase(BaseContainerTestCase): default_data_template = create_container_rsa_data @testcase.attr('positive') def test_create_rsa_no_passphrase(self): """Covers creating an rsa container without a passphrase.""" pub_key_ref = {'name': 'public_key', 'secret_ref': self.secret_ref_1} priv_key_ref = {'name': 'private_key', 'secret_ref': self.secret_ref_2} test_model = container_models.ContainerModel(**self.default_data) test_model.secret_refs = [pub_key_ref, priv_key_ref] resp, container_ref = self.behaviors.create_container(test_model) self.assertEqual(resp.status_code, 201) @utils.parameterized_dataset(accepted_str_values) @testcase.attr('positive') def test_create_rsa_name(self, name): """Covers creating rsa containers with various names.""" test_model = container_models.ContainerModel(**self.default_data) test_model.name = name resp, container_ref = self.behaviors.create_container(test_model) self.assertEqual(resp.status_code, 201) @testcase.attr('negative') def test_create_rsa_invalid_key_names(self): """Covers creating an RSA container with incorrect names.""" test_model = container_models.ContainerModel(**self.default_data) test_model.secret_refs = [ { "name": "secret1", "secret_ref": self.secret_ref_1 }, { "name": "secret2", "secret_ref": self.secret_ref_2 }, { "name": "secret3", "secret_ref": self.secret_ref_3 } ] resp, container_ref = self.behaviors.create_container(test_model) self.assertEqual(resp.status_code, 400) @testcase.attr('negative') def test_create_rsa_no_public_key(self): """Creating an rsa container without a public key should fail. RSA containers must have at least a public key and private key. """ test_model = container_models.ContainerModel(**self.default_data) test_model.secret_refs[0]['name'] = 'secret_1' resp, container_ref = self.behaviors.create_container(test_model) self.assertEqual(resp.status_code, 400) @testcase.attr('negative') def test_create_rsa_no_private_key(self): """Creating an rsa container without a private key should fail. RSA containers must have at least a public key and private key. """ test_model = container_models.ContainerModel(**self.default_data) test_model.secret_refs[1]['name'] = 'secret_1' resp, container_ref = self.behaviors.create_container(test_model) self.assertEqual(resp.status_code, 400) @testcase.attr('positive') def test_create_rsa_change_host_header(self, **kwargs): """Create a container with a (possibly) malicious host name header.""" test_model = container_models.ContainerModel(**self.default_data) malicious_hostname = 'some.bad.server.com' changed_host_header = {'Host': malicious_hostname} resp, container_ref = self.behaviors.create_container( test_model, extra_headers=changed_host_header) self.assertEqual(resp.status_code, 201) # get Location field from result and assert that it is NOT the # malicious one. regex = '.*{0}.*'.format(malicious_hostname) self.assertNotRegexpMatches(resp.headers['location'], regex) class ContainersPagingTestCase(base.PagingTestCase): def setUp(self): super(ContainersPagingTestCase, self).setUp() self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client) self.behaviors = container_behaviors.ContainerBehaviors(self.client) # make a local mutable copy of the default data to prevent # possible data contamination secret = self._create_a_secret() self.create_default_data = get_default_container_create_data(secret) def _create_a_secret(self): secret_defaults_data = { "name": "AES key", "expiration": "2018-02-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", "payload": "gF6+lLoF3ohA9aPRpt+6bQ==", "payload_content_type": "application/octet-stream", "payload_content_encoding": "base64", } secret_model = secret_models.SecretModel(**secret_defaults_data) resp, secret_ref = self.secret_behaviors.create_secret(secret_model) self.assertEqual(resp.status_code, 201) self.assertIsNotNone(secret_ref) return secret_ref def tearDown(self): self.behaviors.delete_all_created_containers() super(ContainersPagingTestCase, self).tearDown() def create_model(self): return container_models.ContainerModel(**self.create_default_data) def create_resources(self, count=0, model=None): for x in range(0, count): self.behaviors.create_container(model) def get_resources(self, limit=10, offset=0, filter=filter): return self.behaviors.get_containers(limit=limit, offset=offset, filter=filter) def set_filter_field(self, unique_str, model): '''Set the name field which we use in the get_resources ''' model.name = unique_str class ContainersUnauthedTestCase(BaseContainerTestCase): def setUp(self): super(ContainersUnauthedTestCase, self).setUp() self.dummy_project_id = 'dummy123' self.dummy_container_ref = ( 'containers/dummy123-3416-4b53-8875-e6af3e0af8c3' ) def tearDown(self): self.secret_behaviors.delete_all_created_secrets() super(ContainersUnauthedTestCase, self).tearDown() @testcase.attr('negative', 'security') def test_unauthed_create_huge_dummy_token_no_proj_id(self): """Attempt to create a container with a dummy token, and no project id Should return 401 """ model = container_models.ContainerModel( **create_container_data ) headers = {'X-Auth-Token': 'a' * 3500} resp = self.client.post( 'containers', request_model=model, use_auth=False, extra_headers=headers ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_unauthed_create_no_proj_id(self): """Attempt to create a container without a token or project id Should return 401 """ model = container_models.ContainerModel( **create_container_data ) resp = self.client.post( 'containers', request_model=model, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_unauthed_get_no_proj_id(self): """Attempt to get a container without a token or project id Should return 401 """ headers = { 'Accept': '*/*', 'Accept-Encoding': '*/*' } resp = self.client.get( self.dummy_container_ref, extra_headers=headers, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_unauthed_delete_no_proj_id(self): """Attempt to delete a container without a token or project id Should return 401 """ resp = self.client.delete(self.dummy_container_ref, use_auth=False) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_unauthed_huge_dummy_token_with_proj_id(self): """Attempt to create a container with a dummy token and project id Should return 401 """ model = container_models.ContainerModel( **create_container_data ) headers = { 'X-Auth-Token': 'a' * 3500, 'X-Project-Id': self.dummy_project_id } resp = self.client.post( 'containers', request_model=model, use_auth=False, extra_headers=headers ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_unauthed_create_with_proj_id(self): """Attempt to create a container with a project id, but not a token Should return 401 """ model = container_models.ContainerModel( **create_container_data ) headers = {'X-Project-Id': self.dummy_project_id} resp = self.client.post( 'containers', request_model=model, use_auth=False, extra_headers=headers ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_unauthed_get_with_proj_id(self): """Attempt to get a container with a project id, but not a token Should return 401 """ headers = { 'Accept': '*/*', 'Accept-Encoding': '*/*', 'X-Project-Id': self.dummy_project_id } resp = self.client.get( self.dummy_container_ref, extra_headers=headers, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_unauthed_delete_with_proj_id(self): """Attempt to delete a container with a project id, but not a token Should return 401 """ headers = {'X-Project-Id': self.dummy_project_id} resp = self.client.delete( self.dummy_container_ref, use_auth=False, extra_headers=headers ) self.assertEqual(401, resp.status_code) barbican-2.0.0/functionaltests/api/v1/functional/__init__.py0000664000567000056710000000000012701405673025205 0ustar jenkinsjenkins00000000000000barbican-2.0.0/functionaltests/api/v1/functional/test_secrets.py0000664000567000056710000014451312701405674026200 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import binascii import json import sys import time from testtools import testcase from barbican.plugin.util import translations from barbican.tests import keys from barbican.tests import utils from functionaltests.api import base from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.models import secret_models def get_pem_content(pem): b64_content = translations.get_pem_components(pem)[1] return base64.b64decode(b64_content) def get_private_key_req(): return {'name': 'myprivatekey', 'payload_content_type': 'application/octet-stream', 'payload_content_encoding': 'base64', 'algorithm': 'rsa', 'bit_length': 2048, 'secret_type': 'private', 'payload': base64.b64encode(keys.get_private_key_pem())} def get_public_key_req(): return {'name': 'mypublickey', 'payload_content_type': 'application/octet-stream', 'payload_content_encoding': 'base64', 'algorithm': 'rsa', 'bit_length': 2048, 'secret_type': 'public', 'payload': base64.b64encode(keys.get_public_key_pem())} def get_certificate_req(): return {'name': 'mycertificate', 'payload_content_type': 'application/octet-stream', 'payload_content_encoding': 'base64', 'algorithm': 'rsa', 'bit_length': 2048, 'secret_type': 'certificate', 'payload': base64.b64encode(keys.get_certificate_pem())} def get_passphrase_req(): return {'name': 'mypassphrase', 'payload_content_type': 'text/plain', 'secret_type': 'passphrase', 'payload': 'mysecretpassphrase'} def get_default_data(): return { "name": "AES key", "expiration": "2018-02-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", "payload": get_default_payload(), "payload_content_type": "application/octet-stream", "payload_content_encoding": "base64", } def get_default_payload(): return "AQIDBAUGBwgBAgMEBQYHCAECAwQFBgcIAQIDBAUGBwg=" @utils.parameterized_test_case class SecretsTestCase(base.TestCase): def setUp(self): super(SecretsTestCase, self).setUp() self.behaviors = secret_behaviors.SecretBehaviors(self.client) # make a local mutable copies of the default data to prevent # possible data contamination if (when?) the data contains # any nested dicts. # TODO(tdink) Move to a config file self.default_secret_create_data = get_default_data() self.default_secret_create_all_none_data = { "name": None, "expiration": None, "algorithm": None, "bit_length": None, "mode": None, "payload": None, "payload_content_type": None, "payload_content_encoding": None, } self.default_secret_create_emptystrings_data = { "name": '', "expiration": '', "algorithm": '', "bit_length": '', "mode": '', "payload": '', "payload_content_type": '', "payload_content_encoding": '', } self.default_secret_create_two_phase_data = { "name": "AES key", "expiration": "2018-02-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", } def tearDown(self): self.behaviors.delete_all_created_secrets() super(SecretsTestCase, self).tearDown() @testcase.attr('negative') def test_secret_create_with_only_content_type_no_payload(self): """Create secret with valid content type but no payload.""" test_model = secret_models.SecretModel( **self.default_secret_create_all_none_data) test_model.payload_content_type = 'application/octet-stream' resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 400) @testcase.attr('positive') def test_secret_create_then_check_content_types(self): """Check that set content-type attribute is retained in metadata.""" test_model = secret_models.SecretModel( **self.default_secret_create_data) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) resp = self.behaviors.get_secret_metadata(secret_ref) self.assertEqual(resp.status_code, 200) content_types = resp.model.content_types self.assertIsNotNone(content_types) self.assertIn('default', content_types) self.assertEqual(content_types['default'], test_model.payload_content_type) @testcase.attr('positive') def test_secret_create_all_none(self): """Covers case of a POST request with no JSON data.""" test_model = secret_models.SecretModel( **self.default_secret_create_all_none_data) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) @testcase.attr('negative') def test_secret_get_secret_doesnt_exist(self): """GET a non-existent secret. Should return a 404. """ resp = self.behaviors.get_secret_metadata('not_a_uuid') self.assertEqual(resp.status_code, 404) @testcase.attr('positive') def test_secret_get_payload_no_accept_header(self): """GET a secret payload, do not pass in accept header. Should return a 200. """ test_model = secret_models.SecretModel( **self.default_secret_create_data) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) get_resp = self.behaviors.get_secret( secret_ref, payload_content_type='', omit_headers=['Accept']) self.assertEqual(get_resp.status_code, 200) self.assertIn(test_model.payload, binascii.b2a_base64(get_resp.content)) @testcase.attr('negative') def test_secret_delete_doesnt_exist(self): """DELETE a non-existent secret. Should return a 404. """ resp = self.behaviors.delete_secret('not_a_uuid', expected_fail=True) self.assertEqual(resp.status_code, 404) @testcase.attr('negative') def test_secret_get_invalid_mime_type(self): """Covers getting a secret with an invalid mime type.""" test_model = secret_models.SecretModel( **self.default_secret_create_data) resp, secret_ref = self.behaviors.create_secret(test_model) resp = self.behaviors.get_secret(secret_ref, payload_content_type="i/m") self.assertEqual(resp.status_code, 406) @testcase.attr('negative') def test_secret_create_with_expiration_passed(self): """Create a secret with an expiration that has already passed. Should return a 400. """ test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.expiration = '2000-01-10T14:58:52.546795' resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 400) @testcase.attr('negative') def test_secret_create_with_empty_strings(self): """Secret create with empty Strings for all attributes. Should return a 400. """ test_model = secret_models.SecretModel( **self.default_secret_create_emptystrings_data) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 400) @testcase.attr('negative') def test_secret_create_with_invalid_content_type(self): """Create secret with an invalid content type in HTTP header. Should return a 415. """ test_model = secret_models.SecretModel( **self.default_secret_create_data) headers = {"Content-Type": "crypto/boom"} resp, secret_ref = self.behaviors.create_secret(test_model, headers) self.assertEqual(resp.status_code, 415) @testcase.attr('negative') def test_secret_create_with_oversized_payload(self): """Create a secret that is larger than the max payload size. Should return a 413 if the secret size is greater than the maximum allowed size. """ test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.payload = str(self.oversized_payload) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 413) @testcase.attr('negative') def test_secret_put_when_payload_doesnt_exist(self): """PUT secret to a non-existent secret. Should return 404. """ resp = self.behaviors.update_secret_payload( secret_ref='not_a_uuid', payload_content_type='application/octet-stream', payload_content_encoding='base64', payload='testing putting to non-existent secret') self.assertEqual(resp.status_code, 404) @testcase.attr('negative') def test_secret_put_when_payload_already_exists(self): """PUT against a secret that already has encrypted data. Should return 409. """ test_model = secret_models.SecretModel( **self.default_secret_create_data) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) resp = self.behaviors.update_secret_payload( secret_ref=secret_ref, payload_content_type='application/octet-stream', payload_content_encoding='base64', payload='testing putting data in secret that already has data') self.assertEqual(resp.status_code, 409) @testcase.attr('negative') def test_secret_put_two_phase_empty_payload(self): """Covers case of putting empty String to a secret. Should return 400. """ test_model = secret_models.SecretModel( **self.default_secret_create_two_phase_data) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) put_resp = self.behaviors.update_secret_payload( secret_ref=secret_ref, payload_content_type='application/octet-stream', payload_content_encoding='base64', payload='') self.assertEqual(put_resp.status_code, 400) @testcase.attr('negative') def test_secret_put_two_phase_invalid_content_type(self): """PUT with an invalid content type. Should return 415. Launchpad bug #1208601 - Updated in Barbican blueprint barbican-enforce-content-type """ test_model = secret_models.SecretModel( **self.default_secret_create_two_phase_data) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) put_resp = self.behaviors.update_secret_payload( secret_ref=secret_ref, payload_content_type='crypto/boom', payload_content_encoding='base64', payload='invalid content type') self.assertEqual(put_resp.status_code, 415) @testcase.attr('negative') def test_secret_put_two_phase_no_payload(self): """Covers case of putting null String to a secret. Should return 400. """ test_model = secret_models.SecretModel( **self.default_secret_create_two_phase_data) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) put_resp = self.behaviors.update_secret_payload( secret_ref=secret_ref, payload_content_type='application/octet-stream', payload_content_encoding='base64', payload=None) self.assertEqual(put_resp.status_code, 400) @testcase.attr('negative') def test_secret_put_two_phase_w_oversized_binary_data_not_utf8(self): """PUT with an oversized binary string that isn't UTF-8. Launchpad bug #1315498. """ oversized_payload = bytearray().zfill(self.max_payload_size + 1) # put a value in the middle of the data that does not have a UTF-8 # code point. Using // to be python3-friendly. oversized_payload[self.max_payload_size // 2] = b'\xb0' test_model = secret_models.SecretModel( **self.default_secret_create_two_phase_data) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) put_resp = self.behaviors.update_secret_payload( secret_ref=secret_ref, payload_content_type='application/octet-stream', payload_content_encoding='base64', payload=str(oversized_payload)) self.assertEqual(put_resp.status_code, 413) @testcase.attr('negative') def test_secret_put_two_phase_oversized_payload(self): """PUT with oversized payload should return 413. Covers the case of putting secret data that is larger than the maximum secret size allowed by Barbican. Beyond that it should return 413. """ oversized_payload = self.oversized_payload test_model = secret_models.SecretModel( **self.default_secret_create_two_phase_data) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) put_resp = self.behaviors.update_secret_payload( secret_ref=secret_ref, payload_content_type='application/octet-stream', payload_content_encoding='base64', payload=oversized_payload) self.assertEqual(put_resp.status_code, 413) @testcase.attr('positive') def test_secret_put_two_phase_valid_binary_data_not_utf8(self): """A string with binary data that doesn't contain UTF-8 code points. Launchpad bug #1315498. """ # put a value in the data that does not have a UTF-8 code point. data = b'\xb0' test_model = secret_models.SecretModel( **self.default_secret_create_two_phase_data) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) put_resp = self.behaviors.update_secret_payload( secret_ref=secret_ref, payload_content_type='application/octet-stream', payload_content_encoding='base64', payload=str(data)) self.assertEqual(put_resp.status_code, 204) @testcase.attr('positive') def test_secret_put_two_phase_high_range_unicode_character(self): """Tests a high-range unicode character on a two-step PUT. Launchpad bug #1315498 """ data = u'\U0001F37A' data = data.encode('utf-8') test_model = secret_models.SecretModel( **self.default_secret_create_two_phase_data) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) put_resp = self.behaviors.update_secret_payload( secret_ref=secret_ref, payload_content_type='application/octet-stream', payload_content_encoding='base64', payload=data) self.assertEqual(put_resp.status_code, 204) @testcase.attr('positive') def test_secret_get_nones_payload_with_a_octet_stream(self): """Tests getting a secret with octet-stream.""" test_model = secret_models.SecretModel( **self.default_secret_create_two_phase_data) test_model.payload_content_encoding = 'base64' test_model.payload_content_type = 'application/octet-stream' test_model.payload = base64.b64encode('abcdef') resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) get_resp = self.behaviors.get_secret( secret_ref, payload_content_type=test_model.payload_content_type, payload_content_encoding=test_model.payload_content_encoding) self.assertEqual(get_resp.status_code, 200) self.assertIn(test_model.payload, binascii.b2a_base64(get_resp.content)) @testcase.attr('negative') def test_secret_create_defaults_bad_content_type_check_message(self): """Verifying the returned error message matches the expected form.""" test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.payload_content_type = 'plain-text' resp, secret_ref = self.behaviors.create_secret(test_model) # first, ensure that the return code is 400 self.assertEqual(resp.status_code, 400) resp_dict = json.loads(resp.content) self.assertIn( "Provided object does not match schema 'Secret': " "payload_content_type plain-text is not one of ['text/plain', " "'text/plain;charset=utf-8', 'text/plain; charset=utf-8', " "'application/octet-stream'", resp_dict['description']) self.assertIn("Bad Request", resp_dict['title']) @testcase.attr('negative') def test_secret_create_then_expire_then_check(self): """Covers case where you try to retrieve a secret that is expired. This test creates a secret that will soon expire. After it expires, check it and verify that it is no longer a valid secret. """ # create a secret that expires in 5 seconds timestamp = utils.create_timestamp_w_tz_and_offset(seconds=5) test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.expiration = timestamp resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) # now get the secret - will be still valid get_resp = self.behaviors.get_secret_metadata(secret_ref) self.assertEqual(get_resp.status_code, 200) # now wait 10 seconds time.sleep(10) # now get the secret - should be invalid (expired) resp = self.behaviors.get_secret_metadata(secret_ref) self.assertEqual(resp.status_code, 404) @utils.parameterized_dataset({ 'alphanumeric': ['1f34ds'], 'punctuation': ['~!@#$%^&*()_+`-={}[]|:;<>,.?'], 'uuid': ['54262d9d-4bc7-4821-8df0-dc2ca8e112bb'], 'len_255': [base.TestCase.max_sized_field], 'empty': [''], 'null': [None] }) @testcase.attr('positive') def test_secret_create_defaults_valid_name(self, name): """Covers cases of creating secrets with valid names.""" test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.name = name resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) @utils.parameterized_dataset({ 'int': [400] }) @testcase.attr('negative') def test_secret_create_defaults_invalid_name(self, name): """Create secrets with various invalid names. Should return 400. """ test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.name = name resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 400) @testcase.attr('positive', 'non-standard-algorithm') def test_secret_create_valid_algorithms(self): """Creates secrets with various valid algorithms.""" algorithm = 'invalid' test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.algorithm = algorithm resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) @utils.parameterized_dataset({ 'int': [400] }) @testcase.attr('negative') def test_secret_create_invalid_algorithms(self, algorithm): """Creates secrets with various invalid algorithms.""" test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.algorithm = algorithm resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 400) @utils.parameterized_dataset({ 'sixteen': [16], 'fifteen': [15], 'eight': [8], 'seven': [7], 'one': [1], 'none': [None] }) @testcase.attr('positive', 'non-standard-algorithm') def test_secret_create_with_non_standard_bit_length(self, bit_length): """Covers cases of creating secrets with valid bit lengths.""" test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.bit_length = bit_length resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) @utils.parameterized_dataset({ '128': [128], '192': [192], '256': [256], '512': [512] }) @testcase.attr('positive') def test_secret_create_with_valid_bit_length(self, bit_length): """Covers cases of creating secrets with valid bit lengths.""" byte_length = bit_length / 8 secret = bytearray(byte_length) for x in range(0, byte_length): secret[x] = x secret64 = base64.b64encode(secret) test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.bit_length = bit_length test_model.payload = secret64 resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) @utils.parameterized_dataset({ 'str_type': ['not-an-int'], 'empty': [''], 'blank': [' '], 'negative_maxint': [-sys.maxint], 'negative_one': [-1], 'zero': [0] }) @testcase.attr('negative') def test_secret_create_defaults_invalid_bit_length(self, bit_length): """Covers cases of creating secrets with invalid bit lengths.""" test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.bit_length = bit_length resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 400) @utils.parameterized_dataset({ 'cbc': ['cbc'], 'unknown_positive': ['unknown'] }) @testcase.attr('positive') def test_secret_create_defaults_valid_mode(self, mode): """Covers cases of creating secrets with valid modes.""" test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.mode = mode resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) @utils.parameterized_dataset({ 'zero': [0], 'oversized_string': [base.TestCase.oversized_field], 'int': [400] }) @testcase.attr('negative') def test_secret_create_defaults_invalid_mode(self, mode): """Covers cases of creating secrets with invalid modes.""" test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.mode = mode resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 400) @utils.parameterized_dataset({ 'text_content_type_none_encoding': { 'payload_content_type': 'text/plain', 'payload_content_encoding': None}, 'utf8_text_content_type_none_encoding': { 'payload_content_type': 'text/plain; charset=utf-8', 'payload_content_encoding': None}, 'no_space_utf8_text_content_type_none_encoding': { 'payload_content_type': 'text/plain;charset=utf-8', 'payload_content_encoding': None}, 'octet_content_type_base64_encoding': { 'payload_content_type': 'application/octet-stream', 'payload_content_encoding': 'base64'} }) @testcase.attr('positive') def test_secret_create_defaults_valid_types_and_encoding( self, payload_content_type, payload_content_encoding): """Creates secrets with various content types and encodings.""" test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.payload_content_type = payload_content_type test_model.payload_content_encoding = payload_content_encoding resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) get_resp = self.behaviors.get_secret( secret_ref, payload_content_type=payload_content_type, payload_content_encoding=payload_content_encoding) self.assertEqual(get_resp.status_code, 200) if payload_content_encoding == 'base64': self.assertIn(test_model.payload, binascii.b2a_base64(get_resp.content)) else: self.assertIn(test_model.payload, get_resp.content) @utils.parameterized_dataset({ 'text_content_type_none_encoding': { 'payload_content_type': 'text/plain', 'payload_content_encoding': None}, 'utf8_text_content_type_none_encoding': { 'payload_content_type': 'text/plain; charset=utf-8', 'payload_content_encoding': None}, 'no_space_utf8_text_content_type_none_encoding': { 'payload_content_type': 'text/plain;charset=utf-8', 'payload_content_encoding': None}, 'octet_content_type_base64_encoding': { 'payload_content_type': 'application/octet-stream', 'payload_content_encoding': 'base64'} }) @testcase.attr('positive', 'deprecated') def test_secret_create_defaults_valid_types_and_encoding_old_way( self, payload_content_type, payload_content_encoding): """Creates secrets with various content types and encodings.""" test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.payload_content_type = payload_content_type test_model.payload_content_encoding = payload_content_encoding resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) get_resp = self.behaviors.get_secret_based_on_content_type( secret_ref, payload_content_type=payload_content_type, payload_content_encoding=payload_content_encoding) self.assertEqual(get_resp.status_code, 200) if payload_content_encoding == 'base64': self.assertIn(test_model.payload, binascii.b2a_base64(get_resp.content)) else: self.assertIn(test_model.payload, get_resp.content) @utils.parameterized_dataset({ 'empty_content_type_and_encoding': { 'payload_content_type': '', 'payload_content_encoding': ''}, 'none_content_type_and_encoding': { 'payload_content_type': None, 'payload_content_encoding': None}, 'large_string_content_type_and_encoding': { 'payload_content_type': base.TestCase.oversized_field, 'payload_content_encoding': base.TestCase.oversized_field}, 'int_content_type_and_encoding': { 'payload_content_type': 123, 'payload_content_encoding': 123}, 'none_content_type_base64_content_encoding': { 'payload_content_type': None, 'payload_content_encoding': 'base64'}, 'text_content_type_none_content_encoding': { 'payload_content_type': 'text/plain', 'payload_content_encoding': ''}, 'text_no_subtype_content_type_none_content_encoding': { 'payload_content_type': 'text', 'payload_content_encoding': None}, 'text_slash_no_subtype_content_type_none_content_encoding': { 'payload_content_type': 'text/', 'payload_content_encoding': None}, 'text_content_type_empty_content_encoding': { 'payload_content_type': 'text/plain', 'payload_content_encoding': ' '}, 'text_content_type_spaces_content_encoding': { 'payload_content_type': 'text/plain', 'payload_content_encoding': ' '}, 'text_content_type_base64_content_encoding': { 'payload_content_type': 'text/plain', 'payload_content_encoding': 'base64'}, 'text_and_utf88_content_type_none_content_encoding': { 'payload_content_type': 'text/plain; charset=utf-88', 'payload_content_encoding': None}, 'invalid_content_type_base64_content_encoding': { 'payload_content_type': 'invalid', 'payload_content_encoding': 'base64'}, 'invalid_content_type_none_content_encoding': { 'payload_content_type': 'invalid', 'payload_content_encoding': None}, 'octet_content_type_invalid_content_encoding': { 'payload_content_type': 'application/octet-stream', 'payload_content_encoding': 'invalid'}, 'text_content_type_invalid_content_encoding': { 'payload_content_type': 'text/plain', 'payload_content_encoding': 'invalid'}, 'none_content_type_invalid_content_encoding': { 'payload_content_type': None, 'payload_content_encoding': 'invalid'}, }) @testcase.attr('negative') def test_secret_create_defaults_invalid_types_and_encoding( self, payload_content_type, payload_content_encoding): """Creating secrets with invalid payload types and encodings.""" test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.payload_content_type = payload_content_type test_model.payload_content_encoding = payload_content_encoding resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 400) @utils.parameterized_dataset({ 'max_payload_string': [base.TestCase.max_sized_payload] }) @testcase.attr('positive') def test_secret_create_defaults_valid_payload(self, payload): """Create secrets with a various valid payloads.""" test_model = secret_models.SecretModel( **self.default_secret_create_data) overrides = {"payload": payload} test_model.override_values(**overrides) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) @utils.parameterized_dataset({ 'empty': [''], 'array': [['boom']], 'int': [123], 'none': [None], 'bad_character': [unichr(0x0080)], 'bad_characters': [unichr(0x1111) + unichr(0xffff)] }) @testcase.attr('negative') def test_secret_create_defaults_invalid_payload(self, payload): """Covers creating secrets with various invalid payloads.""" test_model = secret_models.SecretModel( **self.default_secret_create_data) overrides = {"payload_content_type": "application/octet-stream", "payload_content_encoding": "base64", "payload": payload} test_model.override_values(**overrides) resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 400) @utils.parameterized_dataset({ 'negative_five_long_expire': { 'timezone': '-05:00', 'days': 5}, 'positive_five_long_expire': { 'timezone': '+05:00', 'days': 5}, 'negative_one_short_expire': { 'timezone': '-01', 'days': 1}, 'positive_one_short_expire': { 'timezone': '+01', 'days': 1} }) @testcase.attr('positive') def test_secret_create_defaults_valid_expiration(self, timezone, days): """Create secrets with a various valid expiration data.""" timestamp = utils.create_timestamp_w_tz_and_offset(timezone=timezone, days=days) test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.expiration = timestamp resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) @utils.parameterized_dataset({ 'malformed_timezone': { 'timezone': '-5:00', 'days': 0} }) @testcase.attr('negative') def test_secret_create_defaults_invalid_expiration(self, timezone, days): """Create secrets with various invalid expiration data.""" timestamp = utils.create_timestamp_w_tz_and_offset(timezone=timezone, days=days) test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.expiration = timestamp resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 400) @testcase.attr('positive') def test_secret_create_change_host_header(self, **kwargs): """Create a secret with a (possibly) malicious host name in header.""" test_model = secret_models.SecretModel( **self.default_secret_create_data) malicious_hostname = 'some.bad.server.com' changed_host_header = {'Host': malicious_hostname} resp, secret_ref = self.behaviors.create_secret( test_model, extra_headers=changed_host_header) self.assertEqual(resp.status_code, 201) # get Location field from result and assert that it is NOT the # malicious one. regex = '.*{0}.*'.format(malicious_hostname) self.assertNotRegexpMatches(resp.headers['location'], regex) @utils.parameterized_dataset({ 'symmetric': ['symmetric', base64.b64decode( get_default_payload()), get_default_data()], 'private': ['private', keys.get_private_key_pem(), get_private_key_req()], 'public': ['public', keys.get_public_key_pem(), get_public_key_req()], 'certificate': ['certificate', keys.get_certificate_pem(), get_certificate_req()], 'passphrase': ['passphrase', 'mysecretpassphrase', get_passphrase_req()] }) @testcase.attr('positive') def test_secret_create_with_secret_type(self, secret_type, expected, spec): """Create secrets with various secret types.""" test_model = secret_models.SecretModel(**spec) test_model.secret_type = secret_type resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(201, resp.status_code) resp = self.behaviors.get_secret_metadata(secret_ref) secret_type_response = resp.model.secret_type self.assertIsNotNone(secret_type_response) self.assertEqual(secret_type, secret_type_response) content_type = spec['payload_content_type'] get_resp = self.behaviors.get_secret(secret_ref, content_type) self.assertEqual(expected, get_resp.content) @utils.parameterized_dataset({ 'invalid_http_content_type_characaters_latin': { 'http_content_type': u'\u00c4'.encode('utf-8')}, 'invalid_http_content_type_characaters_arabic': { 'http_content_type': u'\u060f'.encode('utf-8')}, 'invalid_http_content_type_characaters_cyrillic': { 'http_content_type': u'\u0416'.encode('utf-8')}, 'invalid_http_content_type_characaters_replacement_character': { 'http_content_type': u'\ufffd'.encode('utf-8')}, }) @testcase.attr('negative') def test_secret_create_with_invalid_http_content_type_characters( self, http_content_type): """Attempt to create secrets with invalid unicode characters in the HTTP request's Content-Type header. Should return a 415. """ test_model = secret_models.SecretModel( **self.default_secret_create_data) headers = {"Content-Type": http_content_type} resp, secret_ref = self.behaviors.create_secret(test_model, headers) self.assertEqual(resp.status_code, 415) @utils.parameterized_dataset({ 'invalid_http_content_type_characaters_latin': { 'payload_content_type': u'\u00c4'.encode('utf-8')}, 'invalid_http_content_type_characaters_arabic': { 'payload_content_type': u'\u060f'.encode('utf-8')}, 'invalid_http_content_type_characaters_cyrillic': { 'payload_content_type': u'\u0416'.encode('utf-8')}, 'invalid_http_content_type_characaters_replacement_character': { 'payload_content_type': u'\ufffd'.encode('utf-8')}, }) @testcase.attr('negative') def test_secret_create_with_invalid_payload_content_type_characters( self, payload_content_type): """Attempt to create secrets with non-ascii characters in the payload's content type attribute. Should return a 400. """ test_model = secret_models.SecretModel( **self.default_secret_create_data) test_model.payload_content_type = payload_content_type resp, secret_ref = self.behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 400) class SecretsPagingTestCase(base.PagingTestCase): def setUp(self): super(SecretsPagingTestCase, self).setUp() self.behaviors = secret_behaviors.SecretBehaviors(self.client) # make a local mutable copy of the default data to prevent # possible data contamination self.create_default_data = get_default_data() def tearDown(self): self.behaviors.delete_all_created_secrets() super(SecretsPagingTestCase, self).tearDown() def create_model(self): return secret_models.SecretModel(**self.create_default_data) def create_resources(self, count=0, model=None): for x in range(0, count): self.behaviors.create_secret(model) def get_resources(self, limit=10, offset=0, filter=None): return self.behaviors.get_secrets(limit=limit, offset=offset, filter=filter) def set_filter_field(self, unique_str, model): '''Set the name field which we use in the get_resources ''' model.name = unique_str class SecretsUnauthedTestCase(base.TestCase): def setUp(self): super(SecretsUnauthedTestCase, self).setUp() self.behaviors = secret_behaviors.SecretBehaviors(self.client) self.default_secret_create_data = get_default_data() self.dummy_secret_ref = 'orders/dummy-7b86-4071-935d-ef6b83729200' self.dummy_project_id = 'dummy' resp, self.real_secret_ref = self.behaviors.create_secret( secret_models.SecretModel(**self.default_secret_create_data) ) stored_auth = self.client._auth[ self.client._default_user_name].stored_auth project_id = stored_auth.values()[0]['project_id'] self.project_id_header = { 'X-Project-Id': project_id } self.dummy_project_id_header = { 'X-Project-Id': self.dummy_project_id } def tearDown(self): self.behaviors.delete_all_created_secrets() super(SecretsUnauthedTestCase, self).tearDown() @testcase.attr('negative', 'security') def test_secret_create_unauthed_no_proj_id(self): """Attempt to create a secret without a token or project id Should return 401 """ model = secret_models.SecretModel(self.default_secret_create_data) resp, secret_ref = self.behaviors.create_secret(model, use_auth=False) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_create_unauthed_fake_proj_id(self): """Attempt to create a secret with a project id but no token Should return 401 """ model = secret_models.SecretModel(self.default_secret_create_data) resp, secret_ref = self.behaviors.create_secret( model, extra_headers=self.dummy_project_id_header, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_create_unauthed_real_proj_id(self): """Attempt to create a secret with a project id but no token Should return 401 """ model = secret_models.SecretModel(self.default_secret_create_data) resp, secret_ref = self.behaviors.create_secret( model, extra_headers=self.project_id_header, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_get_unauthed_no_proj_id_fake_secret(self): """Attempt to read a non-existant secret without a token or project id Should return 401 """ resp = self.behaviors.get_secret( self.dummy_secret_ref, payload_content_type='application/octet-stream', payload_content_encoding='base64', use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_get_unauthed_no_proj_id_real_secret(self): """Attempt to read an existing secret without a token or project id Should return 401 """ resp = self.behaviors.get_secret( self.real_secret_ref, payload_content_type='application/octet-stream', payload_content_encoding='base64', use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_get_unauthed_fake_proj_id_fake_secret(self): """Attempt to get a non-existant secret with a project id but no token Should return 401 """ resp = self.behaviors.get_secret( self.dummy_secret_ref, payload_content_type='application/octet-stream', payload_content_encoding='base64', extra_headers=self.dummy_project_id_header, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_get_unauthed_fake_proj_id_real_secret(self): """Attempt to get an existing secret with a project id but no token Should return 401 """ resp = self.behaviors.get_secret( self.real_secret_ref, payload_content_type='application/octet-stream', payload_content_encoding='base64', extra_headers=self.dummy_project_id_header, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_get_unauthed_real_proj_id_fake_secret(self): """Attempt to get a non-existant secret with a project id but no token Should return 401 """ resp = self.behaviors.get_secret( self.dummy_secret_ref, payload_content_type='application/octet-stream', payload_content_encoding='base64', extra_headers=self.project_id_header, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_get_unauthed_real_proj_id_real_secret(self): """Attempt to get an existing secret with a project id but no token Should return 401 """ resp = self.behaviors.get_secret( self.real_secret_ref, payload_content_type='application/octet-stream', payload_content_encoding='base64', extra_headers=self.project_id_header, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_put_unauthed_no_proj_id_fake_secret(self): """Attempt to update a non-existant secret without a token or project id Should return 401 """ resp = self.behaviors.update_secret_payload( self.dummy_secret_ref, payload=None, payload_content_type='application/octet-stream', payload_content_encoding='base64', use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_put_unauthed_no_proj_id_real_secret(self): """Attempt to update an existing secret without a token or project id Should return 401 """ resp = self.behaviors.update_secret_payload( self.real_secret_ref, payload=None, payload_content_type='application/octet-stream', payload_content_encoding='base64', use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_put_unauthed_fake_proj_id_fake_secret(self): """Attempt to update a non-existant secret with a project id, but no token Should return 401 """ resp = self.behaviors.update_secret_payload( self.dummy_secret_ref, payload=None, payload_content_type='application/octet-stream', payload_content_encoding='base64', extra_headers=self.dummy_project_id_header, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_put_unauthed_fake_proj_id_real_secret(self): """Attempt to update an existing secret with a project id, but no token Should return 401 """ resp = self.behaviors.update_secret_payload( self.real_secret_ref, payload=None, payload_content_type='application/octet-stream', payload_content_encoding='base64', extra_headers=self.dummy_project_id_header, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_put_unauthed_real_proj_id_fake_secret(self): """Attempt to update a non-existant secret with a project id, but no token Should return 401 """ resp = self.behaviors.update_secret_payload( self.dummy_secret_ref, payload=None, payload_content_type='application/octet-stream', payload_content_encoding='base64', extra_headers=self.project_id_header, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_put_unauthed_real_proj_id_real_secret(self): """Attempt to update an existing secret with a project id, but no token Should return 401 """ resp = self.behaviors.update_secret_payload( self.real_secret_ref, payload=None, payload_content_type='application/octet-stream', payload_content_encoding='base64', extra_headers=self.project_id_header, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_delete_unauthed_no_proj_id_fake_secret(self): """Attempt to delete a non-existant secret without a token or project id Should return 401 """ resp = self.behaviors.delete_secret( self.dummy_secret_ref, expected_fail=True, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_delete_unauthed_no_proj_id_real_secret(self): """Attempt to delete an existing secret without a token or project id Should return 401 """ resp = self.behaviors.delete_secret( self.real_secret_ref, expected_fail=True, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_delete_unauthed_fake_proj_id_fake_secret(self): """Attempt to delete a non-existant secret with a project id, but no token Should return 401 """ resp = self.behaviors.delete_secret( self.dummy_secret_ref, extra_headers=self.dummy_project_id_header, expected_fail=True, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_delete_unauthed_fake_proj_id_real_secret(self): """Attempt to delete an existing secret with a project id, but no token Should return 401 """ resp = self.behaviors.delete_secret( self.real_secret_ref, extra_headers=self.dummy_project_id_header, expected_fail=True, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_delete_unauthed_real_proj_id_fake_secret(self): """Attempt to delete a non-existant secret with a project id, but no token Should return 401 """ resp = self.behaviors.delete_secret( self.dummy_secret_ref, extra_headers=self.project_id_header, expected_fail=True, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_delete_unauthed_real_proj_id_real_secret(self): """Attempt to delete an existing secret with a project id, but no token Should return 401 """ resp = self.behaviors.delete_secret( self.real_secret_ref, extra_headers=self.project_id_header, expected_fail=True, use_auth=False ) self.assertEqual(401, resp.status_code) barbican-2.0.0/functionaltests/api/v1/functional/test_containers_rbac.py0000664000567000056710000002304112701405673027653 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.tests import utils from functionaltests.api import base from functionaltests.api.v1.behaviors import container_behaviors from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.models import container_models from functionaltests.api.v1.models import secret_models from functionaltests.common import config CONF = config.get_config() admin_a = CONF.rbac_users.admin_a creator_a = CONF.rbac_users.creator_a observer_a = CONF.rbac_users.observer_a auditor_a = CONF.rbac_users.auditor_a test_data_rbac_store_container = { 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 201}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 201}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 403}, } test_data_rbac_update_container = { 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 405}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 405}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 405}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 405}, } test_data_rbac_delete_container = { 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 204}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 403}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 403}, } test_data_rbac_get_container = { 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 200}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 200}, } test_data_rbac_get_list_of_containers = { 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 200}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 403}, } def get_default_secret_data(): return { "name": "AES key", "expiration": "2050-02-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", "payload": 'Z0Y2K2xMb0Yzb2hBOWFQUnB0KzZiUT09', "payload_content_type": "application/octet-stream", "payload_content_encoding": "base64", } def get_container_req(secret_ref): return {"name": "testcontainer", "type": "generic", "secret_refs": [{'name': 'secret1', 'secret_ref': secret_ref}]} @utils.parameterized_test_case class RBACContainersTestCase(base.TestCase): """Functional tests exercising RBAC Policies""" def setUp(self): super(RBACContainersTestCase, self).setUp() self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client) self.container_behaviors = container_behaviors.ContainerBehaviors( self.client) def tearDown(self): self.secret_behaviors.delete_all_created_secrets() self.container_behaviors.delete_all_created_containers() super(RBACContainersTestCase, self).tearDown() @utils.parameterized_dataset(test_data_rbac_store_container) def test_rbac_store_container(self, user, admin, expected_return): """Test RBAC for container store Issue a container store and verify that that the correct http return code comes back for the specified user. :param user: the user who will attempt to do the store :param admin: the admin of the group containing the user :param expected_return: the expected http return code """ test_model = secret_models.SecretModel( **get_default_secret_data()) resp, secret_ref = self.secret_behaviors.create_secret( test_model, user_name=admin, admin=admin) self.assertEqual(201, resp.status_code) test_model = container_models.ContainerModel( **get_container_req(secret_ref)) resp, container_ref = self.container_behaviors.create_container( test_model, user_name=user, admin=admin) self.assertEqual(expected_return, resp.status_code) @utils.parameterized_dataset(test_data_rbac_update_container) def test_rbac_update_container(self, user, admin, expected_return): """Test RBAC for container update Issue a container update and verify that that the correct http return code comes back for the specified user. The initial container will be stored with the admin user to ensure that it gets created successfully. We don't want the container store to fail since we are only testing container update here. :param user: the user who will attempt to do the update :param admin: the admin of the group containing the user :param expected_return: the expected http return code """ container_ref = self._create_initial_container(admin=admin) resp = self.container_behaviors.update_container(container_ref, user_name=user) self.assertEqual(expected_return, resp.status_code) @utils.parameterized_dataset(test_data_rbac_get_container) def test_rbac_get_container(self, user, admin, expected_return): """Test RBAC for container get Issue a container get and verify that that the correct http return code comes back for the specified user. The initial container will be stored with the admin user to ensure that it gets created successfully. We don't want the container store to fail since we are only testing container get here. :param user: the user who will attempt to do the get metadata :param admin: the admin of the group containing the user :param expected_return: the expected http return code """ container_href = self._create_initial_container(admin=admin) resp = self.container_behaviors.get_container( container_href, user_name=user) self.assertEqual(expected_return, resp.status_code) self.assertEqual(expected_return == 200, resp.content is not None) @utils.parameterized_dataset(test_data_rbac_delete_container) def test_rbac_delete_container(self, user, admin, expected_return): """Test RBAC for container delete Issue a container delete and verify that that the correct http return code comes back for the specified user. The initial container will be stored with the admin user to ensure that it gets created successfully. We don't want the container store to fail since we are only testing container delete here. :param user: the user who will attempt to do the delete :param admin: the admin of the group containing the user :param expected_return: the expected http return code """ container_href = self._create_initial_container(admin=admin) resp = self.container_behaviors.delete_container( container_href, user_name=user) self.assertEqual(expected_return, resp.status_code) def _create_initial_container(self, admin=admin_a): """Utility function to create a container with a contained secret Some tests require a container to exist before they test certain things, so this function can be used to do that setup. First a secret will be created, then a container will be created which contains that secret. :param admin: the admin user who will create store the container :param secret_data: the data for the container :return: href to the newly stored container """ test_model = secret_models.SecretModel(**get_default_secret_data()) resp, secret_ref = self.secret_behaviors.create_secret( test_model, user_name=admin, admin=admin) self.assertEqual(201, resp.status_code) test_model = container_models.ContainerModel( **get_container_req(secret_ref)) resp, container_ref = self.container_behaviors.create_container( test_model, user_name=admin, admin=admin) self.assertEqual(201, resp.status_code) return container_ref barbican-2.0.0/functionaltests/api/v1/functional/test_acls.py0000664000567000056710000005254512701405673025454 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from testtools import testcase from barbican.tests import utils from functionaltests.api import base from functionaltests.api.v1.behaviors import acl_behaviors from functionaltests.api.v1.behaviors import consumer_behaviors from functionaltests.api.v1.behaviors import container_behaviors from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.models import acl_models from functionaltests.api.v1.models import consumer_model from functionaltests.api.v1.models import container_models from functionaltests.api.v1.models import secret_models from functionaltests.common import config CONF = config.get_config() admin_a = CONF.rbac_users.admin_a creator_a = CONF.rbac_users.creator_a observer_a = CONF.rbac_users.observer_a auditor_a = CONF.rbac_users.auditor_a admin_b = CONF.rbac_users.admin_b observer_b = CONF.rbac_users.observer_b def get_rbac_only(): return {'read': {'project-access': True}} # private secret can only be access by the creator or an admin def get_private(): return {'read': {'project-access': False}} def get_acl_only(reader_id): return {'read': {'users': [reader_id], 'project-access': False}} def get_rbac_plus_acl(reader_id): return {'read': {'users': [reader_id], 'project-access': True}} test_data_read_secret_rbac_only = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'expected_return': 200}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 403}, 'with_admin_b': {'user': admin_b, 'expected_return': 403}, 'with_observer_b': {'user': observer_b, 'expected_return': 403}, } test_data_read_secret_private = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 403}, 'with_admin_b': {'user': admin_b, 'expected_return': 403}, 'with_observer_b': {'user': observer_b, 'expected_return': 403}, } test_data_read_secret_acl_only = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 403}, 'with_admin_b': {'user': admin_b, 'expected_return': 403}, 'with_observer_b': {'user': observer_b, 'expected_return': 200}, } test_data_read_secret_rbac_plus_acl = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'expected_return': 200}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 403}, 'with_admin_b': {'user': admin_b, 'expected_return': 403}, 'with_observer_b': {'user': observer_b, 'expected_return': 200}, } test_data_read_container_rbac_only = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'expected_return': 200}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 200}, 'with_admin_b': {'user': admin_b, 'expected_return': 403}, 'with_observer_b': {'user': observer_b, 'expected_return': 403}, } test_data_read_container_private = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 403}, 'with_admin_b': {'user': admin_b, 'expected_return': 403}, 'with_observer_b': {'user': observer_b, 'expected_return': 403}, } test_data_read_container_acl_only = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 403}, 'with_admin_b': {'user': admin_b, 'expected_return': 403}, 'with_observer_b': {'user': observer_b, 'expected_return': 200}, } test_data_read_container_rbac_plus_acl = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'expected_return': 200}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 200}, 'with_admin_b': {'user': admin_b, 'expected_return': 403}, 'with_observer_b': {'user': observer_b, 'expected_return': 200}, } test_data_read_container_consumer_acl_only = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'expected_return': 200}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 200}, 'with_admin_b': {'user': admin_b, 'expected_return': 404}, 'with_observer_b': {'user': observer_b, 'expected_return': 404}, } test_data_delete_container_consumer_acl_only = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 403}, 'with_observer_a': {'user': observer_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 403}, 'with_admin_b': {'user': admin_b, 'expected_return': 404}, 'with_observer_b': {'user': observer_b, 'expected_return': 403}, } test_data_create_container_consumer_acl_only = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 403}, 'with_observer_a': {'user': observer_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 403}, 'with_admin_b': {'user': admin_b, 'expected_return': 404}, 'with_observer_b': {'user': observer_b, 'expected_return': 403}, } @utils.parameterized_test_case class AclTestCase(base.TestCase): """Functional tests exercising ACL Features""" def setUp(self): super(AclTestCase, self).setUp() self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client) self.container_behaviors = container_behaviors.ContainerBehaviors( self.client) self.acl_behaviors = acl_behaviors.AclBehaviors(self.client) self.consumer_behaviors = consumer_behaviors.ConsumerBehaviors( self.client) def tearDown(self): self.acl_behaviors.delete_all_created_acls() self.secret_behaviors.delete_all_created_secrets() self.container_behaviors.delete_all_created_containers() super(AclTestCase, self).tearDown() @utils.parameterized_dataset(test_data_read_secret_rbac_only) def test_secret_read_default(self, user, expected_return): secret_ref = self.store_secret() status = self.get_secret(secret_ref, user_name=user) self.assertEqual(expected_return, status) @utils.parameterized_dataset(test_data_read_secret_rbac_only) def test_secret_read_rbac_only(self, user, expected_return): secret_ref = self.store_secret() self.set_secret_acl(secret_ref, get_rbac_only()) status = self.get_secret(secret_ref, user_name=user) self.assertEqual(expected_return, status) @utils.parameterized_dataset(test_data_read_secret_private) def test_secret_read_private(self, user, expected_return): secret_ref = self.store_secret() self.set_secret_acl(secret_ref, get_private()) status = self.get_secret(secret_ref, user_name=user) self.assertEqual(expected_return, status) @utils.parameterized_dataset(test_data_read_secret_acl_only) def test_secret_read_acl_only(self, user, expected_return): secret_ref = self.store_secret() user_id = self.secret_behaviors.get_user_id_from_name(observer_b) self.set_secret_acl(secret_ref, get_acl_only(user_id)) status = self.get_secret(secret_ref, user_name=user) self.assertEqual(expected_return, status) @utils.parameterized_dataset(test_data_read_secret_rbac_plus_acl) def test_secret_read_rbac_plus_acl(self, user, expected_return): secret_ref = self.store_secret() user_id = self.secret_behaviors.get_user_id_from_name(observer_b) self.set_secret_acl(secret_ref, get_rbac_plus_acl(user_id)) status = self.get_secret(secret_ref, user_name=user) self.assertEqual(expected_return, status) @utils.parameterized_dataset(test_data_read_container_rbac_only) def test_container_read_default(self, user, expected_return): container_ref = self.store_container() status = self.get_container(container_ref, user_name=user) self.assertEqual(expected_return, status) @utils.parameterized_dataset(test_data_read_container_rbac_only) def test_container_read_rbac_only(self, user, expected_return): container_ref = self.store_container() self.set_container_acl(container_ref, get_rbac_only()) status = self.get_container(container_ref, user_name=user) self.assertEqual(expected_return, status) @utils.parameterized_dataset(test_data_read_container_private) def test_container_read_private(self, user, expected_return): container_ref = self.store_container() self.set_container_acl(container_ref, get_private()) status = self.get_container(container_ref, user_name=user) self.assertEqual(expected_return, status) @utils.parameterized_dataset(test_data_read_container_acl_only) def test_container_read_acl_only(self, user, expected_return): container_ref = self.store_container() user_id = self.container_behaviors.get_user_id_from_name(observer_b) self.set_container_acl(container_ref, get_acl_only(user_id)) status = self.get_container(container_ref, user_name=user) self.assertEqual(expected_return, status) @utils.parameterized_dataset(test_data_read_container_rbac_plus_acl) def test_container_read_rbac_plus_acl(self, user, expected_return): container_ref = self.store_container() user_id = self.container_behaviors.get_user_id_from_name(observer_b) self.set_container_acl(container_ref, get_rbac_plus_acl(user_id)) status = self.get_container(container_ref, user_name=user) self.assertEqual(expected_return, status) @utils.parameterized_dataset(test_data_read_container_consumer_acl_only) def test_container_acl_read_consumers(self, user, expected_return): """Acl access will not allow you to see the list of consumers""" container_ref = self.store_container(user_name=creator_a, admin=admin_a) consumer_model = get_consumer_model() resp, consumer_data = self.consumer_behaviors.create_consumer( model=consumer_model, container_ref=container_ref, user_name=admin_a) self.assertEqual(200, resp.status_code) user_id = self.container_behaviors.get_user_id_from_name(user) self.set_container_acl(container_ref, get_acl_only(user_id)) # Verify all users granted acl access can read the container status_code = self.get_container(container_ref, user_name=user) self.assertEqual(200, status_code) resp, consumers, next_ref, prev_ref = \ self.consumer_behaviors.get_consumers(container_ref, user_name=user) self.assertEqual(expected_return, resp.status_code) @utils.parameterized_dataset(test_data_delete_container_consumer_acl_only) def test_container_acl_remove_consumer(self, user, expected_return): """Acl access will not allow you to delete a consumer""" container_ref = self.store_container(user_name=creator_a, admin=admin_a) consumer_model = get_consumer_model() resp, consumer_data = self.consumer_behaviors.create_consumer( model=consumer_model, container_ref=container_ref, user_name=admin_a) self.assertEqual(200, resp.status_code) user_id = self.container_behaviors.get_user_id_from_name(user) self.set_container_acl(container_ref, get_acl_only(user_id)) # Verify all users granted acl access can read the container status_code = self.get_container(container_ref, user_name=user) self.assertEqual(200, status_code) resp, consumer_data = self.consumer_behaviors.delete_consumer( model=consumer_model, container_ref=container_ref, user_name=user) self.assertEqual(expected_return, resp.status_code) @utils.parameterized_dataset(test_data_create_container_consumer_acl_only) def test_container_acl_create_consumer(self, user, expected_return): """Acl access will not allow you to add a consumer""" container_ref = self.store_container(user_name=creator_a, admin=admin_a) user_id = self.container_behaviors.get_user_id_from_name(user) self.set_container_acl(container_ref, get_acl_only(user_id)) # Verify all users granted acl access can read the container status_code = self.get_container(container_ref, user_name=user) self.assertEqual(200, status_code) consumer_model = get_consumer_model() resp, consumer_data = self.consumer_behaviors.create_consumer( model=consumer_model, container_ref=container_ref, user_name=user) self.assertEqual(expected_return, resp.status_code) @testcase.attr('negative') def test_secret_acl_auditor_with_acl_cannot_read(self): """Auditor granted access to a secret cannot read that secret""" secret_ref = self.store_secret() self.set_secret_acl(secret_ref, get_rbac_plus_acl(auditor_a)) status_code = self.get_secret(secret_ref=secret_ref, user_name=auditor_a) self.assertEqual(403, status_code) @testcase.attr('negative') def test_secret_acl_put_as_observer(self): """Observer can not put to a secret when granted access via acl""" secret_no_payload = { "name": "AES key", "expiration": "2018-02-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", } secret_model = secret_models.SecretModel(**secret_no_payload) resp, secret_ref = self.secret_behaviors.create_secret( model=secret_model, user_name=creator_a) self.set_secret_acl(secret_ref, get_rbac_plus_acl(observer_a)) # Update payload = "gF6+lLoF3ohA9aPRpt+6bQ==" payload_content_type = "application/octet-stream" payload_content_encoding = "base64" update_resp = self.secret_behaviors.update_secret_payload( secret_ref, user_name=observer_a, payload=payload, payload_content_type=payload_content_type, payload_content_encoding=payload_content_encoding) self.assertEqual(403, update_resp.status_code) # ----------------------- Secret ACL Tests --------------------------- @testcase.attr('negative', 'security') def test_secret_read_acl_no_token(self): secret_ref = self.store_secret() acl_ref = '{0}/acl'.format(secret_ref) resp = self.acl_behaviors.get_acl(acl_ref, use_auth=False) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_set_acl_no_token(self): secret_ref = self.store_secret() resp = self.set_secret_acl(secret_ref, get_rbac_only(), use_auth=False) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_delete_acl_no_token(self): secret_ref = self.store_secret() acl_ref = '{0}/acl'.format(secret_ref) resp = self.acl_behaviors.delete_acl( acl_ref, expected_fail=True, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_secret_update_acl_no_token(self): secret_ref = self.store_secret() acl_ref = '{0}/acl'.format(secret_ref) resp = self.set_secret_acl(secret_ref, get_rbac_only()) self.assertEqual(200, resp.status_code) resp = self.acl_behaviors.update_acl(acl_ref, {}, use_auth=False) self.assertEqual(401, resp.status_code) # ----------------------- Container ACL Tests --------------------------- @testcase.attr('negative', 'security') def test_container_read_acl_no_token(self): container_ref = self.store_container() acl_ref = '{0}/acl'.format(container_ref) resp = self.acl_behaviors.get_acl(acl_ref, use_auth=False) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_container_set_acl_no_token(self): container_ref = self.store_container() resp = self.set_container_acl( container_ref, get_rbac_only(), use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_container_delete_acl_no_token(self): container_ref = self.store_container() acl_ref = '{0}/acl'.format(container_ref) resp = self.acl_behaviors.delete_acl( acl_ref, expected_fail=True, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_container_update_acl_no_token(self): container_ref = self.store_container() acl_ref = '{0}/acl'.format(container_ref) resp = self.set_container_acl(container_ref, get_rbac_only()) self.assertEqual(200, resp.status_code) resp = self.acl_behaviors.update_acl(acl_ref, {}, use_auth=False) self.assertEqual(401, resp.status_code) # ----------------------- Helper Functions --------------------------- def store_secret(self, user_name=creator_a, admin=admin_a): test_model = secret_models.SecretModel( **get_default_secret_data()) resp, secret_ref = self.secret_behaviors.create_secret( test_model, user_name=user_name, admin=admin) self.assertEqual(201, resp.status_code) return secret_ref def get_secret(self, secret_ref, user_name=creator_a): resp = self.secret_behaviors.get_secret( secret_ref, 'application/octet-stream', user_name=user_name) return resp.status_code def set_secret_acl(self, secret_ref, acl, use_auth=True, user_name=creator_a): test_model = acl_models.AclModel(**acl) resp = self.acl_behaviors.create_acl( secret_ref, test_model, use_auth=use_auth, user_name=user_name) if use_auth: self.assertEqual(200, resp.status_code) return resp def store_container(self, user_name=creator_a, admin=admin_a): secret_ref = self.store_secret(user_name=user_name, admin=admin) test_model = container_models.ContainerModel( **get_container_req(secret_ref)) resp, container_ref = self.container_behaviors.create_container( test_model, user_name=user_name, admin=admin) self.assertEqual(201, resp.status_code) return container_ref def get_container(self, container_ref, user_name=creator_a): resp = self.container_behaviors.get_container( container_ref, user_name=user_name) return resp.status_code def set_container_acl(self, container_ref, acl, use_auth=True, user_name=creator_a): test_model = acl_models.AclModel(**acl) resp = self.acl_behaviors.create_acl( container_ref, test_model, use_auth=use_auth, user_name=user_name) if use_auth: self.assertEqual(200, resp.status_code) return resp # ----------------------- Support Functions --------------------------- def get_default_secret_data(): return { "name": "AES key", "expiration": "2050-02-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", "payload": get_default_payload(), "payload_content_type": "application/octet-stream", "payload_content_encoding": "base64", } def get_default_payload(): return 'Z0Y2K2xMb0Yzb2hBOWFQUnB0KzZiUT09' def get_container_req(secret_ref): return {"name": "testcontainer", "type": "generic", "secret_refs": [{'name': 'secret1', 'secret_ref': secret_ref}]} def get_consumer_model(): test_consumer_model = consumer_model.ConsumerModel( name="consumername", URL="consumerURL" ) return test_consumer_model barbican-2.0.0/functionaltests/api/v1/functional/test_quotas_enforce.py0000664000567000056710000003011612701405673027535 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import testtools from barbican.plugin.interface import certificate_manager as cert_interface from functionaltests.api import base from functionaltests.api.v1.behaviors import ca_behaviors from functionaltests.api.v1.behaviors import consumer_behaviors from functionaltests.api.v1.behaviors import container_behaviors from functionaltests.api.v1.behaviors import order_behaviors from functionaltests.api.v1.behaviors import quota_behaviors from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.models import ca_models from functionaltests.api.v1.models import consumer_model from functionaltests.api.v1.models import container_models from functionaltests.api.v1.models import order_models from functionaltests.api.v1.models import quota_models from functionaltests.api.v1.models import secret_models from functionaltests.common import config CONF = config.get_config() admin_b = CONF.rbac_users.admin_b service_admin = CONF.identity.service_admin def is_ca_backend_snakeoil(): return 'snakeoil_ca' in\ cert_interface.CONF.certificate.enabled_certificate_plugins @testtools.testcase.attr('no_parallel') class QuotaEnforcementTestCase(base.TestCase): def setUp(self): super(QuotaEnforcementTestCase, self).setUp() self.quota_behaviors = quota_behaviors.QuotaBehaviors(self.client) self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client) self.container_behaviors = container_behaviors.ContainerBehaviors( self.client) self.order_behaviors = order_behaviors.OrderBehaviors(self.client) self.consumer_behaviors = consumer_behaviors.ConsumerBehaviors( self.client) self.ca_behaviors = ca_behaviors.CABehaviors(self.client) self.secret_data = self.get_default_secret_data() self.quota_data = self.get_default_quota_data() self.project_id = self.quota_behaviors.get_project_id_from_name( admin_b) self.order_secrets = [] self.root_ca_ref = None self.test_order_sent = False def tearDown(self): self.quota_behaviors.delete_all_created_quotas() self.consumer_behaviors.delete_all_created_consumers() self.container_behaviors.delete_all_created_containers() self.secret_behaviors.delete_all_created_secrets() self.ca_behaviors.delete_all_created_cas() for secret_ref in self.order_secrets: resp = self.secret_behaviors.delete_secret( secret_ref, user_name=admin_b) self.assertEqual(204, resp.status_code) self.order_behaviors.delete_all_created_orders() super(QuotaEnforcementTestCase, self).tearDown() def test_secrets_unlimited(self): self.set_quotas('secrets', -1) self.create_secrets(count=5) def test_secrets_disabled(self): self.set_quotas('secrets', 0) self.create_secrets(expected_return=403) def test_secrets_limited_one(self): self.set_quotas('secrets', 1) self.create_secrets(count=1) self.create_secrets(expected_return=403) def test_secrets_limited_five(self): self.set_quotas('secrets', 5) self.create_secrets(count=5) self.create_secrets(expected_return=403) def test_containers_unlimited(self): self.set_quotas('containers', -1) self.create_containers(count=5) def test_containers_disabled(self): self.set_quotas('containers', 0) self.create_containers(expected_return=403) def test_containers_limited_one(self): self.set_quotas('containers', 1) self.create_containers(count=1) self.create_containers(expected_return=403) def test_containers_limited_five(self): self.set_quotas('containers', 5) self.create_containers(count=5) self.create_containers(expected_return=403) def test_orders_unlimited(self): self.set_quotas('orders', -1) self.create_orders(count=5) def test_orders_disabled(self): self.set_quotas('orders', 0) self.create_orders(expected_return=403) def test_orders_limited_one(self): self.set_quotas('orders', 1) self.create_orders(count=1) self.create_orders(expected_return=403) def test_orders_limited_five(self): self.set_quotas('orders', 5) self.create_orders(count=5) self.create_orders(expected_return=403) def test_consumers_unlimited(self): self.set_quotas('consumers', -1) self.create_consumers(count=5) def test_consumers_disabled(self): self.set_quotas('consumers', 0) self.create_consumers(expected_return=403) def test_consumers_limited_one(self): self.set_quotas('consumers', 1) self.create_consumers(count=1) self.create_consumers(expected_return=403) def test_consumers_limited_five(self): self.set_quotas('consumers', 5) self.create_consumers(count=5) self.create_consumers(expected_return=403) @testtools.skipIf(not is_ca_backend_snakeoil(), "This test is only usable with snakeoil") def test_snakeoil_cas_unlimited(self): self.set_quotas('cas', -1) self.create_snakeoil_cas(count=5) @testtools.skipIf(not is_ca_backend_snakeoil(), "This test is only usable with snakeoil") def test_snakeoil_cas_disabled(self): self.set_quotas('cas', 0) self.create_snakeoil_cas(expected_return=403) @testtools.skipIf(not is_ca_backend_snakeoil(), "This test is only usable with snakeoil") def test_snakeoil_cas_limited_one(self): self.set_quotas('cas', 1) self.create_snakeoil_cas(count=1) self.create_snakeoil_cas(expected_return=403) @testtools.skipIf(not is_ca_backend_snakeoil(), "This test is only usable with snakeoil") def test_snakeoil_cas_limited_five(self): self.set_quotas('cas', 5) self.create_snakeoil_cas(count=5) self.create_snakeoil_cas(expected_return=403) # ----------------------- Helper Functions --------------------------- def get_default_quota_data(self): return {"project_quotas": {"secrets": -1, "orders": -1, "containers": -1, "consumers": -1}} def set_quotas(self, resource, quota): """Utility function to set resource quotas""" self.quota_data["project_quotas"][resource] = quota request_model = quota_models.ProjectQuotaRequestModel( **self.quota_data) resp = self.quota_behaviors.set_project_quotas(self.project_id, request_model, user_name=service_admin) self.assertEqual(204, resp.status_code) def get_default_secret_data(self): return { "name": "AES key", "expiration": "2050-02-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", "payload": "Z0Y2K2xMb0Yzb2hBOWFQUnB0KzZiUT09", "payload_content_type": "application/octet-stream", "payload_content_encoding": "base64", } def create_secrets(self, count=1, expected_return=201): """Utility function to create secrets""" secret_ref = None for _ in range(count): test_model = secret_models.SecretModel(**self.secret_data) resp, secret_ref = self.secret_behaviors.create_secret( test_model, user_name=admin_b) self.assertEqual(expected_return, resp.status_code) return secret_ref def get_container_req(self, secret_ref): return {"name": "test_container", "type": "generic", "secret_refs": [{'name': 'secret1', 'secret_ref': secret_ref}]} def create_containers(self, count=1, expected_return=201): """Utility function to create containers""" container_ref = None for _ in range(count): secret_ref = self.create_secrets() test_model = container_models.ContainerModel( **self.get_container_req(secret_ref)) resp, container_ref = self.container_behaviors.create_container( test_model, user_name=admin_b) self.assertEqual(expected_return, resp.status_code) return container_ref def get_default_order_data(self): return {'type': 'key', "meta": { "name": "barbican functional test order name", "algorithm": "aes", "bit_length": 256, "mode": "cbc"}} def create_orders(self, count=1, expected_return=202): """Utility function to create orders""" for _ in range(count): order_data = self.get_default_order_data() test_model = order_models.OrderModel(**order_data) resp, order_ref = self.order_behaviors.create_order( test_model, user_name=admin_b) self.assertEqual(expected_return, resp.status_code) if resp.status_code == 202: order_resp = self.order_behaviors.get_order( order_ref, user_name=admin_b) self.assertEqual(order_resp.status_code, 200) self.order_secrets.append(order_resp.model.secret_ref) def get_default_consumer_data(self): return {"name": "consumer_name", "URL": "consumer_url"} def create_consumers(self, count=1, expected_return=200): """Utility function to create consumers""" for _ in range(count): container_ref = self.create_containers() model = consumer_model.ConsumerModel( **self.get_default_consumer_data()) resp, consumer_dat = self.consumer_behaviors.create_consumer( model, container_ref, user_name=admin_b) self.assertEqual(expected_return, resp.status_code) def get_order_simple_cmc_request_data(self): return { 'type': 'certificate', 'meta': { 'request_type': 'simple-cmc', 'requestor_name': 'Barbican User', 'requestor_email': 'user@example.com', 'requestor_phone': '555-1212' } } def get_root_ca_ref(self): if self.root_ca_ref is not None: return self.root_ca_ref (resp, cas, total, next_ref, prev_ref) = self.ca_behaviors.get_cas() snake_name = 'barbican.plugin.snakeoil_ca.SnakeoilCACertificatePlugin' snake_plugin_ca_id = "Snakeoil CA" for item in cas: ca = self.ca_behaviors.get_ca(item) if ca.model.plugin_name == snake_name: if ca.model.plugin_ca_id == snake_plugin_ca_id: return item return None def get_snakeoil_subca_model(self): parent_ca_ref = self.get_root_ca_ref() return ca_models.CAModel( parent_ca_ref=parent_ca_ref, description="Test Snake Oil Subordinate CA", name="Subordinate CA", subject_dn="CN=Subordinate CA, O=example.com" ) def create_snakeoil_cas(self, count=1, expected_return=201): """Utility function to create snakeoil cas""" for _ in range(count): ca_model = self.get_snakeoil_subca_model() resp, ca_ref = self.ca_behaviors.create_ca(ca_model, user_name=admin_b) self.assertEqual(expected_return, resp.status_code) barbican-2.0.0/functionaltests/api/v1/functional/test_orders.py0000664000567000056710000006305612701405673026027 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import json import sys import time from testtools import testcase from barbican.tests import utils from functionaltests.api import base from functionaltests.api.v1.behaviors import container_behaviors from functionaltests.api.v1.behaviors import order_behaviors from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.models import order_models from cryptography.hazmat import backends from cryptography.hazmat.primitives.asymmetric import padding from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives import serialization def get_default_order_create_data(): return {'type': 'key', "meta": { "name": "barbican functional test secret name", "algorithm": "aes", "bit_length": 256, "mode": "cbc", } } # Any field with None will be created in the model with None as the value # but will be omitted in the final request (via the requests package) # to the server. # # Given that fact, order_create_nones_data is effectively an empty json request # to the server. def get_default_order_create_all_none_data(): return { 'type': None, "meta": { "name": None, "algorithm": None, "bit_length": None, "mode": None, } } def get_default_order_create_asymmetric_data(): return { 'type': 'asymmetric', "meta": { "name": "barbican functional test asymmetric secret name", "algorithm": "rsa", "bit_length": 2048, "mode": "cbc", } } @utils.parameterized_test_case class OrdersTestCase(base.TestCase): def setUp(self): super(OrdersTestCase, self).setUp() self.behaviors = order_behaviors.OrderBehaviors(self.client) self.container_behaviors = container_behaviors.ContainerBehaviors( self.client) self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client) self.create_default_data = get_default_order_create_data() self.create_all_none_data = get_default_order_create_all_none_data() self.asymmetric_data = get_default_order_create_asymmetric_data() def tearDown(self): self.behaviors.delete_all_created_orders() super(OrdersTestCase, self).tearDown() def wait_for_order(self, order_resp, order_ref): # Make sure we have an active order time_count = 1 while order_resp.model.status != "ACTIVE" and time_count <= 4: time.sleep(1) time_count += 1 order_resp = self.behaviors.get_order(order_ref) @testcase.attr('positive') def test_order_create_w_out_name(self): """Create an order without the name attribute.""" test_model = order_models.OrderModel(**self.create_default_data) test_model.name = None create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 202) self.assertIsNotNone(order_ref) @testcase.attr('positive') def test_order_create_w_empty_name(self): """Create an order the name attribute an empty string.""" test_model = order_models.OrderModel(**self.create_default_data) test_model.name = "" create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 202) self.assertIsNotNone(order_ref) @testcase.attr('positive') def test_orders_create_check_empty_name(self): """Create order with empty meta name. The resulting secret name should be a UUID. """ # first create an order with defaults test_model = order_models.OrderModel(**self.create_default_data) test_model.meta['name'] = "" create_resp, order_ref = self.behaviors.create_order(test_model) # verify that the order was created successfully self.assertEqual(create_resp.status_code, 202) self.assertIsNotNone(order_ref) # given the order href, retrieve the order order_resp = self.behaviors.get_order(order_ref) # verify that the get was successful self.assertEqual(order_resp.status_code, 200) self.assertTrue(order_resp.model.status == "ACTIVE" or order_resp.model.status == "PENDING") # PENDING orders may take a moment to be processed by the workers # when running tests with queue enabled self.wait_for_order(order_resp, order_ref) # verify the new secret's name matches the name in the secret ref # in the newly created order. secret_resp = self.secret_behaviors.get_secret_metadata( order_resp.model.secret_ref) self.assertEqual(secret_resp.status_code, 200) self.assertEqual(secret_resp.model.name, test_model.meta['name']) @testcase.attr('positive') def test_order_and_secret_metadata_same(self): """Checks that metadata from secret GET and order GET are the same. Covers checking that secret metadata from a get on the order and secret metadata from a get on the secret are the same. Assumes that the order status will be active and not pending. """ test_model = order_models.OrderModel(**self.create_default_data) resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(resp.status_code, 202) order_resp = self.behaviors.get_order(order_ref) self.assertEqual(order_resp.status_code, 200) # PENDING orders may take a moment to be processed by the workers # when running tests with queue enabled self.wait_for_order(order_resp, order_ref) secret_ref = order_resp.model.secret_ref secret_resp = self.secret_behaviors.get_secret_metadata(secret_ref) self.assertEqual(order_resp.model.meta['name'], secret_resp.model.name, 'Names were not the same') self.assertEqual(order_resp.model.meta['algorithm'], secret_resp.model.algorithm, 'Algorithms were not the same') self.assertEqual(order_resp.model.meta['bit_length'], secret_resp.model.bit_length, 'Bit lengths were not the same') self.assertEqual(order_resp.model.meta['expiration'], secret_resp.model.expiration, 'Expirations were not the same') self.assertEqual(order_resp.model.meta['mode'], secret_resp.model.mode, 'Modes were not the same') @testcase.attr('negative') def test_order_get_order_that_doesnt_exist(self): """Covers case of getting a non-existent order.""" # try to get a non-existent order order_resp = self.behaviors.get_order("a ref that does not exist") # verify that the order get failed self.assertEqual(order_resp.status_code, 404) @testcase.attr('negative') def test_order_create_w_invalid_content_type(self): """Covers creating order with invalid content-type header.""" test_model = order_models.OrderModel(**self.create_default_data) extra_headers = {"Content-Type": "crypto/boom"} create_resp, order_ref = self.behaviors.create_order( test_model, extra_headers=extra_headers) self.assertEqual(create_resp.status_code, 415) self.assertIsNone(order_ref) @testcase.attr('negative') def test_order_create_all_none(self): """Covers order creation with empty JSON.""" test_model = order_models.OrderModel(**self.create_all_none_data) create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 400) self.assertIsNone(order_ref) @testcase.attr('negative') def test_order_create_empty_entries(self): """Covers order creation with empty JSON.""" test_model = order_models.OrderModel(**self.create_all_none_data) test_model.meta['name'] = "" test_model.meta['algorithm'] = "" test_model.meta['mode'] = "" test_model.meta['bit_length'] = "" test_model.meta['payload_content_type'] = "" create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 400) self.assertIsNone(order_ref) @testcase.attr('negative') def test_order_create_oversized_strings(self): """Covers order creation with empty JSON.""" test_model = order_models.OrderModel(**self.create_default_data) test_model.meta['name'] = base.TestCase.oversized_field test_model.meta['algorithm'] = base.TestCase.oversized_field test_model.meta['mode'] = base.TestCase.oversized_field create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 400) self.assertIsNone(order_ref) @testcase.attr('negative') def test_order_create_error_message_on_invalid_order_create(self): """Related Launchpad issue: 1269594.""" test_model = order_models.OrderModel(**self.create_default_data) test_model.meta['payload'] = "blarg!" resp, order_ref = self.behaviors.create_order(test_model) # Make sure we actually get a message back error_msg = json.loads(resp.content).get('title') self.assertEqual(resp.status_code, 400) self.assertIsNotNone(error_msg) self.assertNotEqual(error_msg, 'None') @utils.parameterized_dataset({ '8': [8], '64': [64], '128': [128], '192': [192], '256': [256], '1024': [1024], '2048': [2048], '4096': [4096] }) @testcase.attr('positive') def test_order_create_valid_bit_length(self, bit_length): """Covers creating orders with various valid bit lengths.""" test_model = order_models.OrderModel(**self.create_default_data) test_model.meta['bit_length'] = bit_length create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 202) self.assertIsNotNone(order_ref) @utils.parameterized_dataset({ 'negative_maxint': [-sys.maxint], 'negative_7': [-7], 'negative_1': [-1], '0': [0], '1': [1], '7': [7], '129': [129], 'none': [None], 'empty': [''], 'space': [' '], 'over_signed_small_int': [32768] }) @testcase.attr('negative') def test_order_create_invalid_bit_length(self, bit_length): """Covers creating orders with various invalid bit lengths.""" test_model = order_models.OrderModel(**self.create_default_data) test_model.meta['bit_length'] = bit_length create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 400) @utils.parameterized_dataset({ 'array': [['array']], 'int': [123], 'oversized_payload': [str(base.TestCase.oversized_payload)], 'standard_payload': ['standard payload'], 'empty': [''] }) @testcase.attr('negative') def test_order_create_invalid_payload(self, payload): """Covers creating orders with various invalid payloads.""" test_model = order_models.OrderModel(**self.create_default_data) test_model.meta['payload'] = payload create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 400) @utils.parameterized_dataset({ 'alphanumeric': ['1f34ds'], 'len_255': [base.TestCase.max_sized_field], 'uuid': ['54262d9d-4bc7-4821-8df0-dc2ca8e112bb'], 'punctuation': ['~!@#$%^&*()_+`-={}[]|:;<>,.?'], 'empty': [""] }) @testcase.attr('positive') def test_order_create_valid_name(self, name): """Covers creating orders with various valid names.""" test_model = order_models.OrderModel(**self.create_default_data) test_model.meta['name'] = name create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 202) self.assertIsNotNone(order_ref) @utils.parameterized_dataset({ 'int': [123] }) @testcase.attr('negative') def test_order_create_invalid_name(self, name): """Covers creating orders with various invalid names.""" test_model = order_models.OrderModel(**self.create_default_data) test_model.meta['name'] = name create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 400) @utils.parameterized_dataset({ 'cbc': ['cbc'] }) @testcase.attr('positive') def test_order_create_valid_mode(self, mode): """Covers creating orders with various valid modes.""" test_model = order_models.OrderModel(**self.create_default_data) test_model.meta['mode'] = mode create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 202) self.assertIsNotNone(order_ref) @utils.parameterized_dataset({ 'int': [123] }) @testcase.attr('negative') def test_order_create_invalid_mode(self, mode): """Covers creating orders with various invalid modes.""" test_model = order_models.OrderModel(**self.create_default_data) test_model.meta['mode'] = mode create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 400) @utils.parameterized_dataset({ 'aes': ['aes'] }) @testcase.attr('positive') def test_order_create_valid_algorithm(self, algorithm): """Covers creating orders with various valid algorithms.""" test_model = order_models.OrderModel(**self.create_default_data) test_model.meta['algorithm'] = algorithm create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 202) self.assertIsNotNone(order_ref) @utils.parameterized_dataset({ 'int': [123] }) @testcase.attr('negative') def test_order_create_invalid_algorithm(self, algorithm): """Covers creating orders with various invalid algorithms.""" test_model = order_models.OrderModel(**self.create_default_data) test_model.meta['algorithm'] = algorithm create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 400) @utils.parameterized_dataset({ 'empty': [''], 'text/plain': ['text/plain'], 'text_plain_space_charset_utf8': ['text/plain; charset=utf-8'], }) @testcase.attr('positive') def test_order_create_valid_payload_content_type(self, pct): """Covers order creation with various valid payload content types.""" test_model = order_models.OrderModel(**self.create_default_data) test_model.meta['payload_content_type'] = pct create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 202) self.assertIsNotNone(order_ref) @utils.parameterized_dataset({ 'int': [123], 'invalid': ['invalid'], 'oversized_string': [base.TestCase.oversized_field], 'text': ['text'], 'text_slash_with_no_subtype': ['text/'], }) @testcase.attr('negative') def test_order_create_invalid_payload_content_type(self, pct): """Covers order creation with various invalid payload content types.""" test_model = order_models.OrderModel(**self.create_default_data) test_model.meta['payload_content_type'] = pct create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 400) @utils.parameterized_dataset({ 'negative_five_long_expire': { 'timezone': '-05:00', 'days': 5}, 'positive_five_long_expire': { 'timezone': '+05:00', 'days': 5}, 'negative_one_short_expire': { 'timezone': '-01', 'days': 1}, 'positive_one_short_expire': { 'timezone': '+01', 'days': 1} }) @testcase.attr('positive') def test_order_create_valid_expiration(self, **kwargs): """Covers creating orders with various valid expiration data.""" timestamp = utils.create_timestamp_w_tz_and_offset(**kwargs) test_model = order_models.OrderModel(**self.create_default_data) test_model.meta['expiration'] = timestamp create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 202) self.assertIsNotNone(order_ref) @utils.parameterized_dataset({ 'malformed_timezone': { 'timezone': '-5:00', 'days': 5}, }) @testcase.attr('negative') def test_order_create_invalid_expiration(self, **kwargs): """Covers creating orders with various invalid expiration data.""" timestamp = utils.create_timestamp_w_tz_and_offset(**kwargs) test_model = order_models.OrderModel(**self.create_default_data) test_model.meta['expiration'] = timestamp create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 400) @testcase.attr('positive') def test_order_create_change_host_header(self, **kwargs): """Create an order with a (possibly) malicious host name in header.""" test_model = order_models.OrderModel(**self.create_default_data) malicious_hostname = 'some.bad.server.com' changed_host_header = {'Host': malicious_hostname} resp, order_ref = self.behaviors.create_order( test_model, extra_headers=changed_host_header) self.assertEqual(resp.status_code, 202) # get Location field from result and assert that it is NOT the # malicious one. regex = '.*{0}.*'.format(malicious_hostname) self.assertNotRegexpMatches(resp.headers['location'], regex) @testcase.attr('positive') def test_encryption_using_generated_key(self): """Tests functionality of a generated asymmetric key pair.""" test_model = order_models.OrderModel(**self.asymmetric_data) create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 202) order_resp = self.behaviors.get_order(order_ref) self.assertEqual(order_resp.status_code, 200) container_resp = self.container_behaviors.get_container( order_resp.model.container_ref) self.assertEqual(container_resp.status_code, 200) secret_dict = {} for secret in container_resp.model.secret_refs: self.assertIsNotNone(secret.secret_ref) secret_resp = self.secret_behaviors.get_secret( secret.secret_ref, "application/octet-stream") self.assertIsNotNone(secret_resp) secret_dict[secret.name] = secret_resp.content private_key = serialization.load_pem_private_key( secret_dict['private_key'], password=None, backend=backends.default_backend() ) public_key = serialization.load_pem_public_key( secret_dict['public_key'], backend=backends.default_backend() ) self.assertIsNotNone(private_key) self.assertIsNotNone(public_key) message = b'plaintext message' ciphertext = public_key.encrypt( message, padding.OAEP( mgf=padding.MGF1(algorithm=hashes.SHA1()), algorithm=hashes.SHA1(), label=None ) ) plaintext = private_key.decrypt( ciphertext, padding.OAEP( mgf=padding.MGF1(algorithm=hashes.SHA1()), algorithm=hashes.SHA1(), label=None ) ) self.assertEqual(message, plaintext) class OrdersPagingTestCase(base.PagingTestCase): def setUp(self): super(OrdersPagingTestCase, self).setUp() self.behaviors = order_behaviors.OrderBehaviors(self.client) # make a local mutable copy of the default data to prevent # possible data contamination self.create_default_data = get_default_order_create_data() def tearDown(self): self.behaviors.delete_all_created_orders() super(OrdersPagingTestCase, self).tearDown() def create_model(self): return order_models.OrderModel(**self.create_default_data) def create_resources(self, count=0, model=None): for x in range(0, count): self.behaviors.create_order(model) def get_resources(self, limit=10, offset=0, filter=None): return self.behaviors.get_orders(limit=limit, offset=offset, filter=filter) def set_filter_field(self, unique_str, model): '''Set the meta field which we use in the get_resources ''' model.meta['name'] = unique_str class OrdersUnauthedTestCase(base.TestCase): def setUp(self): super(OrdersUnauthedTestCase, self).setUp() self.behaviors = order_behaviors.OrderBehaviors(self.client) self.container_behaviors = container_behaviors.ContainerBehaviors( self.client) self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client) self.create_default_data = get_default_order_create_data() self.dummy_order_ref = 'orders/dummy-7b86-4071-935d-ef6b83729200' self.dummy_project_id = 'dummy' def tearDown(self): self.behaviors.delete_all_created_orders() super(OrdersUnauthedTestCase, self).tearDown() @testcase.attr('negative', 'security') def test_order_create_unauthed_no_proj_id(self): """Attempt to create an order without a token or project id Should return 401 """ model = order_models.OrderModel(self.create_default_data) resp, order_ref = self.behaviors.create_order(model, use_auth=False) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_order_get_unauthed_no_proj_id(self): """Attempt to get an order without a token or project id Should return 401 """ resp = self.behaviors.get_order(self.dummy_order_ref, use_auth=False) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_order_get_order_list_unauthed_no_proj_id(self): """Attempt to get the list of orders without a token or project id Should return 401 """ resp, orders, next_ref, prev_ref = self.behaviors.get_orders( use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_order_delete_unauthed_no_proj_id(self): """Attempt to delete an order without a token or project id Should return 401 """ resp = self.behaviors.delete_order( self.dummy_order_ref, expected_fail=True, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_order_create_unauthed_with_proj_id(self): """Attempt to create an order with a project id, but no token Should return 401 """ model = order_models.OrderModel(self.create_default_data) headers = {'X-Project-Id': self.dummy_project_id} resp, order_ref = self.behaviors.create_order( model, extra_headers=headers, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_order_get_unauthed_with_proj_id(self): """Attempt to get an order with a project id, but no token Should return 401 """ headers = {'X-Project-Id': self.dummy_project_id} resp = self.behaviors.get_order( self.dummy_order_ref, extra_headers=headers, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_order_get_order_list_unauthed_with_proj_id(self): """Attempt to get the list of orders with a project id, but no token Should return 401 """ headers = {'X-Project-Id': self.dummy_project_id} resp, orders, next_ref, prev_ref = self.behaviors.get_orders( extra_headers=headers, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_order_delete_unauthed_with_proj_id(self): """Attempt to delete an order with a project id, but no token Should return 401 """ headers = {'X-Project-Id': self.dummy_project_id} resp = self.behaviors.delete_order( self.dummy_order_ref, extra_headers=headers, expected_fail=True, use_auth=False ) self.assertEqual(401, resp.status_code) barbican-2.0.0/functionaltests/api/v1/functional/test_quotas.py0000664000567000056710000001632612701405674026044 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from testtools import testcase from functionaltests.api import base from functionaltests.api.v1.behaviors import quota_behaviors from functionaltests.api.v1.models import quota_models from functionaltests.common import config CONF = config.get_config() service_admin = CONF.identity.service_admin def get_set_project_quotas_request(): return {"project_quotas": {"secrets": 50, "orders": 10, "containers": 20}} class QuotasTestCase(base.TestCase): def setUp(self): super(QuotasTestCase, self).setUp() self.behaviors = quota_behaviors.QuotaBehaviors(self.client) self.project_id = self.behaviors.get_project_id_from_name( CONF.identity.username) def tearDown(self): self.behaviors.delete_all_created_quotas() super(QuotasTestCase, self).tearDown() def test_get_quotas_with_defaults(self): """Get effective quota information for own project""" resp = self.behaviors.get_quotas() self.assertEqual(200, resp.status_code) self.assertEqual(CONF.quotas.quota_secrets, resp.model.quotas.secrets) self.assertEqual(CONF.quotas.quota_orders, resp.model.quotas.orders) self.assertEqual(CONF.quotas.quota_containers, resp.model.quotas.containers) self.assertEqual(CONF.quotas.quota_consumers, resp.model.quotas.consumers) self.assertEqual(CONF.quotas.quota_cas, resp.model.quotas.cas) def test_get_project_quotas_by_project_id(self): """Get project quota information for specific project""" request_model = quota_models.ProjectQuotaRequestModel( **get_set_project_quotas_request()) resp = self.behaviors.set_project_quotas('44444', request_model, user_name=service_admin) self.assertEqual(204, resp.status_code) resp = self.behaviors.get_project_quotas('44444', user_name=service_admin) self.assertEqual(200, resp.status_code) self.assertEqual(50, resp.model.project_quotas.secrets) self.assertEqual(10, resp.model.project_quotas.orders) self.assertEqual(20, resp.model.project_quotas.containers) self.assertIsNone(resp.model.project_quotas.consumers) self.assertIsNone(resp.model.project_quotas.cas) def test_get_project_quotas_by_project_id_not_found(self): """Get project quota information for specific project""" resp = self.behaviors.get_project_quotas('dummy', user_name=service_admin) self.assertEqual(404, resp.status_code) def test_delete_project_quotas(self): """Delete project quota information""" request_model = quota_models.ProjectQuotaRequestModel( **get_set_project_quotas_request()) resp = self.behaviors.set_project_quotas('55555', request_model, user_name=service_admin) self.assertEqual(204, resp.status_code) resp = self.behaviors.delete_project_quotas('55555', user_name=service_admin) self.assertEqual(204, resp.status_code) def test_delete_project_quotas_not_found(self): """Get project quota information""" resp = self.behaviors.delete_project_quotas('dummy', user_name=service_admin) self.assertEqual(404, resp.status_code) @testcase.attr('no_parallel') class ProjectQuotasPagingTestCase(base.PagingTestCase): def setUp(self): super(ProjectQuotasPagingTestCase, self).setUp() self.behaviors = quota_behaviors.QuotaBehaviors(self.client) def tearDown(self): self.behaviors.delete_all_created_quotas() super(ProjectQuotasPagingTestCase, self).tearDown() def create_model(self): request_model = quota_models.ProjectQuotaRequestModel( **get_set_project_quotas_request()) return request_model def create_resources(self, count=0, model=None): for x in range(0, count): self.behaviors.set_project_quotas(str(x), model, user_name=service_admin) def get_resources(self, limit=10, offset=0, filter=None): return self.behaviors.get_project_quotas_list( limit=limit, offset=offset, user_name=service_admin) def set_filter_field(self, unique_str, model): """ProjectQuotas API does not support filter """ pass def test_get_project_quota_list_none(self): """Get list of all project quotas, when there are none""" resp, project_quotas_list, _, _ =\ self.behaviors.get_project_quotas_list(user_name=service_admin) self.assertEqual(200, resp.status_code) self.assertEqual([], project_quotas_list) def test_get_project_quota_list_one(self): """Get list of all project quotas, when there is one""" request_model = quota_models.ProjectQuotaRequestModel( **get_set_project_quotas_request()) resp = self.behaviors.set_project_quotas('11111', request_model, user_name=service_admin) self.assertEqual(204, resp.status_code) resp, project_quotas_list, _, _ =\ self.behaviors.get_project_quotas_list(user_name=service_admin) self.assertEqual(200, resp.status_code) self.assertEqual(1, len(project_quotas_list)) def test_get_project_quota_list_two(self): """Get list of all project quotas, when there is one""" request_model = quota_models.ProjectQuotaRequestModel( **get_set_project_quotas_request()) resp = self.behaviors.set_project_quotas('22222', request_model, user_name=service_admin) self.assertEqual(204, resp.status_code) resp = self.behaviors.set_project_quotas('33333', request_model, user_name=service_admin) self.assertEqual(204, resp.status_code) resp, project_quotas_list, _, _ =\ self.behaviors.get_project_quotas_list(user_name=service_admin) self.assertEqual(200, resp.status_code) self.assertEqual(2, len(project_quotas_list)) barbican-2.0.0/functionaltests/api/v1/functional/test_orders_rbac.py0000664000567000056710000001773412701405673027020 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Rackspace # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.tests import utils from functionaltests.api import base from functionaltests.api.v1.behaviors import order_behaviors from functionaltests.api.v1.models import order_models from functionaltests.common import config CONF = config.get_config() admin_a = CONF.rbac_users.admin_a creator_a = CONF.rbac_users.creator_a observer_a = CONF.rbac_users.observer_a auditor_a = CONF.rbac_users.auditor_a test_data_rbac_create_order = { 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 202}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 202}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 403}, } test_data_rbac_get_order = { 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 200}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 200}, } test_data_rbac_get_list_of_orders = { 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 200}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 403}, } test_data_rbac_delete_order = { 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 204}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 403}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 403}, } def get_default_order_data(): return {'type': 'key', "meta": { "name": "barbican functional test order name", "algorithm": "aes", "bit_length": 256, "mode": "cbc", } } @utils.parameterized_test_case class RBACOrdersTestCase(base.TestCase): """Functional tests exercising RBAC Policies""" def setUp(self): super(RBACOrdersTestCase, self).setUp() self.order_behaviors = order_behaviors.OrderBehaviors(self.client) def tearDown(self): self.order_behaviors.delete_all_created_orders() super(RBACOrdersTestCase, self).tearDown() @utils.parameterized_dataset(test_data_rbac_create_order) def test_rbac_create_order(self, user, admin, expected_return): """Test RBAC for order creation Issue an order creation and verify that that the correct http return code comes back for the specified user. :param user: the user who will attempt to do the create :param admin: the admin of the group containing the user :param expected_return: the expected http return code """ test_model = order_models.OrderModel(**get_default_order_data()) resp, order_ref = self.order_behaviors.create_order(test_model, user_name=user, admin=admin) self.assertEqual(expected_return, resp.status_code) self.assertEqual(expected_return == 202, order_ref is not None) @utils.parameterized_dataset(test_data_rbac_get_order) def test_rbac_get_order(self, user, admin, expected_return): """Test RBAC for order get metadata Issue an order get and verify that that the correct http return code comes back for the specified user. The initial order will be created with the admin user to ensure that it gets created successfully. We don't want the order create to fail since we are only testing order get here. :param user: the user who will attempt to do the get :param admin: the admin of the group containing the user :param expected_return: the expected http return code """ order_ref = self._create_initial_order(admin=admin) resp = self.order_behaviors.get_order(order_ref, user_name=user) self.assertEqual(expected_return, resp.status_code) self.assertEqual(expected_return == 200, resp.content is not None) @utils.parameterized_dataset(test_data_rbac_get_list_of_orders) def test_rbac_get_list_of_orders(self, user, admin, expected_return): """Test RBAC for get order list Issue an get order list and verify that that the correct http return code comes back for the specified user. Some initial orders will be stored with the admin user to ensure that they get created successfully. We don't want the order creates to fail since we are only testing get order list. :param user: the user who will attempt to get the list of orders :param admin: the admin of the group containing the user :param expected_return: the expected http return code """ for i in range(3): order_ref = self._create_initial_order(admin=admin) self.assertIsNotNone(order_ref) resp, orders, next, prev = self.order_behaviors.get_orders( limit=10, offset=0, user_name=user) self.assertEqual(expected_return, resp.status_code) self.assertIsNotNone(orders) @utils.parameterized_dataset(test_data_rbac_delete_order) def test_rbac_delete_order(self, user, admin, expected_return): """Test RBAC for order delete Issue an order delete and verify that that the correct http return code comes back for the specified user. The initial order will be stored with the admin user to ensure that it gets created successfully. We don't want the order create to fail since we are only testing order delete here. :param user: the user who will attempt to do the delete :param admin: the admin of the group containing the user :param expected_return: the expected http return code """ order_ref = self._create_initial_order(admin=admin) resp = self.order_behaviors.delete_order(order_ref, user_name=user) self.assertEqual(expected_return, resp.status_code) def _create_initial_order(self, admin=admin_a, order_data=get_default_order_data()): """Utility function to create an order Some tests require a order to exist before they test certain things, so this function can be used to do that setup. :param admin: the admin user who will create the order :param order_data: the data for the order :return: href to the newly created order """ test_model = order_models.OrderModel(**order_data) resp, order_ref = self.order_behaviors.create_order(test_model, user_name=admin) self.assertEqual(202, resp.status_code) return order_ref barbican-2.0.0/functionaltests/api/v1/functional/test_quotas_rbac.py0000664000567000056710000001602512701405673027026 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import uuid from barbican.tests import utils from functionaltests.api import base from functionaltests.api.v1.behaviors import quota_behaviors from functionaltests.api.v1.models import quota_models from functionaltests.common import config CONF = config.get_config() admin_a = CONF.rbac_users.admin_a creator_a = CONF.rbac_users.creator_a observer_a = CONF.rbac_users.observer_a auditor_a = CONF.rbac_users.auditor_a service_admin = CONF.identity.service_admin test_data_rbac_get_quotas = { 'with_service_admin': {'user': service_admin, 'admin': service_admin, 'expected_return': 200}, 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 200}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 200}, } test_data_rbac_get_project_quotas = { 'with_service_admin': {'user': service_admin, 'admin': service_admin, 'expected_return': 200}, 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 403}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 403}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 403}, } test_data_rbac_set_project_quotas = { 'with_service_admin': {'user': service_admin, 'admin': service_admin, 'expected_return': 204}, 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 403}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 403}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 403}, } test_data_rbac_delete_project_quotas = { 'with_service_admin': {'user': service_admin, 'admin': service_admin, 'expected_return': 204}, 'with_admin_a': {'user': admin_a, 'admin': admin_a, 'expected_return': 403}, 'with_creator_a': {'user': creator_a, 'admin': admin_a, 'expected_return': 403}, 'with_observer_a': {'user': observer_a, 'admin': admin_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'admin': admin_a, 'expected_return': 403}, } def get_set_project_quotas_request(): return {"project_quotas": {"secrets": 50, "orders": 10, "containers": 20}} @utils.parameterized_test_case class RBACQuotasTestCase(base.TestCase): """Functional tests exercising RBAC Policies""" def setUp(self): super(RBACQuotasTestCase, self).setUp() self.behaviors = quota_behaviors.QuotaBehaviors(self.client) def tearDown(self): self.behaviors.delete_all_created_quotas() super(RBACQuotasTestCase, self).tearDown() @utils.parameterized_dataset(test_data_rbac_get_quotas) def test_rbac_get_quotas(self, user, admin, expected_return): """Test RBAC for get quotas Issue a get quotas and verify that that the correct http return code comes back for the specified user. :param user: the user who will attempt to do the get :param admin: the admin of the group owning quotas :param expected_return: the expected http return code """ resp = self.behaviors.get_quotas(user_name=user) self.assertEqual(expected_return, resp.status_code) @utils.parameterized_dataset(test_data_rbac_get_project_quotas) def test_rbac_get_project_quotas(self, user, admin, expected_return): """Test RBAC for get project quotas Issue a get quotas and verify that that the correct http return code comes back for the specified user. :param user: the user who will attempt to do the get :param admin: the admin of the group owning quotas :param expected_return: the expected http return code """ resp, _, _, _ = self.behaviors.get_project_quotas_list(user_name=user) self.assertEqual(expected_return, resp.status_code) @utils.parameterized_dataset(test_data_rbac_set_project_quotas) def test_rbac_set_project_quotas(self, user, admin, expected_return): """Test RBAC for set project quotas Issue a set project quotas and verify that that the correct http return code comes back for the specified user. :param user: the user who will attempt to do the set :param admin: the admin of the group owning quotas :param expected_return: the expected http return code """ request_model = quota_models.ProjectQuotaRequestModel( **get_set_project_quotas_request()) project_id = str(uuid.uuid4()) resp = self.behaviors.set_project_quotas(project_id, request_model, user_name=user) self.assertEqual(expected_return, resp.status_code) @utils.parameterized_dataset(test_data_rbac_delete_project_quotas) def test_rbac_delete_project_quotas(self, user, admin, expected_return): """Test RBAC for delete project quotas Issue a set project quotas and verify that that the correct http return code comes back for the specified user. :param user: the user who will attempt to do the delete :param admin: the admin of the group owning quotas :param expected_return: the expected http return code """ request_model = quota_models.ProjectQuotaRequestModel( **get_set_project_quotas_request()) project_id = str(uuid.uuid4()) resp = self.behaviors.set_project_quotas(project_id, request_model, user_name=service_admin) resp = self.behaviors.delete_project_quotas(project_id, user_name=user) self.assertEqual(expected_return, resp.status_code) barbican-2.0.0/functionaltests/api/v1/functional/test_secretmeta.py0000664000567000056710000001665412701405673026667 0ustar jenkinsjenkins00000000000000# Copyright (c) 2016 IBM # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import json from testtools import testcase import uuid from barbican.tests import utils from functionaltests.api import base from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.behaviors import secretmeta_behaviors from functionaltests.api.v1.models import secret_models @utils.parameterized_test_case class SecretMetadataTestCase(base.TestCase): def setUp(self): super(SecretMetadataTestCase, self).setUp() self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client) self.behaviors = secretmeta_behaviors.SecretMetadataBehaviors( self.client) self.default_secret_create_all_none_data = { "name": None, "expiration": None, "algorithm": None, "bit_length": None, "mode": None, "payload": None, "payload_content_type": None, "payload_content_encoding": None, } self.valid_metadata = { "metadata": { "latitude": "30.393805", "longitude": "-97.724077" } } self.invalid_metadata = { "metadataaaaaaaa": { "latitude": "30.393805", "longitude": "-97.724077" } } self.valid_metadatum_key = 'access-limit' self.valid_metadatum = { 'key': self.valid_metadatum_key, 'value': '2' } def tearDown(self): self.secret_behaviors.delete_all_created_secrets() super(SecretMetadataTestCase, self).tearDown() @testcase.attr('positive') def test_secret_metadata_create(self): test_model = secret_models.SecretModel( **self.default_secret_create_all_none_data) resp, secret_ref = self.secret_behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) meta_resp, metadata_ref = self.behaviors.create_or_update_metadata( secret_ref, self.valid_metadata) self.assertEqual(meta_resp.status_code, 201) self.assertEqual(secret_ref + '/metadata', metadata_ref) @testcase.attr('negative') def test_secret_metadata_create_no_secret(self): secret_ref = 'http://localhost:9311/secrets/%s' % uuid.uuid4().hex meta_resp, metadata_ref = self.behaviors.create_or_update_metadata( secret_ref, self.invalid_metadata) self.assertEqual(meta_resp.status_code, 404) @testcase.attr('positive') def test_secret_metadata_get(self): test_model = secret_models.SecretModel( **self.default_secret_create_all_none_data) resp, secret_ref = self.secret_behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) meta_resp, metadata_ref = self.behaviors.create_or_update_metadata( secret_ref, self.valid_metadata) self.assertEqual(meta_resp.status_code, 201) self.assertEqual(secret_ref + '/metadata', metadata_ref) get_resp = self.behaviors.get_metadata(secret_ref) self.assertEqual(get_resp.status_code, 200) self.assertEqual(get_resp.content, json.dumps(self.valid_metadata)) @testcase.attr('negative') def test_secret_metadata_get_no_secret(self): secret_ref = 'http://localhost:9311/secrets/%s' % uuid.uuid4().hex get_resp = self.behaviors.get_metadata(secret_ref) self.assertEqual(get_resp.status_code, 404) @testcase.attr('positive') def test_secret_metadatum_create(self): test_model = secret_models.SecretModel( **self.default_secret_create_all_none_data) resp, secret_ref = self.secret_behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) meta_resp, metadata_ref = self.behaviors.create_metadatum( secret_ref, self.valid_metadatum) self.assertEqual(meta_resp.status_code, 201) @testcase.attr('positive') def test_secret_metadatum_update(self): test_model = secret_models.SecretModel( **self.default_secret_create_all_none_data) resp, secret_ref = self.secret_behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) meta_resp, metadata_ref = self.behaviors.create_metadatum( secret_ref, self.valid_metadatum) self.assertEqual(meta_resp.status_code, 201) updated_meta = { 'key': self.valid_metadatum_key, 'value': '10' } put_resp = self.behaviors.update_metadatum( secret_ref, self.valid_metadatum_key, updated_meta) self.assertEqual(put_resp.status_code, 200) @testcase.attr('positive') def test_secret_metadatum_get(self): test_model = secret_models.SecretModel( **self.default_secret_create_all_none_data) resp, secret_ref = self.secret_behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) meta_resp, metadata_ref = self.behaviors.create_metadatum( secret_ref, self.valid_metadatum) self.assertEqual(meta_resp.status_code, 201) get_resp = self.behaviors.get_metadatum(secret_ref, self.valid_metadatum_key) self.assertEqual(get_resp.status_code, 200) self.assertEqual(get_resp.content, json.dumps(self.valid_metadatum, sort_keys=True)) @testcase.attr('negative') def test_secret_metadatum_get_wrong_key(self): test_model = secret_models.SecretModel( **self.default_secret_create_all_none_data) resp, secret_ref = self.secret_behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) meta_resp, metadata_ref = self.behaviors.create_metadatum( secret_ref, self.valid_metadatum) self.assertEqual(meta_resp.status_code, 201) get_resp = self.behaviors.get_metadatum(secret_ref, 'other_key') self.assertEqual(get_resp.status_code, 404) @testcase.attr('positive') def test_secret_metadatum_delete(self): test_model = secret_models.SecretModel( **self.default_secret_create_all_none_data) resp, secret_ref = self.secret_behaviors.create_secret(test_model) self.assertEqual(resp.status_code, 201) meta_resp, metadata_ref = self.behaviors.create_metadatum( secret_ref, self.valid_metadatum) self.assertEqual(meta_resp.status_code, 201) get_resp = self.behaviors.get_metadatum(secret_ref, self.valid_metadatum_key) self.assertEqual(get_resp.status_code, 200) delete_resp = self.behaviors.delete_metadatum(secret_ref, self.valid_metadatum_key) self.assertEqual(delete_resp.status_code, 204) barbican-2.0.0/functionaltests/api/v1/functional/test_consumers.py0000664000567000056710000003045112701405673026540 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import copy from testtools import testcase from functionaltests.api import base from functionaltests.api.v1.behaviors import consumer_behaviors from functionaltests.api.v1.behaviors import container_behaviors from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.models import consumer_model from functionaltests.api.v1.models import container_models from functionaltests.api.v1.models import secret_models create_secret_data = { "name": "AES key", "expiration": "2018-02-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", "payload": "gF6+lLoF3ohA9aPRpt+6bQ==", "payload_content_type": "application/octet-stream", "payload_content_encoding": "base64", } default_consumer_data = { "name": "consumername", "URL": "consumerURL" } create_generic_container_data = { "name": "containername", "type": "generic", "secret_refs": [] } create_cert_container_data = { "name": "A Certificate Container", "type": "certificate", "secret_refs": [] } dummy_project_id = 'dummy123' class ConsumersBaseTestCase(base.TestCase): def setUp(self): super(ConsumersBaseTestCase, self).setUp() self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client) self.container_behaviors = container_behaviors.ContainerBehaviors( self.client ) self.consumer_behaviors = consumer_behaviors.ConsumerBehaviors( self.client ) self.consumer_data = copy.deepcopy(default_consumer_data) self.generic_container_ref = self._create_populated_generic_container() def tearDown(self): self.secret_behaviors.delete_all_created_secrets() self.container_behaviors.delete_all_created_containers() super(ConsumersBaseTestCase, self).tearDown() def _create_a_secret(self): secret_model = secret_models.SecretModel(**create_secret_data) resp, secret_ref = self.secret_behaviors.create_secret(secret_model) self.assertEqual(resp.status_code, 201) self.assertIsNotNone(secret_ref) return secret_ref def _add_secret_ref_to_container(self, container, name, ref): container['secret_refs'].append({"name": name, "secret_ref": ref}) def _create_populated_generic_container(self): # Set up two secrets secret_ref_1 = self._create_a_secret() secret_ref_2 = self._create_a_secret() # Create a generic container with our secrets generic_container_data = copy.deepcopy(create_generic_container_data) self._add_secret_ref_to_container(generic_container_data, 'secret_ref_1', secret_ref_1) self._add_secret_ref_to_container(generic_container_data, 'secret_ref_2', secret_ref_2) container_model = container_models.ContainerModel( **generic_container_data ) resp, container_ref = self.container_behaviors.create_container( container_model ) self.assertEqual(resp.status_code, 201) self.assertIsNotNone(container_ref) return container_ref class ConsumersCertContainerTestCase(ConsumersBaseTestCase): def setUp(self): super(ConsumersCertContainerTestCase, self).setUp() self.container_default_data = copy.deepcopy(create_cert_container_data) self.consumer_default_data = copy.deepcopy(default_consumer_data) def _create_consumer(self, container_ref): self.consumer_test_model = consumer_model.ConsumerModel( **self.consumer_default_data) resp, consumer_data = self.consumer_behaviors.create_consumer( self.consumer_test_model, container_ref) self.assertEqual(resp.status_code, 200) self.assertIsNotNone(consumer_data) return consumer_data def _get_decrypted_secrets_from_container(self, container_href): get_resp = self.container_behaviors.get_container(container_href) self.assertEqual(get_resp.status_code, 200) private_key_ref = get_resp.model.secret_refs[0].secret_ref tls_cert_ref = get_resp.model.secret_refs[1].secret_ref passphrase_ref = get_resp.model.secret_refs[2].secret_ref intermediates_ref = get_resp.model.secret_refs[3].secret_ref private_key = self.secret_behaviors.get_secret( private_key_ref, 'application/octet-stream') tls_cert = self.secret_behaviors.get_secret( tls_cert_ref, 'application/octet-stream') passphrase = self.secret_behaviors.get_secret( passphrase_ref, 'application/octet-stream') intermediates = self.secret_behaviors.get_secret( intermediates_ref, 'application/octet-stream') return private_key, tls_cert, passphrase, intermediates def _create_populated_cert_container(self): dummy_private_key_ref = self._create_a_secret() dummy_tls_certificate_ref = self._create_a_secret() dummy_passphrase_ref = self._create_a_secret() dummy_intermediates_ref = self._create_a_secret() container_ref = self._create_cert_container(dummy_private_key_ref, dummy_tls_certificate_ref, dummy_passphrase_ref, dummy_intermediates_ref) return container_ref def _create_cert_container(self, private_key_ref, tls_certificate_ref, passphrase_ref=None, intermediates_ref=None): container_data = copy.deepcopy(self.container_default_data) self._add_secret_ref_to_container(container_data, "certificate", tls_certificate_ref) self._add_secret_ref_to_container(container_data, "private_key", private_key_ref) if passphrase_ref: self._add_secret_ref_to_container(container_data, "private_key_passphrase", passphrase_ref) if intermediates_ref: self._add_secret_ref_to_container(container_data, "intermediates", intermediates_ref) test_model = container_models.ContainerModel( **container_data) resp, container_ref = self.container_behaviors.create_container( test_model) self.assertEqual(resp.status_code, 201) self.assertIsNotNone(container_ref) return container_ref def _deregister_consumer(self, container_ref): resp, consumer_data = self.consumer_behaviors.delete_consumer( self.consumer_test_model, container_ref ) self.assertEqual(resp.status_code, 200) self.assertIsNotNone(consumer_data) self.assertNotIn(self.consumer_test_model.name, consumer_data) self.assertNotIn(self.consumer_test_model.URL, consumer_data) return consumer_data @testcase.attr('positive') def test_consumer_of_cert_container_full_flow(self): """Simulate the typical flow for a consumer of a cert container. First, create a container and load it up with a key, cert and passphrase. Second, register as a consumer for that container Third, fetch the data from that container Finally, deregister as a consumer for the container """ # create the populated container with a cert container_ref = self._create_populated_cert_container() self.assertIsNotNone(container_ref) # register as a consumer here consumer_data = self._create_consumer(container_ref) self.assertIsNotNone(consumer_data) self.assertEqual(1, len(consumer_data)) # fetch the cert info from the container pk_response, cert_response, passphrase_response, inters_response =\ self._get_decrypted_secrets_from_container(container_ref) self.assertIsNotNone(pk_response) self.assertIsNotNone(cert_response) self.assertIsNotNone(passphrase_response) self.assertIsNotNone(inters_response) # deregister as a consumer updated_consumer_data = self._deregister_consumer(container_ref) self.assertIsNotNone(updated_consumer_data) self.assertEqual(0, len(updated_consumer_data)) class ConsumersAuthedTestCase(ConsumersBaseTestCase): @testcase.attr('negative', 'security') def test_consumer_create_authed(self): """Create a consumer as an authenticated user Should return 200 """ model = consumer_model.ConsumerModel(**self.consumer_data) resp, consumer_dat = self.consumer_behaviors.create_consumer( model, self.generic_container_ref, use_auth=True ) self.assertEqual(200, resp.status_code) class ConsumersUnauthedTestCase(ConsumersBaseTestCase): @testcase.attr('negative', 'security') def test_consumer_create_unauthed_no_proj_id(self): """Attempt to create a consumer without a token or project id Should return 401 """ model = consumer_model.ConsumerModel(**self.consumer_data) resp, consumer_dat = self.consumer_behaviors.create_consumer( model, self.generic_container_ref, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_consumer_get_list_unauthed_no_proj_id(self): """Attempt to get the list of consumers w/o a token or project id Should return 401 """ resp, consumers, next_ref, prev_ref = ( self.consumer_behaviors.get_consumers( self.generic_container_ref, use_auth=False ) ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_consumer_delete_unauthed_no_proj_id(self): """Attempt to delete a consumer without a token or project id Should return 401 """ resp, consumer_dat = self.consumer_behaviors.delete_consumer( None, self.generic_container_ref, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_consumer_create_unauthed_with_proj_id(self): """Attempt to create a consumer with a project id, but no token Should return 401 """ model = consumer_model.ConsumerModel(**self.consumer_data) headers = {'X-Project-Id': dummy_project_id} resp, consumer_dat = self.consumer_behaviors.create_consumer( model, self.generic_container_ref, extra_headers=headers, use_auth=False ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_consumer_get_list_unauthed_with_proj_id(self): """Attempt to get the list of consumers with a project id but no token Should return 401 """ headers = {'X-Project-Id': dummy_project_id} resp, consumers, next_ref, prev_ref = ( self.consumer_behaviors.get_consumers( self.generic_container_ref, extra_headers=headers, use_auth=False ) ) self.assertEqual(401, resp.status_code) @testcase.attr('negative', 'security') def test_consumer_delete_unauthed_with_proj_id(self): """Attempt to delete a consumer with a project id, but no token Should return 401 """ headers = {'X-Project-Id': dummy_project_id} resp, consumer_dat = self.consumer_behaviors.delete_consumer( None, self.generic_container_ref, extra_headers=headers, use_auth=False ) self.assertEqual(401, resp.status_code) barbican-2.0.0/functionaltests/api/v1/functional/test_acls_rbac.py0000664000567000056710000003324712701405673026441 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.tests import utils from functionaltests.api import base from functionaltests.api.v1.behaviors import acl_behaviors from functionaltests.api.v1.behaviors import container_behaviors from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.models import acl_models from functionaltests.api.v1.models import container_models from functionaltests.api.v1.models import secret_models from functionaltests.common import config CONF = config.get_config() admin_a = CONF.rbac_users.admin_a creator_a = CONF.rbac_users.creator_a observer_a = CONF.rbac_users.observer_a auditor_a = CONF.rbac_users.auditor_a admin_b = CONF.rbac_users.admin_b observer_b = CONF.rbac_users.observer_b def get_acl_default(): return {'read': {'project-access': True}} def get_acl_one(): return {'read': {'users': ['reader1'], 'project-access': False}} def get_acl_two(): return {'read': {'users': ['reader2'], 'project-access': False}} test_data_set_secret_acl = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 403}, 'with_admin_b': {'user': admin_b, 'expected_return': 403}, 'with_observer_b': {'user': observer_b, 'expected_return': 403}, } test_data_get_secret_acl = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'expected_return': 200}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 403}, 'with_admin_b': {'user': admin_b, 'expected_return': 403}, 'with_observer_b': {'user': observer_b, 'expected_return': 403}, } test_data_update_secret_acl = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 403}, 'with_admin_b': {'user': admin_b, 'expected_return': 403}, 'with_observer_b': {'user': observer_b, 'expected_return': 403}, } test_data_delete_secret_acl = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 403}, 'with_admin_b': {'user': admin_b, 'expected_return': 403}, 'with_observer_b': {'user': observer_b, 'expected_return': 403}, } test_data_set_container_acl = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 403}, 'with_admin_b': {'user': admin_b, 'expected_return': 403}, 'with_observer_b': {'user': observer_b, 'expected_return': 403}, } test_data_get_container_acl = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'expected_return': 200}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 403}, 'with_admin_b': {'user': admin_b, 'expected_return': 403}, 'with_observer_b': {'user': observer_b, 'expected_return': 403}, } test_data_update_container_acl = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 403}, 'with_admin_b': {'user': admin_b, 'expected_return': 403}, 'with_observer_b': {'user': observer_b, 'expected_return': 403}, } test_data_delete_container_acl = { 'with_admin_a': {'user': admin_a, 'expected_return': 200}, 'with_creator_a': {'user': creator_a, 'expected_return': 200}, 'with_observer_a': {'user': observer_a, 'expected_return': 403}, 'with_auditor_a': {'user': auditor_a, 'expected_return': 403}, 'with_admin_b': {'user': admin_b, 'expected_return': 403}, 'with_observer_b': {'user': observer_b, 'expected_return': 403}, } @utils.parameterized_test_case class RBACAclsTestCase(base.TestCase): """Functional tests exercising RBAC Policies for ACL Operations""" def setUp(self): super(RBACAclsTestCase, self).setUp() self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client) self.container_behaviors = container_behaviors.ContainerBehaviors( self.client) self.acl_behaviors = acl_behaviors.AclBehaviors(self.client) def tearDown(self): self.acl_behaviors.delete_all_created_acls() self.secret_behaviors.delete_all_created_secrets() self.container_behaviors.delete_all_created_containers() super(RBACAclsTestCase, self).tearDown() @utils.parameterized_dataset(test_data_set_secret_acl) def test_set_secret_acl(self, user, expected_return): secret_ref = self.store_secret() status = self.set_secret_acl(secret_ref, get_acl_one(), user_name=user) self.assertEqual(expected_return, status) @utils.parameterized_dataset(test_data_get_secret_acl) def test_get_secret_acl(self, user, expected_return): secret_ref = self.store_secret() status = self.set_secret_acl(secret_ref, get_acl_one()) self.assertEqual(200, status) resp = self.acl_behaviors.get_acl(secret_ref + '/acl', user_name=user) self.assertEqual(expected_return, resp.status_code) if expected_return == 200: self.assertIn('reader1', resp.model.read['users']) else: self.assertTrue(resp.model is None) @utils.parameterized_dataset(test_data_update_secret_acl) def test_update_secret_acl(self, user, expected_return): secret_ref = self.store_secret() status = self.set_secret_acl(secret_ref, get_acl_one()) self.assertEqual(200, status) status, model = self.update_secret_acl(secret_ref, get_acl_two(), user_name=user) self.assertEqual(expected_return, status) get_resp = self.acl_behaviors.get_acl(secret_ref + '/acl', user_name=admin_a) if expected_return == 200: self.assertTrue(model.acl_ref is not None) # verify update happened self.assertIn('reader2', get_resp.model.read['users']) else: self.assertTrue(model is None) # verify no update happened self.assertIn('reader1', get_resp.model.read['users']) @utils.parameterized_dataset(test_data_delete_secret_acl) def test_delete_secret_acl(self, user, expected_return): secret_ref = self.store_secret() status = self.set_secret_acl(secret_ref, get_acl_one()) self.assertEqual(200, status) resp = self.acl_behaviors.delete_acl(secret_ref + '/acl', user_name=user) self.assertEqual(expected_return, resp.status_code) get_resp = self.acl_behaviors.get_acl(secret_ref + '/acl', user_name=admin_a) if expected_return == 200: # verify delete happened (return to default ACL) self.assertTrue(get_resp.model.read['project-access']) else: # verify no delete happened self.assertIn('reader1', get_resp.model.read['users']) @utils.parameterized_dataset(test_data_set_container_acl) def test_set_container_acl(self, user, expected_return): container_ref = self.store_container() status = self.set_container_acl(container_ref, get_acl_one(), user_name=user) self.assertEqual(expected_return, status) @utils.parameterized_dataset(test_data_get_container_acl) def test_get_container_acl(self, user, expected_return): container_ref = self.store_container() status = self.set_container_acl(container_ref, get_acl_one()) self.assertEqual(200, status) resp = self.acl_behaviors.get_acl(container_ref + '/acl', user_name=user) self.assertEqual(expected_return, resp.status_code) if expected_return == 200: self.assertIn('reader1', resp.model.read['users']) else: self.assertTrue(resp.model is None) @utils.parameterized_dataset(test_data_update_container_acl) def test_update_container_acl(self, user, expected_return): container_ref = self.store_container() status = self.set_container_acl(container_ref, get_acl_one()) self.assertEqual(200, status) status, model = self.update_container_acl(container_ref, get_acl_two(), user_name=user) self.assertEqual(expected_return, status) get_resp = self.acl_behaviors.get_acl(container_ref + '/acl', user_name=admin_a) if expected_return == 200: self.assertTrue(model.acl_ref is not None) # verify update happened self.assertIn('reader2', get_resp.model.read['users']) else: self.assertTrue(model is None) # verify no update happened self.assertIn('reader1', get_resp.model.read['users']) @utils.parameterized_dataset(test_data_delete_container_acl) def test_delete_container_acl(self, user, expected_return): container_ref = self.store_container() status = self.set_container_acl(container_ref, get_acl_one()) self.assertEqual(200, status) resp = self.acl_behaviors.delete_acl(container_ref + '/acl', user_name=user) self.assertEqual(expected_return, resp.status_code) get_resp = self.acl_behaviors.get_acl(container_ref + '/acl', user_name=admin_a) if expected_return == 200: # verify delete happened (return to default ACL) self.assertTrue(get_resp.model.read['project-access']) else: # verify no delete happened self.assertIn('reader1', get_resp.model.read['users']) # ----------------------- Helper Functions --------------------------- def store_secret(self, user_name=creator_a, admin=admin_a): test_model = secret_models.SecretModel( **get_default_secret_data()) resp, secret_ref = self.secret_behaviors.create_secret( test_model, user_name=user_name, admin=admin) self.assertEqual(201, resp.status_code) return secret_ref def set_secret_acl(self, secret_ref, acl, user_name=creator_a): test_model = acl_models.AclModel(**acl) resp = self.acl_behaviors.create_acl( secret_ref, test_model, user_name=user_name) return resp.status_code def update_secret_acl(self, secret_ref, acl, user_name=creator_a): test_model = acl_models.AclModel(**acl) resp = self.acl_behaviors.update_acl( secret_ref + '/acl', test_model, user_name=user_name) return resp.status_code, resp.model def store_container(self, user_name=creator_a, admin=admin_a): secret_ref = self.store_secret(user_name=user_name, admin=admin) test_model = container_models.ContainerModel( **get_container_req(secret_ref)) resp, container_ref = self.container_behaviors.create_container( test_model, user_name=user_name, admin=admin) self.assertEqual(201, resp.status_code) return container_ref def set_container_acl(self, container_ref, acl, user_name=creator_a): test_model = acl_models.AclModel(**acl) resp = self.acl_behaviors.create_acl( container_ref, test_model, user_name=user_name) return resp.status_code def update_container_acl(self, container_ref, acl, user_name=creator_a): test_model = acl_models.AclModel(**acl) resp = self.acl_behaviors.update_acl( container_ref + '/acl', test_model, user_name=user_name) return resp.status_code, resp.model # ----------------------- Support Functions --------------------------- def get_default_secret_data(): return { "name": "AES key", "expiration": "2050-02-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", "payload": get_default_payload(), "payload_content_type": "application/octet-stream", "payload_content_encoding": "base64", } def get_default_payload(): return 'Z0Y2K2xMb0Yzb2hBOWFQUnB0KzZiUT09' def get_container_req(secret_ref): return {"name": "testcontainer", "type": "generic", "secret_refs": [{'name': 'secret1', 'secret_ref': secret_ref}]} barbican-2.0.0/functionaltests/api/v1/functional/test_rsa.py0000664000567000056710000005714112701405673025314 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 from Crypto.PublicKey import RSA from OpenSSL import crypto from testtools import testcase from barbican.tests import keys from barbican.tests import utils from functionaltests.api import base from functionaltests.api.v1.behaviors import container_behaviors from functionaltests.api.v1.behaviors import order_behaviors from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.models import container_models from functionaltests.api.v1.models import order_models from functionaltests.api.v1.models import secret_models def get_private_key_req(payload): return {'name': 'myprivatekey', 'payload_content_type': 'application/octet-stream', 'payload_content_encoding': 'base64', 'algorithm': 'rsa', 'bit_length': 2048, 'secret_type': 'private', 'payload': payload} def get_public_key_req(payload): return {'name': 'mypublickey', 'payload_content_type': 'application/octet-stream', 'payload_content_encoding': 'base64', 'algorithm': 'rsa', 'bit_length': 2048, 'secret_type': 'public', 'payload': payload} def get_passphrase_req(passphrase): return {'name': 'mypassphrase', 'payload_content_type': 'text/plain', 'secret_type': 'passphrase', 'payload': passphrase} def get_container_req(public_key_ref, private_key_ref, passphrase=None): request = {"name": "rsacontainer", "type": "rsa", "secret_refs": [ {'name': 'public_key', 'secret_ref': public_key_ref}, {'name': 'private_key', 'secret_ref': private_key_ref}]} if passphrase: request["secret_refs"].append( {'name': 'private_key_passphrase', 'secret_ref': passphrase}) return request def get_certificate_req(payload): return {'name': 'mycertificate', 'payload_content_type': 'application/octet-stream', 'payload_content_encoding': 'base64', 'secret_type': 'certificate', 'payload': payload} def get_order_rsa_container(): return {'type': 'asymmetric', "meta": {"name": "ordered rsacontainer", "algorithm": "rsa", "bit_length": 2048, "mode": "cbc"}} def get_order_rsa_container_with_passphrase(): return {'type': 'asymmetric', "meta": {"name": "ordered rsacontainer", "algorithm": "rsa", "bit_length": 2048, "passphrase": "password", "mode": "cbc"}} def get_order_certificate(container_ref): return {'type': 'certificate', 'meta': {'request_type': 'stored-key', 'container_ref': container_ref, 'subject_dn': 'cn=server.example.com,o=example.com', 'requestor_name': 'Barbican User', 'requestor_email': 'user@example.com', 'requestor_phone': '555-1212'}} def get_order_certificate_simple_cmc(csr): return {'type': 'certificate', 'meta': {'request_type': 'simple-cmc', 'requestor_name': 'Barbican User', 'requestor_email': 'user@example.com', 'requestor_phone': '555-1212', 'request_data': csr}} @utils.parameterized_test_case class RSATestCase(base.TestCase): """Positive test cases for all ways of working with RSA keys These tests are meant to be 'real'. All input is created using OpenSSL commands and all results verified by OpenSSL. """ def setUp(self): super(RSATestCase, self).setUp() self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client) self.container_behaviors = container_behaviors.ContainerBehaviors( self.client) self.order_behaviors = order_behaviors.OrderBehaviors(self.client) def tearDown(self): self.secret_behaviors.delete_all_created_secrets() self.container_behaviors.delete_all_created_containers() self.order_behaviors.delete_all_created_orders() super(RSATestCase, self).tearDown() @testcase.attr('positive') def test_rsa_check_input_keys(self): """Verify the input keys for test cases""" # prove pyOpenSSL can parse the original private key pem = keys.get_private_key_pem() crypto.load_privatekey(crypto.FILETYPE_PEM, pem) # prove pyCrypto can parse the original public key pem = keys.get_public_key_pem() RSA.importKey(pem) # prove pyOpenSSL can parse the original encrypted private key pem = keys.get_encrypted_private_key_pem() passphrase = keys.get_passphrase_txt() crypto.load_privatekey(crypto.FILETYPE_PEM, pem, passphrase) # prove OpenSSL can parse the original certificate pem = keys.get_certificate_pem() crypto.load_certificate(crypto.FILETYPE_PEM, pem) @testcase.attr('positive') def test_rsa_store_and_get_private_key(self): """Post and Get for private key""" key_ref = self.store_private_key() key = self.get_private_key(key_ref) self.verify_private_key_equal(key) @testcase.attr('positive') def test_rsa_store_and_get_public_key(self): """Post and Get for public key""" key_ref = self.store_public_key() key = self.get_public_key(key_ref) self.verify_public_key_equal(key) @testcase.attr('positive') def test_rsa_two_step_store_and_get_private_key(self): """Post, Put, and Get for private key""" key_ref = self.create_private_key() self.update_private_key(key_ref) key = self.get_private_key(key_ref) self.verify_private_key_equal(key) @testcase.attr('positive') def test_rsa_two_step_store_and_get_public_key(self): """Post, Put, and Get for public key""" key_ref = self.create_public_key() self.update_public_key(key_ref) key = self.get_public_key(key_ref) self.verify_public_key_equal(key) @testcase.attr('positive') def test_rsa_store_and_get_passphrase(self): """Post and Get for passphrase""" phrase_ref = self.store_passphrase() phrase = self.get_passphrase(phrase_ref) self.verify_passphrase_equal(phrase) @testcase.attr('positive') def test_rsa_store_and_get_certificate_secret(self): """Post and Get for certificate""" cert_ref = self.store_certificate() cert = self.get_certificate(cert_ref) self.verify_certificate_equal(cert) @testcase.attr('positive') def test_rsa_two_step_store_and_get_certificate_secret(self): """Post, Put, and Get for certificate""" cert_ref = self.create_certificate() self.update_certificate(cert_ref) cert = self.get_certificate(cert_ref) self.verify_certificate_equal(cert) @testcase.attr('positive') def test_rsa_store_and_get_container(self): """Post and Get for container""" public_ref = self.store_public_key() private_ref = self.store_private_key() container_ref = self.store_container(public_ref, private_ref) secrets = self.get_container(container_ref) self.verify_container_keys_equal(secrets) @testcase.attr('positive') def test_rsa_store_and_get_container_with_passphrase(self): """Post and Get for container with passphrase""" public_ref = self.store_public_key() private_ref = self.store_encrypted_private_key() phrase_ref = self.store_passphrase() container_ref = self.store_container(public_ref, private_ref, phrase_ref) secrets = self.get_container(container_ref) self.verify_container_keys_equal(secrets, with_passphrase=True) @testcase.attr('positive') def test_rsa_order_container(self): """Post an order for a container""" order_ref = self.order_container() container_ref = self.get_container_order(order_ref) secrets = self.get_container(container_ref) self.verify_container_keys_valid(secrets) @testcase.attr('positive') def test_rsa_order_container_with_passphrase(self): """Post an order for a container with a passphrase""" order_ref = self.order_container(with_passphrase=True) container_ref = self.get_container_order(order_ref) secrets = self.get_container(container_ref) self.verify_container_keys_valid(secrets, with_passphrase=True) @testcase.attr('positive') def test_rsa_store_container_from_two_step_secrets(self): """Post an order for a certificate""" public_ref = self.create_public_key() self.update_public_key(public_ref) private_ref = self.create_private_key() self.update_private_key(private_ref) container_ref = self.store_container(public_ref, private_ref) secrets = self.get_container(container_ref) self.verify_container_keys_equal(secrets) @testcase.attr('positive') def test_rsa_order_certificate_from_ordered_container(self): """Post an order for a certificate""" order_ref = self.order_container() container_ref = self.get_container_order(order_ref) secrets = self.get_container(container_ref) self.verify_container_keys_valid(secrets) order_ref = self.order_certificate(container_ref) order_status = self.get_certificate_order(order_ref) self.verify_certificate_order_status(order_status) @testcase.attr('positive') def test_rsa_order_certificate_from_ordered_container_with_pass(self): """Post an order for a certificate""" order_ref = self.order_container(with_passphrase=True) container_ref = self.get_container_order(order_ref) secrets = self.get_container(container_ref) self.verify_container_keys_valid(secrets, with_passphrase=True) order_ref = self.order_certificate(container_ref) order_status = self.get_certificate_order(order_ref) self.verify_certificate_order_status(order_status) @testcase.attr('positive') def test_rsa_order_certificate_from_stored_container(self): """Post an order for a certificate""" public_ref = self.create_public_key() self.update_public_key(public_ref) private_ref = self.create_private_key() self.update_private_key(private_ref) container_ref = self.store_container(public_ref, private_ref) secrets = self.get_container(container_ref) self.verify_container_keys_equal(secrets) order_ref = self.order_certificate(container_ref) order_status = self.get_certificate_order(order_ref) self.verify_certificate_order_status(order_status) @testcase.attr('positive') def test_rsa_order_certificate_from_stored_container_with_pass(self): """Post an order for a certificate""" public_ref = self.store_public_key() private_ref = self.store_encrypted_private_key() phrase_ref = self.store_passphrase() container_ref = self.store_container( public_ref, private_ref, phrase_ref) secrets = self.get_container(container_ref) self.verify_container_keys_equal(secrets, with_passphrase=True) order_ref = self.order_certificate(container_ref) order_status = self.get_certificate_order(order_ref) self.verify_certificate_order_status(order_status) @testcase.attr('positive') def test_rsa_order_certificate_from_csr(self): """Post an order for a certificate""" order_ref = self.order_certificate_from_csr() order_status = self.get_certificate_order(order_ref) self.verify_certificate_order_status(order_status) # ----------------------- Helper Functions --------------------------- def store_private_key(self): pem = keys.get_private_key_pem() test_model = secret_models.SecretModel( **get_private_key_req(base64.b64encode(pem))) resp, private_key_secret_ref = self.secret_behaviors.create_secret( test_model) self.assertEqual(201, resp.status_code) return private_key_secret_ref def get_private_key(self, private_key_secret_ref): resp = self.secret_behaviors.get_secret( private_key_secret_ref, 'application/octet-stream') self.assertEqual(200, resp.status_code) return resp.content def verify_private_key_equal(self, retrieved_private_key): pem = keys.get_private_key_pem() self.assertEqual(pem, retrieved_private_key) def create_private_key(self): create_req = get_private_key_req("") del create_req['payload'] del create_req['payload_content_type'] del create_req['payload_content_encoding'] test_model = secret_models.SecretModel(**create_req) resp, private_key_secret_ref = self.secret_behaviors.create_secret( test_model) self.assertEqual(201, resp.status_code) return private_key_secret_ref def update_private_key(self, private_key_secret_ref): pem = keys.get_private_key_pem() update_resp = self.secret_behaviors.update_secret_payload( private_key_secret_ref, pem, 'application/octet-stream') self.assertEqual(204, update_resp.status_code) def store_encrypted_private_key(self): pem = keys.get_encrypted_private_key_pem() test_model = secret_models.SecretModel( **get_private_key_req(base64.b64encode(pem))) resp, private_key_secret_ref = self.secret_behaviors.create_secret( test_model) self.assertEqual(201, resp.status_code) return private_key_secret_ref def get_encrypted_private_key(self, private_key_secret_ref): resp = self.secret_behaviors.get_secret( private_key_secret_ref, 'application/octet-stream') self.assertEqual(200, resp.status_code) return resp.content def verify_encrypted_private_key_equal(self, retrieved_private_key): pem = keys.get_encrypted_private_key_pem() self.assertEqual(pem, retrieved_private_key) def create_encrypted_private_key(self): create_req = get_private_key_req("") del create_req['payload'] del create_req['payload_content_type'] del create_req['payload_content_encoding'] test_model = secret_models.SecretModel(**create_req) resp, private_key_secret_ref = self.secret_behaviors.create_secret( test_model) self.assertEqual(201, resp.status_code) return private_key_secret_ref def update_encrypted_private_key(self, private_key_secret_ref): pem = keys.get_encrypted_private_key_pem() update_resp = self.secret_behaviors.update_secret_payload( private_key_secret_ref, pem, 'application/octet-stream') self.assertEqual(204, update_resp.status_code) def store_public_key(self): pem = keys.get_public_key_pem() test_model = secret_models.SecretModel( **get_public_key_req(base64.b64encode(pem))) resp, public_key_secret_ref = self.secret_behaviors.create_secret( test_model) self.assertEqual(201, resp.status_code) return public_key_secret_ref def get_public_key(self, public_key_secret_ref): resp = self.secret_behaviors.get_secret( public_key_secret_ref, 'application/octet-stream') self.assertEqual(200, resp.status_code) return resp.content def verify_public_key_equal(self, retrieved_public_key): pem = keys.get_public_key_pem() self.assertEqual(pem, retrieved_public_key) def create_public_key(self): create_req = get_public_key_req("") del create_req['payload'] del create_req['payload_content_type'] del create_req['payload_content_encoding'] test_model = secret_models.SecretModel(**create_req) resp, public_key_secret_ref = self.secret_behaviors.create_secret( test_model) self.assertEqual(201, resp.status_code) return public_key_secret_ref def update_public_key(self, public_key_secret_ref): pem = keys.get_public_key_pem() resp = self.secret_behaviors.update_secret_payload( public_key_secret_ref, pem, 'application/octet-stream') self.assertEqual(204, resp.status_code) def store_passphrase(self): passphrase = keys.get_passphrase_txt() test_model = secret_models.SecretModel( **get_passphrase_req(passphrase)) resp, passphrase_secret_ref = self.secret_behaviors.create_secret( test_model) self.assertEqual(201, resp.status_code) return passphrase_secret_ref def get_passphrase(self, passphrase_secret_ref): content_type = 'application/octet-stream' get_resp = self.secret_behaviors.get_secret( passphrase_secret_ref, content_type) self.assertEqual(200, get_resp.status_code) return get_resp.content def verify_passphrase_equal(self, retrieved_passphrase): passphrase = keys.get_passphrase_txt() self.assertEqual(passphrase, retrieved_passphrase) def store_certificate(self): pem = keys.get_certificate_pem() test_model = secret_models.SecretModel( **get_certificate_req(base64.b64encode(pem))) resp, certificate_secret_ref = self.secret_behaviors.create_secret( test_model) self.assertEqual(201, resp.status_code) return certificate_secret_ref def get_certificate(self, certificate_secret_ref): content_type = 'application/octet-stream' resp = self.secret_behaviors.get_secret( certificate_secret_ref, content_type) self.assertEqual(200, resp.status_code) return resp.content def verify_certificate_equal(self, retrieved_certificate): pem = keys.get_certificate_pem() self.assertEqual(pem, retrieved_certificate) def create_certificate(self): create_req = get_certificate_req("") del create_req['payload'] del create_req['payload_content_type'] del create_req['payload_content_encoding'] test_model = secret_models.SecretModel(**create_req) resp, certificate_secret_ref = self.secret_behaviors.create_secret( test_model) self.assertEqual(201, resp.status_code) return certificate_secret_ref def update_certificate(self, certificate_secret_ref): pem = keys.get_certificate_pem() resp = self.secret_behaviors.update_secret_payload( certificate_secret_ref, pem, 'application/octet-stream') self.assertEqual(204, resp.status_code) def store_container(self, public_key_secret_ref, private_key_secret_ref, passphrase_secret_ref=None): test_model = container_models.ContainerModel( **get_container_req(public_key_secret_ref, private_key_secret_ref, passphrase_secret_ref)) resp, container_ref = self.container_behaviors.create_container( test_model) self.assertEqual(201, resp.status_code) return container_ref def make_secret_dict(self, secret_refs): """Get the secrets from the container and store in a dict""" secret_dict = {} for secret in secret_refs: self.assertIsNotNone(secret.secret_ref) secret_resp = self.secret_behaviors.get_secret( secret.secret_ref, "application/octet-stream") self.assertIsNotNone(secret_resp) secret_dict[secret.name] = secret_resp.content return secret_dict def get_container(self, container_ref): resp = self.container_behaviors.get_container(container_ref) self.assertEqual(200, resp.status_code) return self.make_secret_dict(resp.model.secret_refs) def verify_container_keys_equal(self, secret_dict, with_passphrase=False): if with_passphrase: passphrase = keys.get_passphrase_txt() self.assertEqual(passphrase, secret_dict['private_key_passphrase']) private_pem = keys.get_encrypted_private_key_pem() else: self.assertFalse('private_key_passphrase' in secret_dict) private_pem = keys.get_private_key_pem() self.assertEqual(private_pem, secret_dict['private_key']) public_pem = keys.get_public_key_pem() self.assertEqual(public_pem, secret_dict['public_key']) def verify_container_keys_valid(self, secret_dict, with_passphrase=False): # verify generated keys can be parsed if with_passphrase: crypto.load_privatekey( crypto.FILETYPE_PEM, secret_dict['private_key'], secret_dict['private_key_passphrase']) else: self.assertFalse('private_key_passphrase' in secret_dict) crypto.load_privatekey( crypto.FILETYPE_PEM, secret_dict['private_key']) RSA.importKey(secret_dict['public_key']) def order_container(self, with_passphrase=False): if with_passphrase: test_model = order_models.OrderModel( **get_order_rsa_container_with_passphrase()) else: test_model = order_models.OrderModel( **get_order_rsa_container()) resp, order_ref = self.order_behaviors.create_order(test_model) self.assertEqual(202, resp.status_code) return order_ref def get_container_order(self, order_ref): resp = self.order_behaviors.get_order(order_ref) self.assertEqual(200, resp.status_code) return resp.model.container_ref def order_certificate(self, container_ref): test_model = order_models.OrderModel( **get_order_certificate(container_ref)) resp, order_ref = self.order_behaviors.create_order(test_model) self.assertEqual(202, resp.status_code) return order_ref def get_certificate_order(self, order_ref): resp = self.order_behaviors.get_order(order_ref) self.assertEqual(200, resp.status_code) order_status = (resp.model.status, resp.model.sub_status) return order_status def verify_certificate_order_status(self, order_status): self.assertEqual(("PENDING", "cert_request_pending"), order_status) def order_certificate_from_csr(self): csr = keys.get_csr_pem() test_model = order_models.OrderModel( **get_order_certificate_simple_cmc(base64.b64encode(csr))) resp, order_ref = self.order_behaviors.create_order(test_model) self.assertEqual(202, resp.status_code) return order_ref barbican-2.0.0/functionaltests/api/v1/functional/test_cas.py0000664000567000056710000006745412701405673025305 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import copy import datetime import re import testtools import time from OpenSSL import crypto dogtag_subcas_enabled = True try: import pki.authority # noqa import pki.feature # noqa except ImportError: dogtag_subcas_enabled = False from barbican.common import hrefs from barbican.plugin.interface import certificate_manager as cert_interface from barbican.tests import certificate_utils as certutil from functionaltests.api import base from functionaltests.api.v1.behaviors import ca_behaviors from functionaltests.api.v1.behaviors import container_behaviors from functionaltests.api.v1.behaviors import order_behaviors from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.models import ca_models from functionaltests.api.v1.models import order_models from functionaltests.common import config CONF = config.get_config() admin_a = CONF.rbac_users.admin_a admin_b = CONF.rbac_users.admin_b creator_a = CONF.rbac_users.creator_a service_admin = CONF.identity.service_admin order_simple_cmc_request_data = { 'type': 'certificate', 'meta': { 'request_type': 'simple-cmc', 'requestor_name': 'Barbican User', 'requestor_email': 'user@example.com', 'requestor_phone': '555-1212' } } BARBICAN_SRV_CONF = cert_interface.CONF def is_plugin_enabled(plugin): return plugin in BARBICAN_SRV_CONF.certificate.enabled_certificate_plugins def depends_on_ca_plugins(*plugins): def depends_on_ca_plugins_decorator(function): def wrapper(instance, *args, **kwargs): plugins_enabled = (is_plugin_enabled(p) for p in plugins) if not all(plugins_enabled): instance.skipTest("The following plugin(s) need to be " "enabled: ".format(plugins)) function(instance, *args, **kwargs) return wrapper return depends_on_ca_plugins_decorator def convert_to_X509Name(dn): target = crypto.X509().get_subject() fields = dn.split(',') for field in fields: m = re.search(r"(\w+)\s*=\s*(.+)", field.strip()) name = m.group(1) value = m.group(2) if name.lower() == 'ou': target.OU = value elif name.lower() == 'st': target.ST = value elif name.lower() == 'cn': target.CN = value elif name.lower() == 'l': target.L = value elif name.lower() == 'o': target.O = value return target class CATestCommon(base.TestCase): def setUp(self): super(CATestCommon, self).setUp() self.order_behaviors = order_behaviors.OrderBehaviors(self.client) self.ca_behaviors = ca_behaviors.CABehaviors(self.client) self.container_behaviors = container_behaviors.ContainerBehaviors( self.client) self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client) self.simple_cmc_data = copy.deepcopy(order_simple_cmc_request_data) def tearDown(self): self.order_behaviors.delete_all_created_orders() self.ca_behaviors.delete_all_created_cas() super(CATestCommon, self).tearDown() def send_test_order(self, ca_ref=None, user_name=None, expected_return=202): test_model = order_models.OrderModel(**self.simple_cmc_data) test_model.meta['request_data'] = base64.b64encode( certutil.create_good_csr()) if ca_ref is not None: ca_id = hrefs.get_ca_id_from_ref(ca_ref) test_model.meta['ca_id'] = ca_id create_resp, order_ref = self.order_behaviors.create_order( test_model, user_name=user_name) self.assertEqual(expected_return, create_resp.status_code) if expected_return == 202: self.assertIsNotNone(order_ref) return order_ref def wait_for_order(self, order_resp, order_ref): # Make sure we have an active order time_count = 1 while order_resp.model.status != "ACTIVE" and time_count <= 4: time.sleep(1) time_count += 1 order_resp = self.behaviors.get_order(order_ref) def get_root_ca_ref(self, ca_plugin_name, ca_plugin_id): (resp, cas, total, next_ref, prev_ref) = self.ca_behaviors.get_cas( limit=100) for item in cas: ca = self.ca_behaviors.get_ca(item) if ca.model.plugin_name == ca_plugin_name: if ca.model.plugin_ca_id == ca_plugin_id: return item return None def get_snakeoil_root_ca_ref(self): return self.get_root_ca_ref( ca_plugin_name=('barbican.plugin.snakeoil_ca.' 'SnakeoilCACertificatePlugin'), ca_plugin_id="Snakeoil CA") def get_dogtag_root_ca_ref(self): return self.get_root_ca_ref( ca_plugin_name='barbican.plugin.dogtag.DogtagCAPlugin', ca_plugin_id="Dogtag CA") class CertificateAuthoritiesTestCase(CATestCommon): def setUp(self): super(CertificateAuthoritiesTestCase, self).setUp() self.subca_name = "Subordinate CA" self.subca_description = "Test Snake Oil Subordinate CA" self.subca_subca_name = "Sub-Sub CA" self.subca_subca_description = "Test Snake Oil Sub-Sub CA" def get_signing_cert(self, ca_ref): resp = self.ca_behaviors.get_cacert(ca_ref) return crypto.load_certificate(crypto.FILETYPE_PEM, resp.text) def verify_signing_cert(self, ca_ref, subject_dn, issuer_dn): cacert = self.get_signing_cert(ca_ref) return ((cacert.get_subject() == subject_dn) and (cacert.get_issuer() == issuer_dn)) def get_subca_model(self, root_ref): now = datetime.datetime.utcnow().isoformat() subject = "CN=Subordinate CA " + now + ", O=example.com" return ca_models.CAModel( parent_ca_ref=root_ref, description=self.subca_description, name=self.subca_name, subject_dn=subject ) def get_sub_subca_model(self, parent_ca_ref): now = datetime.datetime.utcnow().isoformat() subject = "CN=sub sub CA " + now + ", O=example.com" return ca_models.CAModel( parent_ca_ref=parent_ca_ref, description=self.subca_subca_description, name=self.subca_subca_name, subject_dn=subject ) @depends_on_ca_plugins('snakeoil_ca') def test_create_snakeoil_subca(self): self._create_and_verify_subca(self.get_snakeoil_root_ca_ref()) @testtools.skipIf(not dogtag_subcas_enabled, "dogtag subcas not enabled") @depends_on_ca_plugins('dogtag') def test_create_dogtag_subca(self): self._create_and_verify_subca(self.get_dogtag_root_ca_ref()) def _create_and_verify_subca(self, root_ca_ref): ca_model = self.get_subca_model(root_ca_ref) resp, ca_ref = self.ca_behaviors.create_ca(ca_model) self.assertEqual(201, resp.status_code) root_subject = self.get_signing_cert(root_ca_ref).get_subject() self.verify_signing_cert( ca_ref=ca_ref, subject_dn=convert_to_X509Name(ca_model.subject_dn), issuer_dn=root_subject) resp = self.ca_behaviors.delete_ca(ca_ref=ca_ref) self.assertEqual(204, resp.status_code) @depends_on_ca_plugins('snakeoil_ca') def test_create_subca_of_snakeoil_subca(self): self._create_subca_of_subca(self.get_snakeoil_root_ca_ref()) @testtools.skipIf(not dogtag_subcas_enabled, "dogtag subcas not enabled") @depends_on_ca_plugins('dogtag') def test_create_subca_of_dogtag_subca(self): self._create_subca_of_subca(self.get_dogtag_root_ca_ref()) def _create_subca_of_subca(self, root_ca_ref): parent_model = self.get_subca_model(root_ca_ref) resp, parent_ref = self.ca_behaviors.create_ca(parent_model) self.assertEqual(201, resp.status_code) child_model = self.get_sub_subca_model(parent_ref) resp, child_ref = self.ca_behaviors.create_ca(child_model) self.assertEqual(201, resp.status_code) parent_subject = self.get_signing_cert(parent_ref).get_subject() self.verify_signing_cert( ca_ref=child_ref, subject_dn=convert_to_X509Name(child_model.subject_dn), issuer_dn=parent_subject) resp = self.ca_behaviors.delete_ca(ca_ref=child_ref) self.assertEqual(204, resp.status_code) resp = self.ca_behaviors.delete_ca(ca_ref=parent_ref) self.assertEqual(204, resp.status_code) @depends_on_ca_plugins('snakeoil_ca') def test_fail_to_create_subca_of_snakeoil_not_owned_subca(self): self._fail_to_create_subca_of_not_owned_subca( self.get_snakeoil_root_ca_ref()) @testtools.skipIf(not dogtag_subcas_enabled, "dogtag subcas not enabled") @depends_on_ca_plugins('dogtag') def test_fail_to_create_subca_of_dogtag_not_owned_subca(self): self._fail_to_create_subca_of_not_owned_subca( self.get_dogtag_root_ca_ref()) def _fail_to_create_subca_of_not_owned_subca(self, root_ca_ref): parent_model = self.get_subca_model(root_ca_ref) resp, parent_ref = self.ca_behaviors.create_ca(parent_model) self.assertEqual(201, resp.status_code) child_model = self.get_sub_subca_model(parent_ref) resp, child_ref = self.ca_behaviors.create_ca(child_model, user_name=admin_a) self.assertEqual(403, resp.status_code) resp = self.ca_behaviors.delete_ca(ca_ref=parent_ref) self.assertEqual(204, resp.status_code) def test_create_subca_with_invalid_parent_ca_id(self): ca_model = self.get_subca_model( 'http://localhost:9311/cas/invalid_ref' ) resp, ca_ref = self.ca_behaviors.create_ca(ca_model) self.assertEqual(400, resp.status_code) def test_create_subca_with_missing_parent_ca_id(self): ca_model = self.get_subca_model( 'http://localhost:9311/cas/missing_ref' ) del ca_model.parent_ca_ref resp, ca_ref = self.ca_behaviors.create_ca(ca_model) self.assertEqual(400, resp.status_code) @depends_on_ca_plugins('snakeoil_ca') def test_create_snakeoil_subca_with_missing_subjectdn(self): self._create_subca_with_missing_subjectdn( self.get_snakeoil_root_ca_ref()) @testtools.skipIf(not dogtag_subcas_enabled, "dogtag subcas not enabled") @depends_on_ca_plugins('dogtag') def test_create_dogtag_subca_with_missing_subjectdn(self): self._create_subca_with_missing_subjectdn( self.get_dogtag_root_ca_ref()) def _create_subca_with_missing_subjectdn(self, root_ca_ref): ca_model = self.get_subca_model(root_ca_ref) del ca_model.subject_dn resp, ca_ref = self.ca_behaviors.create_ca(ca_model) self.assertEqual(400, resp.status_code) @depends_on_ca_plugins('snakeoil_ca') def test_create_snakeoil_subca_and_send_cert_order(self): self._create_subca_and_send_cert_order( self.get_snakeoil_root_ca_ref()) @testtools.skipIf(not dogtag_subcas_enabled, "dogtag subcas not enabled") @depends_on_ca_plugins('dogtag') def test_create_dogtag_subca_and_send_cert_order(self): self._create_subca_and_send_cert_order( self.get_dogtag_root_ca_ref()) def _create_subca_and_send_cert_order(self, root_ca): ca_model = self.get_subca_model(root_ca) resp, ca_ref = self.ca_behaviors.create_ca(ca_model) self.assertEqual(201, resp.status_code) self.send_test_order(ca_ref) resp = self.ca_behaviors.delete_ca(ca_ref=ca_ref) self.assertEqual(204, resp.status_code) @depends_on_ca_plugins('snakeoil_ca') def test_add_snakeoil_ca__to_project_and_get_preferred(self): self._add_ca__to_project_and_get_preferred( self.get_snakeoil_root_ca_ref() ) @depends_on_ca_plugins('dogtag') def test_add_dogtag_ca__to_project_and_get_preferred(self): self._add_ca__to_project_and_get_preferred( self.get_dogtag_root_ca_ref() ) def _add_ca__to_project_and_get_preferred(self, ca_ref): resp = self.ca_behaviors.add_ca_to_project(ca_ref, user_name=admin_a) self.assertEqual(204, resp.status_code) resp = self.ca_behaviors.get_preferred(user_name=admin_a) self.assertEqual(200, resp.status_code) ca_id = hrefs.get_ca_id_from_ref(resp.model.ca_ref) self.assertEqual(hrefs.get_ca_id_from_ref(ca_ref), ca_id) resp = self.ca_behaviors.remove_ca_from_project( ca_ref, user_name=admin_a) self.assertEqual(204, resp.status_code) resp = self.ca_behaviors.get_preferred(user_name=admin_a) self.assertEqual(404, resp.status_code) @depends_on_ca_plugins('snakeoil_ca') def test_try_and_fail_to_add_to_proj_snakeoil_subca_that_is_not_mine(self): self._try_and_fail_to_add_to_proj_subca_that_is_not_mine( self.get_snakeoil_root_ca_ref() ) @testtools.skipIf(not dogtag_subcas_enabled, "dogtag subcas not enabled") @depends_on_ca_plugins('dogtag') def test_try_and_fail_to_add_to_proj_dogtag_subca_that_is_not_mine(self): self._try_and_fail_to_add_to_proj_subca_that_is_not_mine( self.get_dogtag_root_ca_ref() ) def _try_and_fail_to_add_to_proj_subca_that_is_not_mine(self, root_ca_ref): ca_model = self.get_subca_model(root_ca_ref) resp, ca_ref = self.ca_behaviors.create_ca(ca_model, user_name=admin_a) self.assertEqual(201, resp.status_code) resp = self.ca_behaviors.add_ca_to_project(ca_ref, user_name=admin_b) self.assertEqual(403, resp.status_code) resp = self.ca_behaviors.delete_ca(ca_ref=ca_ref, user_name=admin_a) self.assertEqual(204, resp.status_code) @depends_on_ca_plugins('snakeoil_ca') def test_create_and_delete_snakeoil_subca(self): self._create_and_delete_subca( self.get_snakeoil_root_ca_ref() ) @testtools.skipIf(not dogtag_subcas_enabled, "dogtag subcas not enabled") @depends_on_ca_plugins('dogtag') def test_create_and_delete_dogtag_subca(self): self._create_and_delete_subca( self.get_dogtag_root_ca_ref() ) def _create_and_delete_subca(self, root_ca_ref): ca_model = self.get_subca_model(root_ca_ref) resp, ca_ref = self.ca_behaviors.create_ca(ca_model) self.assertEqual(201, resp.status_code) self.ca_behaviors.delete_ca(ca_ref) resp = self.ca_behaviors.get_ca(ca_ref) self.assertEqual(404, resp.status_code) @depends_on_ca_plugins('snakeoil_ca') def test_create_and_delete_snakeoil_subca_and_artifacts(self): ca_model = self.get_subca_model(self.get_snakeoil_root_ca_ref()) resp, ca_ref = self.ca_behaviors.create_ca(ca_model, user_name=admin_a) self.assertEqual(201, resp.status_code) resp = self.ca_behaviors.add_ca_to_project(ca_ref, user_name=admin_a) self.assertEqual(204, resp.status_code) resp = self.ca_behaviors.get_preferred(user_name=admin_a) self.assertEqual(200, resp.status_code) self.ca_behaviors.delete_ca(ca_ref, user_name=admin_a) resp = self.ca_behaviors.get_preferred(user_name=admin_a) self.assertEqual(404, resp.status_code) resp = self.ca_behaviors.get_ca(ca_ref, user_name=admin_a) self.assertEqual(404, resp.status_code) @depends_on_ca_plugins('snakeoil_ca') def test_fail_to_delete_top_level_snakeoil_ca(self): self._fail_to_delete_top_level_ca( self.get_snakeoil_root_ca_ref() ) @depends_on_ca_plugins('dogtag') def test_fail_to_delete_top_level_dogtag_ca(self): self._fail_to_delete_top_level_ca( self.get_dogtag_root_ca_ref() ) def _fail_to_delete_top_level_ca(self, root_ca_ref): resp = self.ca_behaviors.delete_ca( root_ca_ref, expected_fail=True) self.assertEqual(403, resp.status_code) @depends_on_ca_plugins('snakeoil_ca') def test_create_snakeoil_subca_and_get_cacert(self): self._create_subca_and_get_cacert( self.get_snakeoil_root_ca_ref() ) @testtools.skipIf(not dogtag_subcas_enabled, "dogtag subcas not enabled") @depends_on_ca_plugins('dogtag') def test_create_dogtag_subca_and_get_cacert(self): self._create_subca_and_get_cacert( self.get_dogtag_root_ca_ref() ) def _create_subca_and_get_cacert(self, root_ca_ref): ca_model = self.get_subca_model(root_ca_ref) resp, ca_ref = self.ca_behaviors.create_ca(ca_model, user_name=admin_a) self.assertEqual(201, resp.status_code) resp = self.ca_behaviors.get_cacert(ca_ref, user_name=admin_a) self.assertEqual(200, resp.status_code) crypto.load_certificate(crypto.FILETYPE_PEM, resp.text) resp = self.ca_behaviors.delete_ca(ca_ref=ca_ref, user_name=admin_a) self.assertEqual(204, resp.status_code) @depends_on_ca_plugins('snakeoil_ca') def test_try_and_fail_to_use_snakeoil_subca_that_is_not_mine(self): self._try_and_fail_to_use_subca_that_is_not_mine( self.get_snakeoil_root_ca_ref() ) @testtools.skipIf(not dogtag_subcas_enabled, "dogtag subcas not enabled") @depends_on_ca_plugins('dogtag') def test_try_and_fail_to_use_dogtag_subca_that_is_not_mine(self): self._try_and_fail_to_use_subca_that_is_not_mine( self.get_dogtag_root_ca_ref() ) def _try_and_fail_to_use_subca_that_is_not_mine(self, root_ca_ref): ca_model = self.get_subca_model(root_ca_ref) resp, ca_ref = self.ca_behaviors.create_ca(ca_model, user_name=admin_a) self.assertEqual(201, resp.status_code) self.send_test_order(ca_ref=ca_ref, user_name=admin_a) self.send_test_order(ca_ref=ca_ref, user_name=admin_b, expected_return=403) resp = self.ca_behaviors.delete_ca(ca_ref=ca_ref, user_name=admin_a) self.assertEqual(204, resp.status_code) @depends_on_ca_plugins('snakeoil_ca') def test_create_snakeoil_subca_and_send_cert_order_and_verify_cert(self): ca_model = self.get_subca_model(self.get_snakeoil_root_ca_ref()) resp, ca_ref = self.ca_behaviors.create_ca(ca_model) self.assertEqual(201, resp.status_code) order_ref = self.send_test_order(ca_ref) order_resp = self.order_behaviors.get_order(order_ref=order_ref) self.assertEqual(200, order_resp.status_code) self.wait_for_order(order_resp=order_resp, order_ref=order_ref) container_resp = self.container_behaviors.get_container( order_resp.model.container_ref) self.assertEqual(container_resp.status_code, 200) secret_dict = {} for secret in container_resp.model.secret_refs: self.assertIsNotNone(secret.secret_ref) secret_resp = self.secret_behaviors.get_secret( secret.secret_ref, "application/octet-stream") self.assertIsNotNone(secret_resp) secret_dict[secret.name] = secret_resp.content certificate = secret_dict['certificate'] new_cert = crypto.load_certificate(crypto.FILETYPE_PEM, certificate) signing_cert = self.get_signing_cert(ca_ref) issuer = new_cert.get_issuer() expected_issuer = signing_cert.get_subject() self.assertEqual(expected_issuer, issuer) resp = self.ca_behaviors.delete_ca(ca_ref=ca_ref) self.assertEqual(204, resp.status_code) class ListingCAsTestCase(CATestCommon): """Tests for listing CAs. Must be in a separate class so that we can deselect them in the parallel CA tests, until we can deselect specific tests using a decorator. """ def test_list_and_get_cas(self): (resp, cas, total, next_ref, prev_ref) = self.ca_behaviors.get_cas() self.assertGreater(total, 0) for item in cas: ca = self.ca_behaviors.get_ca(item) self.assertIsNotNone(ca.model.plugin_name) self.assertIsNotNone(ca.model.ca_id) self.assertIsNotNone(ca.model.plugin_ca_id) @depends_on_ca_plugins('snakeoil_ca', 'simple_certificate') def test_list_snakeoil_and_simple_cert_cas(self): """Test if backend loads these specific CAs Since the standard gate works with the snakeoil CA and the simple_certificate CA. This test is just to make sure that these two are specifically loaded. """ (resp, cas, total, next_ref, prev_ref) = self.ca_behaviors.get_cas() self.assertEqual(total, 2) @depends_on_ca_plugins('dogtag') def test_list_dogtag_cas(self): """Test if backend loads this specific CA""" (resp, cas, total, next_ref, prev_ref) = self.ca_behaviors.get_cas() self.assertGreater(total, 0) class ProjectCATestCase(CATestCommon): def setUp(self): super(ProjectCATestCase, self).setUp() @depends_on_ca_plugins('snakeoil_ca', 'simple_certificate') def test_addition_of_project_ca_affects_getting_ca_list(self): # Getting list of CAs should get the total configured CAs (resp, cas, initial_total, _, __) = self.ca_behaviors.get_cas() self.assertEqual(initial_total, 2) # Set project CA ca_ref = self.get_snakeoil_root_ca_ref() resp = self.ca_behaviors.add_ca_to_project(ca_ref, user_name=admin_a) self.assertEqual(204, resp.status_code) # Getting list of CAs should get only the project CA for all users (resp, cas, project_ca_total, _, __) = self.ca_behaviors.get_cas( user_name=admin_a) self.assertEqual(1, project_ca_total) # Getting list of CAs should get only the project CA for all users (resp, cas, project_ca_total, _, __) = self.ca_behaviors.get_cas( user_name=creator_a) self.assertEqual(1, project_ca_total) # Remove project CA resp = self.ca_behaviors.remove_ca_from_project(ca_ref, user_name=admin_a) self.assertEqual(204, resp.status_code) # Getting list of CAs should get the total configured CAs (as seen # before) (resp, cas, final_total, _, __) = self.ca_behaviors.get_cas() self.assertEqual(initial_total, final_total) class GlobalPreferredCATestCase(CATestCommon): def setUp(self): super(GlobalPreferredCATestCase, self).setUp() (_, self.cas, self.num_cas, _, _) = self.ca_behaviors.get_cas() self.ca_ids = [hrefs.get_ca_id_from_ref(ref) for ref in self.cas] def tearDown(self): super(CATestCommon, self).tearDown() def test_global_preferred_no_project_admin_access(self): resp = self.ca_behaviors.get_global_preferred() self.assertEqual(403, resp.status_code) resp = self.ca_behaviors.set_global_preferred(ca_ref=self.cas[0]) self.assertEqual(403, resp.status_code) resp = self.ca_behaviors.unset_global_preferred() self.assertEqual(403, resp.status_code) def test_global_preferred_update(self): if self.num_cas < 2: self.skipTest("At least two CAs are required for this test") resp = self.ca_behaviors.set_global_preferred( ca_ref=self.cas[0], user_name=service_admin) self.assertEqual(204, resp.status_code) resp = self.ca_behaviors.get_global_preferred(user_name=service_admin) self.assertEqual(200, resp.status_code) ca_id = hrefs.get_ca_id_from_ref(resp.model.ca_ref) self.assertEqual(self.ca_ids[0], ca_id) resp = self.ca_behaviors.set_global_preferred( ca_ref=self.cas[1], user_name=service_admin) self.assertEqual(204, resp.status_code) resp = self.ca_behaviors.get_global_preferred(user_name=service_admin) self.assertEqual(200, resp.status_code) ca_id = hrefs.get_ca_id_from_ref(resp.model.ca_ref) self.assertEqual(self.ca_ids[1], ca_id) resp = self.ca_behaviors.unset_global_preferred( user_name=service_admin) self.assertEqual(204, resp.status_code) def test_global_preferred_set_and_unset(self): resp = self.ca_behaviors.set_global_preferred( ca_ref=self.cas[0], user_name=service_admin) self.assertEqual(204, resp.status_code) resp = self.ca_behaviors.get_global_preferred(user_name=service_admin) self.assertEqual(200, resp.status_code) ca_id = hrefs.get_ca_id_from_ref(resp.model.ca_ref) self.assertEqual(self.ca_ids[0], ca_id) resp = self.ca_behaviors.unset_global_preferred( user_name=service_admin) self.assertEqual(204, resp.status_code) resp = self.ca_behaviors.get_global_preferred(user_name=service_admin) self.assertEqual(404, resp.status_code) def test_global_preferred_affects_project_preferred(self): if self.num_cas < 2: self.skipTest("At least two CAs are required for this test") resp = self.ca_behaviors.get_preferred(user_name=admin_a) self.assertEqual(404, resp.status_code) resp = self.ca_behaviors.set_global_preferred( ca_ref=self.cas[1], user_name=service_admin) self.assertEqual(204, resp.status_code) resp = self.ca_behaviors.get_preferred(user_name=admin_a) self.assertEqual(200, resp.status_code) ca_id = hrefs.get_ca_id_from_ref(resp.model.ca_ref) self.assertEqual(self.ca_ids[1], ca_id) resp = self.ca_behaviors.unset_global_preferred( user_name=service_admin) self.assertEqual(204, resp.status_code) resp = self.ca_behaviors.get_preferred(user_name=admin_a) self.assertEqual(404, resp.status_code) def test_project_preferred_overrides_global_preferred(self): if self.num_cas < 2: self.skipTest("At least two CAs are required for this test") resp = self.ca_behaviors.get_preferred(user_name=admin_a) self.assertEqual(404, resp.status_code) resp = self.ca_behaviors.set_global_preferred( ca_ref=self.cas[1], user_name=service_admin) self.assertEqual(204, resp.status_code) resp = self.ca_behaviors.get_preferred(user_name=admin_a) self.assertEqual(200, resp.status_code) ca_id = hrefs.get_ca_id_from_ref(resp.model.ca_ref) self.assertEqual(self.ca_ids[1], ca_id) resp = self.ca_behaviors.add_ca_to_project( ca_ref=self.cas[0], user_name=admin_a) self.assertEqual(204, resp.status_code) resp = self.ca_behaviors.get_preferred(user_name=admin_a) self.assertEqual(200, resp.status_code) ca_id = hrefs.get_ca_id_from_ref(resp.model.ca_ref) self.assertEqual(self.ca_ids[0], ca_id) resp = self.ca_behaviors.remove_ca_from_project( ca_ref=self.cas[0], user_name=admin_a) self.assertEqual(204, resp.status_code) resp = self.ca_behaviors.get_preferred(user_name=admin_a) ca_id = hrefs.get_ca_id_from_ref(resp.model.ca_ref) self.assertEqual(self.ca_ids[1], ca_id) resp = self.ca_behaviors.unset_global_preferred( user_name=service_admin) self.assertEqual(204, resp.status_code) resp = self.ca_behaviors.get_preferred(user_name=admin_a) self.assertEqual(404, resp.status_code) barbican-2.0.0/functionaltests/api/v1/functional/test_certificate_orders.py0000664000567000056710000007542512701405673030374 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import copy import json import time from OpenSSL import crypto import testtools from barbican.plugin.interface import secret_store as s from barbican.tasks import certificate_resources as cert_res from barbican.tests import certificate_utils as certutil from barbican.tests import keys from functionaltests.api import base from functionaltests.api.v1.behaviors import ca_behaviors from functionaltests.api.v1.behaviors import container_behaviors from functionaltests.api.v1.behaviors import order_behaviors from functionaltests.api.v1.behaviors import secret_behaviors from functionaltests.api.v1.models import container_models from functionaltests.api.v1.models import order_models from functionaltests.api.v1.models import secret_models try: import pki # flake8: noqa dogtag_imports_ok = True except ImportError: # dogtag libraries not available, assume dogtag not installed dogtag_imports_ok = False NOT_FOUND_CONTAINER_REF = "http://localhost:9311/v1/containers/not_found" INVALID_CONTAINER_REF = "invalid" order_simple_cmc_request_data = { 'type': 'certificate', 'meta': { 'request_type': 'simple-cmc', 'requestor_name': 'Barbican User', 'requestor_email': 'user@example.com', 'requestor_phone': '555-1212' } } order_full_cmc_request_data = { 'type': 'certificate', 'meta': { 'request_type': 'full-cmc', 'requestor_name': 'Barbican User', 'requestor_email': 'user@example.com', 'requestor_phone': '555-1212' } } order_stored_key_request_data = { 'type': 'certificate', 'meta': { 'request_type': 'stored-key', 'subject_dn': 'cn=server.example.com,o=example.com', 'requestor_name': 'Barbican User', 'requestor_email': 'user@example.com', 'requestor_phone': '555-1212' } } order_dogtag_custom_request_data = { 'type': 'certificate', 'meta': { 'request_type': 'custom', 'cert_request_type': 'pkcs10', 'profile_id': 'caServerCert' } } create_container_rsa_data = { "name": "rsacontainer", "type": "rsa", "secret_refs": [ { "name": "public_key", }, { "name": "private_key", }, { "name": "private_key_passphrase" } ] } def get_private_key_req(): return {'name': 'myprivatekey', 'payload_content_type': 'application/octet-stream', 'payload_content_encoding': 'base64', 'algorithm': 'rsa', 'bit_length': 2048, 'secret_type': s.SecretType.PRIVATE, 'payload': base64.b64encode(keys.get_private_key_pem())} def get_public_key_req(): return {'name': 'mypublickey', 'payload_content_type': 'application/octet-stream', 'payload_content_encoding': 'base64', 'algorithm': 'rsa', 'bit_length': 2048, 'secret_type': s.SecretType.PUBLIC, 'payload': base64.b64encode(keys.get_public_key_pem())} create_generic_container_data = { "name": "containername", "type": "generic", "secret_refs": [ { "name": "secret1", }, { "name": "secret2", }, { "name": "secret3" } ] } class CertificatesTestCase(base.TestCase): def setUp(self): super(CertificatesTestCase, self).setUp() self.behaviors = order_behaviors.OrderBehaviors(self.client) self.ca_behaviors = ca_behaviors.CABehaviors(self.client) self.container_behaviors = container_behaviors.ContainerBehaviors( self.client) self.secret_behaviors = secret_behaviors.SecretBehaviors(self.client) self.simple_cmc_data = copy.deepcopy(order_simple_cmc_request_data) self.full_cmc_data = copy.deepcopy(order_full_cmc_request_data) self.stored_key_data = copy.deepcopy(order_stored_key_request_data) self.dogtag_custom_data = copy.deepcopy( order_dogtag_custom_request_data) def tearDown(self): self.behaviors.delete_all_created_orders() super(CertificatesTestCase, self).tearDown() def wait_for_order( self, order_ref, delay_before_check_seconds=1, max_wait_seconds=4): time.sleep(delay_before_check_seconds) # Make sure we have an order in a terminal state time_count = 1 order_resp = self.behaviors.get_order(order_ref) while ((order_resp.model.status != "ACTIVE") and (order_resp.model.status != "ERROR") and time_count <= max_wait_seconds): time.sleep(1) time_count += 1 order_resp = self.behaviors.get_order(order_ref) return order_resp def create_asymmetric_key_container(self): secret_model = secret_models.SecretModel(**get_private_key_req()) secret_model.secret_type = s.SecretType.PRIVATE resp, secret_ref_priv = self.secret_behaviors.create_secret( secret_model) self.assertEqual(201, resp.status_code) secret_model = secret_models.SecretModel(**get_public_key_req()) secret_model.secret_type = s.SecretType.PUBLIC resp, secret_ref_pub = self.secret_behaviors.create_secret( secret_model) self.assertEqual(201, resp.status_code) pub_key_ref = {'name': 'public_key', 'secret_ref': secret_ref_pub} priv_key_ref = {'name': 'private_key', 'secret_ref': secret_ref_priv} test_model = container_models.ContainerModel( **create_container_rsa_data) test_model.secret_refs = [pub_key_ref, priv_key_ref] resp, container_ref = self.container_behaviors.create_container( test_model) self.assertEqual(resp.status_code, 201) return container_ref def create_generic_container(self): secret_model = secret_models.SecretModel(**get_private_key_req()) secret_model.secret_type = s.SecretType.PRIVATE resp, secret_ref = self.secret_behaviors.create_secret(secret_model) self.assertEqual(201, resp.status_code) test_model = container_models.ContainerModel(**create_generic_container_data) test_model.secret_refs = [{ 'name': 'my_secret', 'secret_ref': secret_ref }] resp, container_ref = self.container_behaviors.create_container(test_model) self.assertEqual(resp.status_code, 201) return container_ref def get_dogtag_ca_id(self): (resp, cas, total, next_ref, prev_ref) = self.ca_behaviors.get_cas() for item in cas: ca = self.ca_behaviors.get_ca(item) if ca.model.plugin_name == ( 'barbican.plugin.dogtag.DogtagCAPlugin'): return ca.model.ca_id return None def verify_cert_returned(self, order_resp, is_stored_key_type=False): container_ref = order_resp.model.container_ref self.assertIsNotNone(container_ref, "no cert container returned") container_resp = self.container_behaviors.get_container(container_ref) self.assertIsNotNone(container_resp, "Cert container returns None") self.assertEqual('certificate', container_resp.model.type) secret_refs = container_resp.model.secret_refs self.assertIsNotNone(secret_refs, "container has no secret refs") contains_cert = False contains_private_key_ref = False for secret in secret_refs: if secret.name == 'certificate': contains_cert = True self.assertIsNotNone(secret.secret_ref) self.verify_valid_cert(secret.secret_ref) if secret.name == 'intermediates': self.assertIsNotNone(secret.secret_ref) self.verify_valid_intermediates(secret.secret_ref) if is_stored_key_type: if secret.name == 'private_key': contains_private_key_ref = True self.assertIsNotNone(secret.secret_ref) self.assertTrue(contains_cert) if is_stored_key_type: self.assertTrue(contains_private_key_ref) def verify_valid_cert(self, secret_ref): secret_resp = self.secret_behaviors.get_secret( secret_ref, "application/pkix-cert") self.assertIsNotNone(secret_resp) self.assertIsNotNone(secret_resp.content) cert = secret_resp.content crypto.load_certificate(crypto.FILETYPE_PEM, cert) def verify_valid_intermediates(self, secret_ref): secret_resp = self.secret_behaviors.get_secret( secret_ref, "application/pkix-cert") self.assertIsNotNone(secret_resp) self.assertIsNotNone(secret_resp.content) cert_chain = secret_resp.content crypto.load_pkcs7_data(crypto.FILETYPE_PEM, cert_chain) def verify_pending_waiting_for_ca(self, order_resp): self.assertEqual('PENDING', order_resp.model.status) self.assertEqual(cert_res.ORDER_STATUS_REQUEST_PENDING.id, order_resp.model.sub_status) self.assertEqual(cert_res.ORDER_STATUS_REQUEST_PENDING.message, order_resp.model.sub_status_message) def confirm_error_message(self, resp, message): resp_dict = json.loads(resp.content) self.assertEqual(message, resp_dict['description']) @testtools.testcase.attr('positive') @testtools.skipIf(dogtag_imports_ok, "not applicable with dogtag plugin") def test_create_simple_cmc_order(self): test_model = order_models.OrderModel(**self.simple_cmc_data) test_model.meta['request_data'] = base64.b64encode( certutil.create_good_csr()) create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(202, create_resp.status_code) self.assertIsNotNone(order_ref) order_resp = self.behaviors.get_order(order_ref) self.verify_pending_waiting_for_ca(order_resp) # Wait for retry processing to handle checking for status with the # default certificate plugin (which takes about 10 seconds +- 20%). order_resp = self.wait_for_order( order_ref, delay_before_check_seconds=20, max_wait_seconds=25) self.assertEqual('ACTIVE', order_resp.model.status) @testtools.testcase.attr('positive') def test_create_simple_cmc_order_without_requestor_info(self): self.simple_cmc_data.pop("requestor_name", None) self.simple_cmc_data.pop("requestor_email", None) self.simple_cmc_data.pop("requestor_phone", None) test_model = order_models.OrderModel(**self.simple_cmc_data) test_model.meta['request_data'] = base64.b64encode( certutil.create_good_csr()) create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(202, create_resp.status_code) self.assertIsNotNone(order_ref) order_resp = self.behaviors.get_order(order_ref) self.verify_pending_waiting_for_ca(order_resp) @testtools.testcase.attr('positive') @testtools.skipIf(not dogtag_imports_ok, "Dogtag imports not available") def test_create_simple_cmc_order_with_dogtag_profile(self): test_model = order_models.OrderModel(**self.simple_cmc_data) test_model.meta['request_data'] = base64.b64encode( certutil.create_good_csr()) test_model.meta['profile'] = 'caServerCert' test_model.meta['ca_id'] = self.get_dogtag_ca_id() create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(202, create_resp.status_code) self.assertIsNotNone(order_ref) order_resp = self.wait_for_order(order_ref) self.assertEqual('ACTIVE', order_resp.model.status) self.verify_cert_returned(order_resp) @testtools.testcase.attr('negative') def test_create_simple_cmc_with_profile_and_no_ca_id(self): test_model = order_models.OrderModel(**self.simple_cmc_data) test_model.meta['request_data'] = base64.b64encode( certutil.create_good_csr()) test_model.meta['profile'] = 'caServerCert' create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(400, create_resp.status_code) self.assertIsNone(order_ref) self.confirm_error_message( create_resp, "Missing required metadata field for ca_id" ) @testtools.testcase.attr('negative') def test_create_simple_cmc_with_profile_and_incorrect_ca_id(self): test_model = order_models.OrderModel(**self.simple_cmc_data) test_model.meta['request_data'] = base64.b64encode( certutil.create_good_csr()) test_model.meta['profile'] = 'caServerCert' test_model.meta['ca_id'] = 'incorrect_ca_id' create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(400, create_resp.status_code) self.assertIsNone(order_ref) self.confirm_error_message( create_resp, "Order creation issue seen - The ca_id provided " "in the request is invalid." ) @testtools.testcase.attr('negative') @testtools.skipIf(not dogtag_imports_ok, "Dogtag imports not available") def test_create_simple_cmc_with_dogtag_and_invalid_subject_dn(self): test_model = order_models.OrderModel(**self.simple_cmc_data) test_model.meta['request_data'] = base64.b64encode( certutil.create_csr_with_bad_subject_dn()) test_model.meta['profile'] = 'caServerCert' test_model.meta['ca_id'] = self.get_dogtag_ca_id() create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(202, create_resp.status_code) self.assertIsNotNone(order_ref) order_resp = self.wait_for_order(order_ref) self.assertEqual('ERROR', order_resp.model.status) self.assertEqual('400', order_resp.model.error_status_code) self.assertIn('Problem with data in certificate request', order_resp.model.error_reason) # TODO(alee) Dogtag does not currently return a error message # when it does, check for that specific error message @testtools.testcase.attr('negative') def test_create_simple_cmc_order_with_no_base64(self): test_model = order_models.OrderModel(**self.simple_cmc_data) # do not encode with base64 to force the error test_model.meta['request_data'] = certutil.create_bad_csr() create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(400, create_resp.status_code) self.assertIsNone(order_ref) self.confirm_error_message(create_resp, "Unable to decode request data.") @testtools.testcase.attr('negative') def test_create_simple_cmc_order_with_invalid_pkcs10(self): test_model = order_models.OrderModel(**self.simple_cmc_data) test_model.meta['request_data'] = base64.b64encode( certutil.create_bad_csr()) create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(400, create_resp.status_code) self.assertIsNone(order_ref) self.confirm_error_message(create_resp, "Invalid PKCS10 Data: Bad format") @testtools.testcase.attr('negative') def test_create_simple_csc_order_with_unsigned_pkcs10(self): test_model = order_models.OrderModel(**self.simple_cmc_data) test_model.meta['request_data'] = base64.b64encode( certutil.create_csr_that_has_not_been_signed()) create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(400, create_resp.status_code) self.assertIsNone(order_ref) error_description = json.loads(create_resp.content)['description'] self.assertIn("Invalid PKCS10 Data", error_description) @testtools.testcase.attr('negative') def test_create_simple_csc_order_with_pkcs10_signed_by_wrong_key(self): test_model = order_models.OrderModel(**self.simple_cmc_data) test_model.meta['request_data'] = base64.b64encode( certutil.create_csr_signed_with_wrong_key()) create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(400, create_resp.status_code) self.confirm_error_message( create_resp, "Invalid PKCS10 Data: Signing key incorrect" ) @testtools.testcase.attr('negative') @testtools.skipIf(not dogtag_imports_ok, "Dogtag imports not available") def test_create_simple_cmc_order_with_invalid_dogtag_profile(self): test_model = order_models.OrderModel(**self.simple_cmc_data) test_model.meta['request_data'] = base64.b64encode( certutil.create_good_csr()) test_model.meta['profile'] = 'invalidProfileID' test_model.meta['ca_id'] = self.get_dogtag_ca_id() create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(202, create_resp.status_code) self.assertIsNotNone(order_ref) order_resp = self.wait_for_order(order_ref) self.assertEqual('ERROR', order_resp.model.status) self.assertEqual('400', order_resp.model.error_status_code) self.assertIn('Problem with data in certificate request', order_resp.model.error_reason) self.assertIn('Profile not found', order_resp.model.error_reason) @testtools.testcase.attr('positive') @testtools.skipIf(not dogtag_imports_ok, "Dogtag imports not available") def test_create_simple_cmc_order_with_non_approved_dogtag_profile(self): test_model = order_models.OrderModel(**self.simple_cmc_data) test_model.meta['request_data'] = base64.b64encode( certutil.create_good_csr()) test_model.meta['profile'] = 'caTPSCert' test_model.meta['ca_id'] = self.get_dogtag_ca_id() create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(202, create_resp.status_code) self.assertIsNotNone(order_ref) order_resp = self.wait_for_order(order_ref) self.verify_pending_waiting_for_ca(order_resp) @testtools.testcase.attr('negative') def test_create_simple_cmc_order_with_missing_request(self): test_model = order_models.OrderModel(**self.simple_cmc_data) create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 400) self.assertIsNone(order_ref) self.confirm_error_message( create_resp, "Missing required metadata field for request_data" ) @testtools.testcase.attr('negative') def test_create_full_cmc_order(self): test_model = order_models.OrderModel(**self.full_cmc_data) test_model.meta['request_data'] = base64.b64encode( certutil.create_good_csr()) create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 400) self.assertIsNone(order_ref) self.confirm_error_message( create_resp, "Full CMC Requests are not yet supported." ) @testtools.testcase.attr('negative') def test_create_cert_order_with_invalid_type(self): test_model = order_models.OrderModel(**self.simple_cmc_data) test_model.meta['request_data'] = base64.b64encode( certutil.create_good_csr()) test_model.meta['request_type'] = "invalid_type" create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(400, create_resp.status_code) self.confirm_error_message( create_resp, "Invalid Certificate Request Type" ) @testtools.testcase.attr('positive') def test_create_stored_key_order(self): test_model = order_models.OrderModel(**self.stored_key_data) test_model.meta['container_ref'] = ( self.create_asymmetric_key_container()) create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(202, create_resp.status_code) self.assertIsNotNone(order_ref) order_resp = self.behaviors.get_order(order_ref) self.verify_pending_waiting_for_ca(order_resp) @testtools.testcase.attr('positive') @testtools.skipIf(not dogtag_imports_ok, "Dogtag imports not available") def test_create_stored_key_order_with_dogtag_profile(self): test_model = order_models.OrderModel(**self.stored_key_data) test_model.meta['container_ref'] = ( self.create_asymmetric_key_container()) test_model.meta['profile'] = "caServerCert" test_model.meta['ca_id'] = self.get_dogtag_ca_id() create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(202, create_resp.status_code) self.assertIsNotNone(order_ref) order_resp = self.wait_for_order(order_ref) self.assertEqual('ACTIVE', order_resp.model.status) self.verify_cert_returned(order_resp, is_stored_key_type=True) @testtools.testcase.attr('negative') def test_create_stored_key_order_with_invalid_container_ref(self): test_model = order_models.OrderModel(**self.stored_key_data) test_model.meta['container_ref'] = INVALID_CONTAINER_REF create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(400, create_resp.status_code) self.confirm_error_message( create_resp, "Order creation issue seen - " "Invalid container: Bad Container Reference " + INVALID_CONTAINER_REF + "." ) @testtools.testcase.attr('negative') def test_create_stored_key_order_with_not_found_container_ref(self): test_model = order_models.OrderModel(**self.stored_key_data) test_model.meta['container_ref'] = NOT_FOUND_CONTAINER_REF create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(400, create_resp.status_code) self.confirm_error_message( create_resp, "Order creation issue seen - " "Invalid container: Container Not Found." ) @testtools.testcase.attr('negative') def test_create_stored_key_order_with_missing_container_ref(self): test_model = order_models.OrderModel(**self.stored_key_data) create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(400, create_resp.status_code) self.confirm_error_message( create_resp, "Missing required metadata field for container_ref" ) @testtools.testcase.attr('negative') def test_create_stored_key_order_with_unauthorized_container_ref(self): # TODO(alee) - Not sure how to do this pass @testtools.testcase.attr('negative') def test_create_stored_key_order_with_invalid_container_type(self): test_model = order_models.OrderModel(**self.stored_key_data) test_model.meta['container_ref'] = (self.create_generic_container()) create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(400, create_resp.status_code) self.confirm_error_message( create_resp, "Order creation issue seen - " "Invalid container: Container Wrong Type." ) @testtools.testcase.attr('negative') def test_create_stored_key_order_with_container_secrets_inaccessible(self): # TODO(alee) Not sure how to do this pass @testtools.testcase.attr('negative') def test_create_stored_key_order_with_subject_dn_missing(self): test_model = order_models.OrderModel(**self.stored_key_data) test_model.meta['container_ref'] = ( self.create_asymmetric_key_container()) del test_model.meta['subject_dn'] create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(400, create_resp.status_code) self.confirm_error_message( create_resp, "Missing required metadata field for subject_dn" ) @testtools.testcase.attr('negative') def test_create_stored_key_order_with_subject_dn_invalid(self): test_model = order_models.OrderModel(**self.stored_key_data) test_model.meta['container_ref'] = ( self.create_asymmetric_key_container()) test_model.meta['subject_dn'] = "invalid_subject_dn" create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(400, create_resp.status_code) self.confirm_error_message( create_resp, "Invalid subject DN: invalid_subject_dn" ) @testtools.testcase.attr('negative') def test_create_stored_key_order_with_extensions(self): test_model = order_models.OrderModel(**self.stored_key_data) test_model.meta['container_ref'] = ( self.create_asymmetric_key_container()) test_model.meta['extensions'] = "any-extensions" create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(400, create_resp.status_code) self.confirm_error_message( create_resp, "Extensions are not yet supported. " "Specify a valid profile instead." ) @testtools.testcase.attr('positive') @testtools.skipIf(not dogtag_imports_ok, "Dogtag imports not available") def test_create_stored_key_order_with_non_approved_dogtag_profile(self): test_model = order_models.OrderModel(**self.stored_key_data) test_model.meta['container_ref'] = ( self.create_asymmetric_key_container()) test_model.meta['profile'] = "caTPSCert" test_model.meta['ca_id'] = self.get_dogtag_ca_id() create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(202, create_resp.status_code) self.assertIsNotNone(order_ref) order_resp = self.wait_for_order(order_ref) self.verify_pending_waiting_for_ca(order_resp) @testtools.testcase.attr('negative') @testtools.skipIf(not dogtag_imports_ok, "Dogtag imports not available") def test_create_stored_key_order_with_invalid_dogtag_profile(self): test_model = order_models.OrderModel(**self.stored_key_data) test_model.meta['container_ref'] = ( self.create_asymmetric_key_container()) test_model.meta['profile'] = "invalidProfileID" test_model.meta['ca_id'] = self.get_dogtag_ca_id() create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(202, create_resp.status_code) self.assertIsNotNone(order_ref) order_resp = self.wait_for_order(order_ref) self.assertEqual('ERROR', order_resp.model.status) self.assertIn('Problem with data in certificate request', order_resp.model.error_reason) self.assertIn('Profile not found', order_resp.model.error_reason) @testtools.testcase.attr('positive') def test_create_cert_order_with_missing_request_type(self): # defaults to 'custom' type test_model = order_models.OrderModel(**self.dogtag_custom_data) test_model.meta['cert_request'] = base64.b64encode( certutil.create_good_csr()) test_model.meta['profile_id'] = 'caTPSCert' create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(202, create_resp.status_code) self.assertIsNotNone(order_ref) order_resp = self.behaviors.get_order(order_ref) self.verify_pending_waiting_for_ca(order_resp) @testtools.testcase.attr('positive') @testtools.skipIf(not dogtag_imports_ok, "Dogtag imports not available") def test_create_cert_order_with_missing_request_type_auto_enroll(self): # defaults to 'custom' type test_model = order_models.OrderModel(**self.dogtag_custom_data) test_model.meta['cert_request'] = base64.b64encode( certutil.create_good_csr()) create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(202, create_resp.status_code) self.assertIsNotNone(order_ref) order_resp = self.wait_for_order(order_ref) self.assertEqual('ACTIVE', order_resp.model.status) self.verify_cert_returned(order_resp) @testtools.testcase.attr('positive') @testtools.skipIf(not dogtag_imports_ok, "Dogtag imports not available") def test_create_custom_order_with_valid_dogtag_data(self): # defaults to 'custom' type test_model = order_models.OrderModel(**self.dogtag_custom_data) test_model.meta['cert_request'] = base64.b64encode( certutil.create_good_csr()) create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(create_resp.status_code, 202) self.assertIsNotNone(order_ref) order_resp = self.wait_for_order(order_ref) self.assertEqual('ACTIVE', order_resp.model.status) self.verify_cert_returned(order_resp) @testtools.testcase.attr('negative') @testtools.skipIf(not dogtag_imports_ok, "Dogtag imports not available") def test_create_custom_order_with_invalid_dogtag_data(self): # TODO(alee) this test is broken because Dogtag does not return the # correct type of exception, Fix this when Dogtag is fixed. test_model = order_models.OrderModel(**self.dogtag_custom_data) test_model.meta['cert_request'] = "invalid_data" create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(202, create_resp.status_code) self.assertIsNotNone(order_ref) order_resp = self.wait_for_order(order_ref) self.assertEqual('ERROR', order_resp.model.status) # TODO(alee) confirm substatus - data error seen @testtools.testcase.attr('positive') @testtools.skipIf(dogtag_imports_ok, "Non-Dogtag test only") def test_create_custom_order_for_generic_plugin(self): test_model = order_models.OrderModel(**self.dogtag_custom_data) test_model.meta['container_ref'] = ( self.create_asymmetric_key_container()) create_resp, order_ref = self.behaviors.create_order(test_model) self.assertEqual(202, create_resp.status_code) self.assertIsNotNone(order_ref) order_resp = self.behaviors.get_order(order_ref) self.assertEqual('PENDING', order_resp.model.status) barbican-2.0.0/devstack/0000775000567000056710000000000012701406024016213 5ustar jenkinsjenkins00000000000000barbican-2.0.0/devstack/gate_hook.sh0000775000567000056710000000136112701405673020524 0ustar jenkinsjenkins00000000000000#!/bin/bash # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. set -ex # Install barbican devstack integration export DEVSTACK_LOCAL_CONFIG="enable_plugin barbican https://git.openstack.org/openstack/barbican" $BASE/new/devstack-gate/devstack-vm-gate.sh barbican-2.0.0/devstack/local.conf.example0000664000567000056710000000062612701405673021623 0ustar jenkinsjenkins00000000000000[[local|localrc]] disable_all_services enable_plugin barbican https://git.openstack.org/openstack/barbican stable/liberty enable_service rabbit mysql key # This is to keep the token small for testing KEYSTONE_TOKEN_FORMAT=UUID # Modify passwords as needed DATABASE_PASSWORD=secretdatabase RABBIT_PASSWORD=secretrabbit ADMIN_PASSWORD=secretadmin SERVICE_PASSWORD=secretservice SERVICE_TOKEN=111222333444 barbican-2.0.0/devstack/barbican-vagrant/0000775000567000056710000000000012701406024021414 5ustar jenkinsjenkins00000000000000barbican-2.0.0/devstack/barbican-vagrant/install_devstack.sh0000664000567000056710000000067112701405673025317 0ustar jenkinsjenkins00000000000000#!/bin/bash export DEBIAN_FRONTEND=noninteractive sudo apt-get update sudo apt-get install -y python-pip python-dev libffi-dev libssl-dev git git clone https://github.com/openstack-dev/devstack.git git clone https://github.com/openstack/barbican.git cp barbican/devstack/local.conf.example devstack/local.conf sudo cp -R devstack/ /opt/stack/ sudo chown -R vagrant:vagrant /opt/stack/ echo "export SERVICE_HOST=\"localhost\"" >> .bashrcbarbican-2.0.0/devstack/barbican-vagrant/Vagrantfile0000664000567000056710000000122012701405673023605 0ustar jenkinsjenkins00000000000000# -*- mode: ruby -*- # vi: set ft=ruby : VAGRANTFILE_API_VERSION = "2" Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| config.vm.box = "ubuntu/trusty64" # Barbican Ports config.vm.network "forwarded_port", guest: 9311, host: 9311 # Keystone Ports config.vm.network "forwarded_port", guest: 35357, host: 35357 config.vm.network "forwarded_port", guest: 5000, host: 5000 config.vm.provision "shell", path: "install_devstack.sh" # Create Synced Folder config.vm.synced_folder "./devstack", "/opt/stack", create: true config.vm.provider "virtualbox" do |v| v.name = "Devstack" v.memory = 2048 v.cpus = 2 end end barbican-2.0.0/devstack/settings0000664000567000056710000000173012701405673020010 0ustar jenkinsjenkins00000000000000# Defaults # -------- # Set up default directories BARBICAN_DIR=$DEST/barbican BARBICANCLIENT_DIR=$DEST/python-barbicanclient BARBICAN_CONF_DIR=${BARBICAN_CONF_DIR:-/etc/barbican} BARBICAN_CONF=$BARBICAN_CONF_DIR/barbican.conf BARBICAN_PASTE_CONF=$BARBICAN_CONF_DIR/barbican-api-paste.ini BARBICAN_API_LOG_DIR=$DEST/logs BARBICAN_AUTH_CACHE_DIR=${BARBICAN_AUTH_CACHE_DIR:-/var/cache/barbican} # Support potential entry-points console scripts BARBICAN_BIN_DIR=$(get_python_exec_prefix) # Set Barbican repository BARBICAN_REPO=${BARBICAN_REPO:-${GIT_BASE}/openstack/barbican.git} BARBICAN_BRANCH=${BARBICAN_BRANCH:-master} # Set client library repository BARBICANCLIENT_REPO=${BARBICANCLIENT_REPO:-${GIT_BASE}/openstack/python-barbicanclient.git} BARBICANCLIENT_BRANCH=${BARBICANCLIENT_BRANCH:-master} # Set host href BARBICAN_HOST_HREF=${BARBICAN_HOST_HREF:-http://${SERVICE_HOST}:9311} # Tell Tempest this project is present TEMPEST_SERVICES+=,barbican enable_service barbican barbican-2.0.0/devstack/lib/0000775000567000056710000000000012701406024016761 5ustar jenkinsjenkins00000000000000barbican-2.0.0/devstack/lib/barbican0000664000567000056710000003711412701405674020465 0ustar jenkinsjenkins00000000000000#!/usr/bin/env bash # Install and start **Barbican** service # To enable a minimal set of Barbican features, add the following to localrc: # enable_service barbican-svc barbican-retry # # Dependencies: # - functions # - OS_AUTH_URL for auth in api # - DEST set to the destination directory # - SERVICE_PASSWORD, SERVICE_PROJECT_NAME for auth in api # - STACK_USER service user # stack.sh # --------- # install_barbican # configure_barbican # init_barbican # start_barbican # stop_barbican # cleanup_barbican # Save trace setting XTRACE=$(set +o | grep xtrace) set +o xtrace # Functions # --------- # TODO(john-wood-w) These 'magic' functions are called by devstack to enable # a given service (so the name between 'is_' and '_enabled'). Currently the # Zuul infra gate configuration (at https://github.com/openstack-infra/project-config/blob/master/jenkins/jobs/barbican.yaml) # only enables the 'barbican' service. So the two functions below, for the two # services we wish to run, have to key off of that lone 'barbican' selection. # Once the Zuul config is updated to add these two services properly, then # these functions should be replaced by the single method below. # !!!! Special thanks to rm_work for figuring this out !!!! function is_barbican-retry_enabled { [[ ,${ENABLED_SERVICES} =~ ,"barbican" ]] && return 0 } function is_barbican-svc_enabled { [[ ,${ENABLED_SERVICES} =~ ,"barbican" ]] && return 0 } # TODO(john-wood-w) Replace the above two functions with the one below once # Zuul is update per above. ## Test if any Barbican services are enabled ## is_barbican_enabled #function is_barbican_enabled { # [[ ,${ENABLED_SERVICES} =~ ,"barbican-" ]] && return 0 # return 1 #} # cleanup_barbican - Remove residual data files, anything left over from previous # runs that a clean run would need to clean up function cleanup_barbican { : } # configure_barbicanclient - Set config files, create data dirs, etc function configure_barbicanclient { setup_develop $BARBICANCLIENT_DIR } # configure_dogtag_plugin - Change config to use dogtag plugin function configure_dogtag_plugin { sudo openssl pkcs12 -in /root/.dogtag/pki-tomcat/ca_admin_cert.p12 -passin pass:PASSWORD -out $BARBICAN_CONF_DIR/kra_admin_cert.pem -nodes sudo chown $USER $BARBICAN_CONF_DIR/kra_admin_cert.pem iniset $BARBICAN_CONF dogtag_plugin dogtag_port 8373 iniset $BARBICAN_CONF secretstore enabled_secretstore_plugins dogtag_crypto iniset $BARBICAN_CONF certificate enabled_certificate_plugins dogtag } # configure_barbican - Set config files, create data dirs, etc function configure_barbican { setup_develop $BARBICAN_DIR [ ! -d $BARBICAN_CONF_DIR ] && sudo mkdir -m 755 -p $BARBICAN_CONF_DIR sudo chown $USER $BARBICAN_CONF_DIR [ ! -d $BARBICAN_API_LOG_DIR ] && sudo mkdir -m 755 -p $BARBICAN_API_LOG_DIR sudo chown $USER $BARBICAN_API_LOG_DIR [ ! -d $BARBICAN_CONF_DIR ] && sudo mkdir -m 755 -p $BARBICAN_CONF_DIR sudo chown $USER $BARBICAN_CONF_DIR # Copy the barbican config files to the config dir cp $BARBICAN_DIR/etc/barbican/barbican.conf $BARBICAN_CONF_DIR cp $BARBICAN_DIR/etc/barbican/barbican-api-paste.ini $BARBICAN_CONF_DIR cp -R $BARBICAN_DIR/etc/barbican/vassals $BARBICAN_CONF_DIR # Copy functional test config cp $BARBICAN_DIR/etc/barbican/barbican-functional.conf $BARBICAN_CONF_DIR # Set the logging to INFO iniset $BARBICAN_CONF DEFAULT verbose True # Do not set to DEBUG iniset $BARBICAN_CONF DEFAULT debug False # Set the host_href iniset $BARBICAN_CONF DEFAULT host_href "$BARBICAN_HOST_HREF" # Set the log file location iniset $BARBICAN_CONF DEFAULT log_file "$BARBICAN_API_LOG_DIR/barbican.log" # Format logging if [ "$LOG_COLOR" == "True" ] && [ "$SYSLOG" == "False" ]; then setup_colorized_logging $BARBICAN_CONF DEFAULT project user fi # Install the policy file for the API server cp $BARBICAN_DIR/etc/barbican/policy.json $BARBICAN_CONF_DIR iniset $BARBICAN_CONF DEFAULT policy_file $BARBICAN_CONF_DIR/policy.json # Set the database connection url iniset $BARBICAN_CONF DEFAULT sql_connection `database_connection_url barbican` # Increase default request buffer size, keystone auth PKI tokens can be very long iniset $BARBICAN_CONF_DIR/vassals/barbican-api.ini uwsgi buffer-size 65535 # Rabbit settings if is_service_enabled rabbit; then iniset $BARBICAN_CONF 'secrets' broker rabbit://guest:$RABBIT_PASSWORD@$RABBIT_HOST else echo_summary "Barbican requires that the RabbitMQ service is enabled" fi ## Set up keystone # Turn on the middleware iniset $BARBICAN_PASTE_CONF 'pipeline:barbican_api' pipeline 'barbican-api-keystone' # Set the keystone parameters iniset $BARBICAN_PASTE_CONF 'filter:keystone_authtoken' auth_protocol $KEYSTONE_AUTH_PROTOCOL iniset $BARBICAN_PASTE_CONF 'filter:keystone_authtoken' auth_host $KEYSTONE_AUTH_HOST iniset $BARBICAN_PASTE_CONF 'filter:keystone_authtoken' auth_port $KEYSTONE_AUTH_PORT iniset $BARBICAN_PASTE_CONF 'filter:keystone_authtoken' admin_user barbican iniset $BARBICAN_PASTE_CONF 'filter:keystone_authtoken' admin_password $SERVICE_PASSWORD iniset $BARBICAN_PASTE_CONF 'filter:keystone_authtoken' admin_tenant_name $SERVICE_PROJECT_NAME iniset $BARBICAN_PASTE_CONF 'filter:keystone_authtoken' signing_dir $BARBICAN_AUTH_CACHE_DIR } # init_barbican - Initialize etc. function init_barbican { # Create cache dir sudo mkdir -p $BARBICAN_AUTH_CACHE_DIR sudo chown $STACK_USER $BARBICAN_AUTH_CACHE_DIR rm -f $BARBICAN_AUTH_CACHE_DIR/* recreate_database barbican utf8 } # install_barbican - Collect source and prepare function install_barbican { # Install package requirements if is_fedora; then install_package sqlite-devel openldap-devel fi # TODO(ravips): We need this until barbican gets into devstack setup_develop $BARBICAN_DIR pip_install 'uwsgi' } # install_barbicanclient - Collect source and prepare function install_barbicanclient { git_clone $BARBICANCLIENT_REPO $BARBICANCLIENT_DIR $BARBICANCLIENT_BRANCH setup_develop $BARBICANCLIENT_DIR } # start_barbican - Start running processes, including screen function start_barbican { # Start the Barbican service up. run_process barbican-svc "uwsgi --master --emperor $BARBICAN_CONF_DIR/vassals" # Pause while the barbican-svc populates the database, otherwise the retry # service below might try to do this at the same time, leading to race # conditions. sleep 10 # Start the retry scheduler server up. run_process barbican-retry "$BARBICAN_BIN_DIR/barbican-retry --config-file=$BARBICAN_CONF_DIR/barbican-api.conf" } # stop_barbican - Stop running processes function stop_barbican { # This will eventually be refactored to work like # Solum and Manila (script to kick off a wsgiref server) # For now, this will stop uWSGI rather than have it hang killall -9 uwsgi # This cleans up the PID file, but uses pkill so Barbican # uWSGI emperor process doesn't actually stop stop_process barbican-svc stop_process barbican-retry } function get_id { echo `"$@" | awk '/ id / { print $4 }'` } function create_barbican_accounts { # # Setup Default Admin User # SERVICE_PROJECT=$(openstack project list | awk "/ $SERVICE_PROJECT_NAME / { print \$2 }") ADMIN_ROLE=$(openstack role list | awk "/ admin / { print \$2 }") BARBICAN_USER=$(openstack user create \ --password "$SERVICE_PASSWORD" \ --project $SERVICE_PROJECT \ --email "barbican@example.com" \ barbican \ | grep " id " | get_field 2) openstack role add --project $SERVICE_PROJECT \ --user $BARBICAN_USER \ $ADMIN_ROLE # # Setup Default service-admin User # SERVICE_ADMIN=$(get_id openstack user create \ --password "$SERVICE_PASSWORD" \ --email "service-admin@example.com" \ "service-admin") SERVICE_ADMIN_ROLE=$(get_id openstack role create \ "key-manager:service-admin") openstack role add \ --user "$SERVICE_ADMIN" \ --project "$SERVICE_PROJECT" \ "$SERVICE_ADMIN_ROLE" # # Setup RBAC User Projects and Roles # PASSWORD="barbican" PROJECT_A_ID=$(get_id openstack project create "project_a") PROJECT_B_ID=$(get_id openstack project create "project_b") ROLE_ADMIN_ID=$(get_id openstack role show admin) ROLE_CREATOR_ID=$(get_id openstack role create "creator") ROLE_OBSERVER_ID=$(get_id openstack role create "observer") ROLE_AUDIT_ID=$(get_id openstack role create "audit") # # Setup RBAC Admin of Project A # USER_ID=$(get_id openstack user create \ --password "$PASSWORD" \ --email "admin_a@example.net" \ "project_a_admin") openstack role add \ --user "$USER_ID" \ --project "$PROJECT_A_ID" \ "$ROLE_ADMIN_ID" # # Setup RBAC Creator of Project A # USER_ID=$(get_id openstack user create \ --password "$PASSWORD" \ --email "creator_a@example.net" \ "project_a_creator") openstack role add \ --user "$USER_ID" \ --project "$PROJECT_A_ID" \ "$ROLE_CREATOR_ID" # # Setup RBAC Observer of Project A # USER_ID=$(get_id openstack user create \ --password "$PASSWORD" \ --email "observer_a@example.net" \ "project_a_observer") openstack role add \ --user "$USER_ID" \ --project "$PROJECT_A_ID" \ "$ROLE_OBSERVER_ID" # # Setup RBAC Auditor of Project A # USER_ID=$(get_id openstack user create \ --password "$PASSWORD" \ --email "auditor_a@example.net" \ "project_a_auditor") openstack role add \ --user "$USER_ID" \ --project "$PROJECT_A_ID" \ "$ROLE_AUDIT_ID" # # Setup RBAC Admin of Project B # USER_ID=$(get_id openstack user create \ --password "$PASSWORD" \ --email "admin_b@example.net" \ "project_b_admin") openstack role add \ --user "$USER_ID" \ --project "$PROJECT_B_ID" \ "$ROLE_ADMIN_ID" # # Setup RBAC Creator of Project B # USER_ID=$(get_id openstack user create \ --password "$PASSWORD" \ --email "creator_b@example.net" \ "project_b_creator") openstack role add \ --user "$USER_ID" \ --project "$PROJECT_B_ID" \ "$ROLE_CREATOR_ID" # # Setup RBAC Observer of Project B # USER_ID=$(get_id openstack user create \ --password "$PASSWORD" \ --email "observer_b@example.net" \ "project_b_observer") openstack role add \ --user "$USER_ID" \ --project "$PROJECT_B_ID" \ "$ROLE_OBSERVER_ID" # # Setup RBAC auditor of Project B # USER_ID=$(get_id openstack user create \ --password "$PASSWORD" \ --email "auditor_b@example.net" \ "project_b_auditor") openstack role add \ --user "$USER_ID" \ --project "$PROJECT_B_ID" \ "$ROLE_AUDIT_ID" # # Setup Barbican Endpoint # if [[ "$KEYSTONE_CATALOG_BACKEND" = 'sql' ]]; then BARBICAN_SERVICE=$(openstack service create \ --name barbican \ --description "Barbican Service" \ 'key-manager' \ | grep " id " | get_field 2) openstack endpoint create \ --os-identity-api-version 3 \ --region RegionOne \ $BARBICAN_SERVICE \ public "http://$SERVICE_HOST:9311" openstack endpoint create \ --os-identity-api-version 3 \ --region RegionOne \ $BARBICAN_SERVICE \ internal "http://$SERVICE_HOST:9311" fi } # Dogtag functions # ---------------- function install_389_directory_server { # Make sure that 127.0.0.1 resolves to localhost.localdomain (fqdn) sudo sed -i 's/127.0.0.1[ \t]*localhost localhost.localdomain/127.0.0.1\tlocalhost.localdomain localhost/' /etc/hosts install_package 389-ds-base sudo mkdir -p /etc/389-ds # Instead of spawning a sub-shell to cat this whole chunk into the desired # file. I just cat it into a temporary file that this user will have access # to, and subsequently use elevated privileges to move the already made # file where we need it to be. cat > .tmp.setup.inf < .tmp.ca.cfg < .tmp.kra.cfg < [GITREF] where is the URL of a Barbican repository [GITREF] is an optional git ref (branch/ref/tag). The default is master. For example enable_plugin barbican https://git.openstack.org/openstack/barbican stable/liberty For more information, see the "Externally Hosted Plugins" section of http://docs.openstack.org/developer/devstack/plugins.html barbican-2.0.0/etc/0000775000567000056710000000000012701406024015162 5ustar jenkinsjenkins00000000000000barbican-2.0.0/etc/init/0000775000567000056710000000000012701406024016125 5ustar jenkinsjenkins00000000000000barbican-2.0.0/etc/init/barbican.conf0000664000567000056710000000060112701405673020543 0ustar jenkinsjenkins00000000000000# Barbican upstart script # Used in rpm build. Keep in sync with debian/barbican-api.upstart description "Barbican Key Management APIs" start on runlevel [2345] stop on runlevel [06] #TODO(jwood) Parameterize the stats port address. script uwsgi --master --die-on-term --emperor /etc/barbican/vassals --logto /var/log/barbican/barbican-api.log --stats localhost:9314 end script barbican-2.0.0/etc/init/barbican-keystone-listener.conf0000664000567000056710000000051412701405673024230 0ustar jenkinsjenkins00000000000000# Barbican Keystone Listener upstart script # Used in rpm build. Keep in sync with debian/barbican-keystone-listener.upstart description "Barbican Keystone Event Listeners" start on runlevel [2345] stop on runlevel [06] script barbican-keystone-listener.py >> /var/log/barbican/barbican_keystone_listener.log 2>&1 end script barbican-2.0.0/etc/init/barbican-worker.conf0000664000567000056710000000044312701405673022056 0ustar jenkinsjenkins00000000000000# Barbican Worker node upstart script # Used in rpm build. Keep in sync with debian/barbican-worker.upstart description "Barbican Key Management Workers" start on runlevel [2345] stop on runlevel [06] script barbican-worker.py >> /var/log/barbican/barbican_worker.log 2>&1 end script barbican-2.0.0/etc/logrotate.d/0000775000567000056710000000000012701406024017404 5ustar jenkinsjenkins00000000000000barbican-2.0.0/etc/logrotate.d/barbican-api0000664000567000056710000000032012701405673021643 0ustar jenkinsjenkins00000000000000# logrotate.d configuration # Used in rpm build. Keep in sync with debian/barbican-api.logrotate /var/log/barbican/barbican-api.log { daily missingok compress delaycompress notifempty } barbican-2.0.0/etc/barbican/0000775000567000056710000000000012701406024016723 5ustar jenkinsjenkins00000000000000barbican-2.0.0/etc/barbican/barbican-functional.conf0000664000567000056710000000415112701405674023506 0ustar jenkinsjenkins00000000000000[DEFAULT] [identity] # Replace these with values that represent your identity configuration uri=http://localhost:5000/v3 version=v3 username=admin project_name=admin password=secretadmin domain_name=Default service_admin=service-admin service_admin_project=service service_admin_password=secretservice [rbac_users] # Replace these values that represent additional users for RBAC testing project_a=project_a project_b=project_b # users for project_a admin_a=project_a_admin admin_a_password=barbican creator_a=project_a_creator creator_a_password=barbican observer_a=project_a_observer observer_a_password=barbican auditor_a=project_a_auditor auditor_a_password=barbican # users for project_b admin_b=project_b_admin admin_b_password=barbican creator_b=project_b_creator creator_b_password=barbican observer_b=project_b_observer observer_b_password=barbican auditor_b=project_b_auditor auditor_b_password=barbican [keymanager] # For selecting service endpoint from service catalog, # following attributes are used to find it. #service_type=key-manager #service_name=barbican #region_name=RegionOne #endpoint_type=public # use this to increase the timeout (in seconds) when debugging API calls #timeout=10 # use this to run the functional tests against a # different barbican server than the one that is # specified in the service catalog. To use what is # in the service catalog, just comment this out # or leave it blank. # override_url=http://localhost:9311 # override_url_version=v1 [quotas] # For each resource, the default maximum number that can be used for # a project is set below. This value can be overridden for each # project through the API. A negative value means no limit. A zero # value effectively disables the resource. # These should be set identically to the system under test. # default number of secrets allowed per project quota_secrets = -1 # default number of orders allowed per project quota_orders = -1 # default number of containers allowed per project quota_containers = -1 # default number of consumers allowed per project quota_consumers = -1 # default number of CAs allowed per project quota_cas = -1 barbican-2.0.0/etc/barbican/barbican-api-paste.ini0000664000567000056710000000452512701405673023065 0ustar jenkinsjenkins00000000000000[composite:main] use = egg:Paste#urlmap /: barbican_version /v1: barbican_api # Use this pipeline for Barbican API - versions no authentication [pipeline:barbican_version] pipeline = cors versionapp # Use this pipeline for Barbican API - DEFAULT no authentication [pipeline:barbican_api] pipeline = cors unauthenticated-context apiapp #Use this pipeline to activate a repoze.profile middleware and HTTP port, # to provide profiling information for the REST API processing. [pipeline:barbican-profile] pipeline = cors unauthenticated-context egg:Paste#cgitb egg:Paste#httpexceptions profile apiapp #Use this pipeline for keystone auth [pipeline:barbican-api-keystone] pipeline = cors keystone_authtoken context apiapp #Use this pipeline for keystone auth with audit feature [pipeline:barbican-api-keystone-audit] pipeline = keystone_authtoken context audit apiapp [app:apiapp] paste.app_factory = barbican.api.app:create_main_app [app:versionapp] paste.app_factory = barbican.api.app:create_version_app [filter:simple] paste.filter_factory = barbican.api.middleware.simple:SimpleFilter.factory [filter:unauthenticated-context] paste.filter_factory = barbican.api.middleware.context:UnauthenticatedContextMiddleware.factory [filter:context] paste.filter_factory = barbican.api.middleware.context:ContextMiddleware.factory [filter:audit] paste.filter_factory = keystonemiddleware.audit:filter_factory audit_map_file = /etc/barbican/api_audit_map.conf [filter:keystone_authtoken] paste.filter_factory = keystonemiddleware.auth_token:filter_factory #need ability to re-auth a token, thus admin url identity_uri = http://localhost:35357 admin_tenant_name = service admin_user = barbican admin_password = orange auth_version = v3.0 #delay failing perhaps to log the unauthorized request in barbican .. #delay_auth_decision = true # signing_dir is configurable, but the default behavior of the authtoken # middleware should be sufficient. It will create a temporary directory # for the user the barbican process is running as. #signing_dir = /var/barbican/keystone-signing [filter:profile] use = egg:repoze.profile log_filename = myapp.profile cachegrind_filename = cachegrind.out.myapp discard_first_request = true path = /__profile__ flush_at_shutdown = true unwind = false [filter:cors] paste.filter_factory = oslo_middleware.cors:filter_factory oslo_config_project = barbican barbican-2.0.0/etc/barbican/barbican.conf0000664000567000056710000003173312701405673021353 0ustar jenkinsjenkins00000000000000[DEFAULT] # Show more verbose log output (sets INFO log level output) verbose = True # Show debugging output in logs (sets DEBUG log level output) #debug = True # Address to bind the API server bind_host = 0.0.0.0 # Port to bind the API server to bind_port = 9311 # Host name, for use in HATEOAS-style references # Note: Typically this would be the load balanced endpoint that clients would use # communicate back with this service. host_href = http://localhost:9311 # Log to this file. Make sure you do not set the same log # file for both the API and registry servers! #log_file = /var/log/barbican/api.log # Backlog requests when creating socket backlog = 4096 # TCP_KEEPIDLE value in seconds when creating socket. # Not supported on OS X. #tcp_keepidle = 600 # Maximum allowed http request size against the barbican-api max_allowed_secret_in_bytes = 10000 max_allowed_request_size_in_bytes = 1000000 # SQLAlchemy connection string for the reference implementation # registry server. Any valid SQLAlchemy connection string is fine. # See: http://www.sqlalchemy.org/docs/05/reference/sqlalchemy/connections.html#sqlalchemy.create_engine # Uncomment this for local dev, putting db in project directory: #sql_connection = sqlite:///barbican.sqlite # Note: For absolute addresses, use '////' slashes after 'sqlite:' # Uncomment for a more global development environment sql_connection = sqlite:////var/lib/barbican/barbican.sqlite # Period in seconds after which SQLAlchemy should reestablish its connection # to the database. # # MySQL uses a default `wait_timeout` of 8 hours, after which it will drop # idle connections. This can result in 'MySQL Gone Away' exceptions. If you # notice this, you can lower this value to ensure that SQLAlchemy reconnects # before MySQL can drop the connection. sql_idle_timeout = 3600 # Accepts a class imported from the sqlalchemy.pool module, and handles the # details of building the pool for you. If commented out, SQLAlchemy # will select based on the database dialect. Other options are QueuePool # (for SQLAlchemy-managed connections) and NullPool (to disabled SQLAlchemy # management of connections). # See http://docs.sqlalchemy.org/en/latest/core/pooling.html for more details. #sql_pool_class = QueuePool # Show SQLAlchemy pool-related debugging output in logs (sets DEBUG log level # output) if specified. #sql_pool_logging = True # Size of pool used by SQLAlchemy. This is the largest number of connections # that will be kept persistently in the pool. Can be set to 0 to indicate no # size limit. To disable pooling, use a NullPool with sql_pool_class instead. # Comment out to allow SQLAlchemy to select the default. #sql_pool_size = 5 # The maximum overflow size of the pool used by SQLAlchemy. When the number of # checked-out connections reaches the size set in sql_pool_size, additional # connections will be returned up to this limit. It follows then that the # total number of simultaneous connections the pool will allow is # sql_pool_size + sql_pool_max_overflow. Can be set to -1 to indicate no # overflow limit, so no limit will be placed on the total number of concurrent # connections. Comment out to allow SQLAlchemy to select the default. #sql_pool_max_overflow = 10 # Default page size for the 'limit' paging URL parameter. default_limit_paging = 10 # Maximum page size for the 'limit' paging URL parameter. max_limit_paging = 100 # Role used to identify an authenticated user as administrator #admin_role = admin # Allow unauthenticated users to access the API with read-only # privileges. This only applies when using ContextMiddleware. #allow_anonymous_access = False # Allow access to version 1 of barbican api #enable_v1_api = True # Allow access to version 2 of barbican api #enable_v2_api = True # ================= SSL Options =============================== # Certificate file to use when starting API server securely #cert_file = /path/to/certfile # Private key file to use when starting API server securely #key_file = /path/to/keyfile # CA certificate file to use to verify connecting clients #ca_file = /path/to/cafile # ================= Security Options ========================== # AES key for encrypting store 'location' metadata, including # -- if used -- Swift or S3 credentials # Should be set to a random string of length 16, 24 or 32 bytes #metadata_encryption_key = <16, 24 or 32 char registry metadata key> # ================= Queue Options - oslo.messaging ========================== # Rabbit and HA configuration: ampq_durable_queues = True rabbit_userid=guest rabbit_password=guest rabbit_ha_queues = True rabbit_port=5672 # For HA, specify queue nodes in cluster, comma delimited: # For example: rabbit_hosts=192.168.50.8:5672, 192.168.50.9:5672 rabbit_hosts=localhost:5672 # For HA, specify queue nodes in cluster as 'user@host:5672', comma delimited, ending with '/offset': # For example: transport_url = rabbit://guest@192.168.50.8:5672,guest@192.168.50.9:5672/ # DO NOT USE THIS, due to '# FIXME(markmc): support multiple hosts' in oslo/messaging/_drivers/amqpdriver.py # transport_url = rabbit://guest@localhost:5672/ # oslo notification driver for sending audit events via audit middleware. # Meaningful only when middleware is enabled in barbican paste ini file. # This is oslo config MultiStrOpt so can be defined multiple times in case # there is need to route audit event to messaging as well as log. # notification_driver = messagingv2 # notification_driver = log # ======== OpenStack policy - oslo_policy =============== [oslo_policy] # ======== OpenStack policy integration # JSON file representing policy (string value) policy_file=/etc/barbican/policy.json # Rule checked when requested rule is not found (string value) policy_default_rule=default # ================= Queue Options - Application ========================== [queue] # Enable queuing asynchronous messaging. # Set false to invoke worker tasks synchronously (i.e. no-queue standalone mode) enable = False # Namespace for the queue namespace = 'barbican' # Topic for the queue topic = 'barbican.workers' # Version for the task API version = '1.1' # Server name for RPC service server_name = 'barbican.queue' # Number of asynchronous worker processes. # When greater than 1, then that many additional worker processes are # created for asynchronous worker functionality. asynchronous_workers = 1 # ================= Retry/Scheduler Options ========================== [retry_scheduler] # Seconds (float) to wait between starting retry scheduler initial_delay_seconds = 10.0 # Seconds (float) to wait between starting retry scheduler periodic_interval_max_seconds = 10.0 # ====================== Quota Options =============================== [quotas] # For each resource, the default maximum number that can be used for # a project is set below. This value can be overridden for each # project through the API. A negative value means no limit. A zero # value effectively disables the resource. # default number of secrets allowed per project quota_secrets = -1 # default number of orders allowed per project quota_orders = -1 # default number of containers allowed per project quota_containers = -1 # default number of consumers allowed per project quota_consumers = -1 # default number of CAs allowed per project quota_cas = -1 # ================= Keystone Notification Options - Application =============== [keystone_notifications] # Keystone notification functionality uses transport related configuration # from barbican common configuration as defined under # 'Queue Options - oslo.messaging' comments. # The HA related configuration is also shared with notification server. # True enables keystone notification listener functionality. enable = False # The default exchange under which topics are scoped. # May be overridden by an exchange name specified in the transport_url option. control_exchange = 'openstack' # Keystone notification queue topic name. # This name needs to match one of values mentioned in Keystone deployment's # 'notification_topics' configuration e.g. # notification_topics=notifications, barbican_notifications # Multiple servers may listen on a topic and messages will be dispatched to one # of the servers in a round-robin fashion. That's why Barbican service should # have its own dedicated notification queue so that it receives all of Keystone # notifications. topic = 'notifications' # True enables requeue feature in case of notification processing error. # Enable this only when underlying transport supports this feature. allow_requeue = False # Version of tasks invoked via notifications version = '1.0' # Define the number of max threads to be used for notification server # processing functionality. thread_pool_size = 10 # ================= Secret Store Plugin =================== [secretstore] namespace = barbican.secretstore.plugin enabled_secretstore_plugins = store_crypto # ================= Crypto plugin =================== [crypto] namespace = barbican.crypto.plugin enabled_crypto_plugins = simple_crypto [simple_crypto_plugin] # the kek should be a 32-byte value which is base64 encoded kek = 'YWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXoxMjM0NTY=' [dogtag_plugin] pem_path = '/etc/barbican/kra_admin_cert.pem' dogtag_host = localhost dogtag_port = 8443 nss_db_path = '/etc/barbican/alias' nss_db_path_ca = '/etc/barbican/alias-ca' nss_password = 'password123' simple_cmc_profile = 'caOtherCert' ca_expiration_time = 1 plugin_working_dir = '/etc/barbican/dogtag' [p11_crypto_plugin] # Path to vendor PKCS11 library library_path = '/usr/lib/libCryptoki2_64.so' # Password to login to PKCS11 session login = 'mypassword' # Label to identify master KEK in the HSM (must not be the same as HMAC label) mkek_label = 'an_mkek' # Length in bytes of master KEK mkek_length = 32 # Label to identify HMAC key in the HSM (must not be the same as MKEK label) hmac_label = 'my_hmac_label' # HSM Slot id (Should correspond to a configured PKCS11 slot). Default: 1 # slot_id = 1 # Enable Read/Write session with the HSM? # rw_session = True # Length of Project KEKs to create # pkek_length = 32 # How long to cache unwrapped Project KEKs # pkek_cache_ttl = 900 # Max number of items in pkek cache # pkek_cache_limit = 100 # ================== KMIP plugin ===================== [kmip_plugin] username = 'admin' password = 'password' host = localhost port = 5696 keyfile = '/path/to/certs/cert.key' certfile = '/path/to/certs/cert.crt' ca_certs = '/path/to/certs/LocalCA.crt' # ================= Certificate plugin =================== [certificate] namespace = barbican.certificate.plugin enabled_certificate_plugins = simple_certificate enabled_certificate_plugins = snakeoil_ca [certificate_event] namespace = barbican.certificate.event.plugin enabled_certificate_event_plugins = simple_certificate_event [snakeoil_ca_plugin] ca_cert_path = /etc/barbican/snakeoil-ca.crt ca_cert_key_path = /etc/barbican/snakeoil-ca.key ca_cert_chain_path = /etc/barbican/snakeoil-ca.chain ca_cert_pkcs7_path = /etc/barbican/snakeoil-ca.p7b subca_cert_key_directory=/etc/barbican/snakeoil-cas [cors] # # From oslo.middleware.cors # # Indicate whether this resource may be shared with the domain # received in the requests "origin" header. (list value) #allowed_origin = # Indicate that the actual request can include user credentials # (boolean value) #allow_credentials = true # Indicate which headers are safe to expose to the API. Defaults to # HTTP Simple Headers. (list value) #expose_headers = X-Auth-Token, X-Openstack-Request-Id, X-Project-Id, X-Identity-Status, X-User-Id, X-Storage-Token, X-Domain-Id, X-User-Domain-Id, X-Project-Domain-Id, X-Roles # Maximum cache age of CORS preflight requests. (integer value) #max_age = 3600 # Indicate which methods can be used during the actual request. (list # value) #allow_methods = GET,PUT,POST,DELETE,PATCH # Indicate which header field names may be used during the actual # request. (list value) #allow_headers = X-Auth-Token, X-Openstack-Request-Id, X-Project-Id, X-Identity-Status, X-User-Id, X-Storage-Token, X-Domain-Id, X-User-Domain-Id, X-Project-Domain-Id, X-Roles [cors.subdomain] # # From oslo.middleware.cors # # Indicate whether this resource may be shared with the domain # received in the requests "origin" header. (list value) #allowed_origin = # Indicate that the actual request can include user credentials # (boolean value) #allow_credentials = true # Indicate which headers are safe to expose to the API. Defaults to # HTTP Simple Headers. (list value) #expose_headers = X-Auth-Token, X-Openstack-Request-Id, X-Project-Id, X-Identity-Status, X-User-Id, X-Storage-Token, X-Domain-Id, X-User-Domain-Id, X-Project-Domain-Id, X-Roles # Maximum cache age of CORS preflight requests. (integer value) #max_age = 3600 # Indicate which methods can be used during the actual request. (list # value) #allow_methods = GET,PUT,POST,DELETE,PATCH # Indicate which header field names may be used during the actual # request. (list value) #allow_headers = X-Auth-Token, X-Openstack-Request-Id, X-Project-Id, X-Identity-Status, X-User-Id, X-Storage-Token, X-Domain-Id, X-User-Domain-Id, X-Project-Domain-Id, X-Roles barbican-2.0.0/etc/barbican/policy.json0000664000567000056710000001152612701405673021133 0ustar jenkinsjenkins00000000000000{ "admin": "role:admin", "observer": "role:observer", "creator": "role:creator", "audit": "role:audit", "service_admin": "role:key-manager:service-admin", "admin_or_user_does_not_work": "project_id:%(project_id)s", "admin_or_user": "rule:admin or project_id:%(project_id)s", "admin_or_creator": "rule:admin or rule:creator", "all_but_audit": "rule:admin or rule:observer or rule:creator", "all_users": "rule:admin or rule:observer or rule:creator or rule:audit or rule:service_admin", "secret_project_match": "project:%(target.secret.project_id)s", "secret_acl_read": "'read':%(target.secret.read)s", "secret_private_read": "'False':%(target.secret.read_project_access)s", "secret_creator_user": "user:%(target.secret.creator_id)s", "container_project_match": "project:%(target.container.project_id)s", "container_acl_read": "'read':%(target.container.read)s", "container_private_read": "'False':%(target.container.read_project_access)s", "container_creator_user": "user:%(target.container.creator_id)s", "secret_non_private_read": "rule:all_users and rule:secret_project_match and not rule:secret_private_read", "secret_decrypt_non_private_read": "rule:all_but_audit and rule:secret_project_match and not rule:secret_private_read", "container_non_private_read": "rule:all_users and rule:container_project_match and not rule:container_private_read", "secret_project_admin": "rule:admin and rule:secret_project_match", "secret_project_creator": "rule:creator and rule:secret_project_match and rule:secret_creator_user", "container_project_admin": "rule:admin and rule:container_project_match", "container_project_creator": "rule:creator and rule:container_project_match and rule:container_creator_user", "version:get": "@", "secret:decrypt": "rule:secret_decrypt_non_private_read or rule:secret_project_creator or rule:secret_project_admin or rule:secret_acl_read", "secret:get": "rule:secret_non_private_read or rule:secret_project_creator or rule:secret_project_admin or rule:secret_acl_read", "secret:put": "rule:admin_or_creator and rule:secret_project_match", "secret:delete": "rule:admin and rule:secret_project_match", "secrets:post": "rule:admin_or_creator", "secrets:get": "rule:all_but_audit", "orders:post": "rule:admin_or_creator", "orders:get": "rule:all_but_audit", "order:get": "rule:all_users", "order:put": "rule:admin_or_creator", "order:delete": "rule:admin", "consumer:get": "rule:all_users", "consumers:get": "rule:all_users", "consumers:post": "rule:admin", "consumers:delete": "rule:admin", "containers:post": "rule:admin_or_creator", "containers:get": "rule:all_but_audit", "container:get": "rule:container_non_private_read or rule:container_project_creator or rule:container_project_admin or rule:container_acl_read", "container:delete": "rule:admin", "transport_key:get": "rule:all_users", "transport_key:delete": "rule:admin", "transport_keys:get": "rule:all_users", "transport_keys:post": "rule:admin", "certificate_authorities:get_limited": "rule:all_users", "certificate_authorities:get_all": "rule:admin", "certificate_authorities:post": "rule:admin", "certificate_authorities:get_preferred_ca": "rule:all_users", "certificate_authorities:get_global_preferred_ca": "rule:service_admin", "certificate_authorities:unset_global_preferred": "rule:service_admin", "certificate_authority:delete": "rule:admin", "certificate_authority:get": "rule:all_users", "certificate_authority:get_cacert": "rule:all_users", "certificate_authority:get_ca_cert_chain": "rule:all_users", "certificate_authority:get_projects": "rule:service_admin", "certificate_authority:add_to_project": "rule:admin", "certificate_authority:remove_from_project": "rule:admin", "certificate_authority:set_preferred": "rule:admin", "certificate_authority:set_global_preferred": "rule:service_admin", "secret_acls:put_patch": "rule:secret_project_admin or rule:secret_project_creator", "secret_acls:delete": "rule:secret_project_admin or rule:secret_project_creator", "secret_acls:get": "rule:all_but_audit and rule:secret_project_match", "container_acls:put_patch": "rule:container_project_admin or rule:container_project_creator", "container_acls:delete": "rule:container_project_admin or rule:container_project_creator", "container_acls:get": "rule:all_but_audit and rule:container_project_match", "quotas:get": "rule:all_users", "project_quotas:get": "rule:service_admin", "project_quotas:put": "rule:service_admin", "project_quotas:delete": "rule:service_admin", "secret_meta:get": "rule:all_but_audit", "secret_meta:post": "rule:admin_or_creator", "secret_meta:put": "rule:admin_or_creator", "secret_meta:delete": "rule:admin_or_creator" } barbican-2.0.0/etc/barbican/vassals/0000775000567000056710000000000012701406024020377 5ustar jenkinsjenkins00000000000000barbican-2.0.0/etc/barbican/vassals/barbican-api.ini0000664000567000056710000000033612701405673023423 0ustar jenkinsjenkins00000000000000[uwsgi] socket = :9311 protocol = http processes = 1 lazy = true vacuum = true no-default-app = true memory-report = true plugins = python paste = config:/etc/barbican/barbican-api-paste.ini add-header = Connection: close barbican-2.0.0/etc/barbican/api_audit_map.conf0000664000567000056710000000124212701405673022376 0ustar jenkinsjenkins00000000000000[DEFAULT] # default target endpoint type # should match the endpoint type defined in service catalog target_endpoint_type = key-manager # map urls ending with specific text to a unique action # Don't need custom mapping for other resource operations # Note: action should match action names defined in CADF taxonomy [custom_actions] acl/get = read # path of api requests for CADF target typeURI # Just need to include top resource path to identify class of resources [path_keywords] secrets= containers= orders= cas=None quotas= project-quotas= # map endpoint type defined in service catalog to CADF typeURI [service_endpoints] key-manager = service/security/keymanagerbarbican-2.0.0/babel.cfg0000664000567000056710000000002012701405673016136 0ustar jenkinsjenkins00000000000000[python: **.py] barbican-2.0.0/.coveragerc0000664000567000056710000000053412701405673016543 0ustar jenkinsjenkins00000000000000[run] branch = True omit = etc/*,setup.py,*egg*,.tox/*,barbican/tests/*,*barbican/openstack/*, functionaltests/*,contrib/*, barbican/model/migration/alembic_migrations/versions/*, barbican/plugin/dogtag.py, barbican/plugin/symantec.py [report] ignore_errors = True exclude_lines = pragma: no cover @abc.abstractmethod barbican-2.0.0/setup.cfg0000664000567000056710000000501312701406024016227 0ustar jenkinsjenkins00000000000000[metadata] name = barbican description = Service for storing sensitive client information for OpenStack description-file = README.md author = OpenStack author-email = openstack-dev@lists.openstack.org home-page = http://www.openstack.org/ classifier = Environment :: OpenStack Intended Audience :: Information Technology Intended Audience :: System Administrators License :: OSI Approved :: Apache Software License Operating System :: POSIX :: Linux Programming Language :: Python Programming Language :: Python :: 2 Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 Programming Language :: Python :: 3.4 [files] packages = barbican [entry_points] console_scripts = barbican-manage = barbican.cmd.barbican_manage:main barbican-db-manage = barbican.cmd.db_manage:main barbican-keystone-listener = barbican.cmd.keystone_listener:main barbican-worker = barbican.cmd.worker:main pkcs11-kek-rewrap = barbican.cmd.pkcs11_kek_rewrap:main pkcs11-key-generation = barbican.cmd.pkcs11_key_generation:main barbican-retry = barbican.cmd.retry_scheduler:main barbican.secretstore.plugin = store_crypto = barbican.plugin.store_crypto:StoreCryptoAdapterPlugin dogtag_crypto = barbican.plugin.dogtag:DogtagKRAPlugin kmip_plugin = barbican.plugin.kmip_secret_store:KMIPSecretStore barbican.crypto.plugin = p11_crypto = barbican.plugin.crypto.p11_crypto:P11CryptoPlugin simple_crypto = barbican.plugin.crypto.simple_crypto:SimpleCryptoPlugin barbican.certificate.plugin = simple_certificate = barbican.plugin.simple_certificate_manager:SimpleCertificatePlugin snakeoil_ca = barbican.plugin.snakeoil_ca:SnakeoilCACertificatePlugin symantec = barbican.plugin.symantec:SymantecCertificatePlugin dogtag = barbican.plugin.dogtag:DogtagCAPlugin barbican.certificate.event.plugin = simple_certificate_event = barbican.plugin.simple_certificate_manager:SimpleCertificateEventPlugin barbican.test.crypto.plugin = test_crypto = barbican.tests.crypto.test_plugin:TestCryptoPlugin [build_sphinx] all_files = 1 build-dir = doc/build source-dir = doc/source [build_apiguide] all_files = 1 build-dir = api-guide/build source-dir = api-guide/source [egg_info] tag_build = tag_date = 0 tag_svn_revision = 0 [compile_catalog] directory = barbican/locale domain = barbican [update_catalog] domain = barbican output_dir = barbican/locale input_file = barbican/locale/barbican.pot [extract_messages] keywords = _ gettext ngettext l_ lazy_gettext mapping_file = babel.cfg output_file = barbican/locale/barbican.pot [wheel] universal = 1 barbican-2.0.0/doc/0000775000567000056710000000000012701406024015154 5ustar jenkinsjenkins00000000000000barbican-2.0.0/doc/source/0000775000567000056710000000000012701406024016454 5ustar jenkinsjenkins00000000000000barbican-2.0.0/doc/source/index.rst0000664000567000056710000000336212701405673020332 0ustar jenkinsjenkins00000000000000Welcome to Barbican's developer documentation! ============================================== Barbican is the OpenStack Key Manager service. It provides secure storage, provisioning and management of secret data. This includes keying material such as Symmetric Keys, Asymmetric Keys, Certificates and raw binary data. Barbican for Users ================== If you're trying to learn how to use barbican, you can start by reading about `Secrets in the Barbican API Guide `__. Once you're comfortable working with secrets you can dig into the rest of the API. .. toctree:: :maxdepth: 1 api/index Barbican for Developers ======================= If you're new to OpenStack development you should start by reading the `OpenStack Developer's Guide`_. .. _`OpenStack Developer's Guide`: http://docs.openstack.org/infra/manual/developers.html Once you've read the OpenStack guide you'll be ready to set up a local barbican development environment. .. toctree:: :maxdepth: 1 setup/dev setup/devstack When you're ready to dive deeper in to barbican take a look at: .. toctree:: :maxdepth: 1 contribute/getting_involved contribute/architecture contribute/structure contribute/dataflow contribute/dependencies contribute/database_migrations plugin/index testing Barbican for Operators ====================== If you're looking for information for deploying and configuring barbican you will probably want to read: .. toctree:: :maxdepth: 1 setup/index admin-guide-cloud/index This documentation is generated by the Sphinx toolkit and lives in the source tree. Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` barbican-2.0.0/doc/source/plugin/0000775000567000056710000000000012701406024017752 5ustar jenkinsjenkins00000000000000barbican-2.0.0/doc/source/plugin/index.rst0000664000567000056710000000574012701405673021632 0ustar jenkinsjenkins00000000000000========================= Plugin Developers Guide ========================= This guide describes how to develop custom plugins for use by Barbican. While Barbican provides useful plugin implementations, some OpenStack operators may require customized implementations, perhaps to interact with an existing corporate database or service. This approach also gives flexibility to operators of OpenStack clouds by allowing them to choose the right implementation for their cloud. Plugin Status ============= A Barbican plugin may be considered ``stable``, ``experimental`` or ``out-of-tree``. * A *stable* status indicates that the plugin is fully supported by the OpenStack Barbican Team * An *experimental* status indicates that we intend to support the plugin, but it may be missing features or may not be fully tested at the gate. Plugins in this status may occasionally break. * An *out-of-tree* status indicates that no formal support will be provided, and the plugin may be removed in a future release. Graduation Process ------------------ By default, new plugins proposed to be in-tree will be in the *experimental* status. To be considered *stable* a plugin must meet the following requirements: * 100% unit test coverage, including branch coverage. * Gate job that executes the functional test suite against an instance of Barbican configured to use the plugin. The gate may be a devstack gate, or a third-party gate. * Implement new features within one cycle after the new blueprint feature is approved. Demotion Process ---------------- Plugins should not stay in the *experimental* status for a long time. Plugins that stay in *experimental* for more than **two** releases are expected to move into *stable*, as described by the Graduation Process, or move into *out-of-tree*. Plugins in the *stable* status may be deprecated by the team, and moved to *out-of-tree*. Plugins that stay in the *out-of-tree* status for more than **two** releases may be removed from the tree. Architecture ============ Barbican's plugin architecture enables developers to create their own implementations of features such as secret storage and generation, X.509 certificate generation, and event handling. The plugin pattern used defines an abstract class, whose methods are invoked by Barbican logic (referred to as Barbican 'core' in this guide) in a particular sequence. Typically plugins do not interact with Barbican's data model directly, so Barbican core also handles persisting any required information on the plugin's behalf. In general, Barbican core will invoke a variation of the plugin's ``supports()`` method to determine if a requested action can be implemented by the plugin. Once a supporting plugin is selected, Barbican core will invoke one or more methods on the plugin to complete the action. The links below provide further guidance on the various plugin types used by Barbican, as well as configuration and deployment options. .. toctree:: :maxdepth: 1 secret_store crypto certificate barbican-2.0.0/doc/source/plugin/secret_store.rst0000664000567000056710000001354712701405673023230 0ustar jenkinsjenkins00000000000000.. module:: barbican.plugin.interface.secret_store =============================== Secret Store Plugin Development =============================== This guide describes how to develop a custom secret store plugin for use by Barbican. Barbican supports two storage modes for secrets: a secret store mode (detailed on this page), and a :doc:`cryptographic mode `. The secret store mode offloads both encryption/decryption and encrypted secret storage to the plugin implementation. Barbican includes plugin interfaces to a Red Hat Dogtag service and to a Key Management Interoperability Protocol (KMIP) compliant security appliance. Since the secret store mode defers the storage of encrypted secrets to plugins, Barbican core does not need to store encrypted secrets into its data store, unlike the :doc:`cryptographic mode `. To accommodate the discrepancy between the two secret storage modes, a secret store to cryptographic plugin adapter has been included in Barbican core, as detailed in :ref:`plugin-secret-store-crypto-adapter-label` section below. ``secret_store`` Module ======================= The ``barbican.plugin.interface.secret_store`` module contains the classes needed to implement a custom plugin. These classes include the ``SecretStoreBase`` abstract base class which custom plugins should inherit from, as well as several Data Transfer Object (DTO) classes used to transfer data between Barbican and the plugin. Data Transfer Objects ===================== The DTO classes are used to wrap data that is passed from Barbican to the plugin as well as data that is returned from the plugin back to Barbican. They provide a level of isolation between the plugins and Barbican's internal data models. .. autoclass:: SecretDTO .. autoclass:: AsymmetricKeyMetadataDTO Secret Parameter Objects ======================== The secret parameter classes encapsulate information about secrets to be stored within Barbican and/or its plugins. .. autoclass:: SecretType .. autoclass:: KeyAlgorithm .. autoclass:: KeySpec Plugin Base Class ================= Barbican secret store plugins should implement the abstract base class ``SecretStoreBase``. Concrete implementations of this class should be exposed to Barbican using ``stevedore`` mechanisms explained in the configuration portion of this guide. .. autoclass:: SecretStoreBase :members: Barbican Core Plugin Sequence ============================= The sequence that Barbican invokes methods on ``SecretStoreBase`` depends on the requested action as detailed next. Note that these actions are invoked via the ``barbican.plugin.resources`` module, which in turn is invoked via Barbican's API and Worker processes. **For secret storage actions**, Barbican core calls the following methods: 1. ``get_transport_key()`` - If a transport key is requested to upload secrets for storage, this method asks the plugin to provide the transport key. 2. ``store_secret_supports()`` - Asks the plugin if it can support storing a secret based on the ``KeySpec`` parameter information as described above. 3. ``store_secret()`` - Asks the plugin to perform encryption of an unencrypted secret payload as provided in the ``SecretDTO`` above, and then to store that secret. The plugin then returns a dictionary of information about that secret (typically a unique reference to that stored secret that only makes sense to the plugin). Barbican core will then persist this dictionary as a JSON attribute within its data store, and also hand it back to the plugin for secret retrievals later. The name of the plugin used to perform this storage is also persisted by Barbican core, to ensure we retrieve this secret only with this plugin. **For secret retrievals**, Barbican core will select the same plugin as was used to store the secret, and then invoke its ``get_secret()`` method to return the unencrypted secret. **For symmetric key generation**, Barbican core calls the following methods: 1. ``generate_supports()`` - Asks the plugin if it can support generating a symmetric key based on the ``KeySpec`` parameter information as described above. 2. ``generate_symmetric_key()`` - Asks the plugin to both generate and store a symmetric key based on the ``KeySpec`` parameter information. The plugin can then return a dictionary of information for the stored secret similar to the storage process above, which Barbican core will persist for later retrieval of this generated secret. **For asymmetric key generation**, Barbican core calls the following methods: 1. ``generate_supports()`` - Asks the plugin if it can support generating an asymmetric key based on the ``KeySpec`` parameter information as described above. 2. ``generate_asymmetric_key()`` - Asks the plugin to both generate and store an asymmetric key based on the ``KeySpec`` parameter information. The plugin can then return an ``AsymmetricKeyMetadataDTO`` object as described above, which contains secret metadata for each of the three secrets generated and stored by this plugin: private key, public key and an optional passphrase. Barbican core will then persist information for these secrets, and also create a container to group them. .. _plugin-secret-store-crypto-adapter-label: The Cryptographic Plugin Adapter ================================ Barbican core includes a specialized secret store plugin used to adapt to cryptographic plugins, called ``StoreCryptoAdapterPlugin``. This plugin functions as a secret store plugin, but it directs secret related operations to :doc:`cryptographic plugins ` for encryption/decryption/generation operations. Because cryptographic plugins do not store encrypted secrets, this adapter plugin provides this storage capability via Barbican's data store. This adapter plugin also uses ``stevedore`` to access and utilize cryptographic plugins that can support secret operations. barbican-2.0.0/doc/source/plugin/crypto.rst0000664000567000056710000001113512701405673022036 0ustar jenkinsjenkins00000000000000.. module:: barbican.plugin.crypto.crypto ================================ Cryptographic Plugin Development ================================ This guide describes how to develop a custom cryptographic plugin for use by Barbican. Barbican supports two storage modes for secrets: a cryptographic mode (detailed on this page), and a :doc:`secret store mode `. The cryptograpic mode stores encrypted secrets in Barbican's data store, utilizing a cryptographic process or appliance (such as a hardware security module (HSM)) to perform the encryption/decryption. Barbican includes a PKCS11-based interface to SafeNet HSMs. Note that cryptographic plugins are not invoked directly from Barbican core, but rather via a :doc:`secret store mode ` plugin adapter class, further described in :ref:`plugin-secret-store-crypto-adapter-label`. ``crypto`` Module ================= The ``barbican.plugin.crypto`` module contains the classes needed to implement a custom plugin. These classes include the ``CryptoPluginBase`` abstract base class which custom plugins should inherit from, as well as several Data Transfer Object (DTO) classes used to transfer data between Barbican and the plugin. Data Transfer Objects ===================== The DTO classes are used to wrap data that is passed from Barbican to the plugin as well as data that is returned from the plugin back to Barbican. They provide a level of isolation between the plugins and Barbican's internal data models. .. autoclass:: KEKMetaDTO .. autoclass:: EncryptDTO .. autoclass:: DecryptDTO .. autoclass:: GenerateDTO .. autoclass:: GenerateDTO Plugin Base Class ================= Barbican cryptographic plugins should implement the abstract base class ``CryptoPluginBase``. Concrete implementations of this class should be exposed to barbican using ``stevedore`` mechanisms explained in the configuration portion of this guide. .. autoclass:: CryptoPluginBase :members: Barbican Core Plugin Sequence ============================= Barbican invokes a different sequence of methods on the ``CryptoPluginBase`` plugin depending on the requested action. Note that these actions are invoked via the secret store adapter class ``StoreCryptoAdapterPlugin`` which is further described in :ref:`plugin-secret-store-crypto-adapter-label`. **For secret storage actions**, Barbican core calls the following methods: 1. ``supports()`` - Asks the plugin if it can support the ``barbican.plugin.crypto.crypto.PluginSupportTypes.ENCRYPT_DECRYPT`` operation type. 2. ``bind_kek_metadata()`` - Allows a plugin to bind an internal key encryption key (KEK) to a project-ID, typically as a 'label' or reference to the actual KEK stored within the cryptographic appliance. This KEK information is stored into Barbican's data store on behalf of the plugin, and then provided back to the plugin for subsequent calls. 3. ``encrypt()`` - Asks the plugin to perform encryption of an unencrypted secret payload, utilizing the KEK bound to the project-ID above. Barbican core will then persist the encrypted data returned from this method for later retrieval. The name of the plugin used to perform this encryption is also persisted into Barbican core, to ensure we decrypt this secret only with this plugin. **For secret decryptions and retrievals**, Barbican core will select the same plugin as was used to store the secret, and then invoke its ``decrypt()`` method, providing it both the previously-persisted encrypted secret data as well as the project-ID KEK used to encrypt the secret. **For symmetric key generation**, Barbican core calls the following methods: 1. ``supports()`` - Asks the plugin if it can support the ``barbican.plugin.crypto.crypto.PluginSupportTypes.SYMMETRIC_KEY_GENERATION`` operation type. 2. ``bind_kek_metadata()`` - Same comments as for secret storage above. 3. ``generate_symmetric()`` - Asks the plugin to both generate a symmetric key, and then encrypted it with the project-ID KEK. Barbican core persists this newly generated and encrypted secret similar to secret storage above. **For asymmetric key generation**, Barbican core calls the following methods: 1. ``supports()`` - Asks the plugin if it can support the ``barbican.plugin.crypto.crypto.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION`` operation type. 2. ``bind_kek_metadata()`` - Same comments as for secret storage above. 3. ``generate_asymmetric()`` - Asks the plugin to generate and encrypt asymmetric public and private key (and optional passphrase) information, which Barbican core will persist as a container of separate encrypted secrets. barbican-2.0.0/doc/source/plugin/certificate.rst0000664000567000056710000002227612701405673023010 0ustar jenkinsjenkins00000000000000.. module:: barbican.plugin.interface.certificate_manager ============================== Certificate Plugin Development ============================== This guide describes how to develop a custom certificate plugin for use by Barbican. Barbican core orchestrates generating SSL certificates, delegating to certificate plugins any required actions. Certificate actions include initiating a certificate order, checking for order updates, and retrieving generated certificates. Barbican plans to include the following certificate plugins: 1. A Red Hat Dogtag certificate authority (CA) plugin capable of generating certificates once the order is initiated. 2. A Symantec plugin able to interact with the Symantec CA service, requiring periodic status updates to see if certificates are ready. 3. A DigiCert plugin able to interact with the DigiCert CA service, with a similar interactions as with Symantec. ``certificate_manager`` Module ============================== The ``barbican.plugin.interface.certificate_manager`` module contains the classes needed to implement a custom plugin. These classes include the ``CertificatePluginBase`` abstract base class which custom plugins should inherit from, as well as any Data Transfer Object (DTO) classes used to pass information into and from plugin methods. Data Transfer Objects ===================== The DTO classes are used to wrap data that is passed from Barbican to the plugin as well as data that is returned from the plugin back to Barbican. They provide a level of isolation between the plugins and Barbican's internal data models. .. autoclass:: ResultDTO .. _plugin-certificate-status-label: Certificate Status Class ======================== When certificate plugin methods are invoked, they return a ``ResultDTO`` that includes one of the status response constants defined by the ``CertificateStatus`` class. As detailed in the :ref:`plugin-certificate-sequence-label` section below Barbican core directs follow on processing for a certificate order based on these returned status constants. .. autoclass:: CertificateStatus :members: Certificate Parameter Objects ============================= Two dictionaries are available to most certificate plugin methods: 1. ``order_meta`` - A dictionary of values provided by the client when they initiated the Barbican certificate order, including information needed to create a certificate, such as CSR. 2. ``plugin_meta`` - A dictionary of values determined by the plugin itself on behalf of a specific certificate order. Barbican core persists this dictionary into the Barbican data store for a given order, and then provides this data back plugin method invocations thereafter. Plugins are free to update this data as required, or else ignore to it if not required. For example, plugins that interact with remote CAs could store the CA's unique order ID, for use with future interactions with that CA. Plugin Base Class ================= Barbican secret store plugins should implement the abstract base class ``CertificatePluginBase``. Concrete plugin implementations of ``CertificatePluginBase`` should be exposed to Barbican using ``stevedore`` mechanisms explained in the configuration portion of this guide. .. autoclass:: CertificatePluginBase :members: Barbican Order's Status Versus ResultDTO's Status ================================================= When Barbican starts processing orders, it sets the order's ``status`` attribute to ``PENDING``. Barbican will invoke methods on the certificate plugin to process the order, and most of those methods return a ``ResultDTO`` result object, which also has a ``status`` field. Barbican core uses the result's ``status`` to determine follow on processing for the order as detailed in :ref:`plugin-certificate-sequence-label` below. The result's ``status`` field should be set to one of the constants defined in ``CertificateStatus``, per :ref:`plugin-certificate-status-label` above. If the result's ``status`` calls for terminating the order, Barbican core will set the order's status to either ``ACTIVE`` or ``ERROR``. Otherwise the order's ``status`` will stay ``PENDING``, and the order's ``sub_status`` and ``sub_status_message`` will be updated with the result's ``status`` and ``status_message`` respectively. Clients that wish to track the progress of potentially long running certificate orders can poll the order, using the ``sub_status`` and ``sub_status_message`` to track the results. Hence plugins should provide a meaningful message for ``sub_status_message``, especially on error conditions. .. _plugin-certificate-sequence-label: Barbican Core Plugin Sequence ============================= The sequence that Barbican invokes methods on ``CertificatePluginBase`` is detailed next. Note that these methods are invoked via the ``barbican.tasks.certificate_resources`` module, which in turn is invoked via Barbican's Worker processes. Barbican core calls the following methods: 1. ``supports()`` - Asks the plugin if it can support generating a certificate based on the Barbican order's ``order_meta``. 2. ``issue_certificate_request()`` - Asks the plugin to initiate a certificate order from the provided ``order_meta`` parameter information. An empty dictionary is passed in for the ``plugin_meta`` parameter, which the plugin can update as it sees fit. Barbican core will persist and then provide the ``plugin_meta`` for subsequent method calls for this order. The plugin method returns a ``ResultDTO`` instance which Barbican core uses to determine subsequent order processing based on its ``status`` field. This ``status`` field should be set to one of the constants defined in ``CertificateStatus`` per :ref:`plugin-certificate-status-label` above. If ``status`` is ``CertificateStatus.WAITING_FOR_CA`` then Barbican core will invoke the ``check_certificate_status`` method after the delay specified in the result's ``retry_msec`` field. If ``status`` is ``CertificateStatus.CERTIFICATE_GENERATED`` then Barbican core expects that this order is completed and sets its ``status`` to ``ACTIVE``. Barbican also expects that the result's ``certificate`` and (optionally) ``intermediates`` fields are filled out with PEM-formatted SSL certificate data. Barbican will then create a ``barbican.model.models.Container`` record with ``barbican.model.models.Secret`` records to hold the certificate data. If ``status`` is ``CertificateStatus.CA_UNAVAILABLE_FOR_REQUEST`` then Barbican core will invoke the same method after the delay specified in the result's ``retry_msec`` field. This condition typically means that a remote CA was not available, so should be retried in the future. If ``status`` is set to ``CertificateStatus.CLIENT_DATA_ISSUE_SEEN`` then Barbican considers the order to have problems with the client-provided data, but otherwise the order is viable. Barbican will keep the order in the ``PENDING`` state, and update the order's ``sub_status`` to ``CertificateStatus.CLIENT_DATA_ISSUE_SEEN`` and ``sub_status_message`` to the result's ``status_message``. Note that client data issues can include missing or incorrect information that the CA reports on. The CA still considers the order open, but clients must provide updates to correct the data. Since the client could either update this order via Barbican, or else work directly with a remote CA, Barbican will invoke the ``check_certificate_status`` method after the delay specified in the result's ``retry_msec`` field. If ``status`` is set to ``CertificateStatus.REQUEST_CANCELED`` then Barbican core expects that this order is completed and sets its ``status`` to ``ACTIVE``. It also updates the order's ``sub_status`` and ``sub_status_message`` to the result's status information. This condition could arise (for example) if a remote CA indicated that the certificate order is cancelled. If ``status`` is set to ``CertificateStatus.INVALID_OPERATION`` (or else the plugin raises an exception) then Barbican core considers this a failed order and sets the order's ``status`` to ``ERROR``. It also updates the order's ``sub_status`` and ``sub_status_message`` to the result's status information. 3. ``check_certificate_status()`` - This method is called as needed after the ``issue_certificate_request()`` method and is intended to allow plugins to check to see if a certificate has been issued yet. The result's ``status`` is processed similarly to the ``issue_certificate_request()`` method. 4. ``modify_certificate_request`` - This method is invoked if clients provide updates to the order metadata after the certificate order has been initiated. The result's ``status`` is processed similarly to the ``issue_certificate_request()`` method. 5. ``cancel_certificate_request`` - This method is invoked if clients delete or cancel a certificate order. Note that if a remote CA is involved the cancellation may not be processed immediately, in which case Barbican core will invoke the ``check_certificate_status`` method after the delay specified in the result's ``retry_msec`` field. Otherwise the result's ``status`` is processed similarly to the ``issue_certificate_request()`` method. barbican-2.0.0/doc/source/testing.rst0000664000567000056710000001167612701405673020707 0ustar jenkinsjenkins00000000000000Writing and Running Barbican Tests ================================== As a part of every code review that is submitted to the Barbican project there are a number of gating jobs which aid in the prevention of regression issues within Barbican. As a result, a Barbican developer should be familiar with running Barbican tests locally. For your convenience we provide the ability to run all tests through the ``tox`` utility. If you are unfamiliar with tox please see refer to the `tox documentation`_ for assistance. .. _`tox documentation`: https://tox.readthedocs.org/en/latest/ Unit Tests ---------- Currently, we provide tox environments for Python 2.7 and 3.4. By default all available test environments within the tox configuration will execute when calling ``tox``. If you want to run them independently, you can do so with the following command: .. code-block:: bash # Executes tests on Python 2.7 tox -e py27 .. note:: If you do not have the appropriate Python versions available, consider setting up PyEnv to install multiple versions of Python. See the documentation regarding :doc:`/setup/dev` for more information. .. note:: Individual unit tests can also be run, using the following commands: .. code-block:: bash # runs a single test with the function named # test_can_create_new_secret_one_step tox -e py27 -- test_can_create_new_secret_one_step # runs only tests in the WhenTestingSecretsResource class and # the WhenTestingCAsResource class tox -e py27 -- '(WhenTestingSecretsResource|WhenTestingCAsResource)' The function name or class specified must be one located in the `barbican/tests` directory. Groups of tests can also be run with a regex match after the ``--``. For more information on what can be done with ``testr``, please see: http://testrepository.readthedocs.org/en/latest/MANUAL.html You can also setup breakpoints in the unit tests. This can be done by adding ``import pdb; pdb.set_trace()`` to the line of the unit test you want to examine, then running the following command: .. code-block:: bash # Executes tests on Python 2.7 tox -e debug .. note:: For a list of pdb commands, please see: https://docs.python.org/2/library/pdb.html **Python 3.4** In order to run the unit tests within the Python 3.4 unit testing environment you need to make sure you have all necessary packages installed. - On Ubuntu/Debian:: sudo apt-get install python3-dev - On Fedora 21/RHEL7/CensOS7:: sudo yum install python3-devel - On Fedora 22 and higher:: sudo dnf install python3-devel You then specify to run the unit tests within the Python 3.4 environment when invoking tox .. code-block:: bash # Executes tests on Python 3.4 tox -e py34 Functional Tests ---------------- Unlike running unit tests, the functional tests require Barbican and Keystone services to be running in order to execute. For more information on :doc:`setting up a Barbican development environment ` and using :doc:`Keystone with Barbican `, see our accompanying project documentation. Once you have the appropriate services running and configured you can execute the functional tests through tox. .. code-block:: bash # Execute Barbican Functional Tests tox -e functional By default, the functional tox job will use ``testr`` to execute the functional tests as used in the gating job. .. note:: In order to run an individual functional test function, you must use the following command: .. code-block:: bash # runs a single test with the function named # test_secret_create_then_check_content_types tox -e functional -- test_secret_create_then_check_content_types # runs only tests in the SecretsTestCase class and # the OrdersTestCase class tox -e functional -- '(SecretsTestCase|OrdersTestCase)' The function name or class specified must be one located in the `functionaltests` directory. Groups of tests can also be run with a regex match after the ``--``. For more information on what can be done with ``testr``, please see: http://testrepository.readthedocs.org/en/latest/MANUAL.html Remote Debugging ---------------- In order to be able to hit break-points on API calls, you must use remote debugging. This can be done by adding ``import rpdb; rpdb.set_trace()`` to the line of the API call you wish to test. For example, adding the breakpoint in ``def on_post`` in ``barbican.api.controllers.secrets.py`` will allow you to hit the breakpoint when a ``POST`` is done on the secrets URL. .. note:: After performing the ``POST`` the application will freeze. In order to use ``rpdb``, you must open up another terminal and run the following: .. code-block:: bash # enter rpdb using telnet telnet localhost 4444 Once in rpdb, you can use the same commands as pdb, as seen here: https://docs.python.org/2/library/pdb.html barbican-2.0.0/doc/source/contribute/0000775000567000056710000000000012701406024020632 5ustar jenkinsjenkins00000000000000barbican-2.0.0/doc/source/contribute/database_migrations.rst0000664000567000056710000003255112701405673025403 0ustar jenkinsjenkins00000000000000Database Migrations ==================== Database migrations are managed using the Alembic_ library. The consensus for `OpenStack and SQLAlchemy`_ is that this library is preferred over sqlalchemy-migrate. Database migrations can be performed two ways: (1) via the API startup process, and (2) via a separate script. Database migrations can be optionally enabled during the API startup process. Corollaries for this are that a new deployment should begin with only one node to avoid migration race conditions. Alternatively, the automatic update startup behavior can be disabled, forcing the use of the migration script. This latter mode is probably safer to use in production environments. Policy ------- A Barbican deployment goal is to update application and schema versions with zero downtime. The challenge is that at all times the database schema must be able to support two deployed application versions, so that a single migration does not break existing nodes running the previous deployment. For example, when deleting a column we would first deploy a new version that ignores the column. Once all nodes are ignoring the column, a second deployment would be made to remove the column from the database. To achieve this goal, the following rules will be observed for schema changes: 1. Do not remove columns or tables directly, but rather: a. Create a version of the application not dependent on the removed column/table b. Replace all nodes with this new application version c. Create an Alembic version file to remove the column/table d. Apply this change in production manually, or automatically with a future version of the application 2. Changing column attributes (types, names or widths) should be handled as follows: a. TODO: This Stack Overflow `Need to alter column types in production database`_ page and many others summarize the grief involved in doing these sort of migrations b. TODO: What about old and new application versions happening simultaneously? i. Maybe have the new code perform migration to new column on each read ...similar to how a no-sql db migration would occur? 3. Transforming column attributes (ex: splitting one ``name`` column into a ``first`` and ``last`` name): a. TODO: An `Alembic example`_, but not robust for large datasets. Overview --------- *Prior to invoking any migration steps below, change to your* ``barbican`` *project's folder and activate your virtual environment per the* `Developer Guide`_. **If you are using PostgreSQL, please ensure you are using SQLAlchemy version 0.9.3 or higher, otherwise the generated version files will not be correct.** **You cannot use these migration tools and techniques with SQLite databases.** Consider taking a look at the `Alembic tutorial`_. As a brief summary: Alembic keeps track of a linked list of version files, each one applying a set of changes to the database schema that a previous version file in the linked list modified. Each version file has a unique Alembic-generated ID associated with it. Alembic generates a table in the project table space called ``alembic_version`` that keeps track of the unique ID of the last version file applied to the schema. During an update, Alembic uses this stored version ID to determine what if any follow on version files to process. Generating Change Versions --------------------------- To make schema changes, new version files need to be added to the ``barbican/model/migration/alembic_migrations/versions/`` folder. This section discusses two ways to add these files. Automatically '''''''''''''' Alembic autogenerates a new script by comparing a clean database (i.e., one without your recent changes) with any modifications you make to the Models.py or other files. This being said, automatic generation may miss changes... it is more of an 'automatic assist with expert review'. See `What does Autogenerate Detect`_ in the Alembic documentation for more details. First, you must start Barbican using a version of the code that does not include your changes, so that it creates a clean database. This example uses Barbican launched with DevStack (see `Barbican DevStack`_ wiki page for instructions). 1. Make changes to the 'barbican/model/models.py' SQLAlchemy models or checkout your branch that includes your changes using git. 2. Execute ``barbican-db-manage -d revision -m '' --autogenerate`` a. For example: ``barbican-db-manage -d mysql+pymysql://root:password@127.0.0.1/barbican?charset=utf8 revision -m 'Make unneeded verification columns nullable' --autogenerate`` 3. Examine the generated version file, found in ``barbican/model/migration/alembic_migrations/versions/``: a. **Verify generated update/rollback steps, especially for modifications to existing columns/tables** b. Remove autogenerated comments such as: ``### commands auto generated by Alembic - please adjust! ###`` c. **If you added new columns, follow this guideance**: 1. For non-nullable columns you will need to add default values for the records already in the table, per what you configured in the ``barbican.model.models.py`` module. You can add the ``server_default`` keyword argument for the SQLAlchemy ``Column`` call per `SQLAlchemy's server_default`_. For boolean attributes, use `server_default='0'` for False, or `server_default='1'` for True. For DateTime attributes, use `server_default=str(timeutils.utcnow())` to default to the current time. 2. If you add `any` constraint, please `always` name them in the barbican.model.models.py module, and also in the Alembic version modules when creating/dropping constraints, otherwise MySQL migrations might crash. d. **If you added new tables, follow this guidance**: 1. Make sure you added your new table to the ``MODELS`` element of the ``barbican/model/models.py`` module. 2. Note that when Barbican boots up, it will add the new table to the database. It will also try to apply the database version (that also tries to add this table) via alembic. Therefore, please edit the generated script file to add these lines: a. ``ctx = op.get_context()`` (to get the alembic migration context in current transaction) b. ``con = op.get_bind()`` (get the database connection) c. ``table_exists = ctx.dialect.has_table(con.engine, 'your-new-table-name-here')`` d. ``if not table_exists:`` e. ``...remaining create table logic here...`` *Note: For anything but trivial or brand new columns/tables, database backups and maintenance-window downtimes might be called for.* Manually ''''''''' 1. Execute: ``barbican-db-manage revision -m ""`` 2. This will generate a new file in the ``barbican/model/migration/alembic_migrations/versions/`` folder, with this sort of file format: ``_.py``. Note that only the first 20 characters of the description are used. 3. You can then edit this file per tutorial and the `Alembic Operation Reference`_ page for available operations you may make from the version files. **You must properly fill in the** ``upgrade()`` **methods.** Applying Changes ----------------- Barbican utilizes the Alembic version files as managing delta changes to the database. Therefore the first Alembic version file does **not** contain all time-zero database tables. To create the initial Barbican tables in the database, execute the Barbican application per the 'Via Application' section. Thereafter, it is suggested that only the ``barbican-db-manage`` command above be used to update the database schema per the 'Manually' section. Also, automatic database updates from the Barbican application should be disabled by adding/updating ``db_auto_create = False`` in the ``barbican.conf`` configuration file. **Note** : Before attempting any upgrade, you should make a full database backup of your production data. As of Kilo, database downgrades are not supported in OpenStack, and the only method available to get back to a prior database version will be to restore from backup. Via Application '''''''''''''''' The last section of the `Alembic tutorial`_ describes the process used by the Barbican application to create and update the database table space automatically. By default, when the Barbican API boots up it will try to create the Barbican database tables (using SQLAlchemy), and then try to apply the latest version files (using Alembic). In this mode, the latest version of the Barbican application can create a new database table space updated to the latest schema version, or else it can update an existing database table space to the latest schema revision (called ``head`` in the docs). *To bypass this automatic behavior, add* ``db_auto_create = False`` *to the* ``barbican.conf`` *file*. Manually ''''''''' Run ``barbican-db-manage -d upgrade -v head``, which will cause Alembic to apply the changes found in all version files after the version currently written in the target database, up until the latest version file in the linked chain of files. To upgrade to a specific version, run this command: ``barbican-db-manage -d upgrade -v ``. The ``Alembic-ID-of-version`` is a unique ID assigned to the change such ``as1a0c2cdafb38``. Downgrade ''''''''' Upgrades involve complex operations and can fail. Before attempting any upgrade, you should make a full database backup of your production data. As of Kilo, database downgrades are not supported, and the only method available to get back to a prior database version will be to restore from backup. You must complete these steps to successfully roll back your environment: 1. Roll back configuration files. 2. Restore databases from backup. 3. Roll back packages. Rolling back upgrades is a tricky process because distributions tend to put much more effort into testing upgrades than downgrades. Broken downgrades often take significantly more effort to troubleshoot and resolve than broken upgrades. Only you can weigh the risks of trying to push a failed upgrade forward versus rolling it back. Generally, consider rolling back as the very last option. The backup instructions provided in `Backup tutorial`_ ensure that you have proper backups of your databases and configuration files. Read through this section carefully and verify that you have the requisite backups to restore. **Note** : The backup tutorial reference file only updated to Juno, DB backup operation will be similar for Kilo. The link will be updated when the reference has updated. For more information and examples about downgrade operation please see `Downgrade tutorial`_ as reference. TODO Items ----------- 1. *[Done - It works!]* Verify alembic works with the current SQLAlchemy model configuration in Barbican (which was borrowed from Glance). 2. *[Done - It works, I was able to add/remove columns while app was running]* Verify that SQLAlchemy is tolerant of schema miss-matches. For example, if a column is added to a table schema, will this break existing deployments that aren't expecting this column? 3. *[Done - It works]* Add auto-migrate code to the boot up of models (see the ``barbican\model\repositories.py`` file). 4. *[Done - It works]* Add guard in Barbican model logic to guard against running migrations with SQLite databases. 5. Add detailed deployment steps for production, so how new nodes are rolled in and old ones rolled out to complete move to new versions. 6. *[In Progress]* Add a best-practices checklist section to this page. a. This would provide guidance on safely migrating schemas, do's and don'ts, etc. b. This could also provide code guidance, such as ensuring that new schema changes (eg. that new column) aren't required for proper functionality of the previous version of the code. c. If a server bounce is needed, notification guidelines to the devop team would be spelled out here. .. _Alembic: https://alembic.readthedocs.org/en/latest/ .. _Alembic Example: https://julo.ch/blog/migrating-content-with-alembic/ .. _Alembic Operation Reference: https://alembic.readthedocs.org/en/latest/ops.html .. _Alembic tutorial: https://alembic.readthedocs.org/en/latest/tutorial.html .. _Barbican DevStack: http://docs.openstack.org/developer/barbican/setup/devstack.html .. _Developer Guide: https://github.com/cloudkeep/barbican/wiki/Developer-Guide .. _Need to alter column types in production database: http://stackoverflow.com/questions/5329255/need-to-alter-column-types-in-production-database-sql-server-2005 .. _OpenStack and SQLAlchemy: https://wiki.openstack.org/wiki/OpenStack_and_SQLAlchemy#Migrations .. _What does Autogenerate Detect: http://alembic.readthedocs.org/en/latest/autogenerate.html#what-does-autogenerate-detect-and-what-does-it-not-detect .. _SQLAlchemy's server_default: http://docs.sqlalchemy.org/en/latest/core/metadata.html?highlight=column#sqlalchemy.schema.Column.params.server_default .. _Backup tutorial: http://docs.openstack.org/openstack-ops/content/upgrade-icehouse-juno.html#upgrade-icehouse-juno-backup .. _Downgrade tutorial: http://docs.openstack.org/openstack-ops/content/ops_upgrades-roll-back.html barbican-2.0.0/doc/source/contribute/dataflow.rst0000664000567000056710000000733212701405673023203 0ustar jenkinsjenkins00000000000000Dataflow ======== Bootup flow when the Barbican API service begins ------------------------------------------------ This is the sequence of calls for booting up the Barbican API server: #. ``bin/barbican.sh start``: Launches a WSGI service that performs a PasteDeploy process, invoking the middleware components found in ``barbican/api/middleware`` as configured in ``etc/barbican/barbican-api-paste``. The middleware components invoke and then execute the Pecan application created via ``barbican/api/app.py:create_main_app()``, which also defines the controllers (defined in ``barbican/api/controllers/``) used to process requested URI routes. Typical flow when the Barbican API executes ------------------------------------------- For **synchronous** calls, the following sequence is generally followed: #. A client sends an HTTP REST request to the Barbican API server. #. The WSGI server and routing invokes a method on one of the ``XxxxController`` classes in ``barbican/api/controllers/xxxx.py``, keyed to an HTTP verb (so one of POST, GET, DELETE, or PUT). #. Example - GET /secrets: #. In ``barbican/api/controllers/secrets.py``, the ``SecretController``'s ``on_get()`` is invoked. #. A ``SecretRepo`` repository class (found in ``barbican/model/respositories.py``) is then used to retrieve the entity of interest, in this case as a ``Secret`` entity defined in ``barbican/model/models.py``. #. The payload is decrypted as needed, via ``barbican/plugin/resources.py``'s ``get_secret()`` function. #. A response JSON is formed and returned to the client. For **asynchronous** calls, the following sequence is generally followed: #. A client sends an HTTP REST request to the Barbican API server. #. The WSGI server and routing again invokes a method on one of the ``XxxxcController`` classes in ``barbican/api/controllers/``. #. A remote procedure call (RPC) task is enqueue for later processing by a worker node. #. Example - POST /orders: #. In ``barbican/api/controllers/orders.py``, the ``OrdersController``'s ``on_post()`` is invoked. #. The ``OrderRepo`` repository class (found in ``barbican/model/respositories.py``) is then used to create the ``barbican/model/models.py``'s ``Order``entity in a 'PENDING' state. #. The Queue API's ``process_type_order()`` method on the ``TaskClient`` class (found in ``barbican/queue/client.py``) is invoked to send a message to the queue for asynchronous processing. #. A response JSON is formed and returned to the client. #. The Queue service receives the message sent above, invoking a corresponding method on ``barbican/queue/server.py``'s ``Tasks`` class. This method then invokes the ``process_and_suppress_exceptions()`` method on one of the ``barbican/tasks/resources.py``'s ``BaseTask`` implementors. This method can then utilize repository classes as needed to retrieve and update entities. It may also interface with third party systems via plugins`. The ``barbican/queue/client.py``'s ``TaskClient`` class above may also be invoked from a worker node for follow on asynchronous processing steps. #. Example - POST /orders (continued): #. Continuing the example above, the queue would invoke the ``process_type_order()`` method on ``barbican/queue/server.py``'s ``Tasks`` class. Note the method is named the same as the ``TaskClient`` method above by convention. #. This method then invokes ``process_and_suppress_exceptions()`` on the ``barbican/tasks/resources.py``'s ``BeginTypeOrder`` class. This class is responsible for processing all newly-POST-ed orders. barbican-2.0.0/doc/source/contribute/structure.rst0000664000567000056710000000301712701405673023436 0ustar jenkinsjenkins00000000000000Project Structure ================= #. ``barbican/`` (Barbican-specific Python source files) #. ``api/`` (REST API related source files) #. ``controllers/`` (Pecan-based controllers handling REST-based requests) #. ``middleware/`` (Middleware business logic to process REST requests) #. ``cmd/`` (Barbican admin command source files) #. ``common/`` (Modules shared across other Barbican folders) #. ``locale/`` (Translation templates) #. ``model/`` (SQLAlchemy-based model classes) #. ``plugin/`` (Plugin related logic, interfaces and look-up management) #. ``resources.py`` (Supports interactions with plugins) #. ``crypto/`` (Hardware security module (HSM) logic and plugins) #. ``interface/`` (Certificate manager and secret store interface classes) #. (The remaining modules here are implementations of above interfaces) #. ``queue/`` (Client and server interfaces to the queue) #. ``client.py`` (Allows clients to publish tasks to queue) #. ``server.py`` (Runs the worker service, responds to enqueued tasks) #. ``tasks/`` (Worker-related controllers and implementations) #. ``tests/`` (Unit tests) #. ``bin/`` (Start-up scripts for the Barbican nodes) #. ``devstack/`` (Barbican DevStack plugin, DevStack gate configuration and Vagrantfile for installing DevStack VM) #. ``etc/barbican/`` (Configuration files) #. ``functionaltests`` (Functional Barbican tests) #. ``doc/source`` (Sphinx documentation) #. ``releasenotes`` (Barbican Release Notes) barbican-2.0.0/doc/source/contribute/dependencies.rst0000664000567000056710000000215112701405673024022 0ustar jenkinsjenkins00000000000000Adding/Updating Dependencies ============================ Adding new Dependency --------------------- If you need to add a new dependency to Barbican, you must edit a few things: #. Add the package name (and minimum version if applicable) to the requirements.txt file in the root directory. .. note:: All dependencies and their version specifiers must come from the OpenStack `global requirements`_ repository. #. We support deployment on CentOS 6.4, so you should check CentOS + EPEL 6 yum repos to figure out the name of the rpm package that provides the package you're adding. Add this package name as a dependency in ``rpmbuild/SPECS/barbican.spec``. #. If there is no package available in CentOS or EPEL, or if the latest available package's version is lower than the minimum required version we must build an rpm for it ourselves. Add a line to ``rpmbuild/package_dependencies.sh`` so that jenkins will build an rpm using fpm and upload it to the cloudkeep yum repo. .. _`global requirements`: https://git.openstack.org/cgit/openstack/requirements/tree/global-requirements.txt barbican-2.0.0/doc/source/contribute/architecture.rst0000664000567000056710000000722512701405673024065 0ustar jenkinsjenkins00000000000000Architecture ============ This document describes the architecture and technology selections for Barbican. In general, a goal is to utilize the OpenStack architecture and technology selections as much as possible. An overall architecture is presented first, followed by technology selection details to implement the system. Overall Architecture -------------------- The next figure presents an overall logical diagram for Barbican. .. image:: ./../images/barbican-overall-architecture.gif The API node(s) handle incoming REST requests to Barbican. These nodes can interact with the database directly if the request can be completed synchronously (such as for GET requests), otherwise the queue supports asynchronous processing by worker nodes. The latter could include interactions with third parties such as certificate authorities. As implied in the diagram, the architecture supports multiple API and worker nodes being added/removed to/from the network, to support advanced features such as auto scaling. Eventually, the database could be replicated across data centers supporting region-agnostic storage and retrieval of secured information, albeit with lags possible during data synchronization. Technology Selection -------------------- In general, components from the `Oslo `_ commons project are used within Barbican, such as config, messaging and logging. The next figure examines the components within Barbican. .. image:: ./../images/barbican-components.gif Several potential clients of the Barbican REST interface are noted, including `Castellan `_ which presents a generic key management interface for other OpenStack projects with Barbican as an available plugin. The API node noted in the previous section is a WSGI server. Similar to OpenStack projects such as `Glance `_ it utilizes paste to support configurable middleware such as to interface with `Keystone `_ for authentication and authorization services. `Pecan `_ (a lean Python web framework inspired by CherryPy, TurboGears, and Pylons) is utilized to map resources to REST routes. These resources contain the controller business logic for Barbican and can interface with encryption/decryption processes (via crypto components), datastore (via repository components) and asynchronous tasks (via queue components). The crypto components provide a means to encrypt and decrypt information that accommodates a variety of encryption mechanisms and cryptographic backends (such as key management interoperability protocol (KMIP) or hardware security module (HSM)) via a plugin interface. The repository components provide an interface and database session context for the datastore, with model components representing entities such as Secrets (used to store encrypted information such as data encryption keys). `SQLAlchemy `_ is used as the object relational model (ORM) layer to the database, including `MySQL `_ and `PostgreSQL `_. For asynchronous processing, `Oslo Messaging `_ is used to interact with the queue, including `RabbitMQ `_. The worker node processes tasks from the queue. Task components are similar to API resources in that they implement business logic and also interface with the datastore and follow on asynchronous tasks as needed. These asynchronous tasks can interface with external systems, such as certificate authorities for SSL/TLS certificate processing. barbican-2.0.0/doc/source/contribute/getting_involved.rst0000664000567000056710000000455712701405673024757 0ustar jenkinsjenkins00000000000000Getting Involved =================== The best way to join the community and get involved is to talk with others online or at a meetup and offer contributions. Here are some of the many ways you can contribute to the Barbican project\: * Development and Code Reviews * Bug reporting/Bug fixes * Wiki and Documentation * Blueprints/Specifications * Testing * Deployment scripts Freenode IRC (Chat) -------------------- You can find Barbicaneers in our publicly accessible channel on `freenode`_ ``#openstack-barbican``. All conversations are logged and stored for your convenience at `eavesdrop.openstack.org`_. For more information regarding OpenStack IRC channels please visit the `OpenStack IRC Wiki`_. .. _`freenode`: https://freenode.net .. _`OpenStack IRC Wiki`: https://wiki.openstack.org/wiki/IRC .. _`eavesdrop.openstack.org`: http://eavesdrop.openstack.org/irclogs/ %23openstack-barbican/ Mailing List -------------- The mailing list email is openstack@lists.openstack.org. This is a common mailing list across the OpenStack projects. If you wish to ask questions or have a discussion related to Barbican include ``[barbican]`` in your email subject line. To participate on the mailing list\: * `Subscribe`_ to the mailing list * Browse the `mailing list archives`_ .. _`Subscribe`: http://lists.openstack.org/cgi-bin/mailman/listinfo/openstack .. _`mailing list archives`: http://lists.openstack.org/pipermail/openstack Launchpad ----------- Like other OpenStack related projects, we utilize Launchpad for our bug and release tracking. * `Barbican Launchpad Project`_ .. _`Barbican Launchpad Project`: https://launchpad.net/barbican Source Repository ------------------- Like other OpenStack related projects, the official Git repository is available on `git.openstack.org`_; however, the repository is also mirrored to GitHub for easier browsing. * `Barbican on GitHub`_ .. _`git.openstack.org`: http://git.openstack.org/cgit/openstack/barbican .. _`Barbican on GitHub`: https://github.com/openstack/barbican Gerrit -------- Like other OpenStack related projects, we utilize the OpenStack Gerrit review system for all code reviews. If you're unfamiliar with using the OpenStack Gerrit review system, please review the `Gerrit Workflow`_ wiki documentation. .. _`Gerrit Workflow`: http://docs.openstack.org/infra/manual/developers.html#development-workflow barbican-2.0.0/doc/source/admin-guide-cloud/0000775000567000056710000000000012701406024021743 5ustar jenkinsjenkins00000000000000barbican-2.0.0/doc/source/admin-guide-cloud/database_cleaning.rst0000664000567000056710000000561512701405673026121 0ustar jenkinsjenkins00000000000000Database Cleaning ================= Entries in the Barbican database are soft deleted and can build up over time. These entries can be cleaned up with the clean up command. The command can be used with a cron job to clean the database automatically on intervals. Commands -------- The command ```barbican-manage db clean``` can be used to clean up the database. By default, it will remove soft deletions that are at least 90 days old since deletion ```barbican-manage db clean --min-days 180``` (```-m```) will go through the database and remove soft deleted entries that are at least 90 days old since deletion. The default value is 90 days. Passing a value of ```--min-days 0``` will delete all soft-deleted entries up to today. ```barbican-manage db clean --clean-unassociated-projects``` (```-p```) will go through the database and remove projects that have no associated resources. The default value is False. ```barbican-manage db clean --soft-delete-expired-secrets``` (```-e```) will go through the database and soft delete any secrets that are past their expiration date. The default value is False. If ```-e``` is used along with ```---min-days 0``` then all the expired secrets will be hard deleted. ```barbican-manage db clean --verbose``` (```-V```) will print more information out into the terminal. ```barbican-manage db clean --log-file``` (```-L```) will set the log file location. The creation of the log may fail if the user running the command does not have access to the log file location or if the target directory does not exist. The default value for log_file can be found in ```/etc/barbican/barbican.conf``` The log will contain the verbose output from the command. Cron Job -------- A cron job can be created on linux systems to run at a given interval to clean the barbican database. Crontab ''''''' 1. Start the crontab editor ```crontab -e``` with the user that runs the clean up command 2. Edit the crontab section to run the command at a given interval. ``` clean up command``` Crontab Examples '''''''''''''''' ```00 00 * * * barbican-manage db clean -p -e``` -Runs a job everyday at midnight which will remove soft deleted entries that 90 days old since soft deletion, will clean unassociated projects, and will soft delete secrets that are expired. ```00 03 01 * * barbican-manage db clean -m 30``` -Runs a job every month at 3AM which will remove soft deleted entries that are at least 30 days old since deletion. ```05 01 07 * 6 barbican-manage db clean -m 180 -p -e -L /tmp/barbican-clean-command.log``` -Runs a job every month at 1:05AM on the 7th day of the month and every Saturday. Entries that are 180 days old since soft deletion will be removed from the database. Unassociated projects will be removed. Expired secrets will be soft deleted. The log file will be saved to ```/tmp/barbican-clean-command.log``` barbican-2.0.0/doc/source/admin-guide-cloud/index.rst0000664000567000056710000000055712701405673023624 0ustar jenkinsjenkins00000000000000=================== Cloud Administrator Guide - Key Manager service =================== The Key Manager service, code-named Barbican, is the default secret storage service for OpenStack. The service provides secure storage, provisioning and management of secrets. .. toctree:: :maxdepth: 1 access_control.rst barbican_manage.rst database_cleaning.rst barbican-2.0.0/doc/source/admin-guide-cloud/barbican_manage.rst0000664000567000056710000000564012701405673025564 0ustar jenkinsjenkins00000000000000=================================== Barbican Service Management Utility =================================== Description =========== ``barbican-manage`` is a utility that is used to control the barbican key manager service database and Hardware Secure Module (HSM) plugin device. Use cases include migrating the secret database or generating a Master Key Encryption Key (MKEK) in the HSM. This command set should only be executed by a user with admin privileges. Options ======= The standard pattern for executing a barbican-manage command is: ``barbican-manage []`` Running ``barbican-manage`` without arguments shows a list of available command categories. Currently, there are 2 supported categories: *db* and *hsm*. Running with a category argument shows a list of commands in that category: * ``barbican-manage db --help`` * ``barbican-manage hsm --help`` * ``barbican-manage --version`` shows the version number of barbican service. The following sections describe the available categories and arguments for barbican-manage. Barbican Database ~~~~~~~~~~~~~~~~~ .. Warning:: Before executing **barbican-manage db** commands, make sure you are familiar with `Database Migration`_ first. ``barbican-manage db revision [--db-url] [--message] [--autogenerate]`` Create a new database version file. ``barbican-manage db upgrade [--db-url] [--version]`` Upgrade to a future version database. ``barbican-manage db history [--db-url] [--verbose]`` Show database changeset history. ``barbican-manage db current [--db-url] [--verbose]`` Show current revision of database. ``barbican-manage db clean [--db-url] [--verbose] [--min-days] [--clean-unassociated-projects] [--soft-delete-expired-secrets] [--log-file]`` Clean up soft deletions in the database. More documentation can be found here: :doc:`Database Cleaning ` Barbican PKCS11/HSM ~~~~~~~~~~~~~~~~~~~ ``barbican-manage hsm gen_mkek [--library-path] [--passphrase] [--slot-id] [--label] [--length]`` Create a new Master key encryption key in HSM. This MKEK will be used to encrypt all project key encryption keys. Its label must be unique. ``barbican-manage hsm gen_hmac [--library-path] [--passphrase] [--slot-id] [--label] [--length]`` Create a new Master HMAC key in HSM. This HMAC key will be used to generate an authentication tag of encrypted project key encryption keys. Its label must be unique. ``barbican-manage hsm rewrap_pkek [--dry-run]`` Rewrap project key encryption keys after rotating to new MKEK and/or HMAC key(s) in HSM. The new MKEK and HMAC key should have already been generated using the above commands. The user will have to configure new MKEK and HMAC key labels in /etc/barbican.conf and restart barbican server before executing this command. .. _Database Migration: http://docs.openstack.org/developer/barbican/contribute/database_migrations.html barbican-2.0.0/doc/source/admin-guide-cloud/access_control.rst0000664000567000056710000000611512701405673025512 0ustar jenkinsjenkins00000000000000============== Access Control ============== Role Based Access Control (RBAC) -------------------------------- Like many other services, the Key Manager service supports the protection of its APIs by enforcing policy rules defined in a policy file. The Key Manager service stores a reference to a policy JSON file in its configuration file, :file:`/etc/barbican/barbican.conf`. Typically this file is named ``policy.json`` and it is stored in :file:`/etc/barbican/policy.json`. Each Key Manager API call has a line in the policy file that dictates which level of access applies: .. code-block:: ini API_NAME: RULE_STATEMENT or MATCH_STATEMENT where ``RULE_STATEMENT`` can be another ``RULE_STATEMENT`` or a ``MATCH_STATEMENT``: .. code-block:: ini RULE_STATEMENT: RULE_STATEMENT or MATCH_STATEMENT ``MATCH_STATEMENT`` is a set of identifiers that must match between the token provided by the caller of the API and the parameters or target entities of the API in question. For example: .. code-block:: ini "secrets:post": "role:admin or role:creator" indicates that to create a new secret via a POST request, you must have either the admin or creator role in your token. .. warning:: The Key Manager service scopes the ownership of a secret at the project level. This means that many calls in the API will perform an additional check to ensure that the project_id of the token matches the project_id stored as the secret owner. Default Policy ~~~~~~~~~~~~~~ The policy engine in OpenStack is very flexible and allows for customized policies that make sense for your particular cloud. The Key Manager service comes with a sample ``policy.json`` file which can be used as the starting point for a customized policy. The sample policy defines 5 distinct roles: key-manager:service-admin The cloud administrator in charge of the Key Manager service. This user has access to all management APIs like the project-quotas. admin Project administrator. This user has full access to all resources owned by the project for which the admin role is scoped. creator Users with this role are allowed to create new resources but are not allowed to delete any existing resources. They are also allowed full access to existing secrets owned by the project in scope. observer Users with this role are allowed to access to existing resources but are not allowed to upload new secrets or delete existing secrets. audit Users with this role are only allowed access to the resource metadata. So users with this role are unable to decrypt secrets. Access Control List API ----------------------- There are some limitations that result from scoping ownership of a secret at the project level. For example, there is no easy way for a user to upload a secret for which only they have access. There is also no easy way to grant a user access to only a single secret. To address this limitations the Key Manager service includes an Access Control List (ACL) API. For full details see the `ACL API User Guide `__ barbican-2.0.0/doc/source/conf.py0000775000567000056710000000466212701405673017777 0ustar jenkinsjenkins00000000000000# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys sys.path.insert(0, os.path.abspath('../..')) # -- General configuration ---------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.autodoc', # 'sphinx.ext.intersphinx', 'oslosphinx' ] # autodoc generation is a bit aggressive and a nuisance when doing heavy # text edit cycles. # execute "export SPHINX_DEBUG=1" in your terminal to disable # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Barbican' copyright = u'2014, OpenStack Foundation' # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = True # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. # html_theme_path = ["."] # html_theme = '_theme' # html_static_path = ['static'] html_theme_options = {} # Output file base name for HTML help builder. htmlhelp_basename = '%sdoc' % project # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', '%s.tex' % project, u'%s Documentation' % project, u'OpenStack Foundation', 'manual'), ] # Example configuration for intersphinx: refer to the Python standard library. # intersphinx_mapping = {'http://docs.python.org/': None} barbican-2.0.0/doc/source/setup/0000775000567000056710000000000012701406024017614 5ustar jenkinsjenkins00000000000000barbican-2.0.0/doc/source/setup/index.rst0000664000567000056710000000023312701405673021464 0ustar jenkinsjenkins00000000000000Setting up Barbican =================== .. toctree:: :maxdepth: 1 dev keystone devstack certificate troubleshooting noauth audit barbican-2.0.0/doc/source/setup/devstack.rst0000664000567000056710000000671412701405673022173 0ustar jenkinsjenkins00000000000000Running Barbican on DevStack ============================ Barbican is currently available via the plugin interface within devstack We provide two ways of deploying a DevStack environment with a running Barbican. The easy mode uses vagrant and automatically creates the VM with all necessary dependencies to run DevStack. It is recommended to use this process if it is your first time. If you are familiar with DevStack you can use the steps in the manual setup section to install Barbican onto your already running DevStack installation. .. warning:: This process takes anywhere from 10-30 minutes depending on your internet connection. Easy Mode --------- To simplify the setup process of running Barbican on DevStack, there is a Vagrantfile that will automatically setup up a VM containing Barbican running on Devstack. .. warning:: Upon following these steps, you will not be able to use tox tools if you setup a shared folder. This is because making hard-links is required, but not permitted if the project is in a shared folder. If you wish to use tox, comment out the `Create Synced Folder` section in `barbican/devstack/barbican-vagrant/Vagrantfile`. 1. Obtain Barbican vagrant file If you don't already have the file then clone the repo below .. code-block:: bash git clone https://github.com/openstack/barbican.git 2. Move the ``barbican-vagrant`` directory outside of the Barbican directory and into your current directory for vagrant files. If you do not have one, then just copy it into your home directory. .. code-block:: bash cp -r barbican/devstack/barbican-vagrant 3. Get into the ``barbican-vagrant`` directory .. code-block:: bash cd barbican-vagrant 4. Start create a new VM based on the cloned configuration .. code-block:: bash vagrant up 5. Once the VM has been successfully started and provisioned, ssh into the VM. .. code-block:: bash vagrant ssh 6. Once inside the VM, change your directory to the ``devstack`` folder. .. code-block:: bash cd /opt/stack/devstack/ 7. Start DevStack .. code-block:: bash ./stack.sh Manual Setup ------------ These steps assume you are running within a clean Ubuntu 14.04 virtual machine (local or cloud instance). If you are running locally, do not forget to expose the following ports #. Barbican - ``9311`` #. Keystone API - ``5000`` #. Keystone Admin API - ``35357`` Installation ^^^^^^^^^^^^ 1. Make sure you are logged in as a non-root user with sudo privileges 2. Install git .. code-block:: bash sudo apt-get install git 3. Clone DevStack .. code-block:: bash git clone https://github.com/openstack-dev/devstack.git 4. Add the Barbican plugin to the local.conf file and verify the minimum services required are included. You can pull down a specific branch by appending the name to the end of the git url. If you leave the space empty like below, then origin/master will be pulled. .. code-block:: ini enable_plugin barbican https://git.openstack.org/openstack/barbican enable_service rabbit mysql key If this is your first time and you do not have a local.conf file, there is an example in the `Barbican github `_. Copy the file and place it in the devstack/ directory. 5. Start DevStack .. code-block:: bash cd devstack/ ./stack.sh barbican-2.0.0/doc/source/setup/dev.rst0000664000567000056710000000717512701405673021147 0ustar jenkinsjenkins00000000000000Setting up a Barbican Development Environment ============================================== These instructions are designed to help you setup a standalone version of Barbican which uses SQLite as a database backend. This is not suitable for production due to the lack of authentication and an interface to a secure encryption system such as an HSM (Hardware Security Module). In addition, the SQLite backend has known issues with thread-safety. This setup is purely to aid in development workflows. .. warning:: The default key store implementation in Barbican **is not secure** in any way. **Do not use this development standalone mode to store sensitive information!** Installing system dependencies ------------------------------ **Ubuntu 15.10:** .. code-block:: bash # Install development tools sudo apt-get install -y git python-tox # Install dependency build requirements sudo apt-get install -y libffi-dev libssl-dev python-dev **Fedora 23:** .. code-block:: bash # Install development tools sudo dnf install -y git python-tox # Install dependency build requirements sudo dnf install -y gcc libffi-devel openssl-devel redhat-rpm-config Setting up a virtual environment -------------------------------- We highly recommend using virtual environments for development. You can learn more about `Virtual Environments`_ in the Python Guide. If you installed tox in the previous step you should already have virtualenv installed as well. .. _Virtual Environments: http://docs.python-guide.org/en/latest/dev/virtualenvs/ .. code-block:: bash # Clone barbican source git clone https://git.openstack.org/openstack/barbican cd barbican # Create and activate a virtual environment virtualenv .barbicanenv source .barbicanenv/bin/activate # Install barbican in development mode pip install -e $PWD Configuring Barbican -------------------- Barbican uses oslo.config for configuration. By default the api process will look for the configuration file in ``$HOME/barbican.conf`` or ``/etc/barbican/barbican.conf``. The sample configuration files included in the source code assume that you'll be using ``/etc/barbican/`` for configuration and ``/var/lib/barbican`` for the database file location. .. code-block:: bash # Create the directories and copy the config files sudo mkdir /etc/barbican sudo mkdir /var/lib/barbican sudo chown $(whoami) /etc/barbican sudo chown $(whoami) /var/lib/barbican cp -r etc/barbican /etc All the locations are configurable, so you don't have to use ``/etc`` and ``/var/lib`` in your development machine if you don't want to. Running Barbican ---------------- If you made it this far you should be able to run the barbican development server using this command: .. code-block:: bash bin/barbican-api An instance of barbican will be listening on ``http://localhost:9311``. Note that the default configuration uses the unauthenticated context. This means that requests should include the ``X-Project-Id`` header instead of including a keystone token in the ``X-Auth-Token`` header. For example: .. code-block:: bash curl -v -H 'X-Project-Id: 12345' \ -H 'Accept: application/json' \ http://localhost:9311/v1/secrets For more information on configuring Barbican with Keystone auth see the :doc:`Keystone Configuration ` page. Building the Documentation -------------------------- You can build the html developer documentation using tox: .. code-block:: bash tox -e docs Running the Unit Tests ---------------------- You can run the unit test suite using tox: .. code-block:: bash tox -e py27 barbican-2.0.0/doc/source/setup/noauth.rst0000664000567000056710000000250712701405673021661 0ustar jenkinsjenkins00000000000000No Auth Barbican ================ Generally barbican can be configured to use keystone like every other OpenStack services for identity and access control. Sometimes it may be useful to run barbican without any authentication service for development purpose. By default, configuration in :file:`barbican-api-paste.ini` sets barbican without any authentication (no auth mode), for example: .. code-block:: ini # Use this pipeline for Barbican API - DEFAULT no authentication [pipeline:barbican_api] pipeline = unauthenticated-context apiapp With every OpenStack service integrated with keystone, its API requires access token to retireve certain information and validate user's information and prviledges. If you are running barbican in no auth mode, you have to specify project_id instead of an access token which was retrieved from the token instead. In case of API, replace ``'X-Auth-Token: $TOKEN'`` with ``'X-Project-Id: {project_id}'`` for every API request in :doc:`../api/index`. You can also find detailed explanation to run barbican client with an unauthenticated context `here `_ and run barbican CLI in no auth mode `here `_. barbican-2.0.0/doc/source/setup/audit.rst0000664000567000056710000001204012701405673021462 0ustar jenkinsjenkins00000000000000Using Audit Middleware with Barbican ==================================== Background ---------- `Audit middleware`_ is a python middleware logic which is added in service request processing pipeline via paste deploy filters. Audit middleware constructs audit event data in `CADF format`_. Audit middleware supports delivery of CADF audit events via Oslo messaging notifier capability. Based on `notification_driver` configuration, audit events can be routed to messaging infrastructure (notification_driver = messagingv2) or can be routed to a log file (notification_driver = log). Audit middleware creates two events per REST API interaction. First event has information extracted from request data and the second one has request outcome (response). .. _Audit middleware: http://docs.openstack.org/developer/keystonemiddleware/audit.html .. _CADF format: http://www.dmtf.org/sites/default/files/standards/documents/DSP2038_1.0.0.pdf Enabling Audit for API Requests ------------------------------- Audit middleware is available as part of `keystonemiddleware`_ (>= 1.6) library. Assuming a barbican deployment is already using keystone for token validation, auditing support requires only configuration changes. It has Oslo messaging library dependency as it uses this for audit event delivery. pyCADF library is used for creating events in CADF format. * Enable Middleware : `Enabling Middleware Link`_ . Change is primarily in service paste deploy configuration. * Configure Middleware : `Configuring Middleware Link`_ . Can use provided audit mapping file. If there are no custom mapping for actions or path, then related mapping values are derived from taxonomy defined in pyCADF library. .. _keystonemiddleware: https://github.com/openstack/keystonemiddleware/blob/master/keystonemiddleware/audit.py .. _Enabling Middleware Link: http://docs.openstack.org/developer/keystonemiddleware/audit.html#enabling-audit-middleware .. _Configuring Middleware Link: http://docs.openstack.org/developer/keystonemiddleware/audit.html#configure-audit-middleware .. note:: Audit middleware filter should be included after Keystone middleware’s keystone_authtoken middleware in request pipeline. This is needed so that audit middleware can utilize environment variables set by keystone_authtoken middleware. Steps ##### 1. Turn off any active instances of Barbican. #. Copy *api_audit_map.conf* to ``/etc/barbican`` directory. #. Edit ``/etc/barbican/barbican-api-paste.ini`` Replace the /v1 app pipeline from ``barbican_api`` to ``barbican-api-keystone-audit`` pipeline [pipeline:barbican-api-keystone-audit] pipeline = keystone_authtoken context audit apiapp #. Edit ``barbican.conf`` to update *notification_driver* value. #. Start Barbican ``{barbican_home}/bin/barbican.sh start`` Sample Audit Event ------------------ Following is the sample of audit event for symmetric key create request .. code-block:: json { "priority":"INFO", "event_type":"audit.http.request", "timestamp":"2015-12-11 00:44:26.412076", "publisher_id":"uwsgi", "payload":{ "typeURI":"http://schemas.dmtf.org/cloud/audit/1.0/event", "eventTime":"2015-12-11T00:44:26.410768+0000", "target":{ "typeURI":"service/security/keymanager/secrets", "addresses":[ { "url":"http://{barbican_admin_host}:9311", "name":"admin" }, { "url":"http://{barbican_internal_host}:9311", "name":"private" }, { "url":"https://{barbican_public_host}:9311", "name":"public" } ], "name":"barbican_service_user", "id":"barbican" }, "observer":{ "id":"target" }, "tags":[ "correlation_id?value=openstack:7e0fe4a6-e258-477e-a1c9-0fd0921a8435" ], "eventType":"activity", "initiator":{ "typeURI":"service/security/account/user", "name":"cinder_user", "credential":{ "token":"***", "identity_status":"Confirmed" }, "host":{ "agent":"curl/7.38.0", "address":"192.168.245.2" }, "project_id":"8eabee0a4c4e40f882df8efbce695526", "id":"513e8682f23446ceb598b6b0f5c4482b" }, "action":"create", "outcome":"pending", "id":"openstack:3a6a961c-9ada-4b81-9095-90968d896c41", "requestPath":"/v1/secrets" }, "message_id":"afc3fd93-51e9-4c80-b330-983e66962265" } `Ceilometer audit wiki`_ can be referred to identify meaning of different fields in audit event to **7 "W"s of Audit and Compliance**. .. _Ceilometer audit wiki: https://wiki.openstack.org/wiki/Ceilometer/blueprints/ support-standard-audit-formats#CADF_Model_is_designed_to_answer_all_Audit_and_Compliance_Questionsbarbican-2.0.0/doc/source/setup/certificate.rst0000664000567000056710000000110312701405673022634 0ustar jenkinsjenkins00000000000000Setting up Certificate Plugins ============================== Using the SnakeOil CA plugin ---------------------------- To evaluate Barbican certificate management, you can enable the snakeoil_ca certificate plugin. This is not suitable for production environment, but it can be useful as a development tool. To do so, you simply need to set ``enabled_certificate_plugins`` in ``barbican.conf``. .. code-block:: text enabled_certificate_plugins = snakeoil_ca And then restart your Barbican server. It will automatically generate an in-memory CA to create certificates. barbican-2.0.0/doc/source/setup/troubleshooting.rst0000664000567000056710000003175412701405673023620 0ustar jenkinsjenkins00000000000000===================================== Troubleshooting your Barbican Setup ===================================== If you cannot find the answers you're looking for within this document, you can ask questions on the Freenode IRC channel ``#openstack-barbican`` Getting a Barbican HTTP 401 error after a successful authentication to Keystone ------------------------------------------------------------------------------- What you might see ^^^^^^^^^^^^^^^^^^^ You get a HTTP 401 Unauthorized response even with a valid token .. code-block:: bash curl -X POST -H "X-Auth-Token: $TOKEN" -H "Content-type: application/json" \ -d '{"payload": "my-secret-here", "payload_content_type": "text/plain"}' \ http://localhost:9311/v1/secrets Caused by ^^^^^^^^^^ Expired signing cert on the Barbican server. How to avoid ^^^^^^^^^^^^^ Check for an expired Keystone signing certificate on your Barbican server. Look at the expiration date in ``/tmp/barbican/cache/signing_cert.pem``. If it is expired then follow these steps. #. On your Keystone server, verify that signing_cert.pem has the same expiration date as the one on your Barbican machine. You can normally find ``signing_cert.pem`` on your Keystone server in ``/etc/keystone/ssl/certs``. #. If the cert matches then follow these steps to create a new one #. Delete it from both your Barbican and Keystone servers. #. Edit ``/etc/keystone/ssl/certs/index.txt.attr`` and set unique_subject to no. #. Run ``keystone-manage pki_setup`` to create a new ``signing_cert.pem`` #. The updated cert will be downloaded to your Barbican server the next time you hit the Barbican API. #. If the cert **doesn't match** then delete the ``signing_cert.pem`` from your Barbican server. Do not delete from Keystone. The cert from Keystone will be downloaded to your machine the next time you hit the Barbican API. Returned refs use localhost instead of the correct hostname ------------------------------------------------------------- What you might see ^^^^^^^^^^^^^^^^^^^ .. code-block:: bash curl -X POST \ -H "Content-type: application/json" -H "X-Auth-Token: $TOKEN" -d \ '{"payload": "my-secret-here", "payload_content_type": "text/plain"}' \ http://myhostname.com/v1/secrets # Response: { "secret_ref": "http://localhost:9311/v1/secrets/UUID_HERE" } Caused by ^^^^^^^^^^ The default configuration on the response host name is not modified to the endpoint's host name (typically the load balancer's DNS name and port). How to avoid ^^^^^^^^^^^^^ Change your ``barbican.conf`` file's ``host_href`` setting from ``localhost:9311`` to the correct host name (myhostname.com in the example above). Barbican's tox tests fail to run on my Mac -------------------------------------------- What you might see ^^^^^^^^^^^^^^^^^^^ ``clang: error: unknown argument: '-mno-fused-madd'`` How to avoid ^^^^^^^^^^^^^ There is a `great blog article`__ that provides more details on the error and how to work around it. This link provides more details on the error and how to work around it. __ https://langui.sh/2014/03/10/wunused-command-line-argument-hard-error-in -future-is-a-harsh-mistress/ Barbican's tox tests fail to find ffi.h on my Mac ------------------------------------------------- What you might see ^^^^^^^^^^^^^^^^^^ .. code-block:: text c/_cffi_backend.c:13:10: fatal error: 'ffi.h' file not found ... ERROR: could not install deps [...]; v = InvocationError('...', 1) How to avoid ^^^^^^^^^^^^ Be sure that xcode and cmd line tools are up to date. Easiest way is to run ``xcode-select --install`` from an OS X command line. Be sure to say yes when asked if you want to install the command line tools. Now ``ls /usr/include/ffi/ffi.h`` should show that missing file exists, and the tox tests should run. Barbican's tox tests fail with "ImportError: No module named _bsddb" ------------------------------------------------- What you might see ^^^^^^^^^^^^^^^^^^ .. code-block:: text ImportError: No module named _bsddb How to avoid ^^^^^^^^^^^^ Running tests via tox (which uses testr) will create a .testrepository directory containing, among other things, data files. Those datafiles may be created with bsddb, if it is available in the environment. This can cause problems if you run in an environment that does not have bsddb. To resolve this, delete your .testrepository directory and run tox again. uWSGI logs 'OOPS ! failed loading app' -------------------------------------- What you might see ^^^^^^^^^^^^^^^^^^ .. code-block:: text ... spawned uWSGI master process (pid: 59190) spawned uWSGI worker 1 (pid: 59191, cores: 1) spawned uWSGI worker 1 (pid: 59192, cores: 1) Loading paste environment: config:/etc/barbican/barbican-api-paste.ini WSGI app 0 (mountpoint='') ready in 0 seconds on interpreter \ 0x7fd098c08520 pid: 59191 (default app) OOPS ! failed loading app in worker 1 (pid 59192) :( trying again... Respawned uWSGI worker 1 (new pid: 59193) Loading paste environment: config:/etc/barbican/barbican-api-paste.ini OOPS ! failed loading app in worker 1 (pid 59193) :( trying again... worker respawning too fast !!! i have to sleep a bit (2 seconds)... ... .. note:: You will not see any useful logs or stack traces with this error! Caused by ^^^^^^^^^ The vassal (worker) processes are not able to access the datastore. How to avoid ^^^^^^^^^^^^ Check the ``sql_connection`` in your ``barbican.conf`` file, to make sure that it references a valid reachable database. "Cannot register CLI option" error when importing logging --------------------------------------------------------- What you might see ^^^^^^^^^^^^^^^^^^ .. code-block:: text ... File ".../oslo_config/cfg.py", line 1275, in register_cli_opt raise ArgsAlreadyParsedError("cannot register CLI option") ArgsAlreadyParsedError: arguments already parsed: cannot register CLI option Caused by ^^^^^^^^^ An attempt to call the olso.config's ``register_cli_opt()`` function after the configuration arguments were 'parsed' (see the comments and method in `the oslo.config project's cfg.py file`__ for details. __ https://github.com/openstack/oslo.config/blob/master/oslo_config/cfg.py How to avoid ^^^^^^^^^^^^ Instead of calling ``import barbican.openstack.common.log as logging`` to get a logger, call ``from barbican.common import config`` with this to get a logger to use in your source file: ``LOG = config.getLogger(__name__)``. Responder raised TypeError: 'NoneType' object has no attribute '__getitem__' ---------------------------------------------------------------------------- What you might see ^^^^^^^^^^^^^^^^^^ .. code-block:: text ... 2013-04-14 14:17:56 [FALCON] [ERROR] POST \ /da71dfbc-a959-4ad3-bdab-5ee190ce7515/csrs? => Responder raised \ TypeError: 'NoneType' object has no attribute '__getitem__' Caused by ^^^^^^^^^ Forgetting to set your non-nullable FKs in entities you create via ``XxxxResource`` classes. How to avoid ^^^^^^^^^^^^ Don't forget to set any FKs defined on an entity prior to using the repository to create it. uWSGI config issue: ``ImportError: No module named site`` --------------------------------------------------------- What you might see ^^^^^^^^^^^^^^^^^^ .. code-block:: text ... uwsgi socket 0 bound to TCP address :9311 fd 3 Python version: 2.7.3 (...) [...] Set PythonHome to ./.venv ImportError: No module named site Caused by ^^^^^^^^^ * Can't locate the Python virtualenv for the Barbican project. * Either the 'broker' setting above is incorrect, or else you haven't started a queue process yet (such as RabbitMQ) How to avoid ^^^^^^^^^^^^ Make sure the uWSGI config file at ``etc/barbican/barbican-api-paste.ini`` is configured correctly (see installation steps above), esp. if the virtualenv folder is named differently than the ``.ini`` file has. REST Request Fails with JSON error ---------------------------------- What you might see ^^^^^^^^^^^^^^^^^^ .. code-block:: json { title: "Malformed JSON" } Caused by ^^^^^^^^^ Barbican REST server cannot parse the incoming JSON message from your REST client. How to avoid ^^^^^^^^^^^^ Make sure you are submitting properly formed JSON. For example, are there commas after all but the last name/value pair in a list? Are there quotes around all name/values that are text-based? Are the types of values matching what is expected (i.e. integer and boolean types instead of quoted text)? If you are using the Advanced REST Client with Chrome, and you tried to upload a file to the secrets PUT call, not only will this fail due to the multi-part format it uses, but it will also try to submit this file for every REST request you make thereafter, causing this error. Close the tab/window with the client, and restart it again. Crypto Mime Type Not Supported when I try to run tests or hit the API --------------------------------------------------------------------- What you might see ^^^^^^^^^^^^^^^^^^ A stack trace that has this in it (for example): .. code-block:: text CryptoMimeTypeNotSupportedException: Crypto Mime Type of 'text/plain' not \ supported Caused by ^^^^^^^^^ The Barbican plugins are not installed into a place where the Python plugin manager can find them. How to avoid ^^^^^^^^^^^^ Make sure you run the ``pip install -e .``. Python "can't find module errors" with the uWSGI scripts -------------------------------------------------------- What you might see ^^^^^^^^^^^^^^^^^^ .. code-block:: text *** has_emperor mode detected (fd: 6) *** ... !!! UNABLE to load uWSGI plugin: dlopen(./python_plugin.so, 10): image not \ found !!! ... File "./site-packages/paste/deploy/loadwsgi.py", line 22, in import_string return pkg_resources.EntryPoint.parse("x=" + s).load(False) File "./site-packages/distribute-0.6.35-py2.7.egg/pkg_resources.py", line \ 2015, in load entry = __import__(self.module_name, globals(),globals(), ['__name__']) ImportError: No module named barbican.api.app ... *** Starting uWSGI 1.9.13 (64bit) on [Fri Jul 5 09:59:29 2013] *** Caused by ^^^^^^^^^ The Barbican source modules are not found in the Python path of applications such as uwsgi. How to avoid ^^^^^^^^^^^^ Make sure you are running from your virtual env, and that pip was executed **after** you activated your virtual environment. This especially includes the ``pip install -e`` command. Also, it is possible that your virtual env gets corrupted, so you might need to rebuild it. 'unable to open database file None None' errors running scripts --------------------------------------------------------------- What you might see ^^^^^^^^^^^^^^^^^^ .. code-block:: text ... File "./site-packages/sqlalchemy/engine/strategies.py", line 80, in connect return dialect.connect(*cargs, **cparams) File "./site-packages/sqlalchemy/engine/default.py", line 283, in connect return self.dbapi.connect(*cargs, **cparams) OperationalError: (OperationalError) unable to open database file None None [emperor] removed uwsgi instance barbican-api.ini ... Caused by ^^^^^^^^^ Destination folder for the sqlite database is not found, or is not writable. How to avoid ^^^^^^^^^^^^ Make sure the ``/var/lib/barbican/`` folder exists and is writable by the user that is running the Barbican API process. 'ValueError: No JSON object could be decoded' with Keystoneclient middleware ---------------------------------------------------------------------------- What you might see ^^^^^^^^^^^^^^^^^^ .. code-block:: text ... 2013-08-15 16:55:15.759 2445 DEBUG keystoneclient.middleware.auth_token \ [-] Token validation failure. _validate_user_token \ ./site-packages/keystoneclient/middleware/auth_token.py:711 ... 2013-08-15 16:55:15.759 2445 TRACE keystoneclient.middleware.auth_token \ raise ValueError("No JSON object could be decoded") 2013-08-15 16:55:15.759 24458 TRACE keystoneclient.middleware.auth_token \ ValueError: No JSON object could be decoded ... 2013-08-15 16:55:15.766 2445 WARNING keystoneclient.middleware.auth_token \ [-] Authorization failed for token ... 2013-08-15 16:55:15.766 2445 INFO keystoneclient.middleware.auth_token \ [-] Invalid user token - rejecting request... Caused by ^^^^^^^^^ The ``keystoneclient`` middleware component is looking for a ``cms`` command in ``openssl`` that wasn't available before version ``1.0.1``. How to avoid ^^^^^^^^^^^^ Update openssl. "accept-encoding of 'gzip,deflate,sdch' not supported" ------------------------------------------------------ What you might see ^^^^^^^^^^^^^^^^^^ .. code-block:: text Secret retrieval issue seen - accept-encoding of 'gzip,deflate,sdch' not \ supported Caused by ^^^^^^^^^ This might be an issue with the browser you are using, as performing the request via curl doesn't seem to be affected. How to avoid ^^^^^^^^^^^^ Other than using an command such as curl to make the REST request you may not have many other options. barbican-2.0.0/doc/source/setup/keystone.rst0000664000567000056710000000333512701405673022224 0ustar jenkinsjenkins00000000000000Using Keystone Middleware with Barbican ======================================== Prerequisites -------------- To enable Keystone integration with Barbican you'll need a relatively current version of Keystone. If you don't have an instance of Keystone available, you can use one of the following ways to setup your own. #. `Simple Dockerized Keystone`_ #. `Installing Keystone`_ #. Devstack .. _Simple Dockerized Keystone: https://registry.hub.docker.com/u/ jmvrbanac/simple-keystone/ .. _Installing Keystone: http://docs.openstack.org/developer/keystone/ installing.html Hooking up Barbican to Keystone -------------------------------- Assuming that you've already setup your Keystone instance, connecting Barbican to Keystone is quite simple. When completed, Barbican should require a valid X-Auth-Token to be provided with all API calls except the get version call. 1. Turn off any active instances of Barbican 2. Edit ``/etc/barbican/barbican-api-paste.ini`` 1. Replace the ``barbican_api`` pipeline with an authenticated pipeline .. code-block:: ini [pipeline:barbican_api] pipeline = keystone_authtoken context apiapp 2. Replace ``keystone_authtoken`` filter values to match your Keystone setup .. code-block:: ini [filter:keystone_authtoken] paste.filter_factory = keystonemiddleware.auth_token:filter_factory signing_dir = /tmp/barbican/cache identity_uri = http://{YOUR_KEYSTONE_ENDPOINT}:35357 admin_tenant_name = service admin_user = {YOUR_KEYSTONE_USERNAME} admin_password = {YOUR_KEYSTONE_PASSWORD} auth_version = v2.0 3. Start Barbican ``{barbican_home}/bin/barbican.sh start`` barbican-2.0.0/doc/source/images/0000775000567000056710000000000012701406024017721 5ustar jenkinsjenkins00000000000000barbican-2.0.0/doc/source/images/barbican-components.gif0000664000567000056710000015452112701405673024355 0ustar jenkinsjenkins00000000000000GIF87aÐ÷} þ§ ý Ú   C .[ïü5"{ü÷+ C"""$1ö$ÿ$Ÿ&Ž)³)[*+;Ë+7ô++,,ÿ,$-Ê./Iq0ò3ÿ3%4H455,6ß6{:ð;ÿ;+Vaeobeÿcný RGB profilGeneri ki RGB profilPerfil RGB genèricPerfil RGB Genérico030;L=89 ?@>D09; RGBProfil générique RVBu( RGB ‚r_icÏðProfiÿlo RGB genericoGenerisk RGB-profilÇ|¼ RGB Õ¸\Ó Ç|Obecný RGB profiläèÕäÙÜ RGB ÛÜÜÙAllgemeines RGB-ProfilÁltalános RGB profilfn RGB cÏÿðe‡NöN‚, RGB 0×0í0Õ0¡0¤0ëProfil RGB generic“µ½¹ºÌ ÀÁ¿Æ¯» RGBPerfil RGB genéricoAlgemeen RGB-profielB#D%L RGB 1H'DGenel RGB ProfiliYleÿinen RGB-profiiliUniwersalny profil RGB1I89 ?@>D8;L RGBEDA *91JA RGB 'D9'EGeneric RGB ProfileGenerel RGB-beskrivelsetextCopyright 200¯7 Apple Inc., all rights reserved.XYZ óRÏXYZ tM=îÐXYZ Zu¬s4XYZ (Ÿ¸6curvÍsf32 BÞÿÿó&’ý‘ÿÿû¢ÿÿý£ÜÀl,Ðÿý H° Áƒ*\Ȱ¡Ã‡#JœH±¢Å‹3jÜȱ£Ç CŠI²¤É“(Sª\ɲ¥Ë—0cÊœI³¦Í›8sêÜɳ§ÏŸ@ƒ J´¨Ñ£H“*]Ê´©Ó§P£JJµªÕ«X³jÝʵ«×¯`ÊK¶¬Ù³hÓª]˶­Û·pãÊK·®Ý»xóêÝË·¯ß¿€ L¸°áÈ+^̸±ãÇ#KžL¹²å˘3kÞ̹³çÏ C‹Mº´éÓ¨S«^ͺµë×°cËžM»¶íÛ¸sëÞÍ»·ïßÀƒ N¼¸ñãÈ“+_μ¹óçУKŸÚ^¶gسkßν»÷ïàËÿOž;¸|ÔÓÏfà Ž÷ðãËŸO¿¾ýûøóëß??pêØZ$sÍ&¨à‚ 6èàƒF(á„ b@6¹áφñܳDüÜÃa<øxš=SM4+Fãâ‹/®XŒ.Ê8ã,ÂX#‹4æx#Ž-ÂhcŽ: É#‘6þH¤‘3"ÉãŒmäÐÏløsÌ&’l¢Ê–ªlR‰$®\ƒ?UÍ(’T¢%—XVr 9ñiâgàhPäxæ©çž|öéçŸ{â2z™í:ä$H9ìÜ#çAü#K%¶£M8˜fš©5ÁÄRÉ/ñ ©+”Zªé©ßH³‹*•,ꜛõÿ“€Öjë­¸æ #.JY•WV¢Š-»øbl0¾ì (’Œr ;‘³I,Ò|޵¨^ûͶÛŠ+ì\CiµØjj-·Ü^ëí(ÐÂ:>÷ˆˆÏ£ ɪÁ2Ç s˾à ³Ì¿,ðÀlðÁ'¬ðÂójeäZ©6èVŒ®6¾°ºÌ=þüŠ5‡,2,’ØB±È(£+Í&¿Ðë.^UÞ‚åš]~)˳ õÃÎ+|¤Y ( lâeš£È2Ì1 #}ŒÒ ¬4ÓM+ü4¾QìÊGFN%°›²ÈW2 ,_ K% —­6(Œ|ørXüÀ»á=ó6é(›Ø"Í¥§^ÿË)«Ñ”HP<Ë» È}_«M0$obôÑ÷+ùäù.yå•OÞ/æ‘k>Ì)úºâ9åGg>úæ¥_žz¾šç{uÖy݃è1·àí¥š²Ü² £tÏ(ª¤­6ʶT2¼ÈÁsüð°l¢ÏÛ\ÅCŽÌYrYs%²lÃŽËm|âà{³K%Ñ|ÈÞ¾” ¾¹Ê€‚ýéðO~J7|ÿý®L0ÌMÜïÿÿþ{Ý]î!©,ÁbÁ`†5¬¡jCÌƪ¼ô&vTByË,N–ÁfpÇ€VØq ÃíMdÞP«~Ñ.ìãªà`‹·ŒMx­ƒÖ  '÷´áDèÃýÿ& :ÏåoýÛ¡•(À¹èCZyc† QÆopƒ‹+^%W±*^ÑŠó¢=ø51‚‘ŠVcÅ´ÁˆgqO„LAð0¸¼`t‰ýØÄ.ȸ-iH‚l|ä–/°g¬Bò…ô@ éŠ&ÀÀ ¼ðÅ#n@W|Á7X¤'ˆpƒGø"¾àŸ/æ÷ÈH~ˆD¸"WÉÊV"²‰pÁG õvÅZÚò–¸´åmñGmäò—À ¦0‡IÌ`ª¬Å6zG¥ÜCª¸!Ùf‹@Zƒ d·! ^xó›à '/à pꡞ(ÁxÁK<–`€ïyÏ}ðG¶f²zÔ£=´.±ÒæÚ‚Ñ=4HAúQH;PÒŒÆt¢}yËIkº£œö´/}ѵ]´ dö2ªÛ»êÃuÆ­n^ªaÌYÛúÖ¸nï&泸âp~~³6ÆñŽwŒ#Øs.¶›9ÆcßÃÈŽ¶´ƒ-JôUÐÁ‡+¢©àZªÛà¶+b( Z¤)©Èµº×ÍîvϺN ;61íq‰d²½<[#n ô†?à±ÀsøãØûî÷´Nç]ÜÛ‰ÇÃMñŠ#—dš@·Æ7ÎñŽ{üã ¹ÈGîñ;h ×eÿ9°‘­ |C[ãз› + šËÙàО9bÇqp7û;Ø8§¹Íá,tÄö¹è…Õ³Ÿq‹Ü£¶¸Ô§>Rfĸ$ϺַÎõŽk~@ùXòXŒš›ýìhG;<ü‘´×üïÀ³ÙÇ1mÄ}kû9ö>˜ç}ï{Gl͉mì¹ûsÿ{Û­w¹»ýñ¼Û­ÝôŠÄƒߦºæ7oÑkÞA ½èGOúÒ›þô¨O½êA¿t!¾:K>8 š×·Oûí¡7Ýïm^Ìî»4è†o~ÀãÞÅ÷Ç=¤q|ŸLÞ¨ý;ü«>õkÞ||#^Àwþÿ8p/Ú£}÷¸'¿ùÑŸþö—ßöå§|å%¢MƒÏ‡m ˜ÿüó_®ûgý€˜gØg¸úg€øsÿ·€è (nV m˜¸Øø ‚"]ð å S’ö`&Õ‚.ø‚Ì 1( þ00hRŽb 78XƒµÇ <ç -/1„&e„÷G8@¨ƒÌ€sEÈv1È 9¸ƒøpƒ-(ƒ¸7…/¨…Zˆ….Ø\b7 q ¶Àph˜†j¸†|VmÎp‡r8‡tX‡vx‡x˜‡zX适gA@Ç€&Œ `†ÚàJxƒˆØ‚Ó‡ƒüÿà‚<‡R&E„@(…•(„Ó'‰7xˆLX‰š¸ˆ&Õˆ…8Фƒªp cH†ñtIdž®øŠ°m»à òP‹¶x‹¸˜‹º¸‹¼Ø‹¾Ø‹öŒù‚cqÑ & Æ0>¥ƒ5xƒØpD7èØ0„÷p„–h„ŽƒÒðóB&‰&A͘ŽX Œ`±§Š q q‹ôXög•@ý°üØþø9Ifq ô‚»P êhRøvƒÖ€ˆØP‘Øà„ÔØ‚Ê~Eº{IÊ£Gš{FÚ¤O*¤Ò § Ô@!Zº¥\Ú¥‚  J”¢fŠ¢äŸ5—¢ÌPšš™Ìð™iŠ™Èà¦j§—‰Xgº§|jj5*ù0 Pó×íÀf\¹¨ŒÿÚ¨ŽÊ••p¥Y꥔Z©–ú É€ÄäP|ú© ª¢:ª¤ú©›ÐBþ x`„ÚŸð˜pÄìp&’] ù¨¸š«ºÊ¨ãó É —¬Âj©…*ýp °PªÌÚ¬Îú¬Î Û0%é@ „&è°ú6÷ð Â2 Ù^»:®äZ®XX Œð¸ð«ºÒ®îú®:©ØÉš ½p¯÷Z ½p™ûНþú¯«¯üª¯[°+°k°ûа Û°Û¯ ë¯K°›¯ ±› É þA¨A  |èöp¬oƒ5 é‚Å0 æÚ².›«èÊ6¸° ðZ³6ÿ{+G‘Û û³@´B;´D[´F;´ „º´(&8Œ~µ5„ø²T[µ¸Z âê>Çp³z‚ \˵9»³ëP ³p´f{¶h›¶ » » ¶þjj¹`K;·€Ÿ0¯rSÌ|Û·–ù™ÏY™ÎÙœ—§&… Ì0™«™œÙ·Ž+ŒË·«œk¸‰›¸K§Û·½ ©@™{õ/Ñp0£ë"4+º4[ºSº£+(U0¬{º»£{º¦[»Sº9K²?Ñòà d˶Â;¼Ä[¼Æ{¼È›¼Ê»¼ÌÛ¼Í+ š0 Î˼Œ àð @¨9Ð9Àª„jÏÿ=ì€yÐú¬Ö€¡å›¾ÍJ2(k®ðº 33ëºð[¿UóåÀ»?‘éà ŒP ÓÀ<À\ÀÅ; _) ¥P ̶ `éàåð r À à0 9°´  îB¦-ª¾{Ê Þ°wÞ€™¬Y p'Â,̧© ý9>¿€4¯û˜€Mà ¿Ðÿ‚ MpI8ÐùSAü/ûsM ;P_€`¿ö‹¿úë¾[Îà › ½ÂËÀµÀÅ ì¼^Œ ½ÐÅ»ÀÀÈËÅ^üÅÌ‹Æ]< šà¹É‹Æe|ÀŒ€èŒîp½Ù ]å x°Ç°©& ËÚÂfŠ ÈÿG} ê Åà(£z ŠÈå[ ¢¿PÐ%° ðÄþt P§à @0OŒÄóÓÉ_àCmÀ4 Å÷›¿Há»é ¹` •  ¥0 ]ÌÂ<ÌÄ,ÌÁ0ØPÌÊ<ÌÊÀvËüÌÁ ï Ŭ ï МÍÊ< 1¦ÍÅ< Œà|p ØòËQ@¨ éPà {Œ†ªý°E”Œ¢Ì€oè‹ ÷ÐÈÍŠópÏé –ð K$9d5 ¿ûCàÉÃ@DÃpDÃðà -9p üÓ/ à ÐB|Ð"Ý/Rœý`íì¿ ðÆŒ&šðÒ©àÍÁlpüÿ Ó%ÌÍì 2]ÌÑ8]|Ó?m;=Ôµ Ì;Ý Œ@–  Œ@('-ÁºÐajòàé~°Çl°­Ìñt}¢Ó‡¾êÈ&™™wÆ„˜é '|g'œ¡j Ohl_-ªãs #ÐÃÑ®À§ ?-:ýÃ{-9½Ñý_ðE4ÒKTÒ&æð¿ÂÜ ©P @Ó;"B­Íø8½ÙC­ÌÚ¤ÌolÔÚ¬ I½Ôã£qÒŸÐÙÀ (Øù`V ÖºÎÔ!q— ÃÉÜÁÐ Ãݯg ÜÀÝŸÈ] Á7™‰Ü š< Z¸À§ÏÃ=ÄG& * Èç ÿÿК<Ì­ÜÌÜŸ‰ÜÂ=Þã-ÜÉCÜ{¢½À‹½D`?Pý%D:m? ðÃÊHÔ/ÍØŒíØIñt˜]ÚÃ<‘ÊÀó ÌÍ>½ öv7 ÖxÌã0Æ.ÉÊÌ Ô ’ÌÀã ANw,.ÌÊpgÖ Ì½ÀÖ¥ýÒÚœ µJŽ  m É0%ºÇº ²)hÛ¸]º€„ÚSŒõ nk¶ Û ÷jå½0  /WŽˆÃí(Å Ðøúüp¯0åhî¯òÍ "É0…Ð/zžÐ/©•Ã~ýÃÃÐàŸ\ààŒÍ¶¬¿½³ÿ芞èµ`ÔÀ¼è¾èà8 · ŠŽÝŠ.pÉS"qsà:pÔç|–^¶%B7V™èØ]|µÀáØ œ^}u£ ‰Îs¦þ‰ÞÜ÷p|ã èÞÆé‹î¹À®è¥ã†ÀÚm› ¸à~à½0 òµaÛç xp­òö0 e›æCË ˆXåøpåãÞ V9æþ@îøÊsÌp¯b~¯8çíò¾ `I¾0 Æ‚ïžsïùî d¥J’ó09Iø~ïýb×ýRHÃ`ðÏïü¾ð¯ïšñ?„Þä?¡•Ð ÿñ ò"?ò³  ­ÐÌãðñØýñšîñýñÎÁÿpòVéñÇ7ó­`pÒÐ y—è­Àê.ÊðóçñŒèËâEW'úÊxÑ"EŠ7Vôx¤CAJ,éq%I_ ÊõóWÓæMœ9uîäÙÓçÎx•Z %ZÔèQ¤óð)cúÎß®¡óîõæ¯×P|ðˆó‡hÖV\½å§¾¢\½¶;›¿`­œò£ËÏß¹V³æù÷é_£Œ &DH!CŽ*‚5‹à ‘<È白|4ö³ç®\—ÈŠ~Ž&]ÚôiÔ©U¯fºŸù¹ã°Â«dÆf¹NºM`ìÄ&±‡´~òñL‘ÈühoFk´ñF{rO@©…¶k ò  ‡Ä®»ÞÁ§‚ÙÅ8ãr'˜`°ÙíÉà~¬J™çákv9›]”;‚ŠÜåÇ‚j¡®6kS3ÎJaÄYlÑsO>yéÎO?ùtP=tPÿC»#tQB ”ÑAÕËqRžìI¦R2Í´R0ªSMÇÙï+´äâgÓÜvéŸy8 +AN³êÔŸw4õE7RÜ VV]•.RDUõ(PëÔÕM- ÔL5a„G1D…¨–Y@aD"‹Áˆ$3"Òò‘'I"ÃRvÛu÷Ýö8K'›Sàò^|óÕw_~ùí»øGH¥ÆÇZ§jâçƒï}‹ËZ¬±‹ŸZr«éž`úÕxcl+©dŽû…C<É“OO 4e•[”e—c–YRx)Í'O$Ñtgž{öÙØEõ…”ªÆñå»T%ÅK¦H¹ÕMe%EÊÿ2ñá‡Ó³v~:S¦ƒ!åW¯gáãVvÙmjiZñ…v|v›gIY¬”ÆDyßlïcŒ"Ãà“Ir—ÜÍÎM6jV|qÆáåÌpœ9E6MR¹;dÌ9¦—|×´6_*;Ç×ó{´VÎ3Wý^;/W}dB%”'ÙCø¼Á™ÅÝ–Ý{žPš¯ñ5qråmæ5­ožë;§Ó„ïùu–Lƒ™˜”]ü‘FÓ{jíÔb˜Ëô{vîþ{R´÷çÔ_‡n›Ìõbì›÷Y ­«vcØ!†BÀ@d¢gtÁ\‘Oäõ‰O‚¤ k\T\|¬•„ÿ$<¦‰Ê­N„#$a EغÕÍ"n%S… 6a»˜ýnO}háðøô;ºŒV°a¤fRÁí(•ÐÄòôç6N‘by¢0âÎ>Å,ç)Q­h¢ÏòǼ$F1Yšš"T”¨)/ºm‰KäY&žÅ‡…¬ü3Ä&T1 =¼" ~#F9²¡ÀÉc3öHÀ R‚lÑ6Òx¯Y´"¥(E+LøHHF’„ÔqÝÆÖXK¨.œÀ`ÀC[ôá7Ѓ-Ta…1ÜÀHå6a +¤LD€ÁlAˆQ–ò”z¸-MIK[þ.–¶°Än»]öò–ð€zȧâ Ò‚žyÆ)<ÿØ &fS›cüâ6½ùMpjó‹Ý g9™8Î1šSÛD')ÌÈGh N›Î³±‰X ¤¯+:?¸#åxF "c„òD^~ˆL6¤ùPˆÖ Œ¨$"±²»a£’Ä—F±…9fô¢å¨BŠ/ò+¤Î*̇`ñRZÄ”›À ¡1À †06ASRÂÂ` h;Z°à˜B ÅN{º KÔ” DR—zTCµTÕ,ÕL•%²éKÍJ XÄ4­f=kZÑŠÖhF´4-:8²‘‹zy¬™àk&:È¿L¬S°ƒ%laEÁWÃn³ü»ƒlR‘ ÿIØM²"j…³ì¹ X좽.ZV¸#ö8W9ü°@ ”Ãç'WØÆÖ=›°Üdm»Ñ|iô¶“%©nwk·Þ^ô·À­¨o‡ÜáJ6¸kÌËp!«!¹a–­f}áKǃ—V‚0„%`V¢ÂBÐêxcVír×xé¬`^ðfwcðÀîÀÊê½°‚¦û_8®²ÝãgíêŒ\`¢±ƒ‘D'ü`GX¦p…-|a /6Ãæ[‰F®1>FE%cˆÅdv³œM†=H+{Ðäqéx "£‹Õ–#À@B üc ÷› l"óÒÜ#{ÿ*‘E–Ï’—ì©#39ÊOV2’ü''CK¡‘³LEÆæCçýïz_z‡À0`UÍüÒð  ê&¾kÖ5·™oìÌÖðÕ °B¡+Ág&ð׿f´€äÑôƒ®~ìk_>H"æt§=½a¿~ÚÁ~e„‡ù ›WVCuS˜{ªâ:+Å62Ó¢õÐ5à0‚º|b®rØ8ët±Üd¬ÊËfv³ýlh²GI…«íÛRPL3À˜;§dY£;è½i©ki“Z¨gð¿_;]X È…â#Ðè±ÿ«Á ÊCÂ>ÜA, FÐ<0€1ð>Ù "FÒ?$LB%\B&l®û6±Aг‡ØÐ„Liµ£½C9JÈJøB0 Ã0ôB¾êÂØóB1LÃ4$Cÿ2TC0$CÆ „F`¹–Û¾ ¼´í3¹“À*â" áƒl«›ŠÂP8…t€ÁHã#b—€<x`#BJTœ|X‡ÛëAMÜD¨OÈb 4ð Ê&'0¹@ؾK{NL#LÁ˜Ã¾j„ë¾é”„Ä ÃØãƒî3@ó¦hL¬Å*µxj®.³­VØ \è†àT yQJp…’]ˆŒ×"Mý<É [dM PežË,ls1yXPmP}PP P ­P µÐÍŒýü9ã+»"†\x…SQKȼ•“gLQ=@Ò;=ë´@K{½ÃœCÄüÅêËÁ ½ï̼ ó8QËÂZ1Eª:£øÉSpppÍtsÙ±pD™£FÜP*´I"ÂIn:§-½Â‘̦vâÒžéRr'-•¢0%£ÿ2SÇDÓ3SRp–lÊœ4;½S<ÍS=ÝS>íS?ýS@ T;­Rɳ¸òPÙŽú<¼0¾lNÕÀåÔC¿ÔÑ“¤ÉÃL¹ý4\t½ Œ¡(Ë«„#‡ÔÐÒ”p—IˆApX r$TY-T¯b°&" ,mZF^„L_ýUu’`mµJȆ÷œUde I­s‡ÏrKXÔÒ;=¾l9—³ÔÀÔ@×39Äì+39^lQF]Ìk«Å™ Á¨Ox`ÈSUR¦dñ£üÄ QJÎdÕO‹óPPÑŒ=V‚-ØÅúÕÅ2Hø] †gxXˆX‰XÿŠ­X‹½XŒ½Xpˆ×}­™;ÍãËLàѽÓK=j%5/DQÁ L´ÃI•ÎíBŒ0“å TIٻäœ): Fpw5Õt€1Tyéµ|ýL{(‡pÑ£ŽMV50»Êzb½iÝZ®…0ý8þ!‚@ˆ8[´M[µ][¶m[·}[¸…[ƒr¾¨• yy†D0ÀÜ[þéÂ4TÑõŽýCçÄVÀÄCp@~$Їtp£•4yU5Êúd¨Ð׺uÊe%-gÈrðPbÖ®-]‘ë QcÆG(Lx…cHj¸Ù]Ú­]Û½]ÜÍ]ÝÝ]ÞÅÿp¨Ímy!†6À‚-dC¿…I¾âMÛÎ5¤Ùä}Ã?<¹•äÙ ÜÅLP\·qMÌ\›qVˆ ÕÚ >* Ï ^dµSMP¯|†4]Óƒß 3#C89M°ååG(ÖÅ\xÝhàjˆ†jà&`.`Fàn`6àŽ`N`~`†` ~à Nànƒ)EßÅ^Ád°^tCfÌQ409çMLÆ:5¡¿BÑgäE>DM.ÚÊ%e>𣥕KµÀÆÒÎöÅCƒSÓÕÂònªÏRãsôÊ:ÊœhÚpÞ5âÎ(É W¼ƒ½e@^4½JfÀ“=YK6„-ÞâÕbQåþÑÿ=†eX3^eVneW>ã äF>G>ʆW°Ã8ÎVÁÜÖ–uÉÃÌc1ÜcOÝ4ÁJSp"UÕÜ™×É𱟓Ð0ežåÑ$âHÞ«2œ& Ù2DC6´dÍä¾J^äõfn&L8\ÀWÑO~xŽç-Qu¾…c@åTÎg}Þg~îgþg€hög\xg¦fvypx†W0> „­cÿÄÕCêìTK›ÎÆQïÔÅF,›ô¦ƒ-¬N¸Ì$•eN“ò͆È„vdÏXè\È™JØÈ¨Qå¥Ú'fç•Cx„S¸…a(ê_¸WHj¥>¥v…[ø…¢>†a@e|è~¦jª¶j­^¬æê­ÖgW0è—vã;°Åx^ÎMQµÙÕQL FQ¦Eáî3¹ûU¹ë´L Õ’^y_8èšèw?˜æ±îȩݵlp†WŒè =ö ò„\ «ußJði˜d#¡v…¢.mÓ>mÔ6m©¾ç©Žê©^í¢>…Ò^mØNmÛFmÚníÛÎmÿ©nmÖîmÕ†°6ìÆÎ‘I ÝØ´ÚÑ ç&µL°N–”T=|ÙH­=Ű¯Å°¿öb(iƶ1(•®S{ßøîâÎÑ’ÏèµÈ0€Ö=Oð„¯²„ùÑ\¡MàÈ"îiÐvCÁèƒÑ¾m?íS "hÙ6pW˜€ÓÆO8ð-„ahƒG(m §p/íáNo²VPööŒl@NN«Nk­îlÎLÝë ó«hÅfüýkI8`x×£eœ×¸Oe) FñôÍ–~oˆ‚ä¶Z&ZSNÈÅŒ~¥Ëp0¬G€Bm^øp׃x„/`¢¾íÿ?íB(s/7í&Ѐ>èƒðhƒÒžó5§ð'rÇQßsé†zé¼Ð³Ù”ECglN—uΘ¥è/ôÖ «„\Àڙ栖tO8…GW̨ ±±OøLhFôÖóJìÁù'Á€$ýÜ|\È…1)ϵ²fhÁã<î ÍV„ °„¡†‰–èõ^×°ˆ ð^ø‚ ‚Sð…Gè"hƒç¥/°ˆ/HvKÀÝ)öcOö[øQ8p˜ó‹ w_?wtÏsQW…ÆRqFÀ/ »\Z_ôw—Oxòä~g «%Zx õÉ•‡tÉ\ò½O øu¯Abÿ6œ(Y‹¿øAeZ²+ãÔ«zwÃ9ä ÍÞïOÀWô8=ð=[^ð„/€ù(v!|O"xœâ؃>ˆy¡ŸùQÀùG˜C@‰•ÿŽ9G‰¦Oyó±nxv6p†œiÞE‡ÞCI·ò`f„$UP:ÔÐíï(ßqMO‡p‰Õœp‘p¡[ªßÐ~ø'60(E¸O¨×¡$­2kù6±Iç Ù¨tvøwubЀBzòЃ h‚˜€GðKЃ à+؇…Ìç…6€ž„6˜y” &ÌçÏw"ø”¸.¦o§—}Ç'©'î¹·™ÿGŽœW°Ì.FÌy{|Ê[«¸ÍÐ[ƒ¨×`Iº×˜#<`øÜŸ¸tÁ€l( :ê5i.MCEn&çw'gø¡…r£Í‡g`üg"`×"¸O¨·+ =ñ|=óU2„Í[=U Bd‚­Qm±"ÐVŸ…aŒ™Xñ!ÆŒ7 õÀž¿"G’,iò$Ê”*W²léò%L—ýòÙsW\6g¹Nyª$‰‘¤ Œ*y:õ X6pJ³õ¤äô)Ô¨R¡J:ν~1·rui MÚc ƒÖ®jײmëö-ܘò ø@9wÙ‚kÛo¦=yòÜ¥KWÿî0âr…Ýɳg/_Ú~Ï4p¬lY o4¼aËB+DnàÈÖŽÑ õÑË–#+žAÛÚ´°áCX.êEÑâåà7}Œkü8òä2iÊ3|3ôgÒŸAO N1ãæà€Uª4õ»ÔJ–ˆ]ͪüüÈ~îPÐu2_º°òÐÓ¯oÿ~[]r¤{àAcîti¹õW> :¶ c Bf`H’i ,´Th¡…^¨!…zØ¡T0 ›X2A 0T‹BP¸Ih Á&´¬æÛ0Q Š*Âb[… eH‹!L@",70p¤!}¨!->é!†7~Yj¹%‚ ÿVXbŠ¥ÃØcÖTN6¹XâxRIBT.ÙäÙ–Æõc¡HýÈC{u :(¡pÝi—;Ÿì—•=åàŸuJ8%¥•ZÚ!(–n2%(™Rè饟†:ꨛ0€e¡©ªº‚ 2˜à_ÎÔšÀœbÉ›AAÕH#¹zò 1rb•Ϫmå³—¨ŽdFZ,´Ñ š`̘•ŸX=«å¤¤zû-¸áŠûm%ÅI{.º~QkNÎóÊ);íµÛþzŸ”ã6è½û~n?,Ï¿hƒ…šãél?ì)òöïÑ'—Oòº-µ7­¼=÷Ý{ÿ=øá‹?>ù僯ïÒ«¿~–ýüë‡ü!ƒ(Ä!Qˆ4<"abºÌåÀ,1°‡ýØ`µlÐeaÑþ –Ä-–Day†¹(Æ1’ñdw¢¢Óþó¿:ÆyzÀÀ6ä°ŒbìG³rp@;ò±~Äž;HGª ò€Œ=Òñ/º °aüc÷0ÒA²’–¼drî”ö0’’GÃBGèaòwé`]º°ÇR²²•®|‰Ÿÿþ¨Ò@ð‘_ùJÇAq–9p$)w)ÌaþÑt¸@6(y'pÐÅî&1y–ö< ~Ø4£©Ímq™t Â*EbºI°1›Ü,Y³0ð ô!ðœî|ç ljLJF(‘¬'<¥Ÿö¤£1æÌ'@Ú¯lLB=(BªÐ…2´¡ õØ6øá¡xÀCEªÑ"tÄh§@Y%?#¤[!=)JÕR¹hA .})Lc*Ó™Ò´¦6µ©ršÓ›ò´§bÐBð‰”¶Å‹xF‰ªÔ¥–Ä,˜¸T£*Õ©RµªV½*V³ªÕ­r•ª¸˜SSÒþèâ5¨ì‡ÿŠf”£n}+\ã*×¹Æu”{F:þÖ.ÎA~ý+`+ØÀú]=,b«ØÅZ5Ø^CâVDá_0B(ŠV?$/m+]C+ÚÑ’V®~`CŒ`Ù(Ü3²HÌÇ3ð„äýǶ½-ns«ÛÝòV·_`,pƒ+Üála $òºÀŠÂ vŸÙ8ä`£+ÝéR·ºÖ½.v³ë Svy~p‡k9H # g–vÓ› y<Ñx/|ã+ßùÒ·¾ö½/~ó«ßýâW,K%F € I×VŒAIÕ«à3¸ÁN£,ÙðFOhb™ŒÝš€¿þ0ˆC,âüÿ> Dµp ža]̽~1ŒcìàgÄ î¡0èÒƒdCÆvG‡G,ä!¹ÈÑ(1J˱c_WÕƒ™£,å)ö9ˆA9pŒ¸O<€T®.‡—!æ1“™ÌÑXÆ{Ñ\æ2Ÿ9ÍgV³›×Üæ6¯¹Îv†3ï‚# #-Îÿ£8.^4¡â†1]'þ#ƒ±W–"·Dð¢*ªbƒœ¢B¶bC&dÀ½¢`9F|/ØbäFpF|ìÝx‚Bhd0b¤ÚÂI6„/bDZÁ<JŽ‚-x$Hz‚FêA ÈdG„ÆhØŒ‚%@ T€!Ø‚hØ;ò‚;^äT€C‡OÚã0õC [ƒ}Ÿ"ÐÒå$W2+Aâ:Ò«Ü+2Z¦¥Cž¥Z¶%ƒT$TX‚-ÈB FŒ¤@AÄI¦¤CÌ¥-XÂ(@]>#JÒ%^jb„7ÚÂhÆ#À(XÁ‚N%;Ú‚;Öÿå`VTF¥ö+Œ"u’[VtF•–k¾&lÆ&B-ƒ5‘ÌÄ;ƒnî&oö¦oþ&p§p'qîf49 C<¤ ÷¹¥s>'tF§ZÂ%T&߈TT@ ¢^~†PÚBØ$`B&h@ ØÂvngQÄhdçvÚ¤@4æCŒä(T€¨È(€Æ l§*h&Lv& lçEG$ “û0Ù`}E0€蔃>¨NEA4„V¨…^(†f¨†n(‡v¨‡~¨NðCJ×3 Âă+TÂ#B¼(ŒÆÈèŒÎ¨ŒÒèÆè^TŽê¨îh¾hé )Ž^ÿT#B! Äkh˜–éC¨é—‡˜ „šÞÀEåCК¾)Tè.éB 80ÀPCWQC2ª ¢&ª¢.*U}ÁQ•htÅÀ¥Ë=œÂ#\¦n*§vª§~*¨†ª¨Ž*©^!´Áxª\Á#œÂ=pÉ$ŒN–Î*­¦es@h檮މ”À Ø)¯r„žºU~T‚îE(Ö¡2ª³>+´"V!‰¿ZH±¶Rs^Z ü–b½W´^,ÆflTµ 4ˆ¡¥Ÿ´¹‚+É–¬ÉvªBt«Â1´Fµ«Ìbé­Ž‹ÍÞ,Îæì”gÍê,Þæ­ÞZ ÏbRð´Û*Úƒ"4‘®á.ân ïê.ï~Hðï‡0@^ S:`À¥ÉnâF¯ôN/õÂפ ÐÚªlÚZ.øRn!¸mæ¦k}äEѪÏM,&¼‚û¾/üƯüÎ/ýÖ¯ýÞ/þæ¯þÎ/&4À(,MÒ@Rùí$˜P‚:/ôV/7°ØàPöVlΪÄC%”+§&A$sp¨~ðw«wpÚ–0oj´Á¦¢0·:A%L)z ïÓd)+0ÀSi¬ëp2J%”‚ƒ t,äÎÿP?Ã$p,ð7±?q“Ýu±Âž$Ž%\g±o1w±1‡±g±©vðñXÂ>Ð g©~äðÇqÆæÝqÇTÂýè$Q஢€4A¦ÍY|ùYíÙ›ýYœ±šåÙ¦ 2|²œ-2$÷Ù!™!wš#'²]2™Eñ–ãa–$ œ±)Ÿ2*§r§2B2°qú^)ÏÁ±ßr´Þˆ‚(رÆ0!À'±ù CÖÄŸ›2/33733{rzsªÜC%p+_36_s´A6k1T«žGK§<ø\C4 s:§óT­ó:Cÿ•;£3TUÃ5díÑ&m=ó:ËóTÑsÑæ³<[,;KU<³sAÏs=W->Ós@¿s?_ÃWíàLôÜ”€mQ tÁ.C¬â+þ±3‡´H4IwrH1).$*ÚCôE¡àBtó ´€Ëô)C!X³N'Aà ÏrtþI!`mÕF¯×î×RnSÓ— @UWµU_5VwL5VGT14Æ~³õƒ.ÁJ¯%2rI·µ[¿µ¯AsJ¯¥D.õV`ðOs±44€_7 ìõ§v8‚ #úâ9óœ{=±dWoØ51\6fgÿ¶foö­é1Lh'E |Â!AQ÷ì'DAZBäZ›k¿6lǶl»¶¸‰¹Í6nç¶nï¶l×vmó6p·pûöm»¶';'D¦M$^ÇL#¶û(€D·@uC·ó´OïuP‹3QÛm|L6yOïÁu„‰z¯÷a°z‹IaÄ÷˜D"@—=˜+õƒ"°tþ1âý7€¸€xÁw­²Áó Ê=8h7`·K7x0 vt“@„Û´…s0 ät d··ÀMçô”2bC#„3c³cõx—7Œn 9DA?E³ÄP™8Ú\R?@T/ÿ1¹‘ù#äÞ‘œÓ*F5÷Klƒ‰kqƒCA•_s8;€48(@4x @xp9LxH÷†+4¸(@P€@vB+cs•_y‚ù®x‹Cg×YŒ:‘EqcЪ¨’sžÖ&fèñå纥¸%ôÁë’¸+°`ÌôA§C“Ï*D‰2zôƒ%øôž[9¬Çº¬Ïz¬{pš¿:d&Qz ;ç¥x/09hzÕµ©—z¾3õ-Àœ'Œz©Ÿz–ž–ªŸ;<‚µO<ÅËzÀ°Ëº®Cô:±c7€„yƒcü±»•/;˜¼•›yÄAT¼ÌSü#°Ãô°xQ:ºï¼[«{­.:V¸;öú{—%³½¹Á9_ü]_Ù^¾7a@@ÓÃßõiÀ 4Ÿì5}Á+œõ¼/ ˆÂ#ÜÕk}ì%|À_=ê9=Ã…½Á1<–:¼¤$CÌÏ<Þ×ú­Ë:[yÇÃú¯ç:Ôyß:›Ãú²K÷à>$AOç=ä[y$”{Q—ÿ;Ïg¾Hû¼¢·{[¾»¶ET¸Ò[úC!ê§>êE½'@ÆŸ%ìÊBœüÝB¸‚/|zè/àþX£ÀŸ'´èÁ¢Þ¡X üÅ> BádžÈë+?úÂ)Ü"ð ôº'ðøÏ Ü_)ÝsI«G>ä7@—׺xü—>Ès€û€ýW÷É#þ›_< ܦĕBP4xaB„\,õó÷bD‰)FÌÇ=9n”'¯€e#I–4yeJ•+Y¶tù¦É/QÒÉëx§F?]ÜåÔÙÅf>¡BÝ=ÈWiR¥K™6uúÔ"?>7ºëÒÄÿWV­[¹võºµ Y4ÜÚú@ nø’€ˆÖ6,y2 áÆ„6ž*0PÁCÖ1&x ì)+Œôµ¢­Z¶_ohðED¬V³ÎõÄ×/`_–ØX;ÁÕWÕ«¹ÎìI¶6G¡ÖFï‘BÝ»’ààï94(è¸ï"‹¥ˆ¥L“†wöìâÙfz1cG›í)2fúcËŽ­_ïÒ½ûôóIÆgÿÒþú™5aûÜù:§ÿìj@{rÐÅ»\Á‡.š 6«°ò… +¬« /ÌJC ÁFá…D^H£3 áå0Të² J”%+ÿ¼H ®¬zjbI`˜ª­¾묵ޚ뮽þì°ÅÆzXó¢~úØc?ƒ'eÙdv£|LÇ(í¾Û©k#ì‚[üþpÀy xðÁÜ–°F™ Yþ¾Á/$‡Ëï `àe°Ql‘¥Óôø¯¿a˜ðÁº„Aà ¶ÄÈ%Ÿ¼oÃýÞ÷‘Ú!÷dv˜ôÌïÐý&íóÞa¼ðÂGœIÙýÞ¸ãÿgÃ;©|*¢Šê« ž ë­ßžûî½·ÞRê[H™†ê¡HÂ|ë©Wôåí¡Ã{ó‹XîŠ6¾¿Ÿ{øµ÷ŠJÜœÖiO{†ˆq¶© Tà¶²‚Pc›;܆,¸Ýäé€Þ9½Q%[ÇáE[‚-cÀgXø7{• €3Ĭл<ŽtC‹æP÷·Õµn…,Ä]àF14#ÖЛùïlá;[tÃ#áIȼæ!ëy”H<,?/~Ñ‹IÈÉè½7Ä¡Œiìž%ºS­®Me9ÈFÔÜ’Þ¸ùH6šDkk›.Ø‘ƒiZT$ÿ¥‚-¾Q’ÇÃL,l¯1,ÏJÇ#%·áý-,¬›f§Ò¥’™a¼Dé· TÀoyq¡*5 Å'J,Š‘Ôå߬xEŽ|l‘!G"ÔXLcÓ{€82Ó˜r€ìudƒT…r<­ŽTëãØúÐo~³ [‘ú°Ím>0œ`=VHŽta‰ æ<ôAŸ„p—ºœ¤ß:ǺQÀ–P…%¬ÐJÃ@~#BZ±Q¶(¢ á‰x!±‡~ûá?:ЂÎ¦\¼a‹Á3¨L" zËà5Ñ–ù¤b/}©“,.²Éx3yÚÓ/>‚ >ýâ’ÿ!ÏE³ÙÐ…Tf6Ô$›æ[ Àj ªD'»¤ÊGt¦3jë$$Ç6"£Ò­ykäÞú&SH2””(Djˆµ¨LC4i :´hî„ÔÌ¡!ú 1‰"µÅOxWÓ´Ò›øRàŠÔ7Xü(-¢!ëÒ\ºu¦iªé/oªH‘-S¨©M-#T‹?¥uAo”G³ñŒOä@ePDT»*6Ï(œ½½ãWÁú4±.‹¬:ÁlÓÚ\¤Ø3'ø- i‘пÁ€¨uÿ¶®> \x¹k Ulb¼Ùý[yÍ+Þñ×oªP¯y[Þðª×¾ó®iZSÿ`.2–jk |L.<¢ÀÞã‚'þg­.tºø„"d#ä  &0 °Ä!^g¶âŠ64á ˆÑŠ+¾Ð„‰ù‚œYyD²Âb‹Ç8&nqåqÜ·%×ÿuVò´€…’—¼d%3»Q–ò”±ûä'SÙÊMnrˆ´,å,3ùËTó”ÃLf'k¹Ì^>3˜×²¡#DúÑC3@bƒ ! ]`¢€¦BXÒ¨Vw•T cNSÈrY!‚Œ(D#Òô!J‹Ñ…h´nT£©&<ë ‘ëoxýìi t0÷!ÙØ0bÐEèåþðÓ‘Žr„ÉWþ4ãø bdÙ@ƨ_}Œ™n[†¨ÌLs®Îi`bëË#fôý6Xýèxá OÁ±&~ñŠWxGÜÁ!¤"°È¿›óÿ÷»úùOÿPÊPÿ°ÿÿ÷oÌ"Ê(0PÅLÿ0§ŒÜÉ—0zúÿöRPôšÃ<Îfú$R C¯õTP™T=È÷žÁÜbà‚OøŠ‚ÈÃ#Æ-ÜA }PeP`žŠ¦/ýªú°DDÍ,A-¶åh(bÚ &à^Jä ¿Ä Ó°[¬ÀŽýz¬ øƒ~Âý°èàâ/­æÏ#ÜÁ`ÀÿQ‘ ÑQ1oêp´Hoúa!/±ãH@N. ú$ :. "åA¯Sè/Q¶Ažú¡ …Ѳ¡’PY„¢r"ú!šð@ý€ž!ø”0›öË÷ª•d©”:É\Ê ¦ÿø©Q¦®ÙmäP#Ìf‚à®ñ¢ë ÑÏÓQ×ÑKˆ¡ØÖDÄ:  )‚ý sPJ¯ãødAQR\Z`K±ã¥nîó ÷Qý À> ‚ÀÜŒº z"t±Èð›^1ƒÏg+$¨qºLg–*^Dj¡dÁ Šæ¢†br&oH%w‰ OãXm#æñƒI=¢ràþØ‘)›Ò)Ÿ’o ® ガ´@E6,M$"§ô"i¯ ðÄãXÆóÀõXO:(à,K àP"…’ RÑ|Χ|/i ÿH€8€Ì2e²ÍQøÜá#›©’¯¯%`'ÝŠ%ýfʈ&ýJs Èv`ÓR*-ë1!©'Sà²1(´î°(2qBÒ€  ’6kÓ6o“3u?.ï*­¥º`h€buê+iÏœÃãIJóP&äs TõÖ²F9<¯æRÃ2Û¶Óˆ¶-¢œOs@pOÖ0›ÜϦC3ŸàkK®l!Ü+¾´Ëoê¾Üs•Ú4yÌ4#H(3n5‡ì(«t!-ÁLAT"TB'”B+tB´A4BSAð¯C-TB1C#TDÿ5DODKB9ÔC9ôDU”DtDQ”F/ôAá€À#±7 $hhØÀü!ˆ³8_ï8g ”ç|£ã¤ãäþñ,4,ãå¶à:qP;‡F E€ôü’/[€|À0‰O ƒâ0§ =¡æMÙ3%ùOóô1Gó {Œð h@­R5 T‹tÓ!Aà&@<ÀQR#UR'•R+ÕR/S3US7uRƒS3uÀë²a7ysP½ƒ_³ˆëT(’ah ¶ fuVCÏVm5!ãV”V£“Vy¦h ³;‘p®·àf€®óVÿA/WINQð’Z«ÕZ«µÒàl±Y>$kC¶©=iÁ’Ì\Ï•\ɵ\Ë5]']Ñu]×5ÉâpÞõ\ãÕ^Ñu^Õõ]éõoòu_ý¦_Õµ^ÙÕ\ùô PQs(”P¬5sâ5Ááˆaüc3Vc3Ö6dCVdG–dKÖdOeS8¥"eOv¦. çп¶A\q¢vÏ ÁjÕghƒVh‡Ög“|vLv €|¶ Z L« Z šôh}–eªv~nÎ!‰VhC:¶v~’€lË–l¢ â ÊTÂ5‚²I_ãVnç–nëÖnïoÿóVoÏ• Ëá?WíOÕ‰a ôaÑJTF%£ë—q+Ve‚±q#Wr'—r+×r/s3ëˆÁÜPr5—r¡Ïo°*‰Ò6ò¢ - pA¼vch;±|9~P|h—tj`V%Ågi7hOQv¶èî'ôVñT¡Âm!Ȧò¢Wz+ÏF7a÷lÕt o·c>B=‹O|ÇW|Ëa À$ÉW}×—}Û×}ß~ãW~ß·›‘B›¶*Ö2Œù@XÛ½o“ P€Tc›¾Ñz¶·¶ë¶qû¨î“É¥¯ m”›À¿—T†²!Ò>¡Úü‚* Ü—Ìm‹‡Z²ññ[%Âp¡—}¶ À  ÿB\ÄA<Œ?<ÄIhMÜÄKÄEüÄ? 8®xUÅ–ÆaÜgßÄ;·ÓQ³XÁ6L·ž!üð½“Ü)! ëÊï;{C:{÷›¿cË¿%YP (ˆ’¯i‰Q ¹!\¹á¢ú¨‰’ŠéhšÌÙä¶9<·•«ZÖâÄÅW<Å[<Ïû\ÏUÜÏ]ÏmœÏGÇ=¼ÅOQÐÿ¼ÐÝÅIÜSzx¡}œ ¸ÑP@JAÉ;=™Ü XÊ£œ&°™ÊÃÊʯ¼ž²œg+Òü`ˆýV§»|Ìß|¯g+ð€•Ï7 WÀtm]P™»UÙ fãôt_ÙÑÿ—Ù›ÝÙŸý áÙ§ÝÅ$ݪ‰ñ¢`÷:Ù/82ú´ºРFËÝÜÏÝÓ]ÝQ”„À ìÉG¾¥|àò{mR]ÕO—Õ«r¶>Á/‚Oe„š>°…ý·g Qkq à¬Í® ~ØI9Ò àê|AŒº¨]ã7žãEü©;ÞÙ+ªíú?a«eц¼ù„Qø”Š 4L`HægžækÞæoçs^çwžç{þæ+ÀbÀ¸ã]Þe»ÔÔ­çÛó]ßu»ÕŸ7­)àx´Ö!ž%ÚÐ|ëëç±­žAŸí¼"¦ûºA^í×^ÐaÙ¡€:¼/ÜÿƒÛ &a°­~OËÈuábÇð ßðñ_ñŸñßñ #_*Xf¹èÞ'³7é{é™~˜žßëÈß@Ó5½â ëù:jžÃò;õ%¨)¢îÄ’¦} ðÒÏO¯ØþÙå úÜ!½‡ãamAbñº Ó[õÝuáù«Í·¶´.Ó·Ÿû»ßû¿üÃ_üÇŸüËûYšð üµŽê¬ÿò1›7ß=ߪÿ{b…;Ž;f¯>õ³^â€éäɳ‡0¡Â… :|¡6ùüY¼h1›AÀ¹³×£È‘$Gæ#÷¨ŒÿÊ•,[ªl€— (p‰³¥š2jvîÌ ´ˆL—ƒ]©€ÄQ<}>"W±¤Ô©þúበ ‹.bÏüÄ ÂŠX6péÒ¹;‹¶l¹ràÚº} 7®Ü¹tëÚ½‹×-›˜ÆÞ][­àÁ„ »ÃŬÁÅŒd¼Eª”+[¾Œ93å|lðDü¼Pž»rº°bÕUÎÝAЬ[»~ Û¡Ab0<ŽöĨ"50’:_HÍSû¹«´¥§òåÌ{Ât@¹P š[¿Ž=»ò¦ÌéÒ®†Dîà—o©än8q’å:Äì`„kÛg]&É/gÖðÙ²üûûÿ`€H`öÎF1)ÿr`~ú=X˜cª-¦ÚYò8Yd“­Ça‡zÈ™g”¡=«‘ˆ¢hàDaZd"B(Æš‰$B”âb4θšl6îøc ñˆ£–fc4úã‰A2DäˆF$o•³pòXùáEýÈƒË å)“T±PrUÐÐBæÁ9SO[ÐÙ>´@šnÒéƒj虜ž`TÁgÉõôf ªÈ9ærƒà"z~òÞ'F¸ˆ_:àdóL6øAˆjªª®Êj«†¥Ã"V¤æj­‚!–šhU¨†^¸a—ÂK¬E!Ê8e“¯T1 @L9::©lÕæÿX­kQnû¤µ7ÆX¢¢•ÖÁmÞym· qîB»‰äÎIË%±ölóH¥jÀT†$(·…Ll*G8À¿jT!€Ô ʰà  … P7iOf €,,È$à€¼¡±Á{ÜР¯rlc°ýÄê"8]t˜;éæ ­BM´«°‘¼x]tª¸¦«hþZe±V_½^ˆénýë¯î€³l¼Ø.¸]C†î·Øžv×mŸÍ5ºîûXÙk+I-ÜÓ"»uÕÙã^ ôbm¬;–,ZLáדxáµYEÈj˜)çÀ;ñ¸r˜o‘DOe\ÜS ;ÿ2Ä4€¦¤8„>zËù³[XâN½ÄÙÒ“Ðʃ1ÝtðŸNİQH{4<ªOë7u†~N}õ&u¦wög§ó ð¤!ª¡)O‰ªt¸Ç¥€.4¢-7ŒP¥·Ãê²X?_6rÀ#ha˜Ä,f1›Ðc*s™Ìl¦3Ÿ ÍhJsšQ@æ4§iäàìÃeTŽÆŠÚiŠpùpÇ)ò0&Å©¡`U ÀÇ(ž-ÈSžË‚aÖ{:P($HB¨ ÔÄ&~zä# ÉC”A 1Q€ŸàøÎJåáiì<@‰€ÄX(œ¥Gÿg™X>`+±$†ñ‚¼Ö’0уÌôv Saõ2{ù5®ÓœêT§ÔHF2nºÓ  u¨D-ªQŠÔ¤"Õ§?UªS¿bt³1“±‡¼ØŽò°÷²DOäðU5Èa¬d•LÀê2;a¬lkÁ0Ö²’•+]37ÖƒÉ!r‰„k#×¹µŽÕaƒHÙÀ¹bPaq•«cÕ` š33€@F?¢Îøî£œ­¡*ÅöŒÒ Ù಑R®t0-¥êýb ÛÌÌnÙ@œz j·¼í­o›TÝר…ˆêT©D‘~Á²åÐjËyNÇJ÷¬yµ‰XåÐÿV9€A@Ó¶`V9m!)á•yGV„2Ìr(C &j¯“¢¯`åà°2â½ñ¯v_&ÝËa¢]O?r×lˆ#ší¬ƒ›÷3?D¡T¥yÀXžÁ#Å£«½Õ-]šËØŠ3³=[ ¾ÀÛhDc¸,n±‹_ Ü6 à¸d-7'[½/m£e0Y©+‡$¬Ù•$&1áÀ’±âdí"™$`/%9';’@[k—Éê09¼Xn–Çj¦*øX®e¨Ä6@È!YxÁ îèƒïL´Ÿ‘j,.Ä8ØRªPͲôü°kGŒhÙb¦P±££Q HÿG:Ò†´¤-]éGOzÒ*Þ4¦3 jP{šÒš¾4©EmêN§:Ô¬.u58]éQ·ZÖàæT%#yù¡À»,ç+±æ±^WØŠUlrÊ*»“5Ùrƒ²ÅJƒΕ½­vc F‡içUÁ.+"^ÁkÍpʺ s7‹çt×J-kbýØÝ3Tú,­ÕÐkï:ã!{îðCZ ð€ |à/¸ÁŽð„?º x˜û$³dÇÖãª8,‘ínk|ã/Ø|« ºŽ  =ù–Õ €lœ–à$Å+SŽNœ$ V·Í[ÕŸrÀ2déÏGç}CÛ;ßD—J‰3ä6ü[ÿáLoºÓŸõ¨! ?bb¢ 8öœÎð*Ç¿vÅÚ±czÔ¸Æ.€²AÊ6dn¯ræ4·sZ|f–Ô-ïyß}F˜¿ë]h‚ßZ®’ƒ x…oÐ[Ê«—}òþ8:dÜÑ…¥G}óœï¼ç™>õð™î BÜwÉÕt¼"ao}Øew쯗¡ gö×ñ ºÇ+~ÖúõÐ]¾ûœñxþ~ðøß!??Uñù“|ÁúÌ_~à}V޽h¸ùÁþâ¯ÿŸä³! §Â!­v%šÃ„˜òù¶xƒ@ˆƒAȃG؃•öƒH„FH„BȃM¸„HHƒMøhS…Eˆ…÷Ç>îPz§Så–Àr`gˆ†f8Vv`†lȆjx†j‡nèXnˆ†qø†ee‡rh‡wȇm(uÀƒ°‡oXˆdu¯`?ÒÆcépo•×;H‰•h‰—X å@?cƒ‰”h‚zGh,Ø‚‰æ~‹ƒTˆŠ©¨Š«ÈŠ­èÿН‹±ØŠ\x>àRÕ€½ö5§jp‡¿ŒÁ(ŒÃHŒÅhŒÁ¨mXŒj0Q‹È!aàÀu‡UˆÙ¨¹ã¡²%H~ H~6´~£8b¥h§(‹íèŽïñ(GH‹å“tµQ‘ˆzbø wŒÿ)w¨‰ePŒ,§ˆæ¦ý€Ðpú8áH‘y‰àà`d‘ò‰ÐSŽrŽè[ê(ì8'‰’)©’îXä“ò’éQt¤4Ä` ˜m©“;É“y`ˆPÀ(t€ –- ©´1+Õhé”OY à /¨•Ò‘ægÿŽú(’ºD’1x _ –a)–cI–_¹ Ç0ƒhY–kÉ–mé–ey–iy–oI—uY—q—bÙ’âã.dQà{øFJòPÙ žP ‰€EÁ˜é˜ ™‘)™“I™•9€`ŽP‰0ˆ•à À`*[’‹Tш˜%1‘UÉšUY[úŽ­™W‰‚Y¹•.¸ozƒÃÀ›½é›¿ œÁ)œÃIœÅiœÇ‰œÉ©œË©œ{©=î°\(ÐDyù`£Ï ¹ð Ýéß žá)žãIžåižçÙ§p žp ¹ Ïðg¡•TsÐ\–±šPI•²) ÍR‰Í‚4¦ÂÿŸeA›,õ‘,’·Ùk/¸=™‡œÐÌÉ›ê)¡zœ§à Á‰¡z ê¡¿ÙzœÎ™=é%Ó9Šƒ‰}l‘-ê¢/ £oq*¤ù!ý }©©š“øÏ ±ù ŸàŠ> "¬Ð‰à`Ò£C¨ÅîÖ±ÙšÊZÊZ ª ÏÅ ]³›ÇY7à›·ð_ð}p Äyð¡¾Y!œ}Ðg: mÐÂY` œwÊ›iº¦k xjœ$ 7}Y;3Ÿ"FJ2Bc‹Ê¨H7!!25•L™£×蜤j4àH™z á©–x£Åœâ¤Ò¤”X^ÿ: Têa)x¥‡š¥XÓ•jœ–}Л00¿Ê¼:¡M §ÃÀ§}: 1QÀÙ1¡ÃPÍœzš§xš¬ÊZ¡Ïš¡šæƒy¿Ñ;´:bý`®çŠ®éª®ëÊ®íê®ï ¯îZ,µ…`Ø”ÿs‰Q.äJüq!h  Ú‰x`€ReaªLzZªº!¥¬ «…&«·‚¥µZ=·Z§¿é Ãà Û±M`¼À›® ¢§  ¢zž°±¾jšœ/ë±˱ËZð7Û±³ê±vЧ4; Ö³ÇI³5{³˱5û²Ãy ð¼I´ëÿ›‚º=©ãj±]‹hV€Yš‘ŸýåR]ÐÀ3e1ÀåÀ ]D qÛ£ŸÀÏ °éðJqû x0 =§sr›·þA°WQ·é°eÁ£x ‘ü¤x@ ©Ê¸JTB»µ¡**Ë‚J ãè‘'H±äêµ2µ¥¿ƒN«º{§K*[³§7à ðº_Ð.³¼ VðNk dú»{ qÚMà²5;r@»««,Ûû}P³`qÚºÒ‹»·½1[³ÁûÃë m` Ûâ ¡ÚÛzÀ ûà V0Ë»×ë 5 °ºª‹µ]sQÅÿµ¥+À¨÷¬@•Q¶üáN*9€¶1·4é (`1J×(r†ëBúš ðA`P·<Ìl`Ÿ·Š»°!UÈó=Œû8/̪©¯Ï`(`À jÑùgžû‰Æç3¸’R¢8Àätº——ºûë±QÛ;% ºP¾@ ¬½00ÀVà±PU)Û¼ºúº €ÆÊ‹Lp¿{@²Ñ ³àº%ËÀÍûºðÇܱÞÛ±jÌÆ;0@¿%ë±%€Å7’\É %0 À¾à € ÀâÛÅR\³_ðˆUÇÿoJ LÜį<,öHb:º£À ¶È ^A.”¸FpFÚ¯ÒX·åªºð—üñÃd‘pKÌ?ã* Ã@š°ѶÙð9PA`aé@ ò‚ZİTB]`?‚lJë¹³9Žb‚¶ ËÖ£Ž>“yôë ¼Ð¾íë¾0¿ú D€ÏúLÉ˲.Û¾0ÐúÊíë þ¬ÏÏj¾ «ðú¬Ïùì #÷Ð_ϲ µyœÐ|Ì ºZżðº´kÒùœÒ ­í««cоÒ!ë }žÀ íÑ} ùü¬å ³³›Ñw, ;]ú«ÏPõÓ5»ÏÍ VÊäãÿ;W®<ÏY½µÕ8jt¶ü€RœbÌ0 ðG?#6q›Ü~ÀA°úªÖ(¼e ×Vi?ÓP©z×) kÑÖeØ{­©XuFê…y‹Eüªï ‹ÕZý!õ DÕ›Ñ8Ýú< #ÇÙ;¶À»-‹Ñ0Pý¬: ² ÒMPÒRËÙ-ÛÓºÔ ¡-Ó¼°»¼ÐºÑÌ Ü­ÏÄíÛ/ Ü!«Ï±Í 8m(]ð§­ÓP­«ŸíÛ`Ú¼` gŒÑ¯ÛÝ,k µ½ÙR­ãsÀQÙ–íÞ¦É)A@©_m©ÿÑ—ðˆ(°ÿÃfÝbíÖž* ”¸n4Ù š›ÂˆíB~°ApÎeK¥‚Ëü‘ªËU° Î3å`<(pÖqKð  ±.2 ”]±ïM˜ÝD` 3Nã5>ã,k¦ýÝ;`ã¶ Æ7ŽÚ¦³4NÞ¶@ɾšý¼=>ã#g 8n wj ¼íÝCÎ 7ÀÝ4.åXÞÝQ~3>äFžà .ã¶`š­VÉF^3. V0­^¬`®3Î =æ1! 3îÙ:®Ï:^ãè­Êº¹\ŠÝâ.Žè^²lpÀ Ö—Up÷m‹™*Îýñ‰±·n= }Ýsµ…¤EZd±éÑŒØÅóÿžÖe±Àµ•×éª÷-ꤎº@?y½0”`(^•ã°ÿÁ⤛èSãeÎä4ŽÓm@ãPç6Î%ä–@ãaþä í7Àž íÚ>ãKÎäNn ŒÓ0åþLç3Ží5¾å  î_îãÍŽíÛÎíè^¡ýc ÛPæ0mð,;çïNã¡mò> 4Žï9~ì4.è.i< ÒÞÞèù°\Š@® ¹bŒÍ){!¸¿×¾é ÊÃÚ\Éì¥ÞÒ\´ñ¨uCšÜü3ðˆÂ ó/ oËnˆg*¼þ”¾¾â ‹ Â.ñu—›gcÏÆ¾ðºj4þº ?ÿãPð,Kîçí¶`pð6îí=î¯ëÍÎÛX¿õž0ã£`_~öi¿öfÏõLþ¬VÀî9þÐcÐíðÔnæîÞ㯋ö ?ã Ï—éð@ñGÿÞù`<¬pñ:Š€*È3Øka‹¬ð÷Ö¡Âkª€!°0„ø±ŠÀg ú”¯*<ùé°\§U˜=¬ Ïpß+¸Œaqâ Ä~>lÀ ÆÌßíË?ßn¯ͬÿüÍïü“?ý?#ýBÏD?«ŒÙOÌKOø4<žö^l†Ð¯ 4não ÒNíE®öðª` V í_oã½ÿ÷æ^î1Æ–-zPÂ’¡ nØB¨¡ÃŽ2àá‘*KV, TÀŒ L Æ(Kè©hp -"ÆŒRÕGàÀ fþjËŠ‘tîäEšT©»r°J‡‡M>U­^ÅšUëV®]½~VìX²e³æ‹@W?²ùذ)Wî\?bÌMëgn¹.üÆx÷€Or.±¸] ø5€§Üá½rñ˜› pqu9ð@o\p9þ²Í3h(T³ñkW5é´xfWÆ[÷î¹l¢€K\øpß劺Cž\*U³Í?‡~Õ-¥ÕÝ¥ëB$èö2/ÿÂx `‚ 0ü ´8P€¶ ¹ô«Á“-K/·Û@~ ª.ÚŒi&=€+`xÈ– <0Áô܃ùf*@šØa¦1 ³Â˜òÎUjâp¦ Hºï§¡Šª®Å£˜rŠw–‹®FoÄ1DzÐZŒ­±Ü‚k·Îö²²ÀˆDR®"»­œ%yË œ&•|RÉgž¼r¯l ¬J(¿³߀®ÌâŽK9ud³ÍÙ ÎűÓ.Å™¬`/¨MüÛn”M‚–Ÿøœ‰#BELQZf ô'A}t&OmTRJ­´PKiÐE54RZ"ýÉ’ùfªÄB;…"Ê(ÿ9—*g3]fœÊM[oÅ5תìA€g| È0‡%¶XcÝm°Å¼D¶Ø1+S¸3ÓT®V]¯msºWå¹.;ZFpÁ…e˜4]u×WÜuÝýÖ]xÉeW\zÛ ÷]}ÓÅ÷Ý~ç•WÝÿÝ7]"ú ”à@™huÛ¤˜â’Z™Ã¶b‹/æÊžÍ²6,a›9d‘ý ²ÙØg£•6 ã¨UÓZŒg6KÛW»%¢àu…ð@gŸ:h¡‡&ºhŸ5Èh†YtøÅr{fbš§¦W{K§c°ò‘*e¯¿<ì:ì0Wf9ia^³j·¹²YNœ¦»n»ïÆ;oÿ¡—vµiwÀñ ©ß&¼ðæÜñK­¿ÊÇ.̆<òbÁ1€(¦”<·.ºè,mµ]î:äÞ¢ØðÂãvqn½Wg½u×Yg¸œ¾vG“oÛtÝwÏj°ìÙQ—3'¾x"{õsã劂ÎÓ>“eÒy'õ»"•T`Ù¾àíµÿû@¿ß{îËW—|îÇ?}óÃ×Þ}õ»ßwþwí_?~úõ%?|ùÛßߺˆà²ÙmËа‹ìrG=®“€€×]Üey”\6à f-/Šxžô@'=™5fÖ³NT`?¶Ð…/„a e8CÖІ7Ä¡ oÀ¦Ñ;–ÿS` M8Ä©õ£rŠ(]Xú `Љa³Pöĸ<€ƒÐ!™¢5="Î …KI1 Š–ÑŒgDcÕ8ÃR0@=4`:òÄ$vÑŽØÊ—žQǯôC(y,pÔS|â3¤Â-þzB¼£®¾ø°t”&{kÄd&5¹É5Þ Ù€ãÍž¶˜¢,0’§´à ¸²ôÃAPÄ ei,p(ƈt¢"Ïé‘|D¥›&‰ä”C `'•¹Lf6sj‘]SçÔ<8ƒ f6uÔ±EÁ‹‹=‘ƒY–3LÙè•m:X¼tá6é¨Ìç´h&Hj“MÃ$fÿ:²š0¦@E1P‚Ô -(@ÿЦBÚ{(B ªP…”¢ •hF%zQ:¢Í(G-ЊjÔ¤ (#nð€O€RšÓìK‡M{Ö:ùÕwô4å™Ó§¤Ñ azj< êbbr$ iSáóELy+ŒÀ€ hÀWÅjVµºU®vÕ«_kXÅ:V²nU («X50ANÓ¸•ŽÍÄÒ(¦dªYð± \°ã”ƒÁ€;–ê?!H?E¬’Pcb5slÈ.+ãK¥æ5[pò<³A ](ŸmhC‹ˆÖ´§EmjU»ZÖ¶Öµ¯ÅC"ÿÛÖ*‚ÄP ‹^jbˆÄ¥òÀ«eÂŽSXb &À„%¶.öC¶l`esì!FÇ&ö‰bkd—Lb¨rA=+ÜæL'®®2ÊyŸê¤l<ƒï…o|ák;µÈ×¾÷Åo~õ»_þö׿þEHtñßþ>#‰ëø•ÞÍU12=NpÉ»•{$É0Á4<,Á§@Ç˸ñz%îÂã¬ûS)Éö–ਮٺƒÆB)¼Ä©ç„c³ô2Ø:×'o`¬¬¯Ø§µŒ b ¦à$8u ^ð('!»£HXÇUá9Š»„lØË6A"*‘Œ{Pψ–‹ns\¹%ÿƹœRÚLò–ü51NB»{¹±´r|åa–[PFï‚a6hjU2ph"t¢½hF7ÚÑ^´>cškBºÑ‚NΟÿ|Þ .Y‰… “!Ã_Fµ†#°„Gœ‚üÐ݈SóÍ›ºÃø®Ô™9;‘×¼8z«–"gî×N*‡fºPc(mŽ‘z5Ÿçç3zÓ‹g,½mnwÛÛ®—¸Ä†oCZÓÕ¾Îf”Œ&+[6Ë–x—S=o/›€ ŽHF<hš(6:®LGdŠldÃØàGøÁ þk…éØßuÃI³ë…G<á‡qžpp ÍøÆ!ò‰cÿÜäÀB tQlÞ4ïÙÁᢴŸcÞDŸÓÎFàÊíh›ïÜç5G¯;†Ù¦æçÞ† £É­vÛ4¸p„©é=õ/¯ú– Ç>ܶXpÐúßò dzr²—ÝìgG{ÚÍž_IíoǸ§?qçÊ|ðåjÛ³Ì=§£Ã,ç˜iß?xKËA0MÒbÂ?:NᢛžM|ØXÂ4¿yÎwÞóŸ×¼½ÑשýŠðwSÝ¡AÀÃÝõ¯‡ýë³á<¸=öo‚>Û0Yñî1×{ÍøÞøÁ¾ñÇGþ K¦ûÄCp1ÉW4•N­ÉŸ’P@¿}îƒ~ ÿÉ%s/V¹¤C§ªÏF rpàÛ·ßýîW+Øÿþ²g#(Ø=Ëy£È,U6øå¾ÁK‡Á°&è3ÀÆ£+X6Ó`b­8@˜)‡qµj‰¶.Âg°„W0îóÀä¼=¿Â–å{†4³‘~8±Z€O ¿„Á³ó8ÜŠA“û„Èü[2]:ÿû¿>Ãì­ŒÀ"ô9é#†g¨lЅǘ#t‡¾è¸¢¾¼³#~ .ãÒ>äB. ³1+3\‘5Ô»À±PÁ¿Q «Á6ŒÁl¨œ °=7„1|6¸-ýƒ’vš ë +üÁ­ 9´&‚ÂCä¶t¨ 9ÿK ʆ¾ø(¶I¨´4©>ª°¨ëÀ.äÄ.Ìs5XkyØ ¢ðº{’‡ÉˆÈA:tÅö›=¨½Wü„Ü ‚IÈÃ9+ª¾@Ü» $¼t@ˆ&D4ÆF«$ƒ±^™2px†+A4è{¼Ã£Â*,CÓɲSˆ· èÄnìDÑ#3ɇ^Ar@A6¸}€xX2ªW„Ç´¶·jÃÎ ‚À#ÀE]PFb,ºëE_œ6`@*ÇcDÈHC6xÚ ¨€'+¾x€ÆŠÆP;¥§ÓD 8 àÈÜHäHôÈIìÈ’ÉüH‘4É”ìH øÄ«{µ4 \ÿ“‡kT³T„È0Ø ÀG Ê¡$Ê¢4Ê£DʤTÊ¥dJ¥ä“iʨ$Êx @#ÀƒO`~d?bá.ïڀȮDÁS §„TKBŽ T‹¢Ž" 0E¨@kŒ¤SÀ¼ɽä˾ôË¿ÌÀLŽ´7FÃçp®ÐˆœDL°Ë `…Oø„Ið? ­ËÄÌÌt-6p è8ÑLÔªÌIL­äÊ®$–c¶Ü˱Äô¹g˜äH‡ÊyœÕ‹ÄµÔÍ4%b¨H.‰‚jd@4}`„ÁTÎådÎæ\‚m¸)Ó˜Hw8E]ñ#1Ú=]ÐVàÎîôÎïÿÏðÏñ$Ïò4Oóü„Çè\<ÏöüNíLBe4¸b©3º<`Ì,Ë3I€§ç2ŽÍˆšÝÜMîò•ßL¾r GŠL´K$œ{p„æ”Ð ¥Ð%Hp {H‹é¬Îk9±[3°özÆ%Ñ5ÑEÑUÑeQ… ݋ϕQ͆E8]#¹dÀe˵.ðƒüü¿ýü6E ¸0 ½C$PÝ4Ðg@Pä“­X¹'¥©!ª‡¥Ð,ÕRÀ´P ýŠü8óóÒ[q¥r¸xDÓúë‹ Á4=;‚K¹•;ª(@"×üÅ£Ó§Ípž¼*»Zҵ̶º4À¾è®ÜR4}zÿÀÒ-eTFµPÆlÂÖÈêS\ñ£°;7ÕTƒC'`ƒ9ÜÔ²“;ûŒ •ÒàRo«$’¢F¬œC*Æ?EÈ@¥ÒÁðƒÀ¨UDuE½€ èK`ÝK`ýÕ`åKbÖa=V (ÖeUÖfåÈGm ň ÁéЋ91Ö ÕMÇùÛ֓˽üû ‡JµÓªHÕnƒ§g蕞ôT¿ ÄZ•U4´Ô×£« Y„FFãÕªñÕ_Ø€Ø%Ø‚5؃EØ€•V±x†Ÿ´«|0WIZ=õóÖouźè€6µØ‹³?ü£A(y†UŠX;MWu‡’ñ´”¬XWD¬×JL¾[ýT˜ÿmPã¼y``úׄåÙžõÙž]X¬`W6¸ ó ðÍ›Ù\9CtÁ¥Ãlà7P…Z»ÁüX(Q„ ¸Ös= Øô¹É0ðx šmY(|Ù{Ý9ØPÀ}m´~ÍØx€IXœýÙ»ÅÛŸ Úª‡Ê‰ ¶¶ ã¸IÃiÚ4\ê­AE;S\ƒ³C<ÌÚ=T„óóÚ×$ÈŸ«¤ó #P³EÈ´M¾Úô‹™UÛ*u›~(Ó0]À »ÍÛØ•Ý Ú~ØI‘ðƒ¿é‹Î…«®u›|HÅgXÅV|Ü÷“>9UÜZ4€[ÌE0Ñ r8UTÛ°}¿5£:]Ð Æl3Û¾ÿ‡´ÀU·u4¸%S{H‡O ×° ØÝöÝ…ÅÃÓ<è¿XCÂ…X33_udGw,^س¿ÃÓØmµG|ÔÇ­ìG0a#8ÇËýÚÌÕ\p ¯ËÉ^í@î­`H#qs\–åW›µà]Õ§ ‚¬a_÷EaŸ}Ô¾õ‹vd Nü]Ú"ÒIžôI‘J©Ôáæa£lá⤤J“¹Ê¬D`Ôüî"ËuàVW‹C¸ÍŠ  Çÿ}ÜgÐNª5¹—y´|ízÌ`ò½•|Ð'V˜<`ÝÃGHá6¾Û%À…|4¿È"\ßåK}ÌÈœÌÊ Í?äЪøÌ@ÍÑÿ”LîäÊù “.‚ÔkbÌ-¼IÈ0ÀäLÖdK¶dMöäOåPåQ&åR6åS&eN¾äS¶äªÕô³_~ìÞŽÞ± V„ë²ãØÀ€;`æa&æb6æcFædVæa^‚&˜4@(KÉßHºNÕN÷ÔæmæfñüÓˆÒìæq&Oø|Fo– Î<Žä’´gèhƒd¸†z¶ç{Æç|Öç}æç~öçè€èæçd(„À5¶ôƒÇˆ ÞË`‹¬˜|8YÅãå9¶Ê8Öð\BXæŽöèé%x…Ï€ wºãjF¥=™Ñ–vé—FQ1ÿ’E~„éšVÑ­X!¡¸ÎЌܭeé…`E˹6 è¢6ê£Fê¤Vjþ‚ç£]¨ßÈC.æ61‹óÖˆ h6Pq% ?xÊÐ…¸àhFë´Në%È'ù„ú5?PœÎc{0ÓLµâ´Ë†¤“å¼~ÓÎ81d²œ^B¢^êÄVìÅfì¤nƒ˜TäàÐÀƒ$ìœo³j¯H‡IȽ9Žf?¿x:¨ö‹ˆWwPFPëÕfíd^‚cà·ß¹¥ë<;±ókµ“­ÄÍm´k phàÁ–ŽÂ†™tˆ‚&hìäVîåfî& Šê #°{h»Ì‘~x†.xJ¿ÿÈG?0*¹h2W±{ȇó[Ñ@0ò–Ghíövo°Pò¶‡ë(V¨_ `âSÊVKîm³c\EÐâþN8#(ÀŸÒ·h4î:†kˆoðÇçðz–p¯çj¸†j¨†hØpçp Çp † Çgq wp ð‡ð ‡p÷p×ð GñWñ{6qq¿g¯ñO_´Ü½þó¹Ì¶ ì^ÅØx#pðŽ«ñ.ï~ ò$¯ pÙù&*WÔ÷örµ–V*Ÿoxzk›ÊVŠp“;ÞŠ]󃳿O‚dá~Mr‹´Iˆßp ñ÷ó?ô@t@ßó=ÿôC—qCGôEgôBïsF÷sGïpHô 9¼"Ï­ê.Οž‘íF6(k^2 )§r/íEì]”îò`õ`NWõVõVW¯õY¯õ[·õ]ÇõYßuX¿õ\gu[¯Ý|ó¢0ðÔ]Á)~Ú7çTÓ0‚vv«Å€Ë™s:× a´¨hJ÷öo÷p÷q'wpo‚ø„EêÉö´Úuy oˆYWŠ+”® Eö}ç÷~÷÷ø€xß[PA{÷Ú63TC6|óI pw@ !ÁRøsÕöEã‹n/÷Ž÷øù?ô/ ±"ÓôšRXîÜPgò®òVÿšr­Ð÷§ùš·y›/ø$¿xÞdsŒ hv›@ÞiW^“.\l71W4x:n‘‡ú¨—ú©ô/‚L'¼~5ÚÐ`C½I|ב™¿ù±'û±Ïy>³÷ÉàÌ&W„wôëXäÑ7ï,ï€OÕò÷ÚŒO4§çxª÷û¿|pÿ#˜å#a«HO‘Èòtw°o±/ûÈ—|‚O†do.N6V€¬ŸÄ€âÏÊýÑJϯÊÀÃÜRZ¥ß¦ßûÄh‚eýÙ§ýÚ·ýÛÇýÜ×ýÝçýÞ÷ýßþà§}«w;ª>:Í¿½z*s|[üɇ~É?{™;øë€±öÚÎÒÿÔþíçþî÷þïÿí7 PÒ ÿóGÿôÿ#†±¢pyÖß;;ýãþú·ÿûÇÿü×ÝÿNf€0` †¢rî"L¨páB.™¨¢!õãAÈc㢓R*`“Û=Ù•›rÚ©§Ÿ‚ª¨£’Zª©§¢šj§sžØj«ÉÀ¢ž ñ9R?ÄC¤K Û X°‚¡ÃêS¥Ç*™W¬Ø#)²ÏBÛÖ¥™q§ªµ×b›­¶ÛŠÊj4ß‚û-4æ9kBµŠÔ(FØ“h¯­­àÀÄ¢ÂÃ"­¿êš;Gþ[°Á=ˆi^Ü2ܰà>r •? ª·á‚ÛD™«º!Ù#ã'i’ /زÊ*» €+¤¼Â.¬\óÊ(Û¼²¤ŒsÎ6ûlC¿W?ù±kÑK<­]ÕšÿêJS÷!qÅ¥òòÈ_Ä.Ê„gd,D,âòêXÇtÐ…dý˜ÿ„@£‹@AQoÙ³öE•‘ï‹HÀ22ŠQœ5ØÖ©¹%A òœ'=ëiÏ{â3ŸúÜ'? ð€DA££”£rðÄ8"TAs\&C ˜ap´Æ`@õ”@paD ¶Á7ÀK`@°=P£ äK©Â1ØÎ¶³ /`P0 vV¨aí PÔ–dÀN]¡QLM8iØnÙTâN£ÛPIÜ €è0‹˜/È“!òjÖh‚D2zÔeÞ¨(“iNŠÝ|œû¼gî‘Óœ*7mN@@+{Ÿßô¶„4”A rX,cÿëØÇB6²’,e+KY5lÁhÀ$jºe¹5¡¢ ÐBÍJDp<¢õ…%-6‹ú¢ 0D•šÈ>øR”­)Ê úq§ äÅF?HÕ“2À¤5*/xð±…Ä!#xôÒ’„¤(n¹TØÊ–¶5@ueQQ²T˜V¸.Däó@F2”Á}LËP´F¤¬è‘:𠃄6›@°Là+@¯pöW`Ô€¯+è˹`í xg†lƒ÷ÕÀÀNÙ€ ¼„7¨Á*^1‹[ìâÃ8Æ2ž1k¼b0p òx–|ìæÙÑ9.¥µo¦škÿä##™$ 4Ú„æ6ºÍ5ï“è‰$™»¼ (#›; ™ªX¶¤,ŒLfçVÀȲ½2/Z˜å?ÙÌ2sµ¬å(™Ê[†³‘ ƒ2c9мXo{¿ê˜ø¦g=ö]&ZÝ®€FC숳¨WàKcºÀ"@0= 48Óµ€.Íáa‚péMºÀÎ4­pb5˜ÁƺÞ5¯{­ëÀÇêá¿ #Û6C6퓈 è$?7hÂ'›;Òy2 H²,¬P‚ ¼Ž‘\6²`€d9ƒ9É0˜À‘aj çjÀÈz¨`sÝÜÜqßm8òÌ=îqÿ_[žuÛ‘õ ŒâÙH^¯+†ah:ÕIEõ]4º0‰‹ ’ÆL€e’ZcºêsÜàPƒPczë©à€ÄËä²¶â €á¬`›f9È/çÚ×5εˆîb¢½ÅH·CÐ…>c(`ǔ ~œì«eÙfE­³näÚ"É×õÙÉö°ÉHmÃ#æ-nt¹ÍæFò*v$£ýÈêI8 âÝÜy¿»ÍéÞsÜ_{d€ùßpçÅØËã;Ï_`Àíu†—)J·øB Jt¢:]ãN‹&Æ£‰„#è‚ci©4Þz~é@@Dr@§QŽiõÅÿåäüj^àȾé–×Ê]ïs =ùÊ_>ó›ïüçC?úÒŸ~óiÖéh8ö±î}¡h½¡\·ùËoþó[òüä·F¡~K>¾ü¯%?—mQò—[ýlƒùoåòkùõ]ùþÁ@ùÝùmV”ýÁê-°Ÿû_šŸ!„úi Þ@ÃuJ¼xÞ5¸ ÂÊe,S6(‚@‘äÀ$(ÓrƳ|œñ½Þ`ÒÚ¸š©9€Íåž§eÚ¡ñýP_*á2a*@6LJ6`D9´Ë÷Y![„Cß¾hà(€X‚,X‚X‚-€Mÿ <Â#ܨÀÀ(X‚FE`æßù©BRAè%Â(x”!xû}jV2€!šŸ%Y'‚Tž#ÖzÀ#¨Bža>x‚,ÀžæmÊ#hTÀ °Eø8è‚A þIÄ€dƒA Å BK Ú`0Ap¦Y˜0"ãñ!a6_H3Bc46_\Ÿ¢tÁ±]á6’D.Ó’bù±ßN¢éi@üéAjÀÂFXÔᠪ T“*ŽùBíQà!’_Ú‚!TÓ8bùŘ^ Ä_ýÝ_9É9$ÿò£äc8ŠcRÉ)À‘0µ5A+†Ë2œB!4AÜ£é DL‚.èBDÁht¥c2ößè ¬ý M"ã.!ø@PêÀ3€4¡ÀHcS]HÝ’€8T!7Ze7ÒÅ¢98b¤-À)n¢*¨Ÿ'l‚ùÁÂÚ‚X’ŸZ’ßW’â(°eù©BYžŸ[®¥ùÅ‚%¸e\šŸ^â¡\â¡%Ä%_ÚÂ&X-teFº‚+<‘Ü@<‚%A Là€V™^fb@XœKê1 StAÇù‹I Ái¢fjªæj²æ Ì€jº&kÊælΦOzܦô›¨ÿø@Qe&ü&ôݦ§S6èB¢Ì ÿ¸ÃL^%7zãiE$fvjçvrgwzçwnçz©‡GV@L@Jf¦é1@ • |‚g‚&2Xà×Á˜&mâg~êç~Öæ(n"'râfÑg€z°%€xQê@r¨€æ¦òAhnè€F¨V(€(†h‡Z¨€"Hg‚ø diNçŠVçV^'ù!&-ÈèŒÒ¨- ¦ÚhŒª_ŒÒ¨Žâ¨ŒâèùñhÞèö(iŽ©ù)’¨’Þ(“æ(‘>é‘Réú¨8òcz¦gt^HR!¬!&4Aèÿ2%X”U}Ì}òç›ÂiœžæÏå‡j¨„š‚"§xSƒ§øƒîi¨ˆ@ ¥*ƒªj ¨è@ ˆÀÞ)ˆ(‡¨‡jj àA‰ ÈsFÁA­h©FD‹¦×),Ð,°*«¶j«ºê¬Æj¬ºj­¾ê¬Þ*¬æj­Òh¯Âê¯Ê*®Êª®ë®Êh®+¯k°Î(°®j³&k±Ò*³ÂÀÀ䉟I’ôÁdŠé#`¸bÂ)˜k“šž•óا#Èé»Â+mÒ©¦^ª†Û §QŠ€Ÿ>èp>(r* §È‹p¿úi €¸Œ ’€ÿ(@½ZìÅÞi °kœhŠ‚ª©bªr®’lÉšìÉ¢lʪìʲl˺ìËÂlÊbë#|kV×|" ”«¹âBÏÞÂ/ƒ•UC±iÁ¸i¼"m¼Ò)¾b¬Þ+r’À‚*,Ÿþ«,À¶ÔV¬Âe0èpfíôiƒ6mÙÖ«ÆZ]¨Òͨ¦-ÈzŸÈ^gÌÊíÜÒmÝÚíÝÊ, X‚%Ь=I BHžÂÏfÞ^Åm¬ÑºkÒ2î».í@näJîäN°‘ ÈK @®ƒNÕFnÀNØŠänÚLçe|. „.ÂJåÂnìÊîä¢m’t¬Šº-uf¥ÿV¢–àíïoð oÝ2Á x‚+xÂ#‚ŒTDjäWÁÙWÑþËÑ6îõêçãÎ.í,´€D.çz.ä‚î8€(Ïæåé¶/ä:@®€Ä*ÁöâïìÖnö}Ûæ.ȶh9Œìðpðð¯+È‚, ¯òêAGÂaÃA¯ôšõ–¦#Pœfg0pp{°‡p°ƒðð ›0§ð o0hoþV.˜îä#øŽï„ä.€ú¸\@î OÃ/D®ˆ@(ÀëÊpCîþ‰U~ìÿ"›È²Á ¤Âsq{ñƒq‹ñ“qÿ›ñ£q«q •Ÿ,ŒBòB°bɉÐNoâV/—pûñr ò ÃðHA Ëð½R®ëëæ €c(ÁýF®±ù±äæ°ç/߈öÕEÛZq²‰¬”À£r*«ò*³r+1 X¾±J¦FZLôí}éñr/ûò/2+² ·ïð¦è€<òH3/@ÁXòûbòLj<°1''ò.+®L1Š,1@%¸r;»ó;Ãs*kÂl¦-È‚*0™~ W°8ÿ"s@ t/ s5ôA#t5[B[óQV3ÿý®@Àãr‹²Çô@uQðÏÍ€I“ô,5B÷@53õˆ€DõTtUSôA÷ÀS+õW+5J/r`¶t©¾t6°ÂÁM»õ[Ã5Ôx‚]ªŸ)ú´2õsP+Š=T‚u`õ\ARƒõa+õ¦…S÷L4bCvd´X'ˆ²ÌKŸ5:ï.ï>Ã$@[±(¤‚(vi—‚(”ÂiŸ¶h‹¶³6koñkvlvi“öl»vÿmÛ¶j«öm·vËvk7mÛ¶oñkï6j÷¶(`AE9Â(Øu)ö´§ðóºòu¢ØÃ+ÐA„As·wƒwwswwSÁw—wx—·y«7z·w“÷{Ÿw{—·!è€aKö}Ï€‚®Ù÷}ûwXûs€8JÔGf³èf/Z9d1LhHBqKø„Sx…[ø…cx†kø†sx‡“v%`RY!l‚*Ìè’ò´+ðU7Ñ8²ØÃ6<ÂxÓxÛøãxŽëøŽ×xBô÷Cö tu ù‘#4e ˆ¸Ã) ëùžó9×}ùú z¡ú¡#z¢+ú¢3z£'º’H€AÑ9”Ǫ‚ƒ.°Â'L§+B °Àz©›ú©£zª«úª³z«»ú«Ãz¬Ëz©ß‚#l‚Ê&0œ7œ3Ú‹›<ˆž÷ù±#»ž×#\A ;ú³C{´Kû´ÿ¤ß8´Ñ“[úÕa: p6<1l:§çÀ ¹£{º«ûº³{»»û»Ã{¼Ëû¼Ó{½Ÿ;#V‚%lÁ¯¿¹ YÁOïõÒØ98ð­${Â+|Ðÿ!\Á8;µKüÄS|£[»uh_ê¶_a·{»Ïg6€»ìÀ&˜üÉ£|Ê«üʳ|Ë»üËÃ|ÌËüÌÓ<ʃ(‚é‚̾y ¼^[÷Òôƒ=¸8¼B%l÷Â+={Á!B@ü TüÔS}Õ_|u ÇP1Ç[¥Ç/ÓTdƒ€v\“}Ù¯2(P‘x(¤ÂÉ&ðÝ%ÙÏçr°Wú’ }Ñ;ƒ%`B,}ß׸ ‚!¤ÁÀÜ Ô@ ˆÀ Týâ3þ³_=nØW[Ýsýÿx½Y pÌ´Ùs~ç›ñ¤'ˆq»ŸÏ |Ð3 Rȃß&½ß/¼ ‚!ľ,Žÿá׀ʭÀá'þá¾Îá~ ¬€X€à¾Î€ï+ÿð'¿ò;¿ð‹€òëËá@ôÀó×@ÿá¿ï€ˆ@ì>ö“¿ï[€°ã xåë.heü °){~ýÛ¿ƒ‚F©”íhBÃ2@Œ²5 A+FÀ•K·!C6lòù“8‘bE‹1fÔ¸Ñb>{îÊevª¢:aP¦T¹’eK—.Íä)ô(Mˆ#V̘QƒçJœlŠ;Åäò¨Ã ç º<y¤L0©àºršÁ±ŽêŽ(ÿðjØN¾¸ÈÊ(ô hϽ¨\XA€ÿº#€MžV˜*¿­øSßPG>àpìÙkÏA -ÔPÁ&´PQ ³ùä!DŒTÒI)í¬¸Ã´ î¨ÀÄ×*3MO`™‘FÜnœëÐy”¤Ç²q˜W„dä‘DÉ£È:ê°Ã/€õ ;äà•Ž<AG*9å”\ˆÉ&]Ž¢R'°­ Mž\êÊ+ÁÖÂs†Â6ÎÊÄ3ªpàÊîÞ ‚¶ÈºÍ+…ªáJîÌêÛo‰Ò3Gy `WmØá‡ M47…0¬˜â…(‡E€3QREä‘K¥”ÿ’KäÍTV™²–C~9ä‘EŽ™å™iFåšWÖ æ•–™fžY–„5!ø¸ã´;°Š‘›ÑQJ5Õ–Ûl¤Øb!~°Uw{ìÇgˆ&Z¡´DíµÙvö”WröiÁA5®ÅVï½]`oo¶ úVÀv„µ ([4ÖÛ\(“ïÉÏÛo¡â´ÛsEØN„ÅÐÛÛ4לÜô½ ŽÐbH'Юa]öÀ$ÖÚbÇ ‰˜I0Ð I†^øá‰/Þøã‘O^ùåƒGq>aÄi>J0‘,<˜À C6Q…–ïm¡Å6#²©÷†¸žÁ~x´ÇÕ°1¬ûGÿi鯟~ùÍwÇyìé?NÇ·xý)èñÉQô¶HEs.@Óº°…®ÈA.€@S':Í-@‹Ã›—@':X`°R)§*=ã4ÏЋú`CúcB:ŸÖn?‹UìGºøDP0© ‘ˆE4⑘D%.‘‰Mt⡘Dx`w0DŒ`‘EXÐb‹\ä"Èg¾¬9&}3ô ûòá‘þɃú«þöÇ¿þ¥‘}é‡;ˆB8`k á¸·\eÄ] +I®‚ï"€¼ü˜Èú¸ÀM.¡!±µ”@àzD d  3ã)Qy¨î•­ÿ#Æt§ V|b“P„bÀ‚1ì’—½ôå/La“˜Å4æ1‘ùK+0 VƒX³+,Ó™w „#6 -n“›`,Ÿ+ËQÆTˆ}hLã9щÎr ê"xô¤ß4¸·pñ „ûŠÊ Ѓ-FMF@$]°¦p]2*'Èd'=É·Ô9¨ @ëØ9N‰N”@«çE[Y7 CeEn@‘Ž”¤%5éIQšR•®”¥-uéKMÚ=èA¤wЀ‰fJÓkZ¢{Üôi½yQqR†xU©L](ƒ²—•¯}ŒE1ØjTZÏ0Â6‘XÅ.–±uìc!YÉN–²•µlc+‘ÙÌ:¸f P¤â§? *8‡êWØõ«­E+fÀÔ·ž€ª!p@V0 ¾–© $@o{T@ µm=Án¡j£Æi*Y=«qgËÔẶ¸ ¬è(Ù`JÕn—»‡yˆ`ÁëÊÁc•2ïyÑ›Ѧ•XQ ²y™Òš6Œ¨µnwÆZê2Õ(#­rù{”Ì€>`ñ/Z`w5¸'XSq±šÕGõ­M±jsyKáWõÁûµë}{ÿÓДãuø51w^?¦ Jï‹aŒÞM˜¦›‘¢ðMW¦öĆү‡Oà& ªÈÉW‰ dÚÖǵIn­“•\d×ö7Êwí —ˆö˜ËMñŠÁ› ?” Æe6³ˆ@A©TT‚bßuÙÇ ®² e=uÞóŸ=Üg=[¹0¢@vçLgE›ñË`,d$sfIOú3 pDÂÑfÑhºˆóŽA¼èáñ¥6õ©O}® šÕ­võ«?`XÏšÖµ–u­cêÁDÁ¡$Þ²¨=»F;ú¢ÄÙ¥‘lËh¢7Å‚&<´ƒ<ãÓ­äq°Djÿ\o›ÛÝöö·¹MèÁPË.ü5¶Ñý°aœz”²á=éRT¢³;€öhLó‰lT›•×N·o´ nœà?µ¸㎄ùA»ÿvxÃÖÍîŒn¬c™iYÎL¶3—ÝŒh1ëøÅGf3ŸáŒd7YÇGþñ™Ù ä(…{àMðlh:;¹(š¦ˆhaÔß'LÀ t¡½Õÿ‹ˆúœé†Š¸Ä;-Eôîw̳úÕ±žu­¯h€‡¼J`¯~Оy®ë¦ è¥ÎÀ2ðö·Ãîrw{ÝéŽj¹Û½Ö{Ï»«ý>÷Àß=î7õàgmt¿øÁOÙØëÚ!¡§CÿÝb=üD‚EÍožó÷üç‹hDẠv`{ù¾àñÈ€ëã{Ù»¾õµ·=íãN{Ü¿þõ·ç½ìy{ßÃø±×½ð[ÿ{Ý+¿÷¶ÿ}Ü ö¾ä¬h¸ê±ï›ÉSž‡Ù Æ,i©ñŸüå7ÿùÑŸ~õ¯Ÿýíwÿû͇Q¡ýµü+¢ÅoP':ûa=ò0ù`où°ùù0Ðù PsÏ…k/úøï/ ­ –®ÿ4°0¶ûâg£8ª£DpI°MðQ0UpY°]°g)’ŽXˆanôoÿ6í`}ð0-‚°ÿ £‚ ×Jlð/:Ðãg~ì§ ­ð ±0 µp ¹° ½ð ‰æ† ëFÅRï -âÿŒ° ÝP7é åð08¦Üä¡Õpº@ Á,!ôO±ØÄæ1âçãŒå§ñ QY‰œ(1!11Ñ5ŠZ²¬Ú.‘ݺô×°çpm €°Y±ëP0¨ï®y±"òÁ¢…q‰±%Ž8:ü )/ üà{Q"ðH¨±­ñ±1µq³Q€¯1 À±Í1o0äá›0áÿ‘†&ÁŒ±íñë‘8V'˜êŒ` 1§ñ ò#  ¾Q\r!2!# Ñ£ -’ÓÑ/.Êáñâóá‚I²$MÒѲcþ„ƒà²ò"eÒ€˜Bà€–‚ €!Ç¥)`&’3²/Ø ñòð#²žaÄNr*©²*㇆ú‘Ý:àÎÍ)åŽÒ(Ëd"#€)¨Ñ«‘=¾1È`ÇR&Àƒ¤BvÑ)yñŽÀ*30í‘8¤ ¶’ØÀ¾ò#í¡`./Ò!«1.S-%àÀÿÑ2!“ ! ¤B2/á±Öñs5Y“ûˆc)Qà0ÁìÀ#MÓì àx³7}ó738…ó7@*pr-€7½‘7‹s7‡ódÑ9Ÿs:©38€^²"ÖAJó6£Ñr`Zs<ÉsŦ/dSÅ&!ìÁ;)ÂØ ªs>ç³8ðzs9 9§“?é3@…@1ý"P€u:r1Ýó 킱<%tB'.Aá £Ú³AiènôCƒsM‚@3s:ãR?A4E ¢À69bu0Àñ²“C³/ˆ(4Gs”8*6Ó¼:;iôŽŒÿ@>UDãR!¡Ó9í?`²€7 ÀHŸTŽD PüB €ö’FÕðŽÊMGÍ”</CÑŸÁ ”CûÁ²Avsàî4K§s9DZ7ê4PÿtÒNÀMô4@@ÒàMëBMuñÅ”Q3 ØàLuôYóGˆ!AÙàGQ¯Eô#óÁð²ìtUYµU]õUaõUñSV[`µ9c5Wu5W `$)U#°Ìº³RQÑtG5u‡>Át C>aütÁbˆÁZéY«­ôV è±SŸa);í(taC‡ÔÒ!Uw5]Õu]uµOÙÿõ]]€]4#Ê!aܱT‰uOU\“µb¨vè4 aP@5à ôO*"C:`[ÃK[[³‡²RTÁéXÃô6ã4tA@Uá5dE¶U«tdÙÕÀ0›².ÜG²^÷uLårà_1ÄDöËa`$vaG‰*æa³bÓSe!,¶•r`Ö;7VÌd@ °6kµvk¹¶k½ökÁ6lÅvl»ö^ÀˆveéÂ’‘#Õ6f{1Êá¢õ_c h+¦+& C–raÙÀjÖa!> Ìn‡X$6~& ¡YM’8îÿ\‰M fô#ã$¦Åº L÷tQ7uUwuY·u]7uÀ^wvg>ÚÒám7A À!cáö Q“ €VSYhòŠg1„o1$:7hY‰”Äð Cž!ˆP`””·¡0À:à¶5*  €n©ÒSGi›Öb‚€ ¢¶AylÄfé·~í÷~ñ7õwù7Îö'ôu"úAcxƒ×AçöñGQ°bpë&0àÓ˜+6p£Î0ÀwËÁhËÁy‰!!J+©%SÁ×¶5b4$: 6+‘ÅæDì -Q+ÆQHì€í¨}Öˆ|ø‡8ˆ…ÿxˆ‰¸ˆøˆ‘˜úgŽX€WçžAAšX‡³5y‡0q3ʆM=˜cXao; ‚ÀÈðöI©g@:˜EÓ0a…o—RÇ$V4áL„V¼æW‹5‘‹'Q‰ƒF™ &á}cv 9’%y’)¹’-ù’1Ù’£xíKÝáW©˜Í5Bµ~+±+Ï‹yH•¹O™ZðÀ£€iErÀñƒW¹ÁþÄh‰á FÒV²n´Uc¤Â˜Y*öÍ”·ø…WY‘§¹=u sš[¹Ù uÈ5”ù²¡:mXÁÙRå6Ǹµ¹Õÿù”ÿø y-ôä§ë7p]Y/ñàa•Y}ë|ÁÁ˜+1™Yˆ žá ç†Õ:ŸÁg×4‹9‘YйœÕp)«œ1zOu ÝY¤·x¡í÷øQ~|vꆞ=Ñž7‘—?™v–å'˜—q)C˜8 €i{Ú¤Gú~ƒú~½ï@h™isa>·£³O£?£™ZxÍ5 bÀ¤­úª¸ ·¥ “~aºI馥‚i ¾ï{놅b@–º7¥ÊæfY±º®ñ¯ÜÔ9¢ª£Ú Ú¯ÿzQ³r ›í:±ïטéwu¼ºž×8¦÷¸n–ò«Oÿxˆb€ÚêfÆü h¹ „ÈJY±¯zZ’1 ú^vŠò”_û ƒb±M;·ñ÷™u»wš~5»~o°·±ú¼4¸ñ÷nÁAwg{J«»¹ùÕ ÷Oˆûº±;»µû~ŸA’Ѳï7ªÚ€¥{í8Y*žZ¶ËÛ OÄ›··;¾å{¾­ÚûŽ›¨Ù ²¼×Ûçúau €¢»¿5°½´’¾|Á|»;ºY!™ù›Àÿ-dpœÕÛÂw°½qÖŸ<ÄE»½ñÀÓ6ñÃw©9\Ôì!A€¼Åû/8ÊAPP|Äy¼Çíº»ÿD¿åÇu¼#YœÆéÌ’‘ gÉ«X8² ÛÇ­üÊw{wЛ8¢àð r×µœÎæ¶wÁ|ÃÇ|C·ˆÁzºÊ±<Îã|Z–P[{šh?YÌÓ¼ÇÚ”u~Í÷\C84& *WÎýʽï¹ur€&æ:àÒ1=Ó5}Ó9½Ó=ýÓA=ÔE}Ô=þ1° ¢é1Ï=ÝGŒÛoŸvǽÖé[hVôÄdTÏ Ä¶7Àü`ˆ½ØýØ‘=Ù•}Ù™½ÙýÙ¡½ÙýÀ6 ª­4:S¥¸ÕŸ28cZð@…½7JÑ¡–Ð=ÝÕÿ}ÝÙ½ÝÝýÝá=Þå}Þé½Þ×ý–ü0:ƒ –9*AàÈÏ( €Öàà>á~á¾áþá!>â%~â)þày ¢À×Q æe~æi¾æmþæW>@;C'!ÿ²ŸÁaúT âþè‘>镞â™Yãg(F‰n#ýãÝÜGäm=ë³[!öÇÒè#–¾™Pú6`éÑ>íÕ~í^rÀéa+êšœê{8Ô(‰õ~ïù¾ïýþçHÒíÈ\´ VΞí¿ñŸâU º«ÄÿЦYÝaøðî!âØÐH¿ôMÿôQŸDô=ÿ6?óíÞP2yöi¿ömÿöq?÷)9#P3˜³ÌP& ø‰¿ø³Àäa»äA•±.¿Aø¡óÙô·!Žáda6¡A*aFAdaüyaÌßü¡ÔýÙŸýáüÍŸ°_TAû+AáûGAnᢠ¶‘#Ç.Þ=~þ*\Ȱ¡Ã‡#JœH±¢Å‹3jÜȱ£Ç CúËç€òDRŒ"cË—0cÊœI³¦Í›8sê܉SF•@)f{p’U:yùú}Ĺh·\mbTÿi”,^ÚMÃ6nܹsïÞÁK¶¬Ù³hÓª…ökWlÓš ã%kT%F›\ÝŠF]<|A L¸°áÈ÷³—.€Jç3…§å˘3kv©bƒå,ò%VÙÏ1²‘_r’ðŒg e`Òg–3&'? Å+áÐlmd’D3š2@œ˜eéÏ”–<À¥w¹‡‡rR¡Y”¢>÷Gï$ %Ð)‡„P  Aò"{l§Jµ‘MªÂ8ªà…0¶Ò•Š“äDÑ4„Á‹—ÒF)\ÿçJÛùNyõ¨6«N:êYC‘hr£lä'7pLŠ}Ç<¦@àu4¦nñªå“¾C©+ÒÒm—Ñš$QžRHízWDÇjÀXRªQUA€Ö b<ÇQ8Íðˆ V$¥CM,K™ Æâ¥•D Uq•a£ØÀ†7ºâ•Üò+9‡7°Qfc.²€¬d7¡ŠX•ìTìP‹ŠÔÚÚöPJ­§"œšO þaøío}+$þö_<Òúxܬ)WiÂÍB‘†Q^N·–ª¤jV)ö‡­­Ï3ÀÅèp;“F垆¹k¸.sX]¼±òéŽ ü`Ø ÈcQ§é8ÿÒqA V¶n§5¤ÁØÆrÐUÅK7Q‰È2B² FšF¡à [XÁ…„ì`£©Â®Má.‚ ØZ#À(vç oËâ×(òH‡ÙPßÃZ.¸8αŽul\÷8¸¼L"޳ÖÝ5ì¸È;v p¼äàz'ǼœƒŽ¥ å&ÿVÉ9Æò8eä#{yÇðu—qҠأž0Â3LZãÃV3Åp޳5,:3ãÎx.0‰óŒç:×yÎq4ŠiëâBú9JáÎÊŒÒùÑ>2‘#MéJ[ºÒ¢ºô‘Ã<)QmÀQƒ‘´3b(¯°>K†-¬yâ9»ºÕ+m5¬µ!ÿkZ³šÖ¯žu­O¬Ò]ß:׳0®mmk@óZÖ¯&¶¯UJèC;ûÙ„Q*+ˆÒtxD“šÎö ín{ûËûöàKžà ÉÚAqGÖ—ƒ’ú-“Ɉ°çMï\ C}èƒ%„Qï~ûû߸ÀÝlhüàâcÌ3tf]tÛâ®´K"NñŠk›Ü®Ì¥’la}Ap·<¦ oX ¼ß_ÀpšÜâßzxÄÉ]ó™Û\à´X1ÂwÎsŽ,Æàø8ºÀh‰@ÜâHOºÒ³ñcÆñ>m6ú'tƨ(˜ú($·Q2L~ó\§\ÖÆ#&^ø[7¸9Ú»ÎözÿÃBç=»Ü'òórØtÌÈÑ—Î÷¾û}Üz}#2_²‡s] õØSg]0äú¿Áþg°˜Î_w5/W{b _xؕч ” 2·Æ½¿Ð³Sþ¬7;éMzÍëá †PÆŸ-±ï1èÁΰï¿Ûîê·}îÈûøbL ªw¡x6–Áß§OýŠ“Û“qTW¢¾y(‚ꨙ8ÏýkoÝÏèO¿ú×/”ÃÖÏDloÀyp^¼¨£*Pi¸úg uFp( ± Àu¦X ( u†€À » À®À~•7gÿ”'‚Æ—|(˜‚ céøõŸ€*Ùp*±wÕwƒ8Hi×çt•/kÀq!Ÿ !'ÑW稖‘'‚L¸~î‡~møë$ ‚Ò€v~Æ H`pÙ¿' ¾àgZXg²MPg¼ÐDpgÒ°°NÁ ýׄzXg'¨‚~8wuG Ò†—(Ð-Â6˜ƒ÷6qŒ¨c<ð4M†L–"7pOQŠpˆ§aFð ãÇ_ÌS~!±„Ò © ¬ØŠ­˜Šu¦Š¬‹í'®£àŠÐ†¢—…7pgÐÀ ³ 7àf0P…°ˆv­hÿ ³èŒÐx[Ác°ŠÎ(ŒºøŠªH`&ŒÜH‹}ø‡æÈsË—å0 ÏÔ-Ÿ M µ‚–³‹ð÷8} 3öˆ8¦0V–c÷Øùø¾iÑm9•¶þø“¨C8&asz%ôåò  Q@Ÿâ§ß©ŽÄÐ)X¤вžþ¹¤îI1ùÙ–©ÃEmùæ‘iY0˜SZ¥{Жûiæ.L:¦‘9‰º>DÂ4×uDPž ¡IQ’àr’:º•ÎhÐôç¢ÁÀy»0 `›w À{ÌÐð¾0ð“£`‹äŒ1Ê „j¨»`` ±rú”6ŠgΨ»° yZ§[©>šª-¤àYåÀ ]   JJ¦¶J1V’˜*°Kö¹J$6E–ö($Gs4*`{À6C²ÿöø¥BÒ `«ÒÊZFeôQàº0~®Ê_¥iš/F§¦Š’QxØËù‰c`·i ,W»¨PD`£ x à¡wæ®*·¯ú 02ú©ÁéŒøúãÊ’¨ªª k[ý°òЪàð ¸>QÀÒz«èB1ýF½Ê5ü¸¤³»3¬fÓ¬ à °ÍʲЫ™±þI­J$¾Ã.—©Ѥ<Þj¥(§£1¡ «£Á ÅÀXyV ²`œÌBÌP Lk FËgJ˴̰ “ŠgP‹’È ±€ C‹’ Û°d‹T@*àX¬²­] ¬2»¤›5öÈ)ÁÕ«ÿÛ±l™«mù¤w+²Ð²ö(61û¶ýi¦¼Áðy·B"*ÛÀ'k ¼VÒ¥÷È·në–_ ¥ó©Ãÿ©Kæñ/§±20Â᪂(ˆŠ˜ 'ÜÂb<Æd\ÆÅðÂ$,Ãjl3ùð "GrêéÄïy·³½š9°LÒ±º6ÇJÄD\ ¸À„E,Çí9‰MD`å/OsÅ'å«@ý@uA€n aÂfÜÉžüÉî‹Æk<Êác#‰*qŒÈìÙ·{ܱÑk²ì“˜Ò•¸*ðÇ€L¾JºŸP‘ ÐÀª˜Š ˜€G d…Å !¼Œ¢ÿ•ì_%AjÏ·É` ÊÒ<Íe,ʤ|Í3>àºê ‘°ß ÎàüÍáì–äLÎöxÎâ˜è<Îå¼Îí¼ê øÎí,—ëœÎtéÍùÜ–çìÍülÏãÜÏúÐ’I˜‰9‰ì!ÂÃ#¤MîÆƒ¡TÙ ¶£œü¾ÁµÁÐ íÑMb" µð»Ñ$FÒ$}ÒÒýÑÅÐ %ÍÑ Ò"=Ò+í¾&mÒ“›Ó(ÓÖŒÍ@ÝBÝ —@ÔE]ÔDmÔI ÐLíÍG}ÔN­ÔHÍÔO½ÔO} X ÕIÕV-ÕMÝÔUÔaýÕc½ÕYÍÕbmÔ“™“¹dœÄÐŽ1PRÿýéXÄ@AÀ T ½ð׀؂=Ø„-Øc° …ØŠ½ØŒÝØŽ­Ø¥ÃA=ÙŒ£žg}Ù˜Ùš½ÙœÝÙžýÙ Ú¢}Ökí–™c 툭xàŠ0 ŸÛ²Û“PÛ“ ¸­~°Ûx€lÀ]`9\ÜÆÈýÊ­ÜBÙÜÖúÜÝÒ ðØÔÝ †ˆÝ „€ª@ØÛ= ‹­ c@c ‚½ wÀh` †ÍÞÕýÞ€ÙMÙô]ÂÆ;Úøßú½ßü=ڥݖ§=‘l€fÐ]à~àžàëÓÒ½ ðÍØ¿ Øæ:Øh Ø8`Øÿ»z;Þ ‡ßò]ß*^3–Ýß.þâ0ãšýßmeWìvF`ûã?ÞB>ä]ðÛFÞÛ­íÚŠð ¬  º@ Pþ Rþ ÙPåUXžåZŽåº ×x¯  £:æîà‚]ædžæi> `½ààój›iî%€jNæ ÀcŽc.vþæŒ*e‡w^èoþ×h^è…žâ+Þè‘{ß2é’>é¡}l Å9ÆlžÏ0É窢>ê¤^ꦞ¨žêª¾ê¬®êik±k–¸ Š^ë¶¾ ¸ cL07@æ• †vn¢:掠ÿlN”i °iN´Ê~ëÔ~ëŒîèØnß<@éÜÞíÞné¦é.1].XŠ€„ä¾îìÞî>ûîðï;ïô^ïönüË*Äð¿ò Ñ^íµ°ëLÀ c¾Àìc° ÿï¸ +W iÎ¨Žæ„w0ª„ðçמí"O#-îí&ò.î.îÁ¥éIÁ‚àÀ žØÏð°6ó8oóAÑž8×§öòþ^ ?ði>ôCð,0dÎëÌn£ ØÈðµ° v^ Ž †»ñ; C³0æOõV0ðÿñ·nô»@ô  Ù#ßö§ é(÷ÿr_éÞ»ò,™Æ ±áÙŽËü<ߟðó‘¡A  ô†øˆŸøŠøà‡ÏëµÀ ôŒ R_  B/ 0 B¿ ô׿B¿ø¥‡Bú‹¿ú¬ßúk?ßnû”÷¢¥r\œ=qO¥ù îH½Ö mÌ –ààÚ°ŸI1FŠ]Bø­ßü‹‡Ïµp@µP;P —Ϩ•P  ˆ_ jý_ á/úµ úοþ¬ÿú²ÿþÐQò¡:À X]¥ºÖT0{p ö—.íÉ"РÁEYÖ¼¤bÄÈé'‹!̘q£À=þ|$y‰ÿ‡Œ?‹ òP‘rÑKlòù£I³_>{îÒÉ›YÓçO A…­ÙÏž<{=}æK¦©ÖS¨Q¥N¥ ÕC…¨˜ÔšÅ!Žj0æi*­`M%+¤Öw¤Â%ô€ÛªyõîÕ”L)QÀ&\Øðaĉ/fÜØñcÈ‘æcÃãE1^Öèðò@g¹Ì@èÓ2.b ókN¦¬Æè™ók‰k6cÆ|»÷fßšwkl`øqà¼O¦\Ùò%L™AoæË×Oòá~Ùƒ2uº×;U²n­%€OÉš} c‚ OgE%»µ”óÏN˜ ÿîw½ëë¯ë 4ð@ÿTpALŒ2ã“ð2†À( æÀè c«³Ödج5Ú€K4Ý6³MÂÜ&tñEc›-ÆãNz©9— #ÐÁƸ ðG&H*­žªä4öÖƒ ¬"kACƒ1!Ä*y*ÈÐ`dŒ @ƒHî³.4 XëGäqM6ÛtóM8ã BÃðŒL+‘“E8X„5×›C¤TnÅK²¢ NrûãÑAws4‹J9™cŽF‡Ð°ÓÍTÔHH±ÎØdPi%”ž[$¦åÌG®¸šåV\sŵÖZoåu°îÀU+ý<` •[•œo– ªL–…ÓЀÿegA£ÙÓJ˜€a»Š€ø‚tD¿jsýÕVsɵUMYÛu÷]xã}ÂPêµ÷^NB©Ñµz14N_N0Ü#ß|CYc"B/Ñ·Þ|eØ = ö,”E 81B‘HÔ¨t‘  =ôu(ä& ¹ C¹ÄÏÍîtá…ƨæ{‡Põ –r|5:y㮕Vf)Úh£‹>Zi¢›v餸i¢“vúê¦ÑÓµ•JJ©zéYR©„hP -k¢KÈì¯oeí«¡f7hºë¶ûn¼‰Ê!îõûïzUHÀ^N8n€´zõô»Ï¾Ci­`{/I o‰²°×¡À=v™ò‘ÿиåÿPdh@áKD Ë.aY…‹B®W†Ù¸Þxüözu^U å\…5o ††{xâ€âáf¤¬ä‰¿C´ 1`ìæ«wznà³×~{îÜÛvÜ7ýÞ,RÞ`‘Ðøûeð÷CôCi`öP0çÄO¿+·×ôbä s‰ `¿øu¨5ÌÌrõÚ€ Â8ñ¬UÏù÷„g=ëi".$šy4ÁÁê‰ „ôK÷LxB¦pVŸèßý6>|aèa$ÊèÜw/4à; Tü0§¿{åÆ^ ¤Ø·¢¥°BlpTX( ø ‘~. Üvg’ ê¨{a‹W êyQÿŒbÄž ÍxF4æ­àÈ‡ØÆ¿ªQ¦z_†rWÀÈñ4Ã\Ÿh/!2ÐŒsU“ȃ%B‘_,íõD7Òn WìÝJ€¶=.ŽQ“›äd˘FP†R”oê‡<0P²HrìvšÙU—ÈÖÔ1~ œÝñw¯>%Põº¥ß™H 5ѹW/©¸‡ü!™IsxwIíe²“Ó¤f5›öÉQfS›Û”ŒQðÐH:n‘àÁþ€¡¶FY8çhìÕš5Äs!0¼£ç(L•¯ˆº ÅI‘9,ð—ö æO¹H3•(áH3wWAL6Åš¥('±ÉMŒfT£sÿ*ÇæÉè²|Ѳ AYvÁ%@¤FÄÐëŸÐÍîÚã>‡ØBžfr¯£ß€¹HEâÔ`"‰ä0•ÇF…fö¸C ¨FUªS¥*)ZaÕ«^µªPÍjV¹ŠU«nU¬c­jWÃ*V³j5­T½èFÝúV–rX*êeWTàõ®¡°kd×¼^}vå+_í¿†„°z]ØbóºWÃv°|Íaï XŠ)ì±ö²la9›×C}”°9tli1˸‘@b;³d¬ðöT²ÆV¶³¥mmm{Û©¶®»åm(ó‘6xÌ´¦õid‡;Ü9ðà¸Ëens»\ÁE6znÿŸx°‡¤æŒµÏtíÝ`‹[ð†W¼¶Å#p›‰öV½ëe)ËìÁ¹/›Ãsí{_üæ×±ì[ìÀ ç"SHmâ½"ØÚ-&#·ĖñFXªŒ`‚°°à cÁ˜*8,U°à¼éeo‰MlB÷*â-Ño‹]übüR̸ÏýƒàÊ™Ú8ò€’MS0,UQBC&²¡ LÀç‘„‘‡lä©ùÉ¢À¤,e¨^Ê[•2Àå0 ÈB6´Nƒù.@&¦œe!cÙÉ¢@owO|g<Ç˽ÄÈAŃ!ZЃ&t¡ }hD'ZÑ‹ft£ÿÍ?Ë` ؽˆø0Å3çÍÇDæt§=}—!{™ž&µ§50êR§:Õ (Á;"ï@+DÒˆ jNgbÖª&uóük`ËË(å ††À$[ÙËfv³ýlhG[ÚÓ¦vµ£Í…˜S%·[D2—‰` *˜×¤Æµ(ÀLäN  Là§Qa4t"X˜€ªìj4 f„(Ä$‰}ã{ß’²®…àéåÙšÓÐê·(ÊMdx`Üžöu°-~q8 ;ÞTÇ=þq‡\ä#'yÉM~r‘ÇåßsTë·ž136™~­‚;qsœç\ç8¿ ÎYÀ€›7Bçi–nÿœzåÅV½å­ °`ɈwÄ·t¡3Z àÃÍ·c]Ϲ—ã݉žë\@ßyÛs^qŒÇ]î *å3âë*¼ç]ï{ç{ßýþwÀ^ðÏÉLøº=ÉA4š6w{ãÓ¾<Üü猸¹yîÐ øà<7?5ÎÅÞˆN, ’@ä%?Íg¢¸¹—KOùÊ·YçËcÂÍÓžó m¾ñm‡ûÜ}ÿ{ÉäÃlxH8|ä'_ù¸#,^ ëWe&žæÞeüîw.­H}”7¸æ{¤³ã¼¦Ç<Û;ñžÓëíµ½î-­~çHûí=?vëï¼÷Àçÿ£ñ)âÿ,Ãò¬åûç#@ä;ÀÎ*@|,Ëb®~RS­!˜>»a Iȿ닺`÷€Ñù¨“·–ì„ñÃ9/ó@³“< À¹@Ð#È@ºx3ÐË9>ˆ?´3:kA Ô<ó?",ÂÀ(%pȺ‚1&lB'tBÙ€5À±œÁ;« ÂÔë>y“·LˆÁì­Ã¹;€öKÁÖC²ÓðÁN` Ȅͣ=F˜CFÈÁœK¸yó’›K=ð³=6¼¿Ë ›“„!4BC<ÄšÈy‡.0¶KxBHŒDI,-“ð«)¬4˜óƒ+¤ „COüDPüÄ»E]cOŒAüDôxÿ:S”·£„@ „OtCQ4ERôDú‚L ?øÄFàOEP4FH½=ôÂ[$ÄMDDfô=E$6#0€ @s4k¼FlÌFm<´“™u2'Lô›`…e”—N¼EtFY¤Y4&„;€ºvDXŒ:8ü91É„ 95‚X¤ZôDwl„FP·yÓ,¸4 ’@€Ã ±·; 7”ÅRD‹¤G€dtÅu¤Ip…rlF‘¼8ELp <k[I–lI—|Ii“!Xm Ç{‘œt° B‡bdGŸüI ôÉ»Jv”µÓð€,³óhv¼ƒ©£±!Ø%cÿ€Ô TJJ`ø€ ÀHv”ȣ܌d€o™vŒ:¢lKvÌF@‡Iºüµ›‡rx(§•ëK¿üKÀ L’k¹UÁ,ÈyÐÉí)¥l°„J·ŒÌ¶üG  ¦¬Lvd®lʉtË@ LŸ´LÏdÇF(A˜ÌΔLv”„J°gpŬKÙü=£HVH¦ÁËMÝÜMÞìÍB²Éõ1b°³“†S#ÈTMælNŸ4¤,JMçtN4ض<žËŒLÖ¬„SpH{ˆÍÙ,ϸ»‰tØ8\OölOãs,ÏR p‡¹Ô³œ(pÈbÈ…ädMIXÿÎê бÄpK=P ¬€ÍÊÿ|LïÌbÈp(wOó¼Ð¹3 pèðYOtO-,|Oæ2 p°Ðº‰œHüÌÏgþô„J`åüO•Lx¤Î½Q¦lPõ„SÈ`x Ðt¨ÐêÀP%;áË•°IŒR)mBN‚Èú4£ìÀ yЉrhÑlȆg }…SðÇd„î|ÌÿÐ}S·üÏFðÑǤQKRLRb Ò"ýÒ#EŠê Ï%Ô_kÒ 0Ë™RE]Tç‚",­O¼ÙRœ8 wðÒ/M S1u2Í…\0Ó3õ­S:#TmÿSUmS‚$ÈU]UTU6US0"U =…WU` g Òg RMýÒr8Rw@ФTBMÖ‹» wVˆàÐmœVj­ÖjåH€`p OAM£ì˜{WyèR`QamQMÍONÓgðTb S`€QP ÕW¸W|ÍW}½×zí×yW^ýÕv-ÒuM×aM‡?5Öc WemX¹+É“T„.ˆŠ­X‹½XŒÍXÝXŽíXýXÝØ.PbOžøÖm ×é¨Ôq=Šr½ÔsEXƒYu]W›½Yœ¥Ù™EX„…ÙrUØq¥Ž@Õ‡-ZÚ< ÅY¥]Z¦mZ§}Z¨Z©uÿÚaE ”U/•]Y¡mY®åÚŸýZ° Û°íZ²Z¡ÍZ£M[DTÑ–[·}[¸[¹[º­Û¸åÚ$=ĬÝ[¾í[•U[ÀmX¿\Â-\Ã=\ÄMÜà \Æm\Ç}\È\É\Ê­\˽\ÌÍ\ÍÝ\Îí\Ïý\Ð ]Ñ]Ò-]Ó=]ÔM]Õ]]Öm]×}]Ø]Ù]Ú­]Û½]ÜÍ]ÝÝ]Þí]ßý]à ^á^â-^ã=^äM^å]^æm^ç}^è^é^ê­^ë½^ìÍ^íÝ^îí^ïý^ð _ñ_ò-_ó=_ôM_õ]_öm_÷}_ø_ù_ú­_û½_üÍ_ýÝ_þío_ÿý_``.`>`N`^`n`~`Ž` ž` ®` ¾` Î` Þ`î`þ`aa.a>aNa^ana~aŽaža®a¾aÎaÞaîaþa b!b"vÜ€;barbican-2.0.0/doc/source/images/barbican-overall-architecture.gif0000664000567000056710000006132112701405673026307 0ustar jenkinsjenkins00000000000000GIF87aÐÕ $$$,,,444;;;CCCLLLSSS\\\ccckkksss{{{„„„ŒŒŒ”””œœœ¤¤¤¬¬¬µµµ¾¾¾ÄÄÄÊÊÊÓÓÓÜÜÜãããíííôôôÿÿÿ!ù !ÿ ICCRGBG1012ÿ¨appl mntrRGB XYZ Ù  acspAPPLapplöÖÓ-appl descodscmxlcprtä8wtptrXYZ0gXYZDbXYZXrTRClchad|,bTRClgTRÿCldescGeneric RGB ProfileGeneric RGB Profilemluc skSK(xhrHR( caES$ÈptBR&ìukUA*frFU(Vaeobeÿcný RGB profilGeneri ki RGB profilPerfil RGB genèricPerfil RGB Genérico030;L=89 ?@>D09; RGBProfil générique RVBu( RGB ‚r_icÏðProfiÿlo RGB genericoGenerisk RGB-profilÇ|¼ RGB Õ¸\Ó Ç|Obecný RGB profiläèÕäÙÜ RGB ÛÜÜÙAllgemeines RGB-ProfilÁltalános RGB profilfn RGB cÏÿðe‡NöN‚, RGB 0×0í0Õ0¡0¤0ëProfil RGB generic“µ½¹ºÌ ÀÁ¿Æ¯» RGBPerfil RGB genéricoAlgemeen RGB-profielB#D%L RGB 1H'DGenel RGB ProfiliYleÿinen RGB-profiiliUniwersalny profil RGB1I89 ?@>D8;L RGBEDA *91JA RGB 'D9'EGeneric RGB ProfileGenerel RGB-beskrivelsetextCopyright 200¯7 Apple Inc., all rights reserved.XYZ óRÏXYZ tM=îÐXYZ Zu¬s4XYZ (Ÿ¸6curvÍsf32 BÞÿÿó&’ý‘ÿÿû¢ÿÿý£ÜÀl,ÐÿÀpH,ȤrÉl:ŸÐ¨tJ­Z¯Ø¬vËíz¿à°xL.›Ïè´zÍn»ßð¸|N¯Ûïø¼~Ïïûÿ€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ H° Áƒ*\Ȱ¡Ã‡#JœH±¢Å‹3jÜȱ£Ç CŠI²¤É“(Sª\ɲ¥Ë—0cÊœI³¦Í›8sêÜɳ§ÏÿŸ@ƒ J´¨Ñ£H“*]Ê´©Ó§P£JJµªÕ«X³jÝʵ«×¯`ÊK¶¬Ù³hÓªÍÙ! pãÊK·®Ý»xóêÝËw/ œxЀáBßÈ+^¬÷BO6fL¹²åËpoXK%ÃB‹Mº´éÓ¨S«^ͺ5kDؼdC®sëÞÍ»·k00ñPlؾ“+_¾ss~r ²kßν»÷ïàËO¾|ù $žDƒÌËŸO¿¾}ó° ¤ÃgÜ'à€¸Ý à€tÓ-a dÐVhá…f¨á†vèÿᇠ†ÈAu d€Ä ¬ÇAˆ,¶èâ‹0†Ø 쇄 AÆèã@Ya8P€ Öö^pàä“PF)å”TViå•Xf©å–Nz€A ñÙMriæ™h¦©¦•ƒ1ÀmETpÀ<®içxæ¹b¬—d$`A™zjè¡T~A„Fh  *餅¶…FpÐ@x°¥ †Êe§,ðdѧžŸ^Ùª¨g¾Z¥¬Vv`‘„®I딿še°Q+%‘ @Z„ HçÆ>­°ÀN˵NnÁ¤j„ ‰-•Ж ­”æ~zÿ.µ³¦û«»ëVyÊáЙæ¹ü¾ë.»íþ[¬Àä^kjEdp¯ãVKp¶êš 0¹_K+¼Æª+A‰Þá¸c,òÈ$—lòÉ(§œò¼0ø@²Ê4×lóÍ8K¼©p ¡0hóÐDmô~vür ÏlôÓPGý/Ë.ó)³ÔXgý´ÁôüÁÏh-öØ7o|ÒK7 €Ód·íö¿·¶ìñ¶°ýöÝdsí5L4Þ€¿½1{{ÐÚ®xÖÄí²tÛ½øäCëpßS®¹Ñƒ£m8⛯‹±Ã:OÞ8½GzÄCù/å–‘æBožÿxº¤‹NyçJn¹µk ´ð_¼ðÈ'O¼ñË3¯üðN*_üÅÐ.ϺõÇÿ<òÓOO|÷Ã?ßýŽÏ½|~öÞoÏ}óÙÃ/~ñ!Ã/+ðÕ›«~þî¿¿¾üáÛž¹еËM }û öܾÿU/€ïS—ï=}`ð&ؼäaw…cšúGš„pŽ8âaÀÈ9€sà€ p˜Í wÈCå•Ï^Û¢À{HÄJ 6Ø@ …v«ÊP¸Àñ怿ñŠ$\@e×7bñ‹ œ@ç9@†£¡¡ ™˜€)ŒpÔ½å;ÿo‰q# /‰H€ p²@ÀZ¸³4yÄâ…¹ó 1’E$Ÿ0 üq0€ IJ@k„À`ELö°…ÐâÞºˆGWf’À›”—€€”(0 p€(ÞhËÎ1Uu¼d3Mø ¨Ç¨À%£¨ À ,s € 9ÍNòeA”f;û÷  P€)§LÀऀ ni X@+çiBYЋ Ý¡œÔs€ (¯›(8Ç¹Ê “ˬeD·÷Ì?Es¤;Œ€7àÍô%ñKÿ@_õ:p@ˆ`€:Ù‰Òç½³’Bì© p¤@ˆHb( €u`8À_TªP¡öÏ¡\t–H­Š¼"yÞ¼(œU®àH À0CÊUåE i!—ð¶*T@â.ùüËK0€ (1©ä³©_sºÓ¶úuæ ªa=('d`p,AyV‹&õ1•êA PUº «C˜V›¼ $À @J¤Xv3­Mªª££´€©Ì\ì[ GGʳ®U@ 0VJ†¯@ihét¤ÞOã]áq¬œâj•JC6x¸D€ðÀZÕê‚Ögÿmu'ÏÙⳌs}mý0L ©Uª¹5ìn='¹.v‰,¶tùF˜:@ ˜€  Ë$v²]ìt-éY”ºö¨éÓ)¸ÛÏL@f€)0^Îê5ºçýZzYXáˆ.QĸXË (` Hð‡$€4U­¸m±‹÷Û;¦•È-—M²’—Ìä&;9©‘j[ÊT ©¸ñLÉeÎZ@¶:åÔ“ÇLf2S±@<_lËÌæ6CYŽ«…²òé ¤p[0Tò:†Ïnt’]ãXn1´é•Œ Ýd>c ðUí”ù¼É.syWÖd `QB3šÑ±%r\ÿìéO/z™H&°j“8YNä°JÀÑ- 3aJmjAWÀT©[@1yÝë6gGé<«YmJOf€ @@k.ò²~Fr±íè‹v×ÛævRUj€U³Ú¦ô&0k0àø³¦oÛép3ZÔ½u@ZKl{y^œ´´ôéÞDÖÈÎeùìÿÕ߯ÎÀ¯ð¸Eâ‚¶€ -=€·Ú¨@ah@gãlv´^OöåJv4_ólJ< ªµ¹§ÿ²ézÃüÉ€Æ74û«íŸ7ZÓÁ±–³Â6|8”‘Üp1Ìöůþÿd €U‘K_ú)D[vÐ&ç쟹Îd•³\Å.g{Û%þK|ÄæÅ_»æ©óœÞ„‘{“…nR#O]ð ×À°w5`àw¹vžkƒñnásÖ=fñÀc^Û À©ÌeZt ä_ÞÎá³ýr£»ÝkßV8æí€W…aæs¼™yÙ»eÞœöüìû ×|°è˜¤Žzÿ ÓY«ð•\u©^âÀæ¼°ûþy)/Rõµ_”Só>«ýú~_9ìÓ;}Áÿåèä®Ù3+¬m·3!}Žþ>ñvèúe2 JûÑ×cÕ!± 0(ÿÏ–}öbqÌ7ä‘ 0pÞJ XÂteà¡J‚’(n¡~—#y)X©f$8yð×G@P€‚ÇUz/hþÇ_ú–P„Fx„H˜„J¸„Kˆo•ƒIˆ€pàL(…É2rLØ…^ø…EˆùAqÚ‡…`x†h†ú&w„/„p+  L8PD‡i˜‡^X,ÈE§‡€È„·QxEV¨·Ö„ànxˆx„AXdCÈ…‘ˆŽÖ„n‘p›Ø…™x‰—(†HIg‰ ¨‡Ÿ˜„ˆežØ‰§ˆ‰Yöv³ó‡¯ˆ©ˆ„™x‹QÈŠµˆÿ“8jDØ‹i¨YhŠž¨sÂ8ŒvH†(lƘŒ{è‰aøŒ¸yȇ²h´h`Œ…¨sÔh„ÞÈ_¨…ÿWx•HŽê¸Ž¶h‡r“f‹ÈŽò8Y‹ë·ô˜úø‹ù–Ex†ñ‰y„ÐW™ ù|¡ÉIJØE8NïHвau±‰„ЗLh‘F’ Y’¹I‘)’Kh‘'i “Coa ù‘I(’1‰’™8 ’é“= ”¹“-¹“ðç˜$ò’˜•R9•!²Q5VIˆT¹•\¹•†€0²PÙ•fy–ÿ‰aಔ e‰–r9—vñB¨T5Ô@—|Ù—roI!DP*З~y˜\yùÑÝâ9"`˜ˆ9™˜˜(–”„NoY“”Ù™Qi (.)MW…žyš•¹`„6/#Gc„,© h”ô˜•uó8ƒn±©´)›X `E0#5R…o!œ>ˆÁ9oÑÊ&!¬Ù$ž¡Þùàžâ9žäYžßÙãÉæ蹞îùžé ïñ–€™””>Gd ðí ŸþéŸýžÜ à©žÿy ëÉúfÁ±"É© ÿ¦‚žŠ ê* Z Ú¡âù¸±R4šÞ6†˜0mÔ¶¢,Ú¢.ú¢0£2Jmïö  ¢3š£:º£, –D$ê)v¶)Ê£Fz¤0ÚF𠀤Nú¤(º…`Ÿ÷)q P¤Oš¥FÚÐ8ª¥`£)šQÖÉšB4oàlÚ¦nú¦p§r:§tʦ·&… P§zº§| §ðöeõy#Jd°¦}z¨ˆ§ Ù¡`yš¨š¨ rzžk‘º©ˆZF𨜪t‘ù$Ú;J$™¨‰¨I gLTê1DBªê™¬ÚªsñšÿMB«µÚ™ ÷«”ɉÆtªž“-3,³eQɺ%±ŠúÒ¬X²¬I$­VÒ*Æ*«ÖŠ%©úÛZ+Ùj¦zà)Eø¬¿0#®»0Иâê #ræê 3²LÇ¡îú®¼¯óÚ õê5ÁÀ®ùª¯ºÀ¯Æð¯÷Ú®Û [ k øº°ûZ®› +±K±k± ;°[ K [ ²·0²ÃP²Äp²(+²K² ± û²¶ ²ÂÀ²Ãà²6+ 8 :+ <Û³°ð³À´[³Dë³1»²3k²J»´EÛ´9û´-µRë F{®V»³X›µ¬°µôÚµBûµÿ`« bë¯d›´ {¶©¶¼€´À0´n‹ p» rë [·­p·º·¾@·|[ ~› €Ë {;¸jÀSÕUB#'d¹‘„p %¹–ÛL…Q¹—»=ýÊè꜠º¢;º¤[º¦‹Šêª Ž™ûƒ®ûº°»²;»´[»¶û‚Áj„«ºEè­ßú»À¼Â;¼Ä+*·»i±ºöª¸„°ºd‚6Ê{±Ì˃Aµ½Ó+äú¼Jƒ½Ù Û›ºÉ›pÒû½x¾ÐK¾æûè۽껾}оã½ð»òë-ô[¿yp¿©’¿ú{ü{½ïû¿çë¼â‹þKÀtÀI’À ,ÿ Ü üÀpÁÓ1ÁìÌœÁl°ÁkÑÁ¬Õ+¯hÓ8Ë;Âr`À©I‡GÈ«Â^Q—<1ü3b”rÃ2¼9œ“LÒ m‘»aˆ*=œ`ŒAœ$IŒ„å{Äb°½âÈÃbÑÄ gÄP¬ÎëTœ†Ã#`Æb<Æd\Æf|Ʋ¬€ÆlÜÆnLÆ,_üÆt\ÇfÇE0Çv¼Ç{ŒÇ²ÊÇ€\Ça Ç\ÈcÜ$$7¥†¼Èpü#"Éî9© É–\žÐ\\°º|ÉžžñÁÃäÓÉŸ\ʰÄM0@Ê¥ìÉ6äÇJà)ÀÊ­É™¼Êµ|ÉÿñÅ÷ài¢Ìsú†*ÌÆÜ¦^š¼‚Ä¡À|ÌÐìÆAc©Œ4 ªÑ,ÌD¼Œà” €ÍÙªj–Ý<°1î6ÎÂLÌìÌÆ¡Púúéèwχc—‘T jÏø\fÄi#±ü9Ð=féq&¢`Ø‘Ðd†öÄç þqнџ¡2ˆ¡´{ÂÈ$]Ò&}Ç×EL°<!=Ò']Ò‚7Ê–B0…ÎBÈ/mÈmt’‡ƒƒ‚Ó9È’=]Mç7B=Ô}ÌÔ‹6æöÐîÖ) S¼ßZŸþ1BTÀ ÐxÑŠÕOâÿ¦–Hð%'Ö®”+H(Ð+bý) FPYFqa­Öj=+·ÒâH ã*Éj-†M-3’MT@œ˜-W](ÄÒ1s6F9 ¯„=1‘M@çŒ#]í…]%PT‡hL×r­š Úª +0sÙôp/ °Be50­ƒ(£CÛƒÛC)õicq-Ç]*ÛþRVñ2)ý2Û§Ý8 àÖÉ™§µ3ܸ*· %àcGÀ`â)Å};»ÍÛ¬/­’§O¬ “ÛÒ/¢‚ÞºÜã4é]ÌV#Ü«Sß)cL±dÖQà àQö]2Y†$FЦ"9ÿm.øÂ™’ÝÏ}àÿðâz9Ð 6~á#s+@’ÛŽáÎB cRÀßþ âéà×­~(î.€i×Ý->ÛÞÐQ0á"=ã3®áôp]®ã .â$¾ßÐß~à*ž)²äЂ/wÇÚ=ãNRãr|8Îä~:>>NßZnßB~ÑýQä'®ãI^^Ý5ZþâOî1QNq@^å~ãåaâÚåòðåGžç3æ%nænßiNkæ‹òæö² ®ãNÂj6þMÂ9èbþk|~Žéõ-èDnäLþ˜®æKÎä‹^Ù åL®)Ôlç–náž:³›ÿ_î=üc<íÓ?O¢/ѪëäÂëìú’?ëâ<¯"ì­%Aöó?ÐîAÓ“Ä#Næ>]äÍ•@¼žAâ£@tI? -ÄYꈮEÛî½Î@ÚÞ@à>âî.©žÇŽN'óëÈÎíÒÓî$íÊnAáýêPPéwïî¾íý“ïÙs-¿w-Ý®ðã<tAÏ@ð¾?ëAänë=.ŸÚ„Iãš"?mU4=Ì¡Ik5ô@^—")¯Eƒ•Dù&âc‡ àIÄóâ°ÑFpG_TK’QíƒÞÙþ<@îDÄòpTî×î$¤ô­õEPÿEóç`¸þ…E»®>þ¹Ò.ér|çÿòäõÔ´ºŸq¸â_žEö¯DDlŸ<šÞñ±_`T{Q% ¶Jíºª5HðKÂÄ Vdcœô÷[ÀHôSæA¸dy­5v„ěɔøQG/%ô¡Þ®%ôl‹ôG᤹ÂS$<àôöýÃôJîG$”Z˜t‘™GUßèWúºFÂû!µÊ Æùè‘iÈC§|E`ë?O³{àˆdœ$R[•dÊýÛsY(÷^îñ¿UDô5‡q‘©`{¡t$âôa ¢NIKêT &FrTÐ’wÀ >´q¬ï|Ú`$Ò—G™·X4‹GòÑ<¡ÑÍÅÑÿ¸@1Çõ¹qj6–ɼ©T ^²:ÚnW䇳p¸ÛžîwÉ.(¸ÊÚ¼Àð¢"*<682+7ô 2("®6%$"",4*FŸ0¶È*Ý2æf‹<8Z]w++ &..& (Á8Ú8Z78¢Ÿ™¥t¡¥y³w+"h¿ÁÃÅÇÉËÍÏÑÓ‘8 š´³ÖÌ (8* 0œÕ&(0&ƒ:tÑà€À0îHð„Ò† pà„ÁINœ@8à@¼D6`X äœ&ºÜ\€‚² D=¹pƒ† XÈpƒ…a\2X¨ÀáÙÿàÈYggׄ=1+<Õ™•(¥¢0\È0t,Q¥=Se [Ï¢FGEšd (…›ÄF˜-Äš"8˜@±ÂLB&aÉ2g Lº'}&°ñ 4 H˜T¡& ”M±Íb„2pfk”„wMró¦ŽvmÛ·qçÇŽA… la+“G¯2 'tØgàJÉ æ9Q0 B/“:FxH Á‚¨À(è> Áè ®¨é ¶”*YšSâÎqš ô µ* à™ÂÒb B¡i 2À`‚ àD f‚`àðˆjŽ: 0 L¢HA )¹¯A1Ð?†‘` #xÿ@ D,°' 5pËH$¡Ñ•ü0èqÁ¼8Ë'R$p‘’Ÿ@¹ >W;‡±\ÚËÆ‚hB™ŠÀ0`à4ž à€Ђ3*!8ò$€â‚@jÀ Á¢„ªÝøìÓÏ?ûd‡‘%.Á‹Z`ôôáGÉL8`€¤›¬( 3 N/   8ŽyÊ{ð ‰(Dòì0¥•†ïú\™BG!æ œÀ 8°Ê§™@;ðÁÄ’`‚ " €”ÐrÅ©J­„C( × ð+ 'MN!ÙûŒr1¬jÊO&0XÒF¸pœ‹. >ù¤Áž ƒÿàV.*ød‚&î£@EBir±[ lµ’ˆ¥-HP°5 v+8`VÉèß à*1Bå2ˆ—¨ÓY…{ÙЗaŽYf#õMGm„#À€x^@êà G‡ôéxb:*Èh|ºèÔ@I `<($Ðx¤ƒ¹™®÷`miÖ×™¢Ù[5¡`L«ÄÁ„Í‚Ò,Q;1Ũh¥ãÂ2Ü1Í(~Lé“ NqÖÚÔÀŒE&ThlŒÙ-Úwˆ+ÇÇ€šÀô9ùM)–Ӊċ`< .çÉÙ;²£À€* ¢8L‹1ÈÿºX¼˜€J±ù‚’“ 'ØøH–Ýmæí¹ïž6AKüñÉ/ß|†¾&€€žîMHß j| $Ðy¬¥hz}¬ ¤Ó2€ ¡ `yÆ79²b)žù A•M>èO$˜3pâV€QŠ) ðœ(h—Ë®”E¾Â<ànG-þ$M  ~Ä·‰ÂÈï@¬©I‚HãÁÉɯY¬MRPäBÐÕyà çÇ£½M|=Ñ<ˆþ0kP,_íÈÁ`Ћe¬¢¢öðDñ-g@›ÐXR5 m0ÅÃ@ã‰L|À* 34ôÄÿwßÑ€ÍE ¸Ì{tä#i8 ‘‰ËÏó: ( j z?„Ò#OF`c÷óHS²2<2à €˜E’”q:_Xúµõ±Ò†–$_ñ.@ÁrtÀ‚ ©ä%“b9eE7¦Ä"," r—äÕPó€#Vò0”Ê0„Ì[ž°†ìV‚׺Ÿp"oHd&¦(wCa¤05ID¤9q£81‘=Å ›é!Yàì×&Š"Š^Bñ`‡-0Æ…úÒ|˜äß¡Äé»/aÀâS^Õ|ÇŒt$iÞKö“:‘Nê{ã1)ŠQÎ’5µ©ÌÀwȘVq,µÿ4 ¸ÊOÆï–@Ä Bj€uò+À PM&È &1ð™âQ„@NÕéN=Ìø ³˜Pô $ÔQLL~áÜP≤±K·úWhÓÆœ„›!&^H9¼H/ šIV“X¤¾Á“ˆúç':>I¾%tML‰%éWý  „°Hȃu¨8ÄHF°Bð2è‰,T‡ÑQ8jª/vBÀmq›¦iÖ à €ÛÛJ䇫… !pSå.7P’¤äj1é©CINc¼j(EÌɘ2a‘ÆöU¼É·?ËG-%‡ŒQ‡DÀ(pY°S˜ä ¦ÿû`Z<¶jÂÎbXFS$ÿ:À5-\gBñÓ¬ è l/½98[Þó„ øÐÝ<"ÕS_#¼O(ZÔ·¬Ä‡Ì*=9–Éꓟfì ƒª¸-5ˆA`ù¯Šš>‚ Í“8QŸè•¢¢œR(Y”•¤<íO‚›UðÇmŸY v›Ã¦ý– ²ÛÔbÜò‰r¦Ì5ó™Õ‘S"{1º±èåe¨C øæjO£ät„š `&PýÍø·3 ÷ÀUƆ ð«;í‰XcEV÷Íp'Œ–²YÓ]3«ÂÈtR®Éše=úuÿòb„©Ed1(KYBÑ•0Æÿ¢”Ï„—U¾[S†ñiµìä3Vq W ¯}Ê7‘J_¥Tñ˜™¶š-_ž6©ó˜Ç¢(2¥ä#Å‚YÏZdRE¡“È&ì1(©ß—T' L‚K pÀšÁZf4ï›ß»qî¡pœàÇx‚:‚ßDÜ¥JÈÌé4@Ùn‹€dÖ«@Ktª˜9\-h  S˜ÎHp•¯<)õÇ}¹rQ"êà4¯âÌí(J€ã\2GªûÊJ™W™Vd6`Ô¯aôð’9 ‚„!ê¿©<Ø•Ýç!}Nð¦ëÜæOÇùÖwžõ€7àÇ‹hØÅžöÿU“,Œ¡Õ×Ý,­WA8*Q ƒ€šÈDÜÚ´á €/ %ú€ª”«=íšPMûyÉ j@G±üå1Ÿy¢d €–oÊ@¦Ë€Á ÿŒR”Âë¬ô¿óP• =É’Ð-²§ÝÖõÂÐü±Ú·˜aþñ‘Ÿ|å{è4[+—|Õ ,“ûiŽØzøUgbš}ñ?ù=b@Ùm䈑ùY•Lp¦¾yP@(pVïÞ‹¡Ìi>ãGš>Oðà¶T&,æ¤Ë@€ôÎøØø²êñ$/#ò†¢¢G €ÿæ"a‘òÿ'»ƒÂ",BÿÀÚz$ C„cŒÂn` i4Å8ÐÍB†ïå,Ú|°Çï¿Éù ¯•ìÊŽâ –ïٖ³ Ÿ„íŒ0 •ÏüÐ/ŒÔ/A´0ó4ÁKøá`b ò\`úŠ¢mL,gé€(@nDʤµð!O q¹(OQ2§nà.Oæ.Њ†Á§Îóp‰W(1 ‹#ø&IÃp‰B•’°ê` p+1ú,±û¬ÐêÂi±»¤ ê kw‚³Kp'4q­èÊ8pí†ñÃÿ"õD‘M®1!‰‘'1ë"Qå0‘-ñ n;1ŘBqwÑGÍÑ“Ïûˆ .fq«ñü#ýÎNQà¶ÑŠXQ³Ñ¯ñI±¡Q ²{r!Ð\n ÒMî‰ 'R ÝñàF2Ò¹°èîñ#mÑ$w‘ r%IPTS`2&er&i²&mò&q2'ur'yR&w"U>ñå"2êz²(R',Óñ(™²)g‹"ÒnäAÍ)­Ò(‡Á¯œÄàb®ò+Á2,­rÁ %Írf‚% E,Ù²-Ýò)ÅD< %"ÿàñ°è-ñò 'àB`’î/Ù‹Þðæ2t8@P0ßòM¶¤0ÃÈNP1%s2±ÚÐ1Ï23ýÄ0àÿ’2A34cR&Î0sý¨.E4›‹ºNó:@R¥*Yó(w‚c8 6‰© Wó6±R LÓIh“€39S%È$65ó9mãä]>S9­ó*ïã2Ä9ÁÁ:ÀÒä:wr5 I‡éArÈôMáôMûècFa" Î/NùtL½4K#ª•Ä9m!ºNÿ´ONNôHMË„QuQ-UKqâ´R/5Nç´76ÀNŸtT,5 S0UUuUYµU]µUŸïUeuViµU94 Õ SÃPµV}õW[õwVbrõƒºÿòw€uY™ÿÀADÕi#šõZ}•N…!TÕᲪP[Å5X_ðgÆõ\WUN!MIµ]K‹Jítäu^éµ^íõ^ñÕt˜m'òµ_ýõ_ëõ3Ä¢XÃè7š`6aû•þ®€`ßqxMa%vb•E-4 ZïÔ'žmb9Va?ƒW¸•6¬c(8£cM¶_åF`NveÖ(ÒôGÝ5f Ó XImöfÕ.Ñä3:­ëpÖEQci!!jög´"„Öz¶h6g_ö6:€˜¶i©ögÁ ie6kgÁ¢6¼ökÁ6lÅvlɶl™áÓÔlÕvmÙ6laÖvº¶måÿvnÍvgªâ–nõvo9k·où6péÖoÓp÷pÇÖ#%·qÁÖnµ6r Ò*ÐóÞVr17s5·%Q.wsA7t@q/ÏsE÷tQ7u9²r//(U÷u7—Kˆ‚]vm÷vt¼2Ww}7A»6!†!möw÷x#Ïœ¡¯¡x‘÷yÏÒ8Úz©·z#p öÓzµ÷9§`z·÷{Áב<À#{7|ÍͺwÎw}Ù·Oa>·}å·{Òw~í÷~ ~ñw!©~ù÷ÿ÷}³€ 8fü·€˜}8~¸Çá€8‚­w%¸‚i‚-8ƒ}—‚5ÿ¸ƒÅƒ=8„O—ƒE¸„„M8…#—„UX„Q¸…a˜TÇWc8„_¸†q:±·|s˜€Ã 'L3ˆ…xˆ‰¸ˆøˆ‘8‰•x‰™¸‰ø‰¡8Š¥xŠ©¸Š­øŠ±8‹±Ø#¸‡ï·{Å{X&ŒÅ¸ŒÍøŒyŒÑxÙ¸ŒÕ¸á¡yØ‹ùWwQòŽñ8õxù¸ÉoŽé;`(ªD²9‘µñ¹‘™‘YìL7+X‹-¹ˆ±w7/y“9¹“‹˜KÚÅ“Ey”/Yr<—”³˜’UÙ{t¿7=ý€My•iv[ùaY}ùw|G¡–}uo™sY–{ù—ÿ9v‡Á•·w˜ÿw–ù™%7˜÷—™ù×™¡ùšcVš¹Gt¹š‹›ÁyTµù~©y­9œÑ:ÇÙ~ËÏ9áÙ,×y~Ûù~ß9žñYçY~ëÙ~ï9Ÿ:÷¹}ûy~ÿ9 zßš} Z~:¡!“—¹™˜5 ¢1Íz}º}:£Aš•'Z˜+º™¿9¤QÚ‘6ú|;š}?:¥aPVÚ|[ú|yù¢c:§Ñr¤÷—3cÙ¤qZ§±™kñ¶¨ú¨‘:©•z©™oS‚I›:ª¥zª“º‹‰šª±:«“”CU«½Ú«­ú«Å:«aäbÇú¬£º‹…:uãÂ"ä­áÿ:®åz®éº®gBJ„…ìz¯ùº¯áZEæ¸2̯ »°çZEˆÁ°{±I£vç€r­b±%›°kbÆ&û²íz0Ôz­7×hSLA„D{´I»´Mû´Q;µE¨Aô ´Uûµa;¶G;¢È$œ üd;·uµi,„vû·ÛP.ÁAºÒM;¹s[_ZD¹[µ-(U”™³7WFîš®-»o ß´Û—šÂK(àĄ⸻»û©¼Í»ŒTįÓ;»Ñ;¾¡è>ºÃ±©;u'`Hå­ýû¿<«çRU*ç 3)E6|Á¼Áû[$ˆ{*ÐÃ4¼Â-ü«M“Àÿ¿ó;t ¢œÚqE|ÄI\l£öù„–+«&oK¼Å]œoCÕpA8+@y_<Çu\n=€®Ãm—63`lj¼ÈÍÖ³ÀÀ@%äÀÈüÉûV”ò6›ʱüÅb8È3÷T¥ÆÕ@ Âö:ÌüʳüÆÍÿ¡Í¯£lݰX\nϼÍÁvÍß<Í¥î<Ïõ¼lCõ0!à45@ï¬CÌÝœÍý¼ÑùÜkï|Ï!}ÑÍö"ÆËS2£JÏ9½Ó=ýÓA=ÔE}ÔI½Ô×ñ’+t"€âÔÜÔ]ýÕa=Öe}Ìa$á2àâ³ f½×}ý×}]•hÓGÿØ6Ø‘=Ù•ÔQ3; €eÙ¥}Ú‘½ÖoÝr}ש}Û¹Öis؉t¹ÖØû¼ÛÍýÜ;}¼ ÐàÙ¡ÕÑ=ÞϽÖ7â0À’CÞõ=Þ¿=ÜCwÜ£ Þ÷}à•Ý8Ö3eüÝ ~á—Þ»™î=9xá)Þ×ióü]ÜIòØ+¾ãeÝàÙ½&Kà=¾äEÝáqß­ÃäY¾Ôû=ã5—k#ªÜ½oáÝæ×çM]ç=ä~ä >js^èièKç;å±]åƒþæwÍzç¾ä_æ1à!m&>ÔÓÔP“$¦ "÷ÀÒ´"ǼAÏ¯Æ jeëAÿUªÁí ÔÀ>tœ«>Ò~Ô ZÛdÌi3Ò¾ÖXeï×\‚AïÁ^Ôç^ ÜìÑ^ ìàá? ð}©Ü 2ø>yV¢2è‡pæM nÑjÒ^!8ð#'mŸë£õÙôÙ ê­~…{“"Ä8% bd‚… –jàc2÷ùàŽOħø=ŽÛÀîCþóÞŒ*b° É*à ŽDG6@ø·d~Úà͸òí=óÍŸ6€”`Fœ @ !ž`6æQpÿf¤æûÙ2ph<"“Ê¥R’À|¢Ò)µj½b³Ú-·ëý‚Ãâ1¹l>c=ÿ‡bÃ|Ã5ÂCä@e#´ \qº¹mb(Pr r¸m$RÆi&Y0@xLy@,ômnfpPdømH 8)DpMF"j(6<ð*"M]úfšj e,8lPeLä6k^ < aì1ø9@ý2'»Î5X®/gùÎgÇ9A¡õûÿ (p Á‚\Ô°A†oI+ «,TZpBB^H`ä„*´rB /4zD¤Â„A";J°pÄH ë-\ÒéS¨Q¥r.Ù°­D_ 08 q†Ý*QðHò† .H ìÁ!–.7`ÿ˜ !æÌ L‚ú&Ú´)Õ$`S›dÂs‡$xà€L° øTª ! ‡2P¸¶å»"+$Ë ˜pH  c-,wÉ>ƒž?ƒ-z4é Á)·U|(U8 à‚9ÐÀ …FV=` ÁÀLXàÈÍðÜLHPàw L“R°`j¹;AIEJáæÔÁÐ Ô»€Ë ¾Ð ýŠƒ8L@À@Á€¶R2Á¨dA]ôÁE Ѐq‚”—[ÔWwrqðÀãø1ß·\bA8ppÁ ÇiH4AðlPA 'ß«8ÿ`‡6°€FˆDO”–¤’K2Ù¤g§‘a "2×H‘Ô…‹  ì!Müæ(÷ÈLpÉ{$ðÀ|À…•p€.Ù¢'àE!ÞOE¶"­T`@æD0Dnp€Á ð °x!È—9t0g jpAøIæèpŒ,Jv¡ZºUa\Enà^­`€€mïȹèÁÌ@«$Á;©®J#4Ð@qN) R r 餹碛nº§¥6®0ÐÊ|h€&|ÉW lèãœ@–œD äÈ¿ôZÿv)-Ì @zb:¶ªõ]Oã‹J €A†y èCH'AḐx0Œ<¬Áy_žz¿ \´1_kpH»«®R¼µk‘Û\cñ6Š@˜ƒá¨dé´TÀ))fu!O71ª°P£ W)ñfE5À=ìË^¢V#n¤„S똩·;l¹æŽˆÙ(Ô§Hƒƒ,e§ø+¶2x?rÔ—˜5-.^ø :Èx8BD̪^p£$Uœ2f108ÁEónÔÛãëiÏ{dnPlå<+ÂÈÊ9 ØËsT!n€‚Ù N%mFfP”5(0VÀ*–"b<ñ†ÂÊ4X+;³F)€wnœèÒX§Ô±„îqZ/÷F¥¤îjOk”@8‹ZEf™[^G ¨iPÈ:%ŒÿçÄq¼0€z®Ó9žæP°Øáô©€87É5$À7ËêªÀR€«ó[Ú>Mоrá3®rëAXH¦6µ6¢*n„3 ÄÆ–•,‚¦à¼‘{µÃDþõœÔ<àOÀ]£Y†T<ôj+3[¤P*UH7œCœ~¹YÀ+VÀë ƒ¢$ÙF•"Áœk¨Ë$¤Q‘š+¥r6Ô›Ï_`fÔŒŒÁhü&@¾®Ö-Ì]¬F‹dP –>Œên…Ë5¸Ò5¼â«>ÝjÕ9‘nßüͽ–Ób€"iD%3P0Êðj “ì›Ûl””æ•Ìÿ@Š‘–Ô­h§úûÆùèf“ïBçÓ¦v‹O½§n° ¿à#3 À,xb¥[?ì¨m!‘‚ËÛ ÌA‚92b‡aÖ ° NlÆù´1Т>4#|ËbàŽzÔÓ<Î&É8¸“nhêƒè0U ’(Ó¤}…4ÿ=}kænÂÔêT§z l‹2³›ý¶@’ð*Òž6µ«mmj. ˜¦½)˜å*ga€¡åé«l Òk£˜dK1l2ã ň-¹6¿ûm6ñ¹Pî¶¿ ~ín߈²Ò–Œ°@"6°ÀÄ`ÑÅ(†A xê ’Ð•[LŠÁ@q@pƒ£\ÚÝVqRí›ò—W…™Â¸¼€€{«<±¨ N»Ý›øí*°xÀU­ÌGu 7,þôò§_@ÙΞ:ÕÏí“C½àVÆv´§m®v}äXûÈUÞmžœìY÷÷e œÙ¯åU®¶Ú1^>²ÿã}ì]ÿ:ÅÓ^÷¸Ó%¦Ž^…\ø~ÏëXÇxFóÎu³¼Ê×ûá .õªc>ó£¹zåùx¯×ýÊ §<ä!Ÿ÷Åw^åP@ÀEJRÔ§^ܰ½èÍ>zЛ>쟽Ê¿bÂ+ˆ÷t§=êk/ûÛ—^ìŸß½ðsyÍC?úá|ó«oý懅õ™]%ì¯ïýï§|僧¢‚þó£ßàñu²ôÛï~1P?ýòŸ¿¸WßzÍvŸþú¯¾øOÅÞþ àõ­ß²½ß" x€0ÈUÝB`ê•GˆÉÛ-@\ÀFàžß¤Î0PC‚œ…r` ¾œê…&à ² ÿi|À¦pUùÈà Ò` Úà â`êàò`ú FH@Hu@o˜Ü"a*á2!f °LHe@4aZába þδ`vat]Šá’aZÊldV(@”aº¡F¾-àÈL¸Œàâaþ`XWxá" 艾u›â!ÖàUœÈ0dV†ôa!"b$âUD€ÄKH}@*è¾Ib'âáY|Ö–`" ’"æy@*ìÅC)²±"+æ^+¢¨Yœˆ§PÁ)zŸ$F,ò⨽b/câUÀ¦È`b@”ßc3bY®¢3ã°„Dÿ”b6B_€ô 8À€c8Šã8’c9šã9¢#8BÀX@@:ºã;Âc9:€D¬ˆt@x`‘[4Äc?úã9®£´ã?$A:|eq&z‡AË7dDöct„DZ¤;:@ì…É-¤6zdÕ5$b°Á'SIšäI¢dJª¤IÖ ƒðÉJÂdLʤI¦H`#>æÂ0~ÖLòdO¢$Ïé…O åP*…YÆt@ªHBeSÎ$D¹9åTª¤~H€aŒâGjå“u@.P£EýTXŠåX’eYšåY¢eX–OZ²e[ºåYZ*dW²Ä[Úå]¢åZâå^òeZmÒÜ#ÿR‚Å,òeaÞ¥UÔ¥a*fZR@æVBf³b%HceŽ^eãÞ(BV†GTŸaf/^fhòâEp@`¦/€&i²fkŠZtddÊf”u@W^ÂmâfnêænòfoúæmºœPÂogq'nžfp&>ÖæpçsBgnFBÑEguZgrzÁdZçv>§}¡w‚'qÆæl’gy‚AC^jš'tB-' …W>ÊÄzÒg}þQ‰\€zÚç´§Çìç´ˆüç€hºà§~hú߀ʧ€èƒBhih„*h„~@€Zh†jè@L(„Vh„b膊舖A‡>è‡Bhˆ’èÿвè˜h¢èƒªh‹ÒhFÁ‹hŒèŒÚh’(Ž&(BY(úh‘f(þ§Ž(‘i“(’î§’(“:i•Ú'”Ú§”þ'•Zi—–'–Ö§–î'—zi™B&˜Ò§˜Ú'™ši›j#š®§šÖ'›ºiþ!œš§œÒ'ÚiŸ® ž–§ž®'Ÿúi¡º ’§ š'¡j£j¢Î¦¢–'£:j¥N¤Ê¦¤’'¥Zj§B¦F¦¦Î&§zj©Ò¨B¦¨Ê&©šj«Þªn¥ªF&«ºj­ö¬j¥¬B&­Új¯î®~¤®jeƒúj±†°zdÞdŠÎ§±:k,`´Jë´RkÿµZëµb«µ ‰˜F¶zë·‚k¸Šë¸’k¸f¢{–kºªëº²k»N+†ºk¼fë³&i"„ç½ú&Á ¾òk¿úkx¦Š„ýëÀlÁò¦wl¿úÂ{Ò«VVŠ BlÄJìÄRlÅ0lÃjã¶ZlÇzìÇ‚lÈ¢‚jìV …(§rÊëÊJ«Ê²ìËÂlÌÊìÌÒlÍ~«ËÚlº¦lÀ’lÉ~$¾¥Ï -lkÆ­žì‚íÒ¦Ï2­V&íÓJmíÔn¬b(­Õ>-z^€Ñj­ûEí×j-×z­ØF_Øš­Ô’mÚ"-Ö²­ÚVíÛ² ÚÊíЮmÝ& ÝâmÉÞíÞ¾ŸÿÞú-½ömàJà®±îájžá*n¯&nãVãB®«>îä:›äZn©VnæFær®¥nî玗çŠn£†néÒ颮ŸžîêÆ•êºn¶nìÚìÒ®™Îîíú‘íên—ænïúﯓþîðîðo‘æcz&o\qÅemó–iÜJ/ue!,*BÙV/‰zf!¸œ:po÷*I¥ ´-+ù)~VÅõ¬ú¢ ÇR›Œ/üZ(×b[úÚ¯Ûì´Ñïþ6éòR›þpÛœìUüo)2N+ðºdoý>0…Š[O0´z¥$NFcFâ&ç?Xd`$Z5vðÄ(çÿáö $€°™Ù Ãp Ëð Ó0 »p ãpë°°)€4€u€0ºxÀüìÇêð#1 7@F&qï°X‰Õ­ˆUJ9PdZeÔsq{ñƒq‹ñ“±#@f䬉±?”Š£iË:jqÓqßã±wĘìÇŸ°íÌ£'@(äl!›kCZ@T¦T°q“PcàMr%Ïk>VÀ8šE|-D@JI€1^ï.r)›ò)£r*«ò*³²*ß#~ì‡:ògœIJQ@ʶr.ëò.ó2/ƒ‚¬¡ÉLí¾$À½œb/#s2+s2«¤” Î2@Hÿ¸4ç2[ó5c3)磳—¬ÎÆ(gó8“s2#£ïX@4„0×1—ó;Ã3+w€páÑ2ÂȈs<ëó>ï‚gúΨ³4gÊiòsAôlŒF í"ýôCï³gj$@4ÄÆ0ä3Dk46{€ª°–ÏÆŠÃdôF“ô2KtT´´†4ŒtI»´/£¼o¯T×¾ôM/³ .@ÌtJgAâÄKKãôP«2#‡ÆfZ5Q3õ)·L»ùt„Œm,uS[õ.Г±v€™]uW»r† tT{#à‚W›u)'%ëõt©öƸóYõQ»µXÄ‘Ø#\ãu!ŒV8«ÿ0Ànäu^o€²8(]g¥@A¶Y{f4rªadâbŸuG3‹akÁ>P6\ŸLT«(hRUoöK#£'¬uT¯Áíˆöh»4JH@±v[°¶W{æHef_|”õl{õQ+ó[ó6Sϳmà¶$‡ wW¯Ánøªª¸õr[µ ÌqW²h¯t7 Qt­BòŠ|¯:œ²"ì&%Ây›w*“·x“ò/¸7z¿·z;'rÆw|£r}Ÿæ¾¶uOA(À1¨²{K§€gzß7|›òz¸!w€38}3x³w)ã÷i&ôjHÐyæz÷fzx‚Ï÷% 8‰/8ÿ|¸‚ÿ&ˆ«¸ˆëf‰¿wZË4KO房8‰ëxƒS8Ž#¸„S¸‡ß&Œ8븋ç8Ч÷<Û[†G¥tøqâxŽ'ìCç”§øuBCbÓ¸i‰mJ¹xV¹•?'–Ó7w>/Tö*0€rù›Ãyo ç–{3]ÏíÄyžëùmv‚dÛ*›»ùž z•çB ÕyT«Á—ú¢lšûy­º™3ú¤k¹¡Óø):Z”Sú¦ƒ§£¯9 :§‹zu:—ó7¦/€¦úª§§ÿ9¨K:«ËúÓù¥s@¦Ïz®û¦«C:¬ëú¯Kg­Ÿú­§:°;‰÷ù§·y¬û¦—ú¡û4ÿª«z³Ë:¯»j¤S»®?»­ãz¶çºµ·*¶{{µ »uKû¸Ï:¸›ª¸£û¨oû°w{»¯z²¿ú²Ë»»—ûqŸû½‹úó^ ²‡:¿Oú»›;±O»À: Ð£_»¯#üÀç;nïûÃ3º¿3|¸;<Å'|ÄcöÄk¼ [<À3ûÇ,Áë»Á“|ÂÈů{Ƨ|œ›¼Ä£üËëyÈ×{ÀÓü˜Ç|ÇÏ|ÎùÍ÷º½ûü›ï¼a{üÐ',Ð7¼Ð#}Òs¼Ñ÷|Ó7úÊ‹¼Ôìœ[:¼ã¹ÕOý¿ßüȧ/Ø«Øs§Ø›}Áš½=úÓÓõÑ—½2¨=À‚½¬=Ä}t*¼×½ÿroÝ+Ãß#ÃEœýØ>~»àßæààþã“GÑ·=±Ëìf2¤ýÜ3þÝÃ=",¾Ý«Åå“ýÜ;þØs~”(=ƃ[”ÀAàÇ=è—ëË…ëþ\äCãûAÖ¼£É¾å7¾Ýk~6ØJï'þTFð‡þÂ<2ë7?ì»KçãCï#?‹îŸ<ï÷B6TP`~Ð<¿ï+A=„ÿ(ÿÍÓN>àMаÿï·ÿõ›úſñ·ÿ¸œÿÞ—ÿèã&h„CbÑxÜlŽÄäÒYl>¥Ò̳€p>[n×û‡ÅcrÙ|F§ÕkvÛýf{8Œ…rz×$õø©]ØŒÿâ\ªXˆ€KT\dldä€`°ð›ÊÈÀÀİ$|ÚìÌàò ¥²ÂÒrLU]emu}%“££$º´¸­µͰ°°ÔÝ.<„=FN^„”¤]25(68X˜ ÖÈ`HÀ8ªºXH¸Øæ«8pÀö6ÍRv‡—«ÃŽP8Fˆ(Ç˨@&“p€@¸U±pMÝCˆæ]ÄèŠÙ¤JV H@'weh   tÆù»SAº‰Dª\i—QçNž=éͶáA ,ˆ&AÐM >H@ÃT` ÀáC`N¬èSìØ2±Lp€‚†L(¨¢ÿ‡ƒ ìÒ¥ÕäÒºw‡`bâ—¯ÁÙçIRøÏÝ@¥p¢"YòäU²ìí‚@ÀlˆV GX±ž 2`S Ôé°š¯âè‚Bã  €,8` ÝT ”e*Á?„¦§Y'M’{‡ÀR¦Ì²Æ`ܰ‹¢¼¨R£èŒÄ‚¤àjš†Ø#‡vl T¥fþÉ[ÿâØÂ6vRGzÄÂ×êœ vÁE•K$ß+E‡"Œeü @) ¼‹>î¨w]GAÔTP ÉœG3_H/R-½ÉT9Ai®®$ˆ€Ò*°+§™ìsº`9øUK么éÊiT¼J ûR§ÆºY€äÀDB³ƒ¢¢Ñ& jÀe#™€¨éQmj"U 0' HÏT¯êŽçêÀʤ¦ üf¹r,Š(Š#]7VZî>H¯‚ÐŽôšCÉ!!)jŽS aGf°¬­X:`€±æ«­y›<5Ž¡`)ŠmŠ”[°Åa©^Ý­Eº­Ó.²8ÒäÿSÈÎØÇ¨Âäƒa[L ÙØ0#„Œ'>‘ÀG&ü‡ó¨?û1É„-툫h\þ0º`G滃ÑQFœŸ;Rà(+]‰øC„ <@@{†´ç w„pe DAþyÀJ«e£ I îñ’œÚ"Øå ¿™M_ŒçÚÄÙÍ¢‰s]ü@çßô™·iÏ<ð 4ʹLs¢}òcðºYÏmžËkC 9?:|JÐ)nç»ÌEÏ:õƒž-`E«¹=‡&Q+ ¨ #cš³ÈÛØÝ¡®º,u®?V6ÿ²&®¶(6TÒêš2k.Â;mQ‹ÿQAöõ²A% <àQÔpÀŸ0(ímÑØßVŸ}Ç“¬zÚ¤÷¹}MÔgÁœàËš] z?$€iö²/ }C›ôÎwGòIg†ßÛ…wÂ]±©w\Ýÿ¸7¿“=c‰·ûáÞ8¹íMñ€G|ÚïV8Oä­ì€"¿„&2~r”³Üá“z¹¿{ñzç›Þ*ß8ìržOŽ×3Çù 2>t˜Ð|Ü¿8Ð%Î *¸œèÎFùAÞsŒŒœß› zÖoŒ@Ì[ëX:ÉkÞð¯=è[ÿz7¼Mu¶¿á»Ó³Îrœ“½ì1ÏøÙÇÞï»Ýîw¯N±Û^õöüÿ¨ï‡G|â¿xÆß[?;|äÓ )6Þò—Ç|扮ò¸¢Yò9­á5óE5‡´-JØ9^÷Í\å@ÚèïÞ ãúó·ƒRwÅ¿ÐÒDÝY Ll@ÀxRîú¬¯fWÈÒO€.ဨ°EöZï<î Ï<ëË>øÝš`¦­e{ßÃÏ{Ùq^U’9ý×§=äµ?.P^îˆW9€cõ_B ˆ€R½c½à³„‚¸äb1ˆîk½÷K¼ì£¿x=„¿ (Ë! $'½Ø·ÞÀ4<š9ŠÛƒJIÑ„=¸>×{¼›@íÓ½Êc<•KˆFÙž}»#ÿ•WˆŠÄp:Ä ŠÁ¶Á…¨˜kðnú@<¸´< |A&)< \A²:œM°' `€ Ã#›%›,P£X €@‘@oq8\ÁøsÁ)ü¼¼¿¾S¹8€À⛽€2šb—@¹€¡€A2œ€8t€3¬y!‰ ‰…›P0Šû޼;)ÄCd¨@Ä<•s‹òÑ ÐhPš•ñ”8)¹6à-å“€ªhƒð–†©E! 6É‘+Ü‹ÚKÅùÓÃÆÓ„J↙AK¸™` 0i—Ì€®ÉŒ89”Ú >ÿ@½äû“ßË›v¹1³D¼±‚©Ós ¶à•æx•Ø[E€‘™€XˆùÃ&c©x€ñSÃ0€X›Êº‚’Ø€Q) œÍð‘¡ÃTøÈcèGeÉÏñ£à žñØ%0&%È,ÆÑq;È ¥|)y”­€R¼B;ôÈ©ä¹ì=ÿËÓx—3LÉldI`ØI¸Èݨ™|2± ¨”ÚIõÿA‰Ÿô€Óa ÖÑnðzù5ÊÆ£ð<ºœ–*ÜH˻ʳT¹ñ0£„,§Ì†£99Ë©™Å1Î š3ù–:À€Š9€óÈ„¿ŽÍȳËÄÓ*1ˆ_ø¨@I§ÉÆÈª 80‚¸œ`ðÉ~hœËÍ ¨˜¡Ê†ë„¦ÉÌ•›I>δ.t˜€ŽßôLÐÎV¨Ê¸´€Ô„ A:€·hMóë&Ò„óÑ€‘"/A"¯¸€ÜÌm¢?4EÓÎ÷ÉÅ’A㼄̨•côÊnèË•¬NéL€(ñAëÔ¡JBƒ»{ QKOÉJ È PY¡²Å!/­hPìÿsOMî+MhœO¬”‰×€ àÊ” >o ÙÜ€ªÒ €½‰I×89+€tÊ5ÎÕѺ”Ð=L»{y Å#¬¿ñËÎE‰5 h%æTQ4 L8m—µ‰¨` €ÓŸ Ï=Õ¸8¥™ì  XœþÀR€TQmB…?CU‘‹ "#EI¢iÈÈ*‰a”•ë¦ÿyÔ¤AMPeFeÕ.½ËÝ$€äº+)€½Q‹ ¤è¤†ƒÕ,°€s $lWëÔՠɶ¡Ä°†in²œfQ˜[‘Öªpˆc¥·dEWE`Vﳄl#quh爡 ¾j›S›éÛ‡6q€\€>‘`Ô´ÊP­X]#ÕÙsH–h4£l 6Á®Øq¯Z™ÉˆÎ¤y€)É'Gi“€Z,‡ †y–e©™ G‘Úæ(àD<Š­Ù7¸Ø´%´’Øš„€•õ ›±ÿ±¶%KP ‡! † p ³‘P‰-Uš[E»Yõ#“MeÏ`ÿ°ŽÙÂM‚€™D#SØÃ޼KHÄMšŽzÄØ›I‡óøî°ëظô…öH,xJ¨ ÔÂEƒ²]ÁHZ:SÒ–K*ðGX‹s<–à [ÜÁ=WÙ-µÃí?(¸¤"<¹!Ür1 ˬ7ê6ƒ“6°…]ä­ÒôG´ó;÷£·Xûަið…@œ+^ü#\ñ]0å½÷ƒ_Šs>æ3_à>ák½ô3ßøØ¥ß4 Ýò=¼ ø={4PðUàRmù;àîzFŽà ö¾X4à 6ƒÖಠ ðÂî½ùõ໲_fáÛ&ná=žÿ½CÖ(ûƒ`æaÍ[Æá0XEL¨å3áL¨L @¿#ÀlIb žA"žàb^Òá$~bFâ,¾_"Vâ#ŽÆ\â*ö‚ÓÂ0 tâ¶)@$΄6vã6~â‘}ã8†c6v?"Fc4Þß&Ζ7öã?N9'¶ãÕ`*&c/ŠÁK(=¾Aþã9VâÕèc/öã'.bIÆãEæâ›£äG†dLîd:¦±>ä.0c'æÀIvcMæÀVvåWänæßÜâ\–e\žæUfækf`Fã¶™1},æÿ0ã¶Áæq&çe¦dm.çtVçköb_ àoî¢D¦æu¦g_¾åy®ç|VgUãbg}è€èÎåa†çs’g‚Vè…fhfåØ=än艦h‚Æ€â2äƒ&ž„®èŽöèu~hÞ‰þè’6é^¾è¨$f‘–Ž>é—~éi’†éšöè”Îh–î% Ud›ö銖i¦éŸ&j‹Æè•Öiªqé¢fê€êƒꦖj>ê¤6fžÆç©Öjl~jxŽê­ks®j«îŸ¥ë³öeM‚h2þj´vkœFj²æ³vëºfURþ涮밆k¹æºÞ볾뵮b½l­îk¿ÿÞh¬>ì½l–6ìÆnêÄVì–flÉFëÇžéö8cÌì±®ìÈlÏžlµ†lÎÎjÒ.jÊm¥æiÕækÓÞlkgØžjÖníºlÛ–jÍjÔæíÛ¶ÏœÎm%9æàîmÙþmk§qÂ…WÆ…d’îé¦nçveê¾îæ®îíŽn^~nîoïžn Ä+QéâVjÓá×ñîåïïíÎnìnò&ï÷®îevoûfïïÖnþÆ@ ¼R>­yÑï7ðGðWpû>Ð nôö®È ×§ð ·ð /ðÿÀkpÇðñGð净ðú€ØgñwñdÚ£déÿÅyñÇñ §±’´÷¨ ò!×ïÍ¥€?h©¡"ò&wréþ\á]ï@L¥'Çr"×Drxö€ H€èï,'s‡  òŽñMâŽ2wóÏ,Ø€.‡ç¢dž1óvÖèwU—”R?&‡/óéŠkw«–ÅT‘†Çxg`Dœó‰Wuÿ`7ù‘qk”~0we÷ßÀ–wy ÇÀRÀxGys"?ÄœÏy o‘¼ ¨“WìoWЄ%ú¢qŠñ•îèøŸ¿9p¢|àŽ§‡z?P¤˜Çª÷kå ®ïzî¨ è ˜¬™·zto¸êM@û´¿o%k••õùÖÆzùÈ’ ûîŠ$üÂÇÄ_ÿò¯HL ré¦Å?üÃWüÅOüÉïÇ·üÄG|Ågüq:P¨øðAø¸w/·’‹Jti‘É'üȧ|Öoüdâ|ÍgýÎýÈçnØÇ|Û‡}ψ‚H€#ûÐVŽðXyõؿﻇrçòù¾÷€ ‘w9J`€{ÒŸŒq3‰Üoç×yæßsç¿ö€xË´`€ €áoío¿è"1•ûpÒ;¥ÿú·ÿûÇÿü×ÿýçÿþ÷  P&’qXD*MçJ§ÔªõŠÍj·Ü®÷ ‹Çä2×ÃÑ`*…!Ñ€H&ºýŽÏë÷ü¾ÿ?Dô°ppÀ Q±ñdÖèø)iÿÕ±‘qQ!á €`p ° :JZjzŠšªºÊªªÀi€ Ð°„¡±8™«»ËÛëûë…fyAÑ+Ûª¼Ìܬ¬Pˆ°à€hËá¬½Í \™a1ñàÀàŒž®¾ÊÀÐð1aa‹ÛmŸ¯O&|Y1þ A»u ¦càž:µêÕ¬[»~ ;¶ìÙ´kÛ¾;·îݼ{ûþ <¸ðáÄ‹?Ž<¹òåÌ›;=ºôéÔ«[¿Ž=»öíÜ»{ÿ>¼øñäË›?>½úõìÛ»?¾üùôëÛ¿?¿þýüûûÿ`€H`ˆ`‚ .È`ƒ>a„NHa…^ˆa†nÈa‡~bˆ"ŽHb‰&žˆbŠ*®Èb‹.¾cŒ2ÎHc6ÞˆcŽ:îÈc>þdBId‘F‰d’J.Éd“N> e”RNIe•V^‰e–Z/nÉe—^~ f˜bŽIf™fž‰fšj®Éf›n¾ gœrÎIgvÞ‰gžzîÉgŸö;barbican-2.0.0/doc/source/api/0000775000567000056710000000000012701406024017225 5ustar jenkinsjenkins00000000000000barbican-2.0.0/doc/source/api/index.rst0000664000567000056710000000076712701405673021111 0ustar jenkinsjenkins00000000000000************************** Barbican API Documentation ************************** User Guide ########## API guide docs are built to: http://developer.openstack.org/api-guide/key-manager/ API Reference ############# .. toctree:: :maxdepth: 1 ./reference/secrets ./reference/secret_types ./reference/secret_metadata ./reference/containers ./reference/acls ./reference/certificates ./reference/cas ./reference/quotas ./reference/consumers ./reference/orders barbican-2.0.0/doc/source/api/reference/0000775000567000056710000000000012701406024021163 5ustar jenkinsjenkins00000000000000barbican-2.0.0/doc/source/api/reference/cas.rst0000664000567000056710000013033312701405673022477 0ustar jenkinsjenkins00000000000000**************************************** Certificates Authorities API - Reference **************************************** Barbican provides an API to interact with certificate authorities (CAs). For an introduction to CAs and how Barbican manages them, see the `Certificate Authorities User's Guide `__. Understanding the following concepts, explained in the user's guide, is important to understanding how to use this API. - Certificate Authorities - Subordinate Certificate Authorities - Project CAs - Preferred CAs - Global Preferred CAs This document will focus on the details of the Barbican /v1/cas REST API. GET /v1/cas ########### Any user can request a list of CAs that may be used. Depending on the settings for the user's project, the returned list may be filtered. If a project has project CAs configured, the list will only contain only the project CAs and the subordinate CAs for that project. If not, it will contain all of the configured CAs and none of the subordinate CAs owned by other projects. .. _get_cas_request_response: Request/Response: ***************** .. code-block:: javascript Request: GET /v1/cas Headers: X-Auth-Token: Accept: application/json Response: HTTP/1.1 200 OK Content-Type: application/json {"cas": ["http://localhost:9311/v1/cas/9277c4b4-2c7a-4612-a693-1e738a83eb54", "http://localhost:9311/v1/cas/d9e853eb-aea4-4002-9be7-78665062f393"], "total": 2} .. _get_cas_parameters: Parameters ********** +--------------+---------+----------------------------------------------------------------+ | Name | Type | Description | +==============+=========+================================================================+ | offset | integer | The starting index within the total list of the project | | | | CAs that you would like to receive. | +--------------+---------+----------------------------------------------------------------+ | limit | integer | The maximum number of records to return. | +--------------+---------+----------------------------------------------------------------+ | plugin_name | string | Filter the returned list of CAs based on plugin name | +--------------+---------+----------------------------------------------------------------+ | plugin_id | string | Filter the returned list of CAs based on plugin id | +--------------+---------+----------------------------------------------------------------+ .. _get_cas_response_attributes: Response Attributes ******************* +----------------+---------+--------------------------------------------------------------+ | Name | Type | Description | +================+=========+==============================================================+ | cas | list | A list of CA references | +----------------+---------+--------------------------------------------------------------+ | total | integer | The total number of configured project CAs records. | +----------------+---------+--------------------------------------------------------------+ | next | string | A HATEOAS url to retrieve the next set of CAs based on | | | | the offset and limit parameters. This attribute is only | | | | available when the total number of secrets is greater than | | | | offset and limit parameter combined. | +----------------+---------+--------------------------------------------------------------+ | previous | string | A HATEOAS url to retrieve the previous set of CAs based | | | | on the offset and limit parameters. This attribute is only | | | | available when the request offset is greater than 0. | +----------------+---------+--------------------------------------------------------------+ .. _get_cas_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Authentication error. Missing or invalid X-Auth-Token. | +------+-----------------------------------------------------------------------------+ | 403 | The user was authenticated, but is not authorized to perform this action | +------+-----------------------------------------------------------------------------+ GET /v1/cas/all ############### A project admin can request a list of CAs that may be used. This returned list will include root certificates, as well as CAs assigned to the project and subCAs created for this project. This will allow a project admin to find all CAs that his project could have access to, so he can manage his project CA list. .. _get_cas_all_request_response: Request/Response: ***************** .. code-block:: javascript Request: GET /v1/cas/all Headers: X-Auth-Token: Accept: application/json Response: HTTP/1.1 200 OK Content-Type: application/json {"cas": ["http://localhost:9311/v1/cas/9277c4b4-2c7a-4612-a693-1e738a83eb54", "http://localhost:9311/v1/cas/d9e853eb-aea4-4002-9be7-78665062f393"], "total": 2} .. _get_cas_all_parameters: Parameters ********** +--------------+---------+----------------------------------------------------------------+ | Name | Type | Description | +==============+=========+================================================================+ | offset | integer | The starting index within the total list of the project | | | | CAs that you would like to receive. | +--------------+---------+----------------------------------------------------------------+ | limit | integer | The maximum number of records to return. | +--------------+---------+----------------------------------------------------------------+ | plugin_name | string | Filter the returned list of CAs based on plugin name | +--------------+---------+----------------------------------------------------------------+ | plugin_id | string | Filter the returned list of CAs based on plugin id | +--------------+---------+----------------------------------------------------------------+ .. _get_cas_all_response_attributes: Response Attributes ******************* +----------------+---------+--------------------------------------------------------------+ | Name | Type | Description | +================+=========+==============================================================+ | cas | list | A list of CA references | +----------------+---------+--------------------------------------------------------------+ | total | integer | The total number of configured project CAs records. | +----------------+---------+--------------------------------------------------------------+ | next | string | A HATEOAS url to retrieve the next set of CAs based on | | | | the offset and limit parameters. This attribute is only | | | | available when the total number of secrets is greater than | | | | offset and limit parameter combined. | +----------------+---------+--------------------------------------------------------------+ | previous | string | A HATEOAS url to retrieve the previous set of CAs based | | | | on the offset and limit parameters. This attribute is only | | | | available when the request offset is greater than 0. | +----------------+---------+--------------------------------------------------------------+ .. _get_cas_all_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Authentication error. Missing or invalid X-Auth-Token. | +------+-----------------------------------------------------------------------------+ | 403 | The user was authenticated, but is not authorized to perform this action | +------+-----------------------------------------------------------------------------+ GET /v1/cas/{CA_ID} ################### Any user can request details about a CA to which he has permissions. .. _get_cas_caid_request_response: Request/Response: ***************** .. code-block:: javascript Request: GET /v1/cas/9277c4b4-2c7a-4612-a693-1e738a83eb54 Headers: X-Auth-Token: Accept: application/json Response: HTTP/1.1 200 OK Content-Type: application/json {"status": "ACTIVE", "updated": "2015-09-22T05:25:35.305647", "created": "2015-09-22T05:25:35.305647", "plugin_name": "barbican.plugin.snakeoil_ca.SnakeoilCACertificatePlugin", "meta": [{"ca_signing_certificate": "-----BEGIN CERTIFICATE----- MIIC+zCCAeOgAwIBAgIBATANBgkqhkiG9w0BAQsFADA1MR0wGwYDVQQDDBRTbmFr ZW9pbCBDZXJ0aWZpY2F0ZTEUMBIGA1UECgwLZXhhbXBsZS5jb20wHhcNMTUwOTI0 MDM0MTI4WhcNMTUwOTI0MDQ0MjE4WjA1MR0wGwYDVQQDDBRTbmFrZW9pbCBDZXJ0 aWZpY2F0ZTEUMBIGA1UECgwLZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUA A4IBDwAwggEKAoIBAQC2OonnytCeizC+2FJlS7rUOjrIukKndwltXex46YUem09T y2+5ZNvl1QypUN1JXZSjUT27oG9jUTsNUzLHuJe8dW6p3z37WNpBCJY5BOjoDFG9 ce5ZrzucVs6QDnsuqD9NqtiECVFNg1qQjVvg9n5I0pl81c0mEfjWwqgOJ303W0IY KnisMByXewyPN57cZuTJQFhUT3fvxF5W1MM03fqILKELL0WE9ALeTThHR9fJRras QgrJYNnb20RwUZv5hqP21iwsaq3CV2+KODR4IlgglFXRN4gfIzZ9cfst95yy0nhV pcf6+IOycYZP7enTEU4e1jtfNn40yQPLlKei9/jrAgMBAAGjFjAUMBIGA1UdEwEB /wQIMAYBAf8CAQUwDQYJKoZIhvcNAQELBQADggEBAEn0wkHsMN7vvDShFLKlpE+1 twrIqSekgqb5wdAId9sKblXQTojI6caiImCleFVzhKxQvuoS31dpg7hh2zw+I8P1 U0zvYrJlM8HVunHkWIdFuEuP7hrDnTA2NZbEN7EBSDksNtC+T+hcZcYcIs3hpV7p PdjhjU9D4IcFd7ooVra7Lt2q3zl2XZ7TCzkIWV9jqCBNrlf7Q6QkLWe41k6kIJUT bl0HHqk9cRxr9hkwMKTjIO6G6gbPepqOuyEym8qjyVckRCQN8W+HUI3FV/XBcDk5 FkhWnqzJ6aTjBQD3WxOtnhm421dERi60RHdTInK6l6BKRUstmPyc3nfMouBarH8= -----END CERTIFICATE----- "}}, {"intermediates": "-----BEGIN PKCS7----- MIIDLAYJKoZIhvcNAQcCoIIDHTCCAxkCAQExADALBgkqhkiG9w0BBwGgggL/MIIC +zCCAeOgAwIBAgIBATANBgkqhkiG9w0BAQsFADA1MR0wGwYDVQQDDBRTbmFrZW9p bCBDZXJ0aWZpY2F0ZTEUMBIGA1UECgwLZXhhbXBsZS5jb20wHhcNMTUwOTI0MDM0 MTI4WhcNMTUwOTI0MDQ0MjE4WjA1MR0wGwYDVQQDDBRTbmFrZW9pbCBDZXJ0aWZp Y2F0ZTEUMBIGA1UECgwLZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IB DwAwggEKAoIBAQC2OonnytCeizC+2FJlS7rUOjrIukKndwltXex46YUem09Ty2+5 ZNvl1QypUN1JXZSjUT27oG9jUTsNUzLHuJe8dW6p3z37WNpBCJY5BOjoDFG9ce5Z rzucVs6QDnsuqD9NqtiECVFNg1qQjVvg9n5I0pl81c0mEfjWwqgOJ303W0IYKnis MByXewyPN57cZuTJQFhUT3fvxF5W1MM03fqILKELL0WE9ALeTThHR9fJRrasQgrJ YNnb20RwUZv5hqP21iwsaq3CV2+KODR4IlgglFXRN4gfIzZ9cfst95yy0nhVpcf6 +IOycYZP7enTEU4e1jtfNn40yQPLlKei9/jrAgMBAAGjFjAUMBIGA1UdEwEB/wQI MAYBAf8CAQUwDQYJKoZIhvcNAQELBQADggEBAEn0wkHsMN7vvDShFLKlpE+1twrI qSekgqb5wdAId9sKblXQTojI6caiImCleFVzhKxQvuoS31dpg7hh2zw+I8P1U0zv YrJlM8HVunHkWIdFuEuP7hrDnTA2NZbEN7EBSDksNtC+T+hcZcYcIs3hpV7pPdjh jU9D4IcFd7ooVra7Lt2q3zl2XZ7TCzkIWV9jqCBNrlf7Q6QkLWe41k6kIJUTbl0H Hqk9cRxr9hkwMKTjIO6G6gbPepqOuyEym8qjyVckRCQN8W+HUI3FV/XBcDk5FkhW nqzJ6aTjBQD3WxOtnhm421dERi60RHdTInK6l6BKRUstmPyc3nfMouBarH+hADEA -----END PKCS7----- "}, {"description": "Certificate Authority - Snakeoil CA"}, {"name": "Snakeoil CA"}], "ca_id": "9277c4b4-2c7a-4612-a693-1e738a83eb54", "plugin_ca_id": "Snakeoil CA", "expiration": "2015-09-23T05:25:35.300633"} .. _get_cas_caid_response_attributes: Response Attributes ******************* +------------------------+---------+--------------------------------------------------------------+ | Name | Type | Description | +========================+=========+==============================================================+ | status | list | Status of the CA | +------------------------+---------+--------------------------------------------------------------+ | updated | time | Date and time CA was last updated . | +------------------------+---------+--------------------------------------------------------------+ | created | time | Date and time CA was created | +------------------------+---------+--------------------------------------------------------------+ | plugin_name | string | Name of certificate plugin associated with this CA | +------------------------+---------+--------------------------------------------------------------+ | meta | list | List of additional information for this CA | +------------------------+---------+--------------------------------------------------------------+ | ca_signing_certificate | PEM | Part of meta, the CA signing certificate for this CA | +------------------------+---------+--------------------------------------------------------------+ | intermediates | pkcs7 | Part of meta, the intermediate certificate chain for this CA | +------------------------+---------+--------------------------------------------------------------+ | description | string | Part of meta, a description given to the CA | +------------------------+---------+--------------------------------------------------------------+ | name | string | Part of meta, a given name for a CA | +------------------------+---------+--------------------------------------------------------------+ | ca_id | string | ID of this CA | +------------------------+---------+--------------------------------------------------------------+ | plugin_ca_id | string | ID of the plugin | +------------------------+---------+--------------------------------------------------------------+ | expiration | time | Expiration date of the CA | +------------------------+---------+--------------------------------------------------------------+ .. _get_cas_caid_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Authentication error. Missing or invalid X-Auth-Token. | +------+-----------------------------------------------------------------------------+ | 403 | The user was authenticated, but is not authorized to perform this action | +------+-----------------------------------------------------------------------------+ GET /v1/cas/{CA_ID}/cacert ########################## Any user can request the CA signing certificate of a CA to which he has permissions. The format of the returned certificate will be PEM. .. _get_cas_caid_cacert_request_response: Request/Response: ***************** .. code-block:: javascript Request: GET /v1/cas/9277c4b4-2c7a-4612-a693-1e738a83eb54/cacert Headers: X-Auth-Token: Accept: */* Response: HTTP/1.1 200 OK Content-Type: text/html -----BEGIN CERTIFICATE----- MIIC+zCCAeOgAwIBAgIBATANBgkqhkiG9w0BAQsFADA1MR0wGwYDVQQDDBRTbmFr ZW9pbCBDZXJ0aWZpY2F0ZTEUMBIGA1UECgwLZXhhbXBsZS5jb20wHhcNMTUwOTI0 MDM0MTI4WhcNMTUwOTI0MDQ0MjE4WjA1MR0wGwYDVQQDDBRTbmFrZW9pbCBDZXJ0 aWZpY2F0ZTEUMBIGA1UECgwLZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUA A4IBDwAwggEKAoIBAQC2OonnytCeizC+2FJlS7rUOjrIukKndwltXex46YUem09T y2+5ZNvl1QypUN1JXZSjUT27oG9jUTsNUzLHuJe8dW6p3z37WNpBCJY5BOjoDFG9 ce5ZrzucVs6QDnsuqD9NqtiECVFNg1qQjVvg9n5I0pl81c0mEfjWwqgOJ303W0IY KnisMByXewyPN57cZuTJQFhUT3fvxF5W1MM03fqILKELL0WE9ALeTThHR9fJRras QgrJYNnb20RwUZv5hqP21iwsaq3CV2+KODR4IlgglFXRN4gfIzZ9cfst95yy0nhV pcf6+IOycYZP7enTEU4e1jtfNn40yQPLlKei9/jrAgMBAAGjFjAUMBIGA1UdEwEB /wQIMAYBAf8CAQUwDQYJKoZIhvcNAQELBQADggEBAEn0wkHsMN7vvDShFLKlpE+1 twrIqSekgqb5wdAId9sKblXQTojI6caiImCleFVzhKxQvuoS31dpg7hh2zw+I8P1 U0zvYrJlM8HVunHkWIdFuEuP7hrDnTA2NZbEN7EBSDksNtC+T+hcZcYcIs3hpV7p PdjhjU9D4IcFd7ooVra7Lt2q3zl2XZ7TCzkIWV9jqCBNrlf7Q6QkLWe41k6kIJUT bl0HHqk9cRxr9hkwMKTjIO6G6gbPepqOuyEym8qjyVckRCQN8W+HUI3FV/XBcDk5 FkhWnqzJ6aTjBQD3WxOtnhm421dERi60RHdTInK6l6BKRUstmPyc3nfMouBarH8= -----END CERTIFICATE----- .. _get_cas_caid_cacert_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Authentication error. Missing or invalid X-Auth-Token. | +------+-----------------------------------------------------------------------------+ | 403 | The user was authenticated, but is not authorized to perform this action | +------+-----------------------------------------------------------------------------+ GET /v1/cas/{CA_ID}/intermediates ################################# Any user can request the certificate chain of a CA to which he has permissions. The format of the returned chain will be PKCS#7. .. _get_cas_caid_intermediates_request_response: Request/Response: ***************** .. code-block:: javascript Request: GET /v1/cas/9277c4b4-2c7a-4612-a693-1e738a83eb54/intermediates Headers: X-Auth-Token: Accept: */* Response: HTTP/1.1 200 OK Content-Type: text/html -----BEGIN PKCS7----- MIIDLAYJKoZIhvcNAQcCoIIDHTCCAxkCAQExADALBgkqhkiG9w0BBwGgggL/MIIC +zCCAeOgAwIBAgIBATANBgkqhkiG9w0BAQsFADA1MR0wGwYDVQQDDBRTbmFrZW9p bCBDZXJ0aWZpY2F0ZTEUMBIGA1UECgwLZXhhbXBsZS5jb20wHhcNMTUwOTI0MDM0 MTI4WhcNMTUwOTI0MDQ0MjE4WjA1MR0wGwYDVQQDDBRTbmFrZW9pbCBDZXJ0aWZp Y2F0ZTEUMBIGA1UECgwLZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IB DwAwggEKAoIBAQC2OonnytCeizC+2FJlS7rUOjrIukKndwltXex46YUem09Ty2+5 ZNvl1QypUN1JXZSjUT27oG9jUTsNUzLHuJe8dW6p3z37WNpBCJY5BOjoDFG9ce5Z rzucVs6QDnsuqD9NqtiECVFNg1qQjVvg9n5I0pl81c0mEfjWwqgOJ303W0IYKnis MByXewyPN57cZuTJQFhUT3fvxF5W1MM03fqILKELL0WE9ALeTThHR9fJRrasQgrJ YNnb20RwUZv5hqP21iwsaq3CV2+KODR4IlgglFXRN4gfIzZ9cfst95yy0nhVpcf6 +IOycYZP7enTEU4e1jtfNn40yQPLlKei9/jrAgMBAAGjFjAUMBIGA1UdEwEB/wQI MAYBAf8CAQUwDQYJKoZIhvcNAQELBQADggEBAEn0wkHsMN7vvDShFLKlpE+1twrI qSekgqb5wdAId9sKblXQTojI6caiImCleFVzhKxQvuoS31dpg7hh2zw+I8P1U0zv YrJlM8HVunHkWIdFuEuP7hrDnTA2NZbEN7EBSDksNtC+T+hcZcYcIs3hpV7pPdjh jU9D4IcFd7ooVra7Lt2q3zl2XZ7TCzkIWV9jqCBNrlf7Q6QkLWe41k6kIJUTbl0H Hqk9cRxr9hkwMKTjIO6G6gbPepqOuyEym8qjyVckRCQN8W+HUI3FV/XBcDk5FkhW nqzJ6aTjBQD3WxOtnhm421dERi60RHdTInK6l6BKRUstmPyc3nfMouBarH+hADEA -----END PKCS7----- .. _get_cas_caid_intermediates_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Authentication error. Missing or invalid X-Auth-Token. | +------+-----------------------------------------------------------------------------+ | 403 | The user was authenticated, but is not authorized to perform this action | +------+-----------------------------------------------------------------------------+ POST /v1/cas ############ A project admin can request to create a new subordinate CA for his project. .. _post_cas_request_response: Request/Response: ***************** .. code-block:: javascript Request: POST /v1/cas Headers: X-Auth-Token: Content-type: application/json Accept: application/json {"name": "Subordinate CA", "description": "Test Snake Oil Subordinate CA", "parent_ca_ref": "http://localhost:9311/v1/cas/d9e853eb-aea4-4002-9be7-78665062f393", "subject_dn": "CN=Subordinate CA, O=example.com"} Response: HTTP/1.1 201 OK Content-Type: application/json {"ca_ref": "http://localhost:9311/v1/cas/a031dcf4-2e2a-4df1-8651-3b424eb6174e"} .. _post_cas_request_attributes: Request Attributes ****************** +----------------+---------+--------------------------------------------------------------+ | Name | Type | Description | +================+=========+==============================================================+ | name | string | A name that can be used to reference this subCA | +----------------+---------+--------------------------------------------------------------+ | description | string | A description to be stored with this subCA . | +----------------+---------+--------------------------------------------------------------+ | parent_ca_ref | string | A URI referencing the parent CA to be used to issue the | | | | subordinate CA's signing certificate | +----------------+---------+--------------------------------------------------------------+ | subject_dn | string | The subject distinguished name corresponding to this subCA | +----------------+---------+--------------------------------------------------------------+ .. _post_cas_response_attributes: Response Attributes ******************* +----------------+---------+--------------------------------------------------------------+ | Name | Type | Description | +================+=========+==============================================================+ | ca_ref | string | A URL that references the created subCA | +----------------+---------+--------------------------------------------------------------+ .. _post_cas_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 201 | Successful Request | +------+-----------------------------------------------------------------------------+ | 400 | Bad request. The content or format of the request is wrong. | +------+-----------------------------------------------------------------------------+ | 401 | Authentication error. Missing or invalid X-Auth-Token. | +------+-----------------------------------------------------------------------------+ | 403 | The user was authenticated, but is not authorized to perform this action | +------+-----------------------------------------------------------------------------+ | 404 | The requested entity was not found | +------+-----------------------------------------------------------------------------+ DELETE /v1/cas/{CA_ID} ###################### A project administrator can delete a subCA that has been created for his project. Root CAs that are defined in the barbican.conf configuration file can not be deleted. If there is more than one project CA, the preferred CA can not be deleted until another project CA has been selected as preferred. .. _delete_cas_caid_request_response: Request/Response: ***************** .. code-block:: javascript Request: DELETE /v1/cas/9277c4b4-2c7a-4612-a693-1e738a83eb54 Headers: X-Auth-Token: Accept: */* Response: HTTP/1.1 204 OK .. _delete_cas_caid_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 204 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Authentication error. Missing or invalid X-Auth-Token. | +------+-----------------------------------------------------------------------------+ | 403 | The user was authenticated, but is not authorized to perform this action. | | | This error can occur if a request is made to delete a root CA. | +------+-----------------------------------------------------------------------------+ | 404 | The requested entity was not found | +------+-----------------------------------------------------------------------------+ | 409 | The requested CA can not be delete because it is currently set as the | | | project preferred CA. | +------+-----------------------------------------------------------------------------+ GET /v1/cas/preferred ##################### Any user can request a reference to the preferred CA assigned to his project. When a preferred CA is set for a project, that is the CA that will be used when a user of that project requests a certificate and does not specify a CA. For more information, consult the `Certificate Authorities User's Guide `__ and the `Certificates API User's Guide `__. .. _get_cas_preferred_request_response: Request/Response: ***************** .. code-block:: javascript Request: GET /v1/cas/preferred Headers: X-Auth-Token: Accept: application/json Response: HTTP/1.1 200 OK Content-Type: application/json {"ca_ref": "http://localhost:9311/v1/cas/9277c4b4-2c7a-4612-a693-1e738a83eb54"} .. _get_cas_preferred_response_attributes: Response Attributes ******************* +----------------+---------+--------------------------------------------------------------+ | Name | Type | Description | +================+=========+==============================================================+ | ca_ref | string | A URL that references the preferred CA | +----------------+---------+--------------------------------------------------------------+ .. _get_cas_preferred_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Authentication error. Missing or invalid X-Auth-Token. | +------+-----------------------------------------------------------------------------+ | 403 | The user was authenticated, but is not authorized to perform this action | +------+-----------------------------------------------------------------------------+ | 404 | Not found. No preferred CA has been defined. | +------+-----------------------------------------------------------------------------+ POST /v1/cas/{CA_ID}/add-to-project ################################### A project administrator can add a CA to his project list. The CA must be a root CA or a subCA created by that project. When a project administrator adds a CA to the project list, he limits the number of CA that project users can use; they will only be able to use CAs that are project CAs or subCAs of the project. The first created project CA becomes the project's preferred CA by default. For more information, consult the `Certificate Authorities User's Guide `__ and the `Certificates API User's Guide `__. .. _post_cas_caid_add_request_response: Request/Response: ***************** .. code-block:: javascript Request: POST /v1/cas/9277c4b4-2c7a-4612-a693-1e738a83eb54/add-to-project Headers: X-Auth-Token: Accept: */* Response: HTTP/1.1 204 OK .. _post_cas_caid_add_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 204 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Authentication error. Missing or invalid X-Auth-Token. | +------+-----------------------------------------------------------------------------+ | 403 | The user was authenticated, but is not authorized to perform this action | +------+-----------------------------------------------------------------------------+ | 404 | The requested entity was not found | +------+-----------------------------------------------------------------------------+ POST /v1/cas/{CA_ID}/remove-from-project ######################################## A project administrator can remove a CA from his project list. If a project CA requested for removal is also the preferred CA for the project, and there are other project CAs, then this command will fail. The project administrator must first set a new preferred CA before deleting this CA. .. _post_cas_caid_remove_request_response: Request/Response: ***************** .. code-block:: javascript Request: POST /v1/cas/9277c4b4-2c7a-4612-a693-1e738a83eb54/remove-from-project Headers: X-Auth-Token: Accept: */* Response: HTTP/1.1 204 OK .. _post_cas_caid_remove_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 204 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Authentication error. Missing or invalid X-Auth-Token. | +------+-----------------------------------------------------------------------------+ | 403 | The user was authenticated, but is not authorized to perform this action. | +------+-----------------------------------------------------------------------------+ | 404 | The requested entity was not found or not part of the project's CA | | | list | +------+-----------------------------------------------------------------------------+ | 409 | Conflict. The remove action was blocked because the requested | | | CA is set as the project preferred CA. The user must set another CA | | | to be the preferred CA to remedy this error. | +------+-----------------------------------------------------------------------------+ GET /v1/cas/{CA_ID}/projects ############################ A service administrator can request a list of project who have the specified CA as part of their project CA list. .. _get_cas_caid_projects_request_response: Request/Response: ***************** .. code-block:: javascript Request: GET /v1/cas/9277c4b4-2c7a-4612-a693-1e738a83eb54/projects Headers: X-Auth-Token: Accept: application/json Response: HTTP/1.1 200 OK Content-Type: application/json {"projects": ["4d2f8335-2af8-4a88-851f-2e745bd4860c"]} .. _get_cas_caid_projects_response_attributes: Response Attributes ******************* +----------------+---------+--------------------------------------------------------------+ | Name | Type | Description | +================+=========+==============================================================+ | projects | list | A list of project IDs associated with the CA | +----------------+---------+--------------------------------------------------------------+ .. _get_cas_caid_projects_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Authentication error. Missing or invalid X-Auth-Token. | +------+-----------------------------------------------------------------------------+ | 403 | The user was authenticated, but is not authorized to perform this action | +------+-----------------------------------------------------------------------------+ POST /v1/cas/{CA_ID}/set-preferred ################################## A project administrator can set a CA to be the preferred CA for his project. A preferred CA must first be assigned as a project CA. There can only be one preferred CA for a project. Setting a CA as preferred, also removes the preferred setting from any other project CA. .. _post_cas_caid_set_pref_request_response: Request/Response: ***************** .. code-block:: javascript Request: POST /v1/cas/9277c4b4-2c7a-4612-a693-1e738a83eb54/set-preferred Headers: X-Auth-Token: Response: HTTP/1.1 204 OK .. _post_cas_caid_set_pref_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 204 | Successful Request | +------+-----------------------------------------------------------------------------+ | 400 | Bad request. The requested CA is not valid to be a preferred CA for this | | | project | +------+-----------------------------------------------------------------------------+ | 401 | Authentication error. Missing or invalid X-Auth-Token. | +------+-----------------------------------------------------------------------------+ | 403 | The user was authenticated, but is not authorized to perform this action | +------+-----------------------------------------------------------------------------+ | 404 | The requested entity was not found | +------+-----------------------------------------------------------------------------+ GET /v1/cas/global-preferred ############################ A service administrator can can request a reference to the CA that has been assigned to be the global preferred CA. .. _get_cas_global_preferred_request_response: Request/Response: ***************** .. code-block:: javascript Request: GET /v1/cas/global-preferred Headers: X-Auth-Token: Accept: application/json Response: HTTP/1.1 200 OK Content-Type: application/json {"ca_ref": "http://localhost:9311/v1/cas/9277c4b4-2c7a-4612-a693-1e738a83eb54"} .. _get_cas_global_preferred_response_attributes: Response Attributes ******************* +----------------+---------+--------------------------------------------------------------+ | Name | Type | Description | +================+=========+==============================================================+ | ca_ref | string | A URL that references the global preferred CA | +----------------+---------+--------------------------------------------------------------+ .. _get_cas_global_preferred_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Authentication error. Missing or invalid X-Auth-Token. | +------+-----------------------------------------------------------------------------+ | 403 | The user was authenticated, but is not authorized to perform this action | +------+-----------------------------------------------------------------------------+ | 404 | Not found. No global preferred CA has been defined. | +------+-----------------------------------------------------------------------------+ POST /v1/cas/{CA_ID}/set-global-preferred ######################################### A service administrator can set the global preferred CA value. When a global preferred CA is set, that is the CA that will be used when a user requests a certificate and does not specify a CA and his project does not have a project preferred CA. For more information, consult the `Certificate Authorities User's Guide `__ and the `Certificates API User's Guide `__. .. _post_cas_caid_set_global_pref_request_response: Request/Response: ***************** .. code-block:: javascript Request: POST /v1/cas/9277c4b4-2c7a-4612-a693-1e738a83eb54/set-global-preferred Headers: X-Auth-Token: Accept: */* Response: HTTP/1.1 204 OK .. _post_cas_caid_set_global_pref_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 204 | Successful Request | +------+-----------------------------------------------------------------------------+ | 400 | Bad request. The requested CA is not valid to be a global preferred CA | +------+-----------------------------------------------------------------------------+ | 401 | Authentication error. Missing or invalid X-Auth-Token. | +------+-----------------------------------------------------------------------------+ | 403 | The user was authenticated, but is not authorized to perform this action | +------+-----------------------------------------------------------------------------+ | 404 | The requested entity was not found | +------+-----------------------------------------------------------------------------+ POST /v1/cas/unset-global-preferred ################################### A service administrator can remove the setting of global preferred CA. .. _post_cas_caid_unset_global_pref_request_response: Request/Response: ***************** .. code-block:: javascript Request: POST /v1/cas/9277c4b4-2c7a-4612-a693-1e738a83eb54/unset-global-preferred Headers: X-Auth-Token: Accept: */* Response: HTTP/1.1 204 OK .. _post_cas_caid_unset_global_pref_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 204 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Authentication error. Missing or invalid X-Auth-Token. | +------+-----------------------------------------------------------------------------+ | 403 | The user was authenticated, but is not authorized to perform this action | +------+-----------------------------------------------------------------------------+ | 404 | The requested entity was not found | +------+-----------------------------------------------------------------------------+ barbican-2.0.0/doc/source/api/reference/containers.rst0000664000567000056710000002466412701405674024110 0ustar jenkinsjenkins00000000000000************************** Containers API - Reference ************************** GET /v1/containers ################## Lists a project's containers. Returned containers will be ordered by creation date; oldest to newest. Parameters ********** +--------+---------+------------------------------------------------------------+ | Name | Type | Description | +========+=========+============================================================+ | offset | integer | The starting index within the total list of the containers | | | | that you would like to retrieve. | +--------+---------+------------------------------------------------------------+ | limit | integer | The maximum number of containers to return (up to 100). | | | | The default limit is 10. | +--------+---------+------------------------------------------------------------+ Response Attributes ******************* +------------+---------+--------------------------------------------------------+ | Name | Type | Description | +============+=========+========================================================+ | containers | list | Contains a list of dictionaries filled with container | | | | data | +------------+---------+--------------------------------------------------------+ | total | integer | The total number of containers available to the user | +------------+---------+--------------------------------------------------------+ | next | string | A HATEOAS url to retrieve the next set of containers | | | | based on the offset and limit parameters. This | | | | attribute is only available when the total number of | | | | containers is greater than offset and limit parameter | | | | combined. | +------------+---------+--------------------------------------------------------+ | previous | string | A HATEOAS url to retrieve the previous set of | | | | containers based on the offset and limit parameters. | | | | This attribute is only available when the request | | | | offset is greater than 0. | +------------+---------+--------------------------------------------------------+ Request: ******** .. code-block:: javascript GET /v1/containers Headers: X-Auth-Token: Response: ******** .. code-block:: javascript { "containers": [ { "consumers": [], "container_ref": "https://{barbican_host}/v1/containers/{uuid}", "created": "2015-03-26T21:10:45.417835", "name": "container name", "secret_refs": [ { "name": "private_key", "secret_ref": "https://{barbican_host}/v1/secrets/{uuid}" } ], "status": "ACTIVE", "type": "generic", "updated": "2015-03-26T21:10:45.417835" } ], "total": 1 } HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ GET /v1/containers/{uuid} ######################### Retrieves a single container. Response Attributes ******************* +-------------+--------+---------------------------------------------------------+ | Name | Type | Description | +=============+========+=========================================================+ | name | string | (optional) Human readable name for the container | +-------------+--------+---------------------------------------------------------+ | type | string | Type of container. Options: generic, rsa, certificate | +-------------+--------+---------------------------------------------------------+ | secret_refs | list | A list of dictionaries containing references to secrets | +-------------+--------+---------------------------------------------------------+ Request: ******** .. code-block:: javascript GET /v1/containers/{uuid} Headers: X-Auth-Token: Response: ********* .. code-block:: javascript { "type": "generic", "status": "ACTIVE", "name": "container name", "consumers": [], "container_ref": "https://{barbican_host}/v1/containers/{uuid}", "secret_refs": [ { "name": "private_key", "secret_ref": "https://{barbican_host}/v1/secrets/{uuid}" } ], "created": "2015-03-26T21:10:45.417835", "updated": "2015-03-26T21:10:45.417835" } HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ | 404 | Container not found or unavailable | +------+-----------------------------------------------------------------------------+ POST /v1/containers ################### Create a container There are three different types of containers that can be created: generic, rsa, and certificate. **Generic** This type of container holds any number of references to secrets. Each secret reference is accompanied by a name. Unlike other container types, no specific restrictions are enforced on the contents name attribute. **RSA** This type of container is designed to hold references to only three different secrets. These secrets are enforced by the their accompanied names: public_key, private_key, and private_key_passphrase. **Certificate** This type of container is designed to hold a reference to a certificate and optionally private_key, private_key_passphrase, and intermediates. Request Attributes ****************** +-------------+--------+-----------------------------------------------------------+ | Name | Type | Description | +=============+========+===========================================================+ | name | string | (optional) Human readable name for identifying your | | | | container | +-------------+--------+-----------------------------------------------------------+ | type | string | Type of container. Options: generic, rsa, certificate | +-------------+--------+-----------------------------------------------------------+ | secret_refs | list | A list of dictionaries containing references to secrets | +-------------+--------+-----------------------------------------------------------+ Request: ******** .. code-block:: javascript POST /v1/containers Headers: X-Auth-Token: Content: { "type": "generic", "name": "container name", "secret_refs": [ { "name": "private_key", "secret_ref": "https://{barbican_host}/v1/secrets/{secret_uuid}" } ] } Response: ********* .. code-block:: javascript { "container_ref": "https://{barbican_host}/v1/containers/{container_uuid}" } HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 201 | Successful creation of the container | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ | 403 | Forbidden. The user has been authenticated, but is not authorized to | | | create a container. This can be based on the the user's role or the | | | project's quota. | +------+-----------------------------------------------------------------------------+ DELETE /v1/containers/{uuid} ############################ Deletes a container Request: ******** .. code-block:: javascript DELETE /v1/containers/{container_uuid} Headers: X-Auth-Token: Response: ********* .. code-block:: javascript 204 No Content HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 204 | Successful deletion of a container | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ | 404 | Container not found or unavailable | +------+-----------------------------------------------------------------------------+ barbican-2.0.0/doc/source/api/reference/acls.rst0000664000567000056710000005737012701405673022664 0ustar jenkinsjenkins00000000000000******************* ACL API - Reference ******************* .. note:: This feature is applicable only when Barbican is used in an authenticated pipeline i.e. integrated with Keystone. .. note:: Currently the access control list (ACL) settings defined for a container are not propagated down to associated secrets. .. warning:: This ACL documentation is work in progress and may change in near future. Secret ACL API =============== .. _get_secret_acl: GET /v1/secrets/{uuid}/acl ########################## Retrieve the ACL settings for a given secret. If no ACL is defined for that secret, then `Default ACL `__ is returned. Request/Response (With ACL defined): ************************************ .. code-block:: javascript Request: GET /v1/secrets/{uuid}/acl Headers: X-Auth-Token: {token_id} Response: HTTP/1.1 200 OK { "read":{ "updated":"2015-05-12T20:08:47.644264", "created":"2015-05-12T19:23:44.019168", "users":[ {user_id1}, {user_id2}, ..... ], "project-access":{project-access-flag} } } Request/Response (With no ACL defined): *************************************** .. code-block:: javascript Request: GET /v1/secrets/{uuid}/acl Headers: X-Auth-Token: {token_id} Response: HTTP/1.1 200 OK { "read":{ "project-access": true } } HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful request. | +------+-----------------------------------------------------------------------------+ | 401 | Missing or Invalid X-Auth-Token. Authentication required. | +------+-----------------------------------------------------------------------------+ | 403 | User does not have permission to access this resource. | +------+-----------------------------------------------------------------------------+ | 404 | Secret not found for the given UUID. | +------+-----------------------------------------------------------------------------+ .. _put_secret_acl: PUT /v1/secrets/{uuid}/acl ########################## Create new or replaces existing ACL for a given secret. This call is used to add new ACL for a secret. If the ACL is already set on a secret, this method will replace it with the requested ACL settings. In case of create (first new explicit ACL) or replace existing ACL, 200 is returned in both cases. To delete existing users from an ACL definition, pass empty list [] for `users`. Returns an ACL reference in success case. Attributes ********** The ACL resource detailed in this page allows access to individual secrets to be controlled. This access is configured via operations on those secrets. Currently only the 'read' operation (which includes GET REST actions) is supported. +----------------------------+----------+-----------------------------------------------+----------+ | Attribute Name | Type | Description | Default | +============================+==========+===============================================+==========+ | read | parent | ACL data for read operation. | None | | | element | | | +----------------------------+----------+-----------------------------------------------+----------+ | users | [string] | (optional) List of user ids. This needs to be | [] | | | | a user id as returned by Keystone. | | +----------------------------+----------+-----------------------------------------------+----------+ | project-access | boolean | (optional) Flag to mark a secret private so | `true` | | | | that the user who created the secret and | | | | | ``users`` specified in above list can only | | | | | access the secret. Pass `false` to mark the | | | | | secret private. | | +----------------------------+----------+-----------------------------------------------+----------+ Request/Response (Set or Replace ACL): ************************************** .. code-block:: javascript Request: PUT /v1/secrets/{uuid}/acl Headers: Content-Type: application/json X-Auth-Token: {token_id} Body: { "read":{ "users":[ {user_id1}, {user_id2}, ..... ], "project-access":{project-access-flag} } } Response: HTTP/1.1 200 OK {"acl_ref": "https://{barbican_host}/v1/secrets/{uuid}/acl"} HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successfully set/replaced secret ACL. | +------+-----------------------------------------------------------------------------+ | 400 | Bad Request. | +------+-----------------------------------------------------------------------------+ | 401 | Missing or Invalid X-Auth-Token. Authentication required. | +------+-----------------------------------------------------------------------------+ | 403 | User does not have permission to access this resource. | +------+-----------------------------------------------------------------------------+ | 404 | Secret not found for the given UUID. | +------+-----------------------------------------------------------------------------+ | 415 | Unsupported Media Type. | +------+-----------------------------------------------------------------------------+ .. _patch_secret_acl: PATCH /v1/secrets/{uuid}/acl ############################ Updates existing ACL for a given secret. This method can be used to apply partial changes on existing ACL settings. Client can update the `users` list and enable or disable `project-access` flag for existing ACL. List of provided users replaces existing users if any. For an existing list of provided users from an ACL definition, pass empty list [] for `users`. Returns an ACL reference in success case. .. note:: PATCH API support will be changing in near future. Attributes ********** +----------------------------+----------+-----------------------------------------------+----------+ | Attribute Name | Type | Description | Default | +============================+==========+===============================================+==========+ | read | parent | ACL data for read operation. | None | | | element | | | +----------------------------+----------+-----------------------------------------------+----------+ | users | [string] | (optional) List of user ids. This needs to be | None | | | | a user id as returned by Keystone. | | +----------------------------+----------+-----------------------------------------------+----------+ | project-access | boolean | (optional) Flag to mark a secret private so | None | | | | that the user who created the secret and | | | | | ``users`` specified in above list can only | | | | | access the secret. Pass `false` to mark the | | | | | secret private. | | +----------------------------+----------+-----------------------------------------------+----------+ Request/Response (Updating project-access flag): ************************************************ .. code-block:: javascript PATCH /v1/secrets/{uuid}/acl Headers: Content-Type: application/json X-Auth-Token: {token_id} Body: { "read": { "project-access":false } } Response: HTTP/1.1 200 OK {"acl_ref": "https://{barbican_host}/v1/secrets/{uuid}/acl"} Request/Response (Removing all users from ACL): *********************************************** .. code-block:: javascript PATCH /v1/secrets/{uuid}/acl Headers: Content-Type: application/json X-Auth-Token: {token_id} Body: { "read": { "users":[] } } Response: HTTP/1.1 200 OK {"acl_ref": "https://{barbican_host}/v1/secrets/{uuid}/acl"} HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successfully updated secret ACL. | +------+-----------------------------------------------------------------------------+ | 400 | Bad Request. | +------+-----------------------------------------------------------------------------+ | 401 | Missing or Invalid X-Auth-Token. Authentication required. | +------+-----------------------------------------------------------------------------+ | 403 | User does not have permission to access this resource. | +------+-----------------------------------------------------------------------------+ | 404 | Secret not found for the given UUID. | +------+-----------------------------------------------------------------------------+ | 415 | Unsupported Media Type. | +------+-----------------------------------------------------------------------------+ .. _delete_secret_acl: DELETE /v1/secrets/{uuid}/acl ############################# Delete ACL for a given secret. No content is returned in the case of successful deletion. Request/Response: ***************** .. code-block:: javascript DELETE /v1/secrets/{uuid}/acl Headers: X-Auth-Token: {token_id} Response: HTTP/1.1 200 OK HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successfully deleted secret ACL. | +------+-----------------------------------------------------------------------------+ | 401 | Missing or Invalid X-Auth-Token. Authentication required. | +------+-----------------------------------------------------------------------------+ | 403 | User does not have permission to access this resource. | +------+-----------------------------------------------------------------------------+ | 404 | Secret not found for the given UUID. | +------+-----------------------------------------------------------------------------+ Container ACL API ================= .. _get_container_acl: GET /v1/containers/{uuid}/acl ############################# Retrieve the ACL settings for a given container. If no ACL is defined for that container, then `Default ACL `__ is returned. Request/Response (With ACL defined): ************************************ .. code-block:: javascript Request: GET /v1/containers/{uuid}/acl Headers: X-Auth-Token: {token_id} Response: HTTP/1.1 200 OK { "read":{ "updated":"2015-05-12T20:08:47.644264", "created":"2015-05-12T19:23:44.019168", "users":[ {user_id1}, {user_id2}, ..... ], "project-access":{project-access-flag} } } Request/Response (With no ACL defined): *************************************** .. code-block:: javascript Request: GET /v1/containers/{uuid}/acl Headers: X-Auth-Token: {token_id} Response: HTTP/1.1 200 OK { "read":{ "project-access": true } } HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful request. | +------+-----------------------------------------------------------------------------+ | 401 | Missing or Invalid X-Auth-Token. Authentication required. | +------+-----------------------------------------------------------------------------+ | 403 | User does not have permission to access this resource. | +------+-----------------------------------------------------------------------------+ | 404 | Container not found for the given UUID. | +------+-----------------------------------------------------------------------------+ .. _put_container_acl: PUT /v1/containers/{uuid}/acl ############################# Create new or replaces existing ACL for a given container. This call is used to add new ACL for an container. If the ACL is already set on a container, this method will replace it with the requested ACL settings. In case of create (first new explicit ACL) or replace existing ACL, 200 is returned in both cases. To delete existing users from an ACL definition, pass empty list [] for `users`. Returns an ACL reference in success case. Attributes ********** The ACL resource detailed in this page allows access to individual containers to be controlled. This access is configured via operations on those containers. Currently only the 'read' operation (which includes GET REST actions) is supported. +----------------------------+----------+-----------------------------------------------+----------+ | Attribute Name | Type | Description | Default | +============================+==========+===============================================+==========+ | read | parent | ACL data for read operation. | None | | | element | | | +----------------------------+----------+-----------------------------------------------+----------+ | users | [string] | (optional) List of user ids. This needs to be | [] | | | | a user id as returned by Keystone. | | +----------------------------+----------+-----------------------------------------------+----------+ | project-access | boolean | (optional) Flag to mark a container private | `true` | | | | so that the user who created the container and| | | | | ``users`` specified in above list can only | | | | | access the container. Pass `false` to mark the| | | | | container private. | | +----------------------------+----------+-----------------------------------------------+----------+ Request/Response (Set or Replace ACL): ************************************** .. code-block:: javascript PUT /v1/containers/{uuid}/acl Headers: Content-Type: application/json X-Auth-Token: {token_id} Body: { "read":{ "users":[ {user_id1}, {user_id2}, ..... ], "project-access":{project-access-flag} } } Response: HTTP/1.1 200 OK {"acl_ref": "https://{barbican_host}/v1/containers/{uuid}/acl"} HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successfully set/replaced container ACL. | +------+-----------------------------------------------------------------------------+ | 400 | Bad Request. | +------+-----------------------------------------------------------------------------+ | 401 | Missing or Invalid X-Auth-Token. Authentication required. | +------+-----------------------------------------------------------------------------+ | 403 | User does not have permission to access this resource. | +------+-----------------------------------------------------------------------------+ | 404 | Container not found for the given UUID. | +------+-----------------------------------------------------------------------------+ | 415 | Unsupported Media Type. | +------+-----------------------------------------------------------------------------+ .. _patch_container_acl: PATCH /v1/containers/{uuid}/acl ############################### Update existing ACL for a given container. This method can be used to apply partial changes on existing ACL settings. Client can update `users` list and enable or disable `project-access` flag for existing ACL. List of provided users replaces existing users if any. For an existing list of provided users from an ACL definition, pass empty list [] for `users`. Returns an ACL reference in success case. .. note:: PATCH API support will be changing in near future. Attributes ********** +----------------------------+----------+-----------------------------------------------+----------+ | Attribute Name | Type | Description | Default | +============================+==========+===============================================+==========+ | read | parent | ACL data for read operation. | None | | | element | | | +----------------------------+----------+-----------------------------------------------+----------+ | users | [string] | (optional) List of user ids. This needs to be | None | | | | a user id as returned by Keystone. | | +----------------------------+----------+-----------------------------------------------+----------+ | project-access | boolean | (optional) Flag to mark a container private | None | | | | so that the user who created the container and| | | | | ``users`` specified in above list can only | | | | | access the container. Pass `false` to mark the| | | | | container private. | | +----------------------------+----------+-----------------------------------------------+----------+ Request/Response (Updating project-access flag): ************************************************ .. code-block:: javascript PATCH /v1/containers/{uuid}/acl Headers: Content-Type: application/json X-Auth-Token: {token_id} Body: { "read": { "project-access":false } } Response: HTTP/1.1 200 OK {"acl_ref": "https://{barbican_host}/v1/containers/{uuid}/acl"} Request/Response (Removing all users from ACL): *********************************************** .. code-block:: javascript PATCH /v1/containers/{uuid}/acl Headers: Content-Type: application/json X-Auth-Token: {token_id} Body: { "read": { "users":[] } } Response: HTTP/1.1 200 OK {"acl_ref": "https://{barbican_host}/v1/containers/{uuid}/acl"} HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successfully updated container ACL. | +------+-----------------------------------------------------------------------------+ | 400 | Bad Request. | +------+-----------------------------------------------------------------------------+ | 401 | Missing or Invalid X-Auth-Token. Authentication required. | +------+-----------------------------------------------------------------------------+ | 403 | User does not have permission to access this resource. | +------+-----------------------------------------------------------------------------+ | 404 | Container not found for the given UUID. | +------+-----------------------------------------------------------------------------+ | 415 | Unsupported Media Type. | +------+-----------------------------------------------------------------------------+ .. _delete_container_acl: DELETE /v1/containers/{uuid}/acl ################################ Delete ACL for a given container. No content is returned in the case of successful deletion. Request/Response: ***************** .. code-block:: javascript DELETE /v1/containers/{uuid}/acl Headers: X-Auth-Token: {token_id} Response: HTTP/1.1 200 OK HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successfully deleted container ACL. | +------+-----------------------------------------------------------------------------+ | 401 | Missing or Invalid X-Auth-Token. Authentication required. | +------+-----------------------------------------------------------------------------+ | 403 | User does not have permission to access this resource. | +------+-----------------------------------------------------------------------------+ | 404 | Container not found for the given UUID. | +------+-----------------------------------------------------------------------------+ barbican-2.0.0/doc/source/api/reference/secret_types.rst0000664000567000056710000002070712701405673024445 0ustar jenkinsjenkins00000000000000************************ Secret Types - Reference ************************ Every secret in Barbican has a type. Secret types are used to describe different kinds of secret data that are stored in Barbican. The type for a particular secret is listed in the secret's metadata as the ``secret_type`` attribute. The possible secret types are: * ``symmetric`` - Used for storing byte arrays such as keys suitable for symmetric encryption. * ``public`` - Used for storing the public key of an asymmetric keypair. * ``private`` - Used for storing the private key of an asymmetric keypair. * ``passphrase`` - Used for storing plain text passphrases. * ``certificate`` - Used for storing cryptographic certificates such as X.509 certificates. * ``opaque`` - Used for backwards compatibility with previous versions of the API without typed secrets. New applications are encouraged to specify one of the other secret types. Symmetric ######### The ``symmetric`` secret type is used to store byte arrays of sensitive data, such as keys that are used for symmetric encryption. The content-type used with symmetric secrets is ``application/octet-stream``. When storing a symmetric secret with a single POST request, the data must be encoded so that it may be included inside the JSON body of the request. In this case, the content encoding of ``base64`` can be used. Example 1.1 *********** Create an encryption key for use in AES-256-CBC encryption and store it in Barbican. First, we'll see how this can be done in a single POST request from the command line using curl. .. code-block:: bash # Create an encryption_key file with 256 bits of random data dd bs=32 count=1 if=/dev/urandom of=encryption_key # Encode the contents of the encryption key using base64 encoding KEY_BASE64=$(base64 < encryption_key) # Send a request to store the key in Barbican curl -vv -H "X-Auth-Token: $TOKEN" -H 'Accept: application/json' \ -H 'Content-Type: application/json' \ -d '{"name": "AES encryption key", "secret_type": "symmetric", "payload": "'"$KEY_BASE64"'", "payload_content_type": "application/octet-stream", "payload_content_encoding": "base64", "algorithm": "AES", "bit_length": 256, "mode": "CBC"}' \ http://localhost:9311/v1/secrets | python -m json.tool This should return a reference (URI) for the secret that was created: .. code-block:: json { "secret_ref": "http://localhost:9311/v1/secrets/48d24158-b4b4-45b8-9669-d9f0ef793c23" } We can use this reference to retrieve the secret metadata: .. code-block:: bash curl -vv -H "X-Auth-Token: $TOKEN" -H 'Accept: application/json' \ http://localhost:9311/v1/secrets/48d24158-b4b4-45b8-9669-d9f0ef793c23 | python -m json.tool The metadata will list the available content-types for the symmetric secret: .. code-block:: json { "algorithm": "AES", "bit_length": 256, "content_types": { "default": "application/octet-stream" }, "created": "2015-04-08T06:24:16.600393", "creator_id": "3a7e3d2421384f56a8fb6cf082a8efab", "expiration": null, "mode": "CBC", "name": "AES encryption key", "secret_ref": "http://localhost:9311/v1/secrets/48d24158-b4b4-45b8-9669-d9f0ef793c23", "secret_type": "symmetric", "status": "ACTIVE", "updated": "2015-04-08T06:24:16.614204" } The ``content_types`` attribute describes the content-types that can be used to retrieve the payload. In this example, there is only the default content type of ``application/octet-stream``. We can use it to retrieve the payload: .. code-block:: bash # Retrieve the payload and save it to a file curl -vv -H "X-Auth-Token: $TOKEN" \ -H 'Accept: application/octet-stream' \ -o retrieved_key \ http://localhost:9311/v1/secrets/48d24158-b4b4-45b8-9669-d9f0ef793c23/payload The ``retrieved_key`` file now contains the byte array we started with. Note that barbican returned the byte array in binary format, not base64. This is because the ``payload_content_encoding`` is only used when submitting the secret to barbican. Public ###### The ``public`` secret type is used to store the public key of an asymmetric keypair. For example, a public secret can be used to store the public key of an RSA keypair. Currently, there is only one file format accepted for public secrets: A DER-encoded ``SubjectPublicKeyInfo`` structure as defined by X.509 RFC 5280 that has been Base64 encoded with a PEM header and footer. This is the type of public key that is generated by the ``openssl`` tool by default. The content-type used with public secrets is ``application/octet-stream``. When storing a public secret with a single POST request, the contents of the file must be encoded since JSON does not accept newline characters. In this case, the contents of the file must be Base64 encoded and the content encoding of ``base64`` can be used. Example 2.1 *********** Create an RSA keypair and store the public key in Barbican. For this example, we will be using a metadata-only POST followed by a PUT. .. code-block:: bash # Create the RSA keypair openssl genrsa -out private.pem 2048 # Extract the public key openssl rsa -in private.pem -out public.pem -pubout # Submit a metadata-only POST curl -vv -H "X-Auth-Token: $TOKEN" \ -H 'Accept: application/json' \ -H 'Content-Type: application/json' \ -d '{"name": "RSA Public Key", "secret_type": "public", "algorithm": "RSA"}' \ http://localhost:9311/v1/secrets | python -m json.tool This should return a reference (URI) for the secret that was created: .. code-block:: json 200 OK { "secret_ref": "http://localhost:9311/v1/secrets/cd20d134-c229-417a-a753-86432ad13bad" } We can use this reference to add the payload with a PUT request: .. code-block:: bash curl -vv -X PUT -H "X-Auth-Token: $TOKEN" \ -H 'Accept: application/json' \ -H 'Content-Type: application/octet-stream' \ --data-binary @public.pem \ http://localhost:9311/v1/secrets/cd20d134-c229-417a-a753-86432ad13bad The server should respond with a 2xx response to indicate that the PUT request was processed successfully: .. code-block:: json 204 - No Content Now we should be able to request the metadata and see the new content-type listed there: .. code-block:: bash curl -vv -H "X-Auth-Token: $TOKEN" \ -H 'Accept: application/json' \ http://localhost:9311/v1/secrets/cd20d134-c229-417a-a753-86432ad13bad | python -m json.tool .. code-block:: json { "algorithm": "RSA", "bit_length": null, "content_types": { "default": "application/octet-stream" }, "created": "2015-04-08T21:45:59.239976", "creator_id": "3a7e3d2421384f56a8fb6cf082a8efab", "expiration": null, "mode": null, "name": "RSA Public Key", "secret_ref": "http://localhost:9311/v1/secrets/cd20d134-c229-417a-a753-86432ad13bad", "secret_type": "public", "status": "ACTIVE", "updated": "2015-04-08T21:52:57.523969" } Finally, we can use the default content-type listed in ``content_types`` to retrieve the public key: .. code-block:: bash curl -vv -H "X-Auth-Token: $TOKEN" \ -H 'Accept: application/octet-stream' \ -o retrieved_public.pem \ http://localhost:9311/v1/secrets/cd20d134-c229-417a-a753-86432ad13bad/payload The ``retrieved_public.pem`` file now has the same contents as the public.pem file we started with. Example 2.2 *********** Create an RSA keypair and store the public key in Barbican. For this example we will be using a single POST request. .. code-block:: bash # Create the RSA keypair openssl genrsa -out private.pem 2048 # Extract the public key openssl rsa -in private.pem -out public.pem -pubout # Base64 encode the contents of the public key PUB_BASE64=$(base64 < public.pem) curl -vv -H "X-Auth-Token: $TOKEN" \ -H 'Accept: application/json' \ -H 'Content-Type: application/json' \ -d '{"name": "RSA Public Key", "secret_type": "public", "payload": "'"$PUB_BASE64"'", "payload_content_type": "application/octet-stream", "payload_content_encoding": "base64", "algorithm": "RSA"}' \ http://localhost:9311/v1/secrets | python -m json.tool This should return a reference (URI) for the secret that was created. .. code-block:: json 200 OK { "secret_ref": "http://localhost:9311/v1/secrets/d553f0ac-c79d-43b4-b165-32594b612ad4" } barbican-2.0.0/doc/source/api/reference/consumers.rst0000664000567000056710000002452612701405674023756 0ustar jenkinsjenkins00000000000000************************* Consumers API - Reference ************************* GET {container_ref}/consumers ############################# Lists a container's consumers. The list of consumers can be filtered by the parameters passed in via the URL. .. _consumer_parameters: Parameters ********** +----------+---------+----------------------------------------------------------------+ | Name | Type | Description | +==========+=========+================================================================+ | offset | integer | The starting index within the total list of the consumers that | | | | you would like to retrieve. | +----------+---------+----------------------------------------------------------------+ | limit | integer | The maximum number of records to return (up to 100). The | | | | default limit is 10. | +----------+---------+----------------------------------------------------------------+ Request: ******** .. code-block:: javascript GET {container_ref}/consumers Headers: X-Auth-Token: Response: ********* .. code-block:: javascript 200 OK { "total": 3, "consumers": [ { "status": "ACTIVE", "URL": "consumerurl", "updated": "2015-10-15T21:06:33.123878", "name": "consumername", "created": "2015-10-15T21:06:33.123872" }, { "status": "ACTIVE", "URL": "consumerURL2", "updated": "2015-10-15T21:17:08.092416", "name": "consumername2", "created": "2015-10-15T21:17:08.092408" }, { "status": "ACTIVE", "URL": "consumerURL3", "updated": "2015-10-15T21:21:29.970370", "name": "consumername3", "created": "2015-10-15T21:21:29.970365" } ] } Request: ******** .. code-block:: javascript GET {container_ref}/consumers?limit=1&offset=1 Headers: X-Auth-Token: .. code-block:: javascript { "total": 3, "next": "http://localhost:9311/v1/consumers?limit=1&offset=2", "consumers": [ { "status": "ACTIVE", "URL": "consumerURL2", "updated": "2015-10-15T21:17:08.092416", "name": "consumername2", "created": "2015-10-15T21:17:08.092408" } ], "previous": "http://localhost:9311/v1/consumers?limit=1&offset=0" } .. _consumer_response_attributes: Response Attributes ******************* +----------+---------+---------------------------------------------------------------+ | Name | Type | Description | +==========+=========+============================================================== + | consumers| list | Contains a list of dictionaries filled with consumer metadata.| +----------+---------+---------------------------------------------------------------+ | total | integer | The total number of consumers available to the user. | +----------+---------+---------------------------------------------------------------+ | next | string | A HATEOAS url to retrieve the next set of consumers based on | | | | the offset and limit parameters. This attribute is only | | | | available when the total number of consumers is greater than | | | | offset and limit parameter combined. | +----------+---------+---------------------------------------------------------------+ | previous | string | A HATEOAS url to retrieve the previous set of consumers based | | | | on the offset and limit parameters. This attribute is only | | | | available when the request offset is greater than 0. | +----------+---------+---------------------------------------------------------------+ .. _consumer_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ .. _post_consumers: POST {container_ref}/consumers ############################## Creates a consumer Attributes ********** +----------------------------+---------+----------------------------------------------+------------+ | Attribute Name | Type | Description | Default | +============================+=========+==============================================+============+ | name | string | The name of the consumer set by the user. | None | +----------------------------+---------+----------------------------------------------+------------+ | url | string | The url for the user or service using the | None | | | | container. | | +----------------------------+---------+----------------------------------------------+------------+ Request: ******** .. code-block:: javascript POST {container_ref}/consumers Headers: X-Auth-Token: Content: { "name": "ConsumerName", "url": "ConsumerURL" } Response: ********* .. code-block:: javascript 200 OK { "status": "ACTIVE", "updated": "2015-10-15T17:56:18.626724", "name": "container name", "consumers": [ { "URL": "consumerURL", "name": "consumername" } ], "created": "2015-10-15T17:55:44.380002", "container_ref": "http://localhost:9311/v1/containers/74bbd3fd-9ba8-42ee-b87e-2eecf10e47b9", "creator_id": "b17c815d80f946ea8505c34347a2aeba", "secret_refs": [ { "secret_ref": "http://localhost:9311/v1/secrets/b61613fc-be53-4696-ac01-c3a789e87973", "name": "private_key" } ], "type": "generic" } HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | OK | +------+-----------------------------------------------------------------------------+ | 400 | Bad Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ | 403 | Forbidden. The user has been authenticated, but is not authorized to | | | create a consumer. This can be based on the the user's role or the | | | project's quota. | +------+-----------------------------------------------------------------------------+ .. _delete_consumer: DELETE {container_ref}/consumers ################################ Delete a consumer. Attributes ********** +----------------------------+---------+----------------------------------------------+------------+ | Attribute Name | Type | Description | Default | +============================+=========+==============================================+============+ | name | string | The name of the consumer set by the user. | None | +----------------------------+---------+----------------------------------------------+------------+ | URL | string | The url for the user or service using the | None | | | | container. | | +----------------------------+---------+----------------------------------------------+------------+ Request: ******** .. code-block:: javascript POST {container_ref}/consumers Headers: X-Auth-Token: Content: { "name": "ConsumerName", "URL": "ConsumerURL" } Response: ********* .. code-block:: javascript 200 OK { "status": "ACTIVE", "updated": "2015-10-15T17:56:18.626724", "name": "container name", "consumers": [], "created": "2015-10-15T17:55:44.380002", "container_ref": "http://localhost:9311/v1/containers/74bbd3fd-9ba8-42ee-b87e-2eecf10e47b9", "creator_id": "b17c815d80f946ea8505c34347a2aeba", "secret_refs": [ { "secret_ref": "http://localhost:9311/v1/secrets/b61613fc-be53-4696-ac01-c3a789e87973", "name": "private_key" } ], "type": "generic" } HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | OK | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ | 404 | Not Found | +------+-----------------------------------------------------------------------------+ barbican-2.0.0/doc/source/api/reference/secret_metadata.rst0000664000567000056710000003003012701405674025050 0ustar jenkinsjenkins00000000000000******************************* Secret Metadata API - Reference ******************************* .. _get_secret_metadata: GET /v1/secrets/{uuid}/metadata ############################### Lists a secret's user-defined metadata. If a secret does not contain any user metadata, an empty list will be returned. Request: ******** .. code-block:: javascript GET /v1/secrets/{uuid}/metadata Headers: Accept: application/json X-Auth-Token: Response: ********* .. code-block:: javascript { 'metadata': { 'description': 'contains the AES key', 'geolocation': '12.3456, -98.7654' } } .. _secret_metadata_response_attributes: Response Attributes ******************* +----------+---------+--------------------------------------------------------------+ | Name | Type | Description | +==========+=========+==============================================================+ | metadata | list | Contains a list of the secret metadata's key/value pairs. | | | | The provided keys must be lowercase. If not they will be | | | | converted to lowercase. | +----------+---------+--------------------------------------------------------------+ .. _secret_metadata_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to access this | | | resource. | +------+-----------------------------------------------------------------------------+ | 403 | Forbidden. The user has been authenticated, but is not authorized to | | | retrieve secret metadata. This can be based on the the user's role. | +------+-----------------------------------------------------------------------------+ | 404 | Not Found | +------+-----------------------------------------------------------------------------+ .. _put_secret_metadata: PUT /v1/secrets/{uuid}/metadata ################################ Sets the metadata for a secret. Any metadata that was previously set will be deleted and replaced with this metadata. Parameters ********** +----------+---------+--------------------------------------------------------------+ | Name | Type | Description | +==========+=========+==============================================================+ | metadata | list | Contains a list of the secret metadata's key/value pairs. | | | | The provided keys must be lowercase. If not they will be | | | | converted to lowercase. | +----------+---------+--------------------------------------------------------------+ Request: ******** .. code-block:: javascript PUT /v1/secrets/{uuid}/metadata Headers: Content-Type: application/json X-Auth-Token: Content: { 'metadata': { 'description': 'contains the AES key', 'geolocation': '12.3456, -98.7654' } } Response: ********* .. code-block:: javascript 201 OK { "metadata_ref": "https://{barbican_host}/v1/secrets/{secret_uuid}/metadata" } HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 201 | Successfully created/updated Secret Metadata | +------+-----------------------------------------------------------------------------+ | 400 | Bad Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to access this | | | resource. | +------+-----------------------------------------------------------------------------+ | 403 | Forbidden. The user has been authenticated, but is not authorized to | | | create secret metadata. This can be based on the the user's role. | +------+-----------------------------------------------------------------------------+ .. _get_secret_metadatum: GET /v1/secrets/{uuid}/metadata/{key} ##################################### Retrieves a secret's user-added metadata. Request: ***************** .. code-block:: javascript GET /v1/secrets/{uuid}/metadata/{key} Headers: Accept: application/json X-Auth-Token: Response: ****************** .. code-block:: javascript 200 OK { "key": "access-limit", "value": "0" } HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to access this | | | resource | +------+-----------------------------------------------------------------------------+ | 403 | Forbidden. The user has been authenticated, but is not authorized to | | | retrieve secret metadata. This can be based on the the user's role. | +------+-----------------------------------------------------------------------------+ | 404 | Not Found | +------+-----------------------------------------------------------------------------+ .. _post_secret_metadatum: POST /v1/secrets/{uuid}/metadata/ ################################# Adds a new key/value pair to the secret's user metadata. The key sent in the request must not already exist in the metadata. The key must also be in lowercase, otherwise it will automatically be changed to lowercase. Request: ******** .. code-block:: javascript POST /v1/secrets/{uuid}/metadata/ Headers: X-Auth-Token: Content-Type: application/json Content: { "key": "access-limit", "value": "11" } Response: ********* .. code-block:: javascript 201 Created Secret Metadata Location: http://example.com:9311/v1/secrets/{uuid}/metadata/access-limit { "key": "access-limit", "value": "11" } HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 201 | Successful request | +------+-----------------------------------------------------------------------------+ | 400 | Bad Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to access this | | | resource. | +------+-----------------------------------------------------------------------------+ | 403 | Forbidden. The user has been authenticated, but is not authorized to | | | create secret metadata. This can be based on the the user's role. | +------+-----------------------------------------------------------------------------+ | 409 | Conflict. The provided metadata key already exists. | +------+-----------------------------------------------------------------------------+ .. _put_secret_metadatum: PUT /v1/secrets/{uuid}/metadata/{key} ##################################### Updates an existing key/value pair in the secret's user metadata. The key sent in the request must already exist in the metadata. The key must also be in lowercase, otherwise it will automatically be changed to lowercase. Request: ******** .. code-block:: javascript PUT /v1/secrets/{uuid}/metadata/{key} Headers: X-Auth-Token: Content-Type: application/json Content: { "key": "access-limit", "value": "11" } Response: ********* .. code-block:: javascript 200 OK { "key": "access-limit", "value": "11" } HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful request | +------+-----------------------------------------------------------------------------+ | 400 | Bad Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to access this | | | resource. | +------+-----------------------------------------------------------------------------+ | 403 | Forbidden. The user has been authenticated, but is not authorized to | | | update secret metadata. This can be based on the the user's role. | +------+-----------------------------------------------------------------------------+ | 404 | Not Found | +------+-----------------------------------------------------------------------------+ .. _delete_secret_metadatum: DELETE /v1/secrets/{uuid}/metadata/{key} ######################################## Delete secret metadata by key. Request: ******** .. code-block:: javascript DELETE /v1/secrets/{uuid}/metadata/{key} Headers: X-Auth-Token: Response: ********* .. code-block:: javascript 204 No Content HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 204 | Successful request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to access this | | | resource. | +------+-----------------------------------------------------------------------------+ | 403 | Forbidden. The user has been authenticated, but is not authorized to | | | delete secret metdata. This can be based on the the user's role. | +------+-----------------------------------------------------------------------------+ | 404 | Not Found | +------+-----------------------------------------------------------------------------+ barbican-2.0.0/doc/source/api/reference/certificates.rst0000664000567000056710000002156412701405674024404 0ustar jenkinsjenkins00000000000000**************************** Certificates API - Reference **************************** .. _reference_post_certificate_orders: POST /v1/orders ############### Certificates are requested using the Orders interface. Detailed description of this interface is deferred to the Orders API reference. This reference identifies the parameters that are specific to each of the certificate order types i.e. those orders for which the parameter *type* is "certificate". All orders contain a required parameter *meta*, which is a dictionary containing key-value parameters which specify the details of an order request. All the parameters below are passed in the *meta* dictionary. The result of this operation is an order for a certificate, returned to the client as an order reference. Upon completion, the order will contain a reference to a Certificate Container, see `Certificate Containers `__. Common Attributes ***************** Certificate orders have the same attributes that are common to all orders. The table below lists those parameters that are specific to certificate orders in particular. +----------------------------+---------+----------------------------------------------+------------+ | Attribute Name | Type | Description | Default | +============================+=========+==============================================+============+ | request_type | string | (optional) The type of certificate order | custom | | | | Possible values are stored-key, simple-cmc, | | | | | full-cmc and custom | | +----------------------------+---------+----------------------------------------------+------------+ | ca_id | string | (optional) The UUID of the CA to which this | None | | | | certificate order should be sent. This | | | | | UUID can be obtained from the cas interface. | | +----------------------------+---------+----------------------------------------------+------------+ | profile | string | (optional) Identifier indicating the | None | | | | certificate product being requested. | | | | | eg. a 3 year server certificate with certain | | | | | extensions. This identifier is CA specific. | | | | | Therefore, ca_id is required if the profile | | | | | is provided. | | +----------------------------+---------+----------------------------------------------+------------+ | requestor_name | string | (optional) Requestor name | None | +----------------------------+---------+----------------------------------------------+------------+ | requestor_email | string | (optional) Requestor email | None | +----------------------------+---------+----------------------------------------------+------------+ | requestor_phone | string | (optional) Requestor phone | None | +----------------------------+---------+----------------------------------------------+------------+ Attributes for Simple CMC Orders ******************************** +----------------------------+---------+----------------------------------------------+------------+ | Attribute Name | Type | Description | Default | +============================+=========+==============================================+============+ | request_data | string | The base64 encoded simple CMC request with | None | | | | no line breaks. Simple CMC is the same as | | | | | a PKCS10 CSR. (RFC 5272) | | +----------------------------+---------+----------------------------------------------+------------+ Attributes for Stored Key Requests ********************************** +----------------------------+---------+----------------------------------------------+------------+ | Attribute Name | Type | Description | Default | +============================+=========+==============================================+============+ | source_container_ref | string | Reference to the RSA container already | None | | | | stored in Barbican containing the private | | | | | and public keys. | | +----------------------------+---------+----------------------------------------------+------------+ | subject_dn | string | Subject DN for the certificate. This | None | | | | value must comply with RFC 1485. | | +----------------------------+---------+----------------------------------------------+------------+ | extensions | string | (optional) Base 64 DER encoded ASN.1 values | None | | | | for requested certificate extensions, | | | | | Currently, this value is not parsed. | | +----------------------------+---------+----------------------------------------------+------------+ Attributes for Custom Orders **************************** +----------------------------+---------+----------------------------------------------+------------+ | Attribute Name | Type | Description | Default | +============================+=========+==============================================+============+ | (Varies - depends on CA) | (Varies)| Custom certificate orders pass arbitrary | None | | | | parameters through the CA unchanged. It is | | | | | up to the CA to interpret the parameters. | | | | | Note that as the request parameters are CA | | | | | specific, *ca_id* is required for this | | | | | request type. | | +----------------------------+---------+----------------------------------------------+------------+ Request: ******** The request below shows a simple CMC request. For examples of each type, see the `Certificate User's Guide `. .. code-block:: javascript POST /v1/orders Headers: Content-Type: application/json X-Auth-Token: Content: { "type": "certificate", "meta": { "request_data": "... base 64 encoded simple CMC ...", "request_type": "simple-cmc", "ca_id": "422e6ad3-24ae-45e3-b165-4e9487cd0ded", "profile": "caServerCert" } } Response: ********* .. code-block:: javascript 201 Created { "order_ref": "https://{barbican_host}/v1/orders/{order_uuid}" } HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 201 | Successfully created an Order | +------+-----------------------------------------------------------------------------+ | 400 | Bad Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ | 403 | Forbidden. The user has been authenticated, but is not authorized to | | | create an order. This can be based on the the user's role or the project's | | | quota. | +------+-----------------------------------------------------------------------------+ | 415 | Unsupported media type | +------+-----------------------------------------------------------------------------+ barbican-2.0.0/doc/source/api/reference/quotas.rst0000664000567000056710000004771012701405673023253 0ustar jenkinsjenkins00000000000000********************** Quotas API - Reference ********************** GET /v1/quotas ############## Get the effective quotas for the project of the requester. The project id of the requester is derived from the authentication token provided in the X-Auth-Token header. .. _get_quotas_request: Request/Response: ***************** .. code-block:: javascript Request: GET /v1/quotas Headers: X-Auth-Token: Accept: application/json Response: HTTP/1.1 200 OK Content-Type: application/json { "quotas": { "secrets": 10, "orders": 20, "containers": 10, "consumers": -1, "cas": 5 } } .. _get_quotas_response_attributes: Response Attributes ******************* +------------+---------+--------------------------------------------------------------+ | Name | Type | Description | +============+=========+==============================================================+ | quotas | dict | Contains a dictionary with quota information | +------------+---------+--------------------------------------------------------------+ | secrets | integer | Contains the effective quota value of the current project | | | | for the secret resource. | +------------+---------+--------------------------------------------------------------+ | orders | integer | Contains the effective quota value of the current project | | | | for the orders resource. | +------------+---------+--------------------------------------------------------------+ | containers | integer | Contains the effective quota value of the current project | | | | for the containers resource. | +------------+---------+--------------------------------------------------------------+ | consumers | integer | Contains the effective quota value of the current project | | | | for the consumers resource. | +------------+---------+--------------------------------------------------------------+ | cas | integer | Contains the effective quota value of the current project | | | | for the CAs resource. | +------------+---------+--------------------------------------------------------------+ Effective quota values are interpreted as follows: +-------+-----------------------------------------------------------------------------+ | Value | Description | +=======+=============================================================================+ | -1 | A negative value indicates the resource is unconstrained by a quota. | +-------+-----------------------------------------------------------------------------+ | 0 | A zero value indicates that the resource is disabled. | +-------+-----------------------------------------------------------------------------+ | int | A positive value indicates the maximum number of that resource that can be | | | created for the current project. | +-------+-----------------------------------------------------------------------------+ .. _get_quotas_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ .. _get_project_quotas: GET /v1/project-quotas ###################### Gets a list of configured project quota records. Paging is supported using the optional parameters offset and limit. .. _get_project_quotas_request: Request/Response: ***************** .. code-block:: javascript Request: GET /v1/project-quotas Headers: X-Auth-Token: Accept: application/json Response: 200 OK Content-Type: application/json { "project_quotas": [ { "project_id": "1234", "project_quotas": { "secrets": 2000, "orders": 0, "containers": -1, "consumers": null, "cas": null } }, { "project_id": "5678", "project_quotas": { "secrets": 200, "orders": 100, "containers": -1, "consumers": null, "cas": null } }, ], "total" : 30, } .. _get_project_quotas_parameters: Parameters ********** +--------+---------+----------------------------------------------------------------+ | Name | Type | Description | +========+=========+================================================================+ | offset | integer | The starting index within the total list of the project | | | | quotas that you would like to receive. | +--------+---------+----------------------------------------------------------------+ | limit | integer | The maximum number of records to return. | +--------+---------+----------------------------------------------------------------+ .. _get_project_quotas_response_attributes: Response Attributes ******************* +----------------+---------+--------------------------------------------------------------+ | Name | Type | Description | +================+=========+==============================================================+ | project-id | string | The UUID of a project with configured quota information. | +----------------+---------+--------------------------------------------------------------+ | project-quotas | dict | Contains a dictionary with project quota information. | +----------------+---------+--------------------------------------------------------------+ | secrets | integer | Contains the effective quota value of the current project | | | | for the secret resource. | +----------------+---------+--------------------------------------------------------------+ | orders | integer | Contains the effective quota value of the current project | | | | for the orders resource. | +----------------+---------+--------------------------------------------------------------+ | containers | integer | Contains the effective quota value of the current project | | | | for the containers resource. | +----------------+---------+--------------------------------------------------------------+ | consumers | integer | Contains the effective quota value of the current project | | | | for the consumers resource. | +----------------+---------+--------------------------------------------------------------+ | cas | integer | Contains the effective quota value of the current project | | | | for the CAs resource. | +----------------+---------+--------------------------------------------------------------+ | total | integer | The total number of configured project quotas records. | +----------------+---------+--------------------------------------------------------------+ | next | string | A HATEOAS url to retrieve the next set of quotas based on | | | | the offset and limit parameters. This attribute is only | | | | available when the total number of secrets is greater than | | | | offset and limit parameter combined. | +----------------+---------+--------------------------------------------------------------+ | previous | string | A HATEOAS url to retrieve the previous set of quotas based | | | | on the offset and limit parameters. This attribute is only | | | | available when the request offset is greater than 0. | +----------------+---------+--------------------------------------------------------------+ Configured project quota values are interpreted as follows: +-------+-----------------------------------------------------------------------------+ | Value | Description | +=======+=============================================================================+ | -1 | A negative value indicates the resource is unconstrained by a quota. | +-------+-----------------------------------------------------------------------------+ | 0 | A zero value indicates that the resource is disabled. | +-------+-----------------------------------------------------------------------------+ | int | A positive value indicates the maximum number of that resource that can be | | | created for the current project. | +-------+-----------------------------------------------------------------------------+ | null | A null value indicates that the default quota value for the resource | | | will be used as the quota for this resource in the current project. | +-------+-----------------------------------------------------------------------------+ .. _get_project_quotas_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ .. _get_project_quotas_uuid: GET /v1/project-quotas/{uuid} ############################# Retrieves a project's configured project quota information. .. _get_project_quotas_uuid_request: Request/Response: ***************** .. code-block:: javascript Request: GET /v1/project-quotas/{uuid} Headers: X-Auth-Token: Accept: application/json Response: 200 OK Content-Type: application/json { "project_quotas": { "secrets": 10, "orders": 20, "containers": -1, "consumers": 10, "cas": 5 } } .. _get_project_quotas_uuid_response_attributes: Response Attributes ******************* +----------------+---------+--------------------------------------------------------------+ | Name | Type | Description | +================+=========+==============================================================+ | project-quotas | dict | Contains a dictionary with project quota information. | +----------------+---------+--------------------------------------------------------------+ | secrets | integer | Contains the configured quota value of the requested project | | | | for the secret resource. | +----------------+---------+--------------------------------------------------------------+ | orders | integer | Contains the configured quota value of the requested project | | | | for the orders resource. | +----------------+---------+--------------------------------------------------------------+ | containers | integer | Contains the configured quota value of the requested project | | | | for the containers resource. | +----------------+---------+--------------------------------------------------------------+ | consumers | integer | Contains the configured quota value of the requested project | | | | for the consumers resource. | +----------------+---------+--------------------------------------------------------------+ | cas | integer | Contains the configured quota value of the requested project | | | | for the CAs resource. | +----------------+---------+--------------------------------------------------------------+ .. _get_project_quotas_uuid_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ | 404 | Not Found. The requested project does not have any configured quotas. | +------+-----------------------------------------------------------------------------+ .. _put_project_quotas: PUT /v1/project-quotas/{uuid} ############################# Create or update the configured project quotas for the project with the specified UUID. .. _put_project_quotas_request: Request/Response: ***************** .. code-block:: javascript Request: PUT /v1/project-quotas/{uuid} Headers: X-Auth-Token: Content-Type: application/json Body:: { "project_quotas": { "secrets": 50, "orders": 10, "containers": 20 } } Response: 204 OK .. _put_project_quotas_request_attributes: Request Attributes ****************** +----------------+---------+----------------------------------------------+ | Attribute Name | Type | Description | +================+=========+==============================================+ | project-quotas | dict | A dictionary with project quota information. | +----------------+---------+----------------------------------------------+ | secrets | integer | The value to set for this project's secret | | | | quota. | +----------------+---------+----------------------------------------------+ | orders | integer | The value to set for this project's order | | | | quota. | +----------------+---------+----------------------------------------------+ | containers | integer | The value to set for this project's | | | | container quota. | +----------------+---------+----------------------------------------------+ | consumers | integer | The value to set for this project's | | | | consumer quota. | +----------------+---------+----------------------------------------------+ | cas | integer | The value to set for this project's | | | | CA quota. | +----------------+---------+----------------------------------------------+ Configured project quota values are specified as follows: +-------+-----------------------------------------------------------------------------+ | Value | Description | +=======+=============================================================================+ | -1 | A negative value indicates the resource is unconstrained by a quota. | +-------+-----------------------------------------------------------------------------+ | 0 | A zero value indicates that the resource is disabled. | +-------+-----------------------------------------------------------------------------+ | int | A positive value indicates the maximum number of that resource that can be | | | created for the specified project. | +-------+-----------------------------------------------------------------------------+ | | If a value is not given for a resource, this indicates that the default | | | quota should be used for that resource for the specified project. | +-------+-----------------------------------------------------------------------------+ .. _put_project_quotas_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 204 | Successful request | +------+-----------------------------------------------------------------------------+ | 400 | Bad Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ .. _delete_project_quotas: DELETE /v1/project-quotas/{uuid} ################################ Delete the project quotas configuration for the project with the requested UUID. When the project quota configuration is deleted, then the default quotas will be used for the specified project. .. _delete_project_request: Request/Response: ***************** .. code-block:: javascript Request: DELETE v1/project-quotas/{uuid} Headers: X-Auth-Token: Response: 204 No Content .. _delete_project_quotas_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 204 | Successful request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ | 404 | Not Found | +------+-----------------------------------------------------------------------------+ barbican-2.0.0/doc/source/api/reference/secrets.rst0000664000567000056710000004721612701405674023411 0ustar jenkinsjenkins00000000000000*********************** Secrets API - Reference *********************** GET /v1/secrets ############### Lists a project's secrets. The list of secrets can be filtered by the parameters passed in via the URL. The actual secret payload data will not be listed here. Clients must instead make a separate call to retrieve the secret payload data for each individual secret. .. _secret_parameters: Parameters ********** +----------+---------+----------------------------------------------------------------+ | Name | Type | Description | +==========+=========+================================================================+ | offset | integer | The starting index within the total list of the secrets that | | | | you would like to retrieve. | +----------+---------+----------------------------------------------------------------+ | limit | integer | The maximum number of records to return (up to 100). The | | | | default limit is 10. | +----------+---------+----------------------------------------------------------------+ | name | string | Selects all secrets with name equal to this value. | +----------+---------+----------------------------------------------------------------+ | bits | integer | Selects all secrets with bit_length equal to this value. | +----------+---------+----------------------------------------------------------------+ | alg | string | Selects all secrets with algorithm equal to this value. | +----------+---------+----------------------------------------------------------------+ | mode | string | Selects all secrets with mode equal to this value. | +----------+---------+----------------------------------------------------------------+ | acl_only | boolean | Selects all secrets with an ACL that contains the user. | | | | Project scope is ignored. | +----------+---------+----------------------------------------------------------------+ Request: ******** .. code-block:: javascript GET /v1/secrets?offset=1&limit=2 Headers: Accept: application/json X-Auth-Token: {keystone_token} (or X-Project-Id: {project id}) Response: ********* .. code-block:: javascript { "next": "http://{barbican_host}:9311/v1/secrets?limit=2&offset=3", "previous": "http://{barbican_host}:9311/v1/secrets?limit=2&offset=0", "secrets": [ { "algorithm": null, "bit_length": null, "content_types": { "default": "application/octet-stream" }, "created": "2015-04-07T03:37:19.805835", "creator_id": "3a7e3d2421384f56a8fb6cf082a8efab", "expiration": null, "mode": null, "name": "opaque octet-stream base64", "secret_ref": "http://{barbican_host}:9311/v1/secrets/{uuid}", "secret_type": "opaque", "status": "ACTIVE", "updated": "2015-04-07T03:37:19.808337" }, { "algorithm": null, "bit_length": null, "content_types": { "default": "application/octet-stream" }, "created": "2015-04-07T03:41:02.184159", "creator_id": "3a7e3d2421384f56a8fb6cf082a8efab", "expiration": null, "mode": null, "name": "opaque random octet-stream base64", "secret_ref": "http://{barbican_host}:9311/v1/secrets/{uuid}", "secret_type": "opaque", "status": "ACTIVE", "updated": "2015-04-07T03:41:02.187823" } ], "total": 5 } .. _secret_response_attributes: Response Attributes ******************* +----------+---------+--------------------------------------------------------------+ | Name | Type | Description | +==========+=========+==============================================================+ | secrets | list | Contains a list of secrets. The attributes in the secret | | | | objects are the same as for an individual secret. | +----------+---------+--------------------------------------------------------------+ | total | integer | The total number of secrets available to the user. | +----------+---------+--------------------------------------------------------------+ | next | string | A HATEOAS url to retrieve the next set of secrets based on | | | | the offset and limit parameters. This attribute is only | | | | available when the total number of secrets is greater than | | | | offset and limit parameter combined. | +----------+---------+--------------------------------------------------------------+ | previous | string | A HATEOAS url to retrieve the previous set of secrets based | | | | on the offset and limit parameters. This attribute is only | | | | available when the request offset is greater than 0. | +----------+---------+--------------------------------------------------------------+ .. _secret_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ .. _post_secrets: POST /v1/secrets ################ Creates a Secret entity. If the ``payload`` attribute is not included in the request, then only the metadata for the secret is created, and a subsequent PUT request is required. Attributes ********** +----------------------------+---------+-----------------------------------------------------+------------+ | Attribute Name | Type | Description | Default | +============================+=========+=====================================================+============+ | name | string | (optional) The name of the secret set by the | None | | | | user. | | +----------------------------+---------+-----------------------------------------------------+------------+ | expiration | string | (optional) This is a UTC timestamp in ISO | None | | | | 8601 format ``YYYY-MM-DDTHH:MM:SSZ``. If | | | | | set, the secret will not be available after | | | | | this time. | | +----------------------------+---------+-----------------------------------------------------+------------+ | algorithm | string | (optional) Metadata provided by a user or | None | | | | system for informational purposes. | | +----------------------------+---------+-----------------------------------------------------+------------+ | bit_length | integer | (optional) Metadata provided by a user or | None | | | | system for informational purposes. Value | | | | | must be greater than zero. | | +----------------------------+---------+-----------------------------------------------------+------------+ | mode | string | (optional) Metadata provided by a user or | None | | | | system for informational purposes. | | +----------------------------+---------+-----------------------------------------------------+------------+ | payload | string | (optional) The secret's data to be stored. | None | | | | ``payload_content_type`` must also be | | | | | supplied if payload is included. | | +----------------------------+---------+-----------------------------------------------------+------------+ | payload_content_type | string | (optional) (required if payload is included) | None | | | | The media type for the content of the | | | | | payload. For more information see | | | | | :doc:`Secret Types <../reference/secret_types>` | | +----------------------------+---------+-----------------------------------------------------+------------+ | payload_content_encoding | string | (optional) (required if payload is encoded) | None | | | | The encoding used for the payload to be able | | | | | to include it in the JSON request. | | | | | Currently only ``base64`` is supported. | | +----------------------------+---------+-----------------------------------------------------+------------+ | secret_type | string | (optional) Used to indicate the type of | ``opaque`` | | | | secret being stored. For more information | | | | | see :doc:`Secret Types <../reference/secret_types>` | | +----------------------------+---------+-----------------------------------------------------+------------+ Request: ******** .. code-block:: javascript POST /v1/secrets Headers: Content-Type: application/json X-Auth-Token: Content: { "name": "AES key", "expiration": "2015-12-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", "payload": "YmVlcg==", "payload_content_type": "application/octet-stream", "payload_content_encoding": "base64" } Response: ********* .. code-block:: javascript 201 Created { "secret_ref": "https://{barbican_host}/v1/secrets/{secret_uuid}" } HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 201 | Successfully created a Secret | +------+-----------------------------------------------------------------------------+ | 400 | Bad Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ | 403 | Forbidden. The user has been authenticated, but is not authorized to | | | create a secret. This can be based on the the user's role or the | | | project's quota. | +------+-----------------------------------------------------------------------------+ | 415 | Unsupported media-type | +------+-----------------------------------------------------------------------------+ GET /v1/secrets/{uuid} ###################### Retrieves a secret's metadata. Request: ***************** .. code-block:: javascript GET /v1/secrets/{uuid} Headers: Accept: application/json X-Auth-Token: {token} (or X-Project-Id: {project_id}) Response: ****************** .. code-block:: javascript 200 OK { "status": "ACTIVE", "created": "2015-03-23T20:46:51.650515", "updated": "2015-03-23T20:46:51.654116", "expiration": "2015-12-28T19:14:44.180394", "algorithm": "aes", "bit_length": 256, "mode": "cbc", "name": "AES key", "secret_ref": "https://{barbican_host}/v1/secrets/{secret_uuid}", "secret_type": "opaque", "content_types": { "default": "application/octet-stream" } } Payload Request: **************** .. warning:: DEPRECATION WARNING: Previous releases of the API allowed the payload to be retrieved from this same endpoint by changing the Accept header to be one of the values listed in the ``content_types`` attribute of the Secret metadata. This was found to be problematic in some situations, so new applications should make use of the :ref:`/v1/secrets/{uuid}/payload ` endpoint instead. .. code-block:: javascript GET /v1/secrets/{uuid} Headers: Accept: application/octet-stream X-Auth-Token: Payload Response: ***************** .. code-block:: javascript 200 OK beer HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ | 404 | Not Found | +------+-----------------------------------------------------------------------------+ | 406 | Not Acceptable | +------+-----------------------------------------------------------------------------+ .. _put_secrets: PUT /v1/secrets/{uuid} ###################### Add the payload to an existing metadata-only secret, such as one made by sending a POST /v1/secrets request that does not include the ``payload`` attribute. .. note:: This action can only be done for a secret that doesn't have a payload. Headers ******* +------------------+-----------------------------------------------------------+------------+ | Name | Description | Default | +==================+===========================================================+============+ | Content-Type | Corresponds with the payload_content_type | text/plain | | | attribute of a normal secret creation request. | | +------------------+-----------------------------------------------------------+------------+ | Content-Encoding | (optional) Corresponds with the payload_content_encoding | None | | | attribute of a normal secret creation request. | | +------------------+-----------------------------------------------------------+------------+ Request: ******** .. code-block:: javascript PUT /v1/secrets/{uuid} Headers: X-Auth-Token: Content-Type: application/octet-stream Content-Encoding: base64 Content: YmxhaA== Response: ********* .. code-block:: javascript 204 No Content HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 204 | Successful request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ | 404 | Not Found | +------+-----------------------------------------------------------------------------+ .. _delete_secrets: DELETE /v1/secrets/{uuid} ######################### Delete a secret by uuid Request: ******** .. code-block:: javascript DELETE /v1/secrets/{uuid} Headers: X-Auth-Token: Response: ********* .. code-block:: javascript 204 No Content HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 204 | Successful request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ | 404 | Not Found | +------+-----------------------------------------------------------------------------+ .. _secret_payload: GET /v1/secrets/{uuid}/payload ############################## Retrieve a secret's payload Accept Header Options: ********************** When making a request for a secret's payload, you must set the accept header to one of the values listed in the ``content_types`` attribute of a secret's metadata. Request: ******** .. code-block:: javascript GET /v1/secrets/{uuid}/payload Headers: Accept: text/plain X-Auth-Token: Response: ********* .. code-block:: javascript 200 OK beer HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ | 404 | Not Found | +------+-----------------------------------------------------------------------------+ | 406 | Not Acceptable | +------+-----------------------------------------------------------------------------+ barbican-2.0.0/doc/source/api/reference/orders.rst0000664000567000056710000003352712701405673023236 0ustar jenkinsjenkins00000000000000********************** Orders API - Reference ********************** .. _get_orders: GET /v1/orders ############## Lists a project's orders. The list of orders can be filtered by the parameters passed in via the URL. .. _get_order_parameters: Parameters ********** +----------+---------+----------------------------------------------------------------+ | Name | Type | Description | +==========+=========+================================================================+ | offset | integer | The starting index within the total list of the orders that | | | | you would like to retrieve. (Default is 0) | +----------+---------+----------------------------------------------------------------+ | limit | integer | The maximum number of records to return (up to 100). | | | | (Default is 10) | +----------+---------+----------------------------------------------------------------+ .. _get_orders_request: Request: ******** .. code-block:: javascript GET /v1/orders Headers: Content-Type: application/json X-Auth-Token: {token} .. _get_orders_response: Response: ********* .. code-block:: none 200 Success { "orders": [ { "created": "2015-10-20T18:38:44", "creator_id": "40540f978fbd45c1af18910e3e02b63f", "meta": { "algorithm": "AES", "bit_length": 256, "expiration": null, "mode": "cbc", "name": "secretname", "payload_content_type": "application/octet-stream" }, "order_ref": "http://localhost:9311/v1/orders/2284ba6f-f964-4de7-b61e-c413df5d1e47", "secret_ref": "http://localhost:9311/v1/secrets/15dcf8e4-3138-4360-be9f-fc4bc2e64a19", "status": "ACTIVE", "sub_status": "Unknown", "sub_status_message": "Unknown", "type": "key", "updated": "2015-10-20T18:38:44" }, { "created": "2015-10-20T18:38:47", "creator_id": "40540f978fbd45c1af18910e3e02b63f", "meta": { "algorithm": "AES", "bit_length": 256, "expiration": null, "mode": "cbc", "name": "secretname", "payload_content_type": "application/octet-stream" }, "order_ref": "http://localhost:9311/v1/orders/87b7169e-3aa2-4cb1-8800-b5aadf6babd1", "secret_ref": "http://localhost:9311/v1/secrets/80183f4b-c0de-4a94-91ad-6d55251acee2", "status": "ACTIVE", "sub_status": "Unknown", "sub_status_message": "Unknown", "type": "key", "updated": "2015-10-20T18:38:47" } ], "total": 2 } .. _get_order_response_attributes: Response Attributes ******************* +----------+---------+--------------------------------------------------------------+ | Name | Type | Description | +==========+=========+==============================================================+ | orders | list | Contains a list of dictionaries filled with order metadata. | +----------+---------+--------------------------------------------------------------+ | total | integer | The total number of orders available to the user. | +----------+---------+--------------------------------------------------------------+ | next | string | A HATEOS url to retrieve the next set of objects based on | | | | the offset and limit parameters. This attribute is only | | | | available when the total number of objects is greater than | | | | offset and limit parameter combined. | +----------+---------+--------------------------------------------------------------+ | previous | string | A HATEOS url to retrieve the previous set of objects based | | | | on the offset and limit parameters. This attribute is only | | | | available when the request offset is greater than 0. | +----------+---------+--------------------------------------------------------------+ .. _get_order_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successful Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ .. _post_orders: POST /v1/orders ############### Creates an order Parameters ********** +----------------------------+---------+----------------------------------------------+------------+ | Attribute Name | Type | Description | Default | +============================+=========+==============================================+============+ | type | string | The type of key to be generated. Valid types | None | | | | are key, asymmetric, and certificate | | +----------------------------+---------+----------------------------------------------+------------+ | meta | | Dictionary containing the secret metadata | None | | | dict | used to generate the secret. | | | | | | | +----------------------------+---------+----------------------------------------------+------------+ .. _post_orders_request: Request: ******** .. code-block:: javascript POST /v1/orders Headers: Content-Type: application/json X-Auth-Token: {token} Content: { "type":"key", "meta": { "name":"secretname", "algorithm": "AES", "bit_length": 256, "mode": "cbc", "payload_content_type":"application/octet-stream" } } .. _post_orders_response: Response: ********* .. code-block:: none 202 Created { "order_ref": "http://{barbican_host}/v1/orders/{order_uuid}" } .. _post_orders_response_attributes: Response Attributes ******************* +----------+---------+--------------------------------------------------------------+ | Name | Type | Description | +==========+=========+==============================================================+ | order_ref| string | Order reference | +----------+---------+--------------------------------------------------------------+ .. _post_orders_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 202 | Successfully created an order | +------+-----------------------------------------------------------------------------+ | 400 | Bad Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ | 415 | Unsupported media-type | +------+-----------------------------------------------------------------------------+ .. _get_unique_order_metadata: GET /v1/orders/{uuid} ##################### Retrieves an order's metadata .. _get_unique_order_request: Request: ******** .. code-block:: javascript GET /v1/orders/{order_uuid} Headers: Accept: application/json X-Auth-Token: {token} Parameters ********** None .. _get_unique_order_response: Response: ********* .. code-block:: javascript 200 Success { "created": "2015-10-20T18:49:02", "creator_id": "40540f978fbd45c1af18910e3e02b63f", "meta": { "algorithm": "AES", "bit_length": 256, "expiration": null, "mode": "cbc", "name": "secretname", "payload_content_type": "application/octet-stream" }, "order_ref": "http://localhost:9311/v1/orders/5443d349-fe0c-4bfd-bd9d-99c4a9770638", "secret_ref": "http://localhost:9311/v1/secrets/16f8d4f3-d3dd-4160-a5bd-8e5095a42613", "status": "ACTIVE", "sub_status": "Unknown", "sub_status_message": "Unknown", "type": "key", "updated": "2015-10-20T18:49:02" } .. _get_unique_order_response_attributes: Response Attributes ******************* +--------------------+---------+----------------------------------------------------+ | Name | Type | Description | +====================+=========+====================================================+ | created | string | Timestamp in ISO8601 format of when the order was | | | | created | +--------------------+---------+----------------------------------------------------+ | creator_id | string | Keystone Id of the user who created the order | +--------------------+---------+----------------------------------------------------+ | meta | dict | Secret metadata used for informational purposes | +--------------------+---------+----------------------------------------------------+ | order_ref | string | Order href associated with the order | +--------------------+---------+----------------------------------------------------+ | secret_ref | string | Secret href associated with the order | +--------------------+---------+----------------------------------------------------+ | status | string | Current status of the order | +--------------------+---------+----------------------------------------------------+ | sub_status | string | Metadata associated with the order | +--------------------+---------+----------------------------------------------------+ | sub_status_message | string | Metadata associated with the order | +--------------------+---------+----------------------------------------------------+ | type | string | Indicates the type of order | +--------------------+---------+----------------------------------------------------+ | updated | string | Timestamp in ISO8601 format of the last time the | | | | order was updated. | +--------------------+---------+----------------------------------------------------+ .. _get_unique_orders_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 200 | Successfully retrieved the order | +------+-----------------------------------------------------------------------------+ | 400 | Bad Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ | 404 | Not Found | +------+-----------------------------------------------------------------------------+ .. _delete_unique_order: DELETE /v1/orders/{uuid} ######################## Delete an order .. _delete_order_request: Request: ******** .. code-block:: javascript DELETE /v1/orders/{order_uuid} Headers: X-Auth-Token: {token} Parameters ********** None .. _delete_order_response: Response: ********* .. code-block:: javascript 204 Success .. _delete_order_status_codes: HTTP Status Codes ***************** +------+-----------------------------------------------------------------------------+ | Code | Description | +======+=============================================================================+ | 204 | Successfully deleted the order | +------+-----------------------------------------------------------------------------+ | 400 | Bad Request | +------+-----------------------------------------------------------------------------+ | 401 | Invalid X-Auth-Token or the token doesn't have permissions to this resource | +------+-----------------------------------------------------------------------------+ | 404 | Not Found | +------+-----------------------------------------------------------------------------+barbican-2.0.0/LICENSE0000664000567000056710000002614712701405673015437 0ustar jenkinsjenkins00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright 2013, Rackspace (http://www.rackspace.com) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. barbican-2.0.0/ChangeLog0000664000567000056710000017346112701406023016174 0ustar jenkinsjenkins00000000000000CHANGES ======= 2.0.0 ----- * Change Table name to correct name * Update .gitreview for stable/mitaka 2.0.0.0rc1 ---------- * Add release notes for metadata api * Fix publishing of api-guide * Add a configurable setting in barbican-functional.conf for timeouts * Ensure that smoke tests cleanup their containers * Add PKCS#11 upgrade release note * Cleanup containers after functional tests run * Add barbican-manage release notes * Updated from global requirements * Return 404 Not Found when UUID is invalid * Publishing API Guide to OpenStack site * Moved CORS middleware configuration into set_defaults * Changes max string length for URL to 255 * Correct cert event plugin name in config * Fix 500 server error invalid transport key during secret creation * Throw 405 when specified method not allowed in Secret Metadatum * Improve error code for invalid secret creation with KMIP * Add missing unit test for clean_command and fix error handling * Remove use of old bandit.yaml * Update Python classifier for 3.4 * Nit: occurrences of barbican in small letter * Fix index for API secrets user-guide * Make clean up of soft deletions configurable 2.0.0.0b3 --------- * Change the type of 'bit_length' from string to integer * User Metadata API and tests * Introducing barbican-manage utility command * Introduce User-Meta table, model, and repo * Fixing project title * User Metadata Documentation * Simple soft deletion clean up for barbican-db-manage * Use assertEqual/Greater/LessEqual/IsNone * Fix typo in word "initialization" * Simplify the development environment setup * Updated from global requirements * Removing orphan and deprecated doc page: api.rst * Make bandit voting as part of pep8 * Fix roles attribute for barbican request context * Avoid using `len(x)` to check if x is empty * Fix typos in repositories.py * Fix gate that broke due to recent devstack renames * Fix http 500 when getting secret payload with no Acccept header * Updated from global requirements * Delete deprecated barbican-all script * Update and reorganize the doc landing page * Updated from global requirements * Fixing pkcs11_kek_rewrap script * Document public secret type * Cleanup barbican-api-paste pipeline * Document Symmetric Secret Type * Updated from global requirements * Update .gitignore for pyenv * Remove erroneous installing of python-nss * Use local images instead of references * Add Name restrictions in ContainerValidator * 's' to uppercase in "Openstack" * Updating the project name to barbican * Typo change Barbican to barbican Closes-Bug: 1542508 * Updated from global requirements * Fixed invalid conf file name * Fix spell typo * Add missing database constraints and defaults * Create Orders Documentation * Use host href for version discovery * Remove padding from legacy stored secrets * Added CORS support to Barbican * Updated from global requirements * Updates python3.4 dependencies in docs * Trivial Fix: Replace exit() with sys.exit() * Casts algorithm to lower case in KMIP plugin * Add missing parameter to the PKCS11 object * Add secret=True so passwords do not get logged * Updated from global requirements * Adding auditing support for API request using audit middleware * Using dict.items() is better than six.iteritems(dict) 2.0.0.0b2 --------- * Adding check on Offset * Fix buffer size for decrypt operations * Correct a typo * Imported Translations from Zanata * Updated from global requirements * Adding support for configuring number of async worker processes * Reworded sentence fragment in the README * Replace None comparisons in SQLAlchemy code * Remove pep8 error in symantec plugin * Fix containers api doc typo * Replaced outdated Barbican devstack link * Add wsgi script to deploy Barbican behind Apache * Fix argument reversal error in pkek cache * Reimplement p11_crypto and pkcs11 modules * Remove openstack-common.conf * Updated from global requirements * Add lock for crypto plugin manager instantiation * Warning about tox not working in Vagrant setup * Updated from global requirements * Update ContainerValidator to Check for Name Max Length * test: make enforce_type=True in CONF.set_override * Use assertTrue/False instead of assertEqual(T/F) * Addressing error by clearing sqlalchemy session leak * Updated from global requirements * Trival: Remove unused logging import * Python 3 deprecated the logger.warn method in favor of warning * Change assertTrue(isinstance()) by optimal assert * Updated from global requirements * Adding cffi to requirements * Updated from global requirements * Removes MANIFEST.in as it is not needed explicitely by PBR * Fix parameter's type error in test case * Updated from global requirements * Updated from global requirements * Added ACL tests * Fix db error when running python34 Unit tests * Remove low-level PyKMIP test asserts * Correct docs for barbican project structure * Remove obsolete shell command files * Remove version from setup.cfg 2.0.0.0b1 --------- * Authorized API Requests * Assigning oslo config CONF once to parsed barbican CONF instance * Added container consumer end to end flow testcase * Add reno for release notes management * Make API Document Code-Blocks Consistent * Updated from global requirements * Allow length to be passed in MKEK Creation * Updated from global requirements * Add information on running individual Tests * Updated from global requirements * Fix troubleshooting.rst broken link * Remove useless requirements * Remove kombu useless requirement * Updated from global requirements * Enable pkcs11-key-generation command * Updated from global requirements * Remove unused scrub variables in barbican.conf * remove default=None for config options * Move Key gen script to cmd folder * Fix Database Migrations Documentation * fix typo * Make variable defined before refer * Removing no longer used class TestCryptoPlugin implementation from test code, to avoid confusing * Change unit tests in test_utils.py and test_contaiers.py to use CONF.set_override * Correct the returned message when bit_length is absent * Allow null content type for orders * Allow null values for secret names * Added secret flag to oslo config params * Update Devstack deployment and docs * Add more unit tests in test_validators.py for schema validation * Add some unit tests regarding validation of secrets * Updated from global requirements * Imported Translations from Zanata * Replace assertFalse(a in b) with assertNotIn(a, b) * Remove old gate code * Updated from global requirements * Adds documentation for consumer resource * Fix spelling of HATEOAS * Updated from global requirements * Fix Intermittant Unit Test Failure in Repositories * Replace assertEqual(False, result) with assertFalse(result) * Change assertEqual(None, result) to assertIsNone(result) * Updated from global requirements * Replace assertTrue(a in b) with assertIn(a, b) * Updated from global requirements * Updated from global requirements * Add more information to debug log message for consumer delete * Updated from global requirements * Fix Intermittant Unit Test Failure * Remove duplicate words in documentation * Add RBAC docs for Cloud Administrator Guide * Updated from global requirements 1.0.0 ----- * Fix db_manage to initialize mysql from base * Enforce project ownership of subCAs * Check a CA's status as project and preferred CA before deleting * Add subca functionality to the dogtag plugin * Update .gitreview to match stable/liberty * Exit with error code when db_manage.py fails * Updated from global requirements * py3: Enable more tests to Python 3.4 * Enforce project ownership of subCAs * Check a CA's status as project and preferred CA before deleting * Python 3 Refactoring: Replace six.iteritems() with the preferred items() * Make tests.api.controllers.test_containers py3 compatible * Add subca functionality to the dogtag plugin * Making barbican endpoint selection values to be configurable attributes * Fix order of arguments in assertEqual * Fix order of arguments in assertEqual * Fix order of arguments in assertEqual * Exit with error code when db_manage.py fails * Fix order of arguments in assertEqual * Fix order of arguments in assertEqual * Fix db_manage to initialize mysql from base * Use environmental variables for NewRelic * Fix comment and remove unneeded code 1.0.0.0rc1 ---------- * Use subCA when specified to sign CSRs * Catch exceptions raised by bad certificate plugin info * Add missing changes for Alembic time-zero * Add reference guide documenting Certificate Authorities API * Fix Snakeoil to return expiration timestamp in string format * Ensure Alembic version modules bootstrap new db * Cleanup of Translations * Open Mitaka development * Add database commit for database writes when async operations are possible * Change definition of Certificate Authority Model for no soft deletes * Add check to validators that SubCA's project id matches order's project id * CAs should return the external project ID * Remove ERROR_ON_CLONE references * Remove quotes from subshell call in bash script * Document dependencies installation for Fedora * Remove oslo-incubator documentation that's no longer valid * Add support in snakeoil plugin for intermediates * Change behavior of GET cas/preferred * Changes to Preferred CA Features * Add default quota limit config to functional tests * Combine exit codes of the two functional test runs * Adding Functional Tests and Supporting Fixes for Global Preferred CAs * Add missing X-xxxx HTTP headers to the unauth context * Change roles to rules in policy.json file * Initialize Database Before Running Quota Enforcer Unit Tests * Fix ca related controllers * Rename Quick Start Guide * Imported Translations from Zanata * Clean up CAs Policy Rules * Updated from global requirements * Remove .pyc files before performing functional tests * Cleanup Secrets created after Order functional tests * Updates quota values to be read from conf file * Finish Initialization of CA Table when Barbican Starts * Add function to catch unknown attributes in URI * Handle case of no logging environment variables * Add barbicanclient clone back (was overzealous in pruning) * Add functional test for project CA * Remove content related to transport keys and quotas * Add filter to secret list for acl secrets * Use testr for running functional tests and documentation * Add DELETE functionality for subCAs * Remove bad clones (new devstack method doesn't need this) * Set host_href parameter in devstack * Update Bandit Tox Environment * Fix Tempest Installation * Add Project Quota Support for Sub CAs * Remove Vestigial Transport Key Quota Code * Added functional tests for creating CAs * Add subca functionality to snakeoil plugin * Use barbican.conf instead of barbican-api.conf * API documentation for CAs interface * Add validators for new CA creations * Make tests in barbican.tests.model py3 compatible * Make alembic used in barbican similar to other openstack projects * Support for creation of subordinate CAs * Force Identity API v3 for endpoint create in devstack * PyKMIP 0.4.0 test requirement * Fix devstack gate (and new gate_hook.sh) * Completes localization of the api directory structure 1.0.0.0b3 --------- * Add Project Quota Information to the Barbican Quick Start Guide * Stanardized Functional Tests * Removes pyenv from barbican.sh * Update API Reference Docs with Project Quota Information * Add Functional Tests for Quota Enforcement * Implement Enforce Method for QuotaEnforcer * Change test_containers unit test to work around webtest issue * Add Request ID to worker-side logs in barbican * Replace dict.iteritems() with dict.items() * Updated from global requirements * Use the new Devstack external plugin method * Add get_count() method to project resource repositories * Ensure Project and ProjectQuotas Tables are in sync * Allow debug flag in Barbican tox and debugging documentation * Removing unused dependency: discover * Fixed Testing Attributes Bug * Updated from global requirements * Migrate to using gate_hook.sh pre-devstack-update * Cleanup database after functional test runs * Updated from global requirements * Updated from global requirements * Add Quota Enforcement API * Integrated with PyKMIP Pie API * Use config rather than hardcoded admin id from Quotas test * Make tests in test_acls.py and test_cas.py py3 compatible * Implement Models and Repositories for Resource Quotas * Updated from global requirements * Replace itertools.ifilter with six.moves.filter for py3 compatibility * Introduce the key-manager:service-admin role * Updated from global requirements * Make tests in barbican.tests.api.middleware py3 compatible * Use "key-manager" instead of "keymanagement" * Ensure a http 405 is returned on container(s) PUT * Updated from global requirements * Drop downgrade field in alembic script.py.mako and version * Made Functional Test Key 256 Bits * Make tests in barbican.tests.tasks py3 compatible * Make tests in test_hrefs, test_quota py3 compatible * Remove obsolete term "incubated" from docs * Remove rpmbuild directory * Introduce the concept of plugin status * Replace python-ldap with ldap3 library * Catch any exception from base64.b64decode during validation * Updated from global requirements * Skip Bandit Checks on Functional Test Code * Removing Cloudkeep from comments * Updated from global requirements * Imported Translations from Transifex * Replace dict.iteritems() with six.iteritems(dict) for py3 support * Replace urllib with six.moves.urllib for py3 support * Add py34 support in tox * Add invalid property info to validation error message * Imported Translations from Transifex * Updated from global requirements * Revert backwards incompatible paste change * Flatten exceptions used in policy tests * Fix colorized logging in Devstack 1.0.0.0b2 --------- * Imported Translations from Transifex * Updated from global requirements * Configure dogtag installation's domain name * Unauthed tests for Secret & Container ACLs * Remove docbook documentation * Updated from global requirements * Add RBAC Functional Test for ACL Opeations * Updated from global requirements * Add Private Key to the resulting container if stored-key order * Added opaque data support to KMIP secret store * Updated from global requirements * Implement Configuration, Controllers, and Validators for Resource Quotas * Fix unit test errors caused by new mock version * Add retry server and functional tests to DevStack * Fix policy.json certificate_authority inconsistency * Imported Translations from Transifex * Adding script for rewrapping p11 KEKs * Remove invalid skipping of tests * Completed localization tagging for plugin directory * Fix JSON structure example for stored-key orders * Fix config file name to barbican.conf * Drop file extensions for /usr/bin/* * Add Functional Tests for ACLs Using Multiple Users * Updated from global requirements * Fix gate-barbican-docs after change to infra * Update unwrap key to accept specific variables * Add troubleshooting for _bsdbb import error * Fix for admin and creator user access for secret/container read calls * Update queries to use proper offset and limit 1.0.0.0b1 --------- * Remove left over reference to admin endpoint * Remove ProjectSecret table-related code * Make db-manage script use same config file as barbican * Replace oslo incubator code with oslo_service * Updated from global requirements * Fill project_id in secrets where needed * Added unit test around bug related to who can modify ACL * Updated from global requirements * Refactor Barbican model registration * Added Certificate API Docs and Quick Start Guides * Display all versions info in versions controller * Changed Test Key Size to 2048 * Update version for Liberty 1.0.0a0 ------- * Replace oslo incubator code with i18n * Replace oslo incubator code with oslo_utils * Added passphrase support to KMIP secret store * Added certificate support to KMIP secret store * Add project_id to Secret model * Updated from global requirements * Replace oslo incubator jsonutils with oslo_serialization * Updated from global requirements * Fixed Inconsistent Request Id in Log Messages * Sync with latest oslo-incubator * Changes to fix dogtag nss db handling * service_enabled instead of environment variable for dogtag * Updated from global requirements * Change naming convention for Barbican config files * Remove duplicate 'have' in doc/source/api/reference/acls.rst * Fix NewRelic error reporting * Updated from global requirements * Add RBAC tests for orders * Fix http 500 when no accept header passed to secret GET /payload * Added pkcs1_only Configuration to KMIP * Documentation for PKCS11 Key Generation script * Updated from global requirements * Complete RBAC tests for containers * Renaming ACL creator-only to project-access flag * Adding a new script to generate mkek and hmac * Updated from global requirements * Centralize Barbican API Config * Complete RBAC tests for secrets * Removed per ACL operations and added support for PUT method * Adding documentation for ACLs operations * Fix cert tasks not being scheduled for retry * Imported Translations from Transifex * Drop incubating theme from docs * Add more users/roles to secret/container RBAC tests * Fix Dogtag setup script * Remove unused incubated cryptoutils * Split out generate mkek and hmac from get command * Updating setup docs to use Python 2.7.9 * Switch from MySQL-python to PyMySQL * Move policy options to the oslo_policy group in the config * Splitting out PKCS11 plugin * Fix the sql_pool_logging config attribute * Fixing unauthenticated middleware role bug * Updated from global requirements * Adding config option for specifying HSM slot * Base64 encode the cert returned from the Dogtag plugin * Imported Translations from Transifex * Port the Architecture, Dataflow, and Project Strucure docs * Add Multi-user support for Functional Tests * Fix snakeoil_ca plugin * Updated from global requirements * Removed extraneous config.py * Remove deprecated references to admin endpoint * Updated from global requirements * Add Barbican configs for SQLAlchemy pool settings * Imported Translations from Transifex * Updated from global requirements * Adding a info log for each processed request * Migrate to oslo_context * Adding more logging around containers & consumers * Updated from global requirements 2015.1.0 -------- * Fixed Bug for KMIP Secret Storage * Fixed Bug for KMIP Secret Storage * Drop use of 'oslo' namespace package 2015.1.0rc2 ----------- * Fix for missing id check in ACL count query * Improved error code handling for pkcs11 errors * Remove Future Parameters (write, list, delete) from ACL Validation Schema * Adding ACL check when new stored key order is submitted * Fix for missing id check in ACL count query * Removing signing_dir directive from config * Fix failure with get on dict that was None * Fix call to load_privatekey() when passphrase is None * Fix call to load_privatekey() when passphrase is None * Updated from global requirements * Removing signing_dir directive from config * Updated from global requirements * Fix failure with get on dict that was None * Security tests for Secret resources * Updated from global requirements * Update .gitreview to match stable/kilo * Refactor RSA Functional Smoke Tests 2015.1.0rc1 ----------- * Refactor and Fix Translation Code for PER and DER Formats * Add order_retry_tasks migration per latest model * Readability-related changes to secret store functions * Adding MySQL fixes to migrations * Refactor dogtag gate scripts * Fix Dogtag plugin to make latest functional tests pass * Fix KMIP Secret Store input/output * Potential resource exhaustion when registering consumers to containers * Fix handling of payload_content_encoding for orders * Add utility functions to convert between and PEM and DER * Fix base64 decoding of payloads in one-step POST * Fix errors in functional tests * Fix generating a CSR with an encrypted private key * Enable alternate error message for OpenSSL 1.0.2 * Imported Translations from Transifex * Fix expectations of order certificate test cases * Sign CSRs issued in SnakeOilCA tests * Imported Translations from Transifex * Add new smoke tests for RSA type containers and secrets * Expose root cause plugin exceptions * Create Barbican python scripts for development * Open Liberty development * Delete openstack.common.context * Changes to get remaining cert functional tests working * Switch to oslo_policy * Add Bandit security static analysis checking via tox * Security tests for Order resources * Return container not found before ACL checks * Remove str() casting for the client_message variable * Imported Translations from Transifex * Imported Translations from Transifex * Fixing python 3 imcompatiblity in common.utils * Updating hacking version for py3pep8 tox job * Adding simple log.info's to the SecretController * Security tests for Consumer resources * Imported Translations from Transifex * Implement validators and tests for stored key certificate orders * Adding policy layer changes for ACL support (Part 5) * Adding Container ACL controller layer changes (Part 4) * Adding Secret ACL controller layer changes (Part 3) * Adding ACL db repository changes (Part 2) * Adding ACL db model changes (Part 1) * Updating Orders functional tests to new naming convention * Adding docs to index and minor fixes * Imported Translations from Transifex * Restore worker tasks processing catching exceptions * Adding GET and DELETE for containers quickstart guide * Fix error in setting and updating ca and preferred ca tables * Create snakeoil certificate plugin * Imported Translations from Transifex * Introducing container types and examples to quickstart guide * Initial connect up retry task submit and re-enqueue * Write task retry info to database from server.py * Creating initial commit for containers quickstart guide * Removing a forgotten TODO * Adding reference doc page for containers * Allow business logic and plugins to retry tasks * Turning on info logging level by default * Fix string formatting for a secret store exception message * Add ability to run secrets tests in parallel * Security tests for Container resources * Completing secret reference documentation * Add utf-8 decoding for Content-Type * Imported Translations from Transifex * Adding more detail to the secrets quickstart guide * Switching how we handle sessions in p11_crypto * Changes to get Dogtag related functional tests working * Fix some ca_id related bugs, add more functional test code * Fix CA related exceptions, and unskip relevant tests * Removing unused TimeKeeper class * Splitting out remaining order tests from test_resources * Fix pep8 gate errors * Adding more content to the api reference for secrets * Fix flake8 issue * Updated from global requirements * Removing the debian folder * Add functional tests for certificate orders * Fix common misspellings * Imported Translations from Transifex * Updated from global requirements * Add retry periodic task and worker-client logic * Add sub-status logic to worker/task processing * BarbicanHTTPException can take arguments for client_message * Starting to rework docs around the secret resource * Fix string substitution in exception messages missing the s 2015.1.0b3 ---------- * Fix string substitution in exception messages missing the s * Change certificate unit tests to use strong algorithms * Make the default devstack config use the right password * Add asymmetric key support to KMIP plugin * Fixing errors and warnings on the sphinx docs * Removed get_secret_type * Moving containers tests to separate module * Add code to populate CA tables and select plugin based on ca_id * Remove unused etc/dev_tempest.conf file * Refactor secrets functional tests for readability * Test functionality of generated asymmetric keys * Update README file * Imported Translations from Transifex * Implement validate_pkcs10_data * Deprecate old secret decrypt path both in code and docs * Use unique refs for RSA container example * Added new repository classes and controller classes for CAs * Standardized Secret Encoding * Updating secret store plugin to support PyKMIP 0.3.0 * Making sure we allow all content-types for delete calls * Third round of refactoring secrets tests * Porting more tests to test_secrets * add another missing status code check in functional tests * Fix functionaltest keystone URL fetch bug for v2 * Add missing alembic migration script for CA tables * Upping process-timeout and fixing posargs in tox.ini * Starting to split out orders from test_resources.py * check some responses in functional tests before using the resp * Fix create orders functional tests for asynchronous order processing * Making the tox -e functional tests run faster * Cleanup and renaming within secrets smoke tests * Remove extra v1 from override uri * Split out and refactored more secret tests * hide the eggs * Replacing functional test authentication hookup * Fix for order obj expiration issue with SQLAlchemy * Starting refactor of test_resources * Fixing test dependence on execution order * Enforce X-Project-Id coming from the request headers * Ensure that external secret refs cannot be added to containers * Ported API documentation to the repo * Updated from global requirements * Creating indexes for foreign keys * Fixing race-condition for order processing in workers * Enable secret decrypt through 'payload' resource * Imported Translations from Transifex * Let functional tests run with older tempest * Fixed Binary Encoding to Secret Stores * Updated from global requirements * Use oslo_log in db-manage script * Get rid of Repositories class * Use urljoin instead of os.path.join * Fix functional tests to use new auth provider interface * Validate character set used for payload when encoding is base64 * Adding NewRelic monitoring to worker tasks * Fix bug in tests assuming order is active * Update devstack to run tests both sequentially and in parallel * Remove version from endpoints in catalog * get_or_create_project now calls repo factory * Clean up test inheritance * Containers and Consumers controllers use repo factories * Refactor Secrets resource to use repository factories * Refactor Orders resource to use repository factories * Modified plugin contract to include barbican-meta-dto * Making RootController load child controller at runtime * Split override-url in functional test config file * Imported Translations from Transifex * Cleaning up application initialization * Using a central secret store manager to remove lock * Attempting to clean up some of the db session code * Fixing logging import for barbican-worker.py * Fixing unable to retrieve req id bug * Use dictionary comprehensions and literals * Add missing localisation markers * Imported Translations from Transifex * Remove pysqlite requirement * Add Barbican order metadata table and repository * Add support for dogtag in devstack testing * Updated from global requirements * Port to graduated oslo.log package * Sync with latest oslo-incubator * Fix security bug with host HTTP header field * Add container consumer repository factory * Removing uuid verification mock * Add clarification regarding _lookup in secrets and orders * Enforce secret and order IDs are valid UUIDs * Port the Database Migrations doc * Add validation for profile for Orders API * Adding ldap dependencies to documentation * Added mixin class to allow soft deletes * Added secret_type to Secret model * Imported Translations from Transifex * Add missing repository factory functions * Updating copyright on barbican/api files * Updated copyright dates for functional tests/models * Add subject_dn validator * Changing basic copyright for a section of functional tests * Refactor _lookup for orders controller * Imported Translations from Transifex * Configure colored logging in devstack * Refactor _lookup for secrets * Remove useless requirements wsgiref * Add code to generate a CSR in the stored key case * fix a use after free bug in the pkcs11 plugin * Fix "invalid credentials" error running functional tests * Fix symmetric/asymmetric key order meta validation * Replaced calls to iteritems with calls to six.iteritems * Imported Translations from Transifex * Cleaning up code duplication in hrefs.py * Cleaning up method identation in transportkeys.py * Cleaning up literal dict in validators.py * Refactor exception handling in the app side * Run functional tests against any barbican server 2015.1.0b2 ---------- * Imported Translations from Transifex * Add the ability to use either identity v2 or v3 API * Drop old namespace for some oslo libraries * Updated from global requirements * Change exception when store plugin is misconfigured * Trivial refactors to secret controller * Imported Translations from Transifex * Completely refactor PKCS11 plugin * Delete comments that are no longer valid * Renamed outputted keys from base model * Fix downgrade for revision 254495565185 * Fix error in "tenants to projects" migration script * Remove unnecessary checks from migration commands * Added new model classes for CAs * Add 'current' option to the migration script * Add 'history' option to the migration script * Handle SystemExit properly in migration script * Add support for simple cmc requests to Dogtag plugin * Updated from global requirements * Switch Python's json to the OpenStack's json wrapper * Updated from global requirements * Remove commented test cases * Inherit tests instead of explictly calling them * Updated from global requirements * Resolve intermittent HTTP 404 in devstack gate * Include logging for barbican functional tests * Fix content type validation if missing payload * Drop Python 2.6 support * Refactor order validation * Updated from global requirements * Add data model entities for OrderRetryTask * Fix 500 error when PUTing an order * Adding exceptions on bad key file permissions * Checking the certificate file has reasonable permissions * Return the actual name value for entities * Fix UnicodeDecodeError's in the functional tests * Updated from global requirements * Adding client certificates to connection credentials * Fix usage of keystone v3 * Only de-serialize objects when possible * Enable functional tests to take a regex from tox * Configure keystomemiddleware using identity_uri * Make default action return 405 in the controllers * Updated from global requirements * Moving exception logging in the base behaviors * Adding error handling to help debug devstack issue * Replace and remove native asserts * Add I18n-related unit tests (Part 3) * Plugin contract changes for the certificate-order-api * Add validation for certificate-order-api * Enable passing test regex to testr from tox * Remove invalid TODOs related to bug 1331815 * Replace instances of keystone_id from the code * Change keystone_id for external_id in model * Fixes crypto enabled plugins configuration 2015.1.0b1 ---------- * Fix content_type loading to be consistent * Updated from global requirements * Delete secret from plugin only if there's metadata * Setting the max secret bit_length size to be 32767 * Add I18n-related unit tests (Part 2) * Use keystone v3 credentials for functional tests * Support containers without a name * Removing conditional logic around KMIP tests * Actually run type order creation test * Notify user if no database is specified * Add I18n-related unit tests (Part 1) * Changing ModelBase.save to correct updated time * Replace 'tenants' for 'projects' in documentation * Replace model related instances of tenant * Make flake8 ignore *.egg * Imported Translations from Transifex * Dont set debug and verbose as our example * Updated from global requirements * Update log messages to oslo.i18n * Workflow documentation is now in infra-manual * Updated from global requirements * Remove py26 from tox.ini * Container deletion will now clean up Consumers * Add functional tests for order * Added smoke tests for consumers * Updated from global requirements * Add PyKMIP to requirements * Updated from global requirements * Moved secret functional tests to data driven tests * Updated from global requirements * Use canonical name for coverage job * Updated from global requirements * Updated from global requirements * Added test to check that an expired secret cannot be retrieved * Fix communication of secret_type info * Use "key-manager" for service type * Fixing remaining hacking violations * Added smoke tests for containers * Excluding alembic migrations from coverage * Cleaning up a couple nits in the Symantec plugin * The last round of secrets functional tests * Remove redundant else, none returned by default * First set of functional tests for orders * Adding basic functional tests for containers * Added smoke tests for orders * Replace trivial instances of tenant for project * Updated from global requirements * Port the Dependency Adding/Updating docs to Sphinx * Added second round of secrets functional tests * Port the troubleshooting documentation into Sphinx * Added first round of functional tests for secrets * Added support classes for secret functional tests * Adding simple getting involved doc * Adding docs around running tests and devstack * Taking a first stab at putting together setup docs * Attempting to fix the devstack gate job * add new diffcover target for a future non-voting gate job * Smoke tests for secrets in Barbican Functional Tests * sync global requirements now that pecan 0.8 is out * Add certificate plugin page * Add secret store plugin page * Updated version tests to include auth and unauth flavors * Add plugin sub-folder and augment crypto plugin doc * Move functionaltests into smoke or functional subdirectories * Bumping default ssl_version to TLSv1, in light of POODLE * Removing new_name argument from test_wrapper * Add missing \n at the end of file * Remove extraneous vim editor configuration comments * Delete usage-indications from the model docstrings * Adding keystone notification listener support 2014.2 ------ * Update to the latest global requirements versions * Imported Translations from Transifex * Adding tox job for local functional test dev * Adding a sample tempest config * Open Kilo development * Sync SQLAlchemy lifecycle to request cycle * Cleaning up secret functional tests * Code clean-up due to type order enhancement * Use canonical cover name for coverage * Adding parameterized decorators for unit tests * Fix error in two-step secret PUT with base64 * Use OpenStack Incubated branding for docs * Allowing all content-types for secret delete * Adding size limits for create secret json fields * Refactor secret functional tests using models and behaviors * Add asymmtric order validator * Changes to get certificate issuance flow working * Deduplicate HACKING.rst with docs.openstack.org/developer/hacking/ * Add asymmetric key generation in dogtag plugin * Pin Pecan to 0.6.0 to fix binary secret decrypts * Test the secret model using an in memory database * Adds to KMIP secret store test coverage * Switch barbican.sh to use testr * Typos 'asychronous' and 'formated' * removed tenant id from code samples * Bringing translation.py coverage up to 100% * Remove restrictive hard-coded orders validation * removed whitespace from pom.xml * Update Getting Started Guide to include tech review feedback * Adding sub-status and message to orders * Imported Translations from Transifex * Remove config parameter from secret_store.py interface * Modifying testr conf to suppress output * Fixing the PYTHONHASHSEED bug with our unittests * Updated dev guide to include feedback from previous tech review * removed image files as they referred to internal architecure * Switch to running tests in parallel with testr * Install tempest instead of just adding it to PYTHONPATH * PKCS11 refactor to use a master KEK and per project KEK * Adding support for allowing binary data as-is as part of PUT * Adding missing unit test on queue server tasks * Imported Translations from Transifex * Making a few MORE modules hacking 0.9.2 compliant * Adding initial update logic for orders * Move to oslotest 2014.2.b3 --------- * Add initial files for certificate event handling * Reorganize code to use store crypto plug-in * Remove some inline if/else statements * Replace explicit assertion for function * Make a whole host of modules hacking 0.9.2 compliant * Updating API unit and functional tests to new hacking standards * Imported Translations from Transifex * Making a few modules hacking 0.9.2 compliant * Additional work on certificate processing * Updated Create Secret request and response * Updated Get Secrets request and response * Initial connect orders resource to certificate processing * Updated Get Orders request and response * Add a py3pep8 tox job. This will verify py3 compliant syntax * Imported Translations from Transifex * Fix Container list to properly format secret_refs * fix for bug #1359197 * fix all the log statments to use %s fomatting * Add order plugin metadata entity and logic * Add certificate_manager functionality to dogtag plugin * Allow devstack to do git clone of barbican * Remove second setUp() call in tests * change CryptoPluginManager to be instantiated in the module scope * Imported Translations from Transifex * Edited docs to improve context * Minor cleanup and moving around code for clarity * Add more type in order post * Update versionbuild script to handle setup.cfg version * Replacing backslashes * Adds KMIPSecretStore and unit tests * Install of a Barbican RPM fails due to missing keystonemiddleware * Imported Translations from Transifex * Removing symantec as a default plugin * Updating symantec plugin docstrings * First attempt at adding the symantecssl library * Revert remove version from setup.cfg * Install sqlite-devel package on fedora * Refactor secret_store for consistency * remove project-id from resource URIs * Replace hard-coded setup version setting * Fixed misspelling in error message * Make transport_key an optional arg in SecretDTO * Add Certificate Interface & Symantec Plugin * Clean old comments (already implemented) * Add support to Barbican for consumer registration * Force uWSGI to set "Connection: close" header * Adds store_secret_supports to secret_store * Remove remaining skipTest * Add code to retrieve secrets metadata and data with transport key * autodoc import error for plugin.rst * Replace skipTest in favor of decorator * Eager load KEKDatum record when EncryptedDatum is retrieved * Code to pass through transport_key_id when storing secret * Correct container create response code to be 201 * Adding doc sections to tox.ini * code to retrieve transport key on metadata request * Set python hash seed to 0 in tox.ini * Imported Translations from Transifex * Add tempest tests to run in devstack gate * Use auth_token from keystonemiddleware * Cleaning up index.rst file * Don't log message for keystone ID query when supressing exceptions 2014.2.b2 --------- * Added duplicate secret_id validation in a container request * Fix accepting longer PKI keystone auth requests to Barbican * Imported Translations from Transifex * Bug fix 1336995 DateTime type only accepts Python * removing dead code from common.resources * Remove dead code from config * Replace should_fail for should_raise in tests * Fix 500 error for secret PUT * Fix Dogtag plugin and tests to match current secret_store API * Implement content-type HTTP header enforcement * Change function and import names for readability * Correctly ordering requirements.txt * Move crypto package contents to the new plugin structure * Refactor json validator * Correct default bit_length to match schema constraint * Replaced some UTF-8 characters with ASCII characters * Update from global requirements * Update devstack endpoint creation to include v1 * Increase test coverage to 100% in validators.py * Increased test coverage for app and updated .gitignore * Removing an unnecessary catch * Excluding the docs target folder from flake8 * Pin barbican above the most recent version of pecan * Disable empty or null names for crypto plugins * editing for gramatical and english errors * Ensure that secrets within orders have expiration date isoformatted * Add dogtag plugin to new plugin structure * Increased common/utils.py test coverage to 100% * Refactor secret validation for redability * Adding test cases for more coverage * Refactor allow_all_content_types for readability * Don't package dependencies available in RDO * Use oslo-config and oslo-messaging from RDO * Restructure project to accommodate plugin types * Pass tests in newer versions of pecan * Addes intermediates to certificate container * Add **kwargs to on_ controllers * fix for - JSONErrorHook is not setting content type to JSON * Fix for Unicode-objects must be encoded before decryption exception * remove default=None for config options * Ensure that datetimes are isoformatted * Add certificate to the container type option * Fixed several typos * replace iterator.next() with next(iterator) * fix to include data migration script * Adding 3 new columns Type, Meta and container_id to Orders * Add SecretStore interface * Fix data migration script error * Adding SecretStoreMetadatum to the list of MODELS * Crypto dev plugin now uses per-tenant KEKs * Prepare barbican for localization * Replace nonexistent function in test * Usage of __metaclass__ not necesary with six.add_metaclass * Enable tox to upgrade packages if needed * Add a local.conf for easier DevStack spinning up * Adding docbook-based docs * enable all hacking checks * Update the links to python-barbicanclient * Prefer os.urandom to PyCrypto's PRNG * Beginning transition from pycrypto to cryptography * More Hacking Violation Fixes * Update to oslo-incubator (caed79d) * Additional Hacking violation corrections * enable hacking check H234 * unpin iso8601 dependency * Fixed some Hacking violations * major hacking fixes * Fixing A Few Hacking Violations * Add TransportKey as a resource * Update Plugin Development documentation * Changing from stackforge to openstack project * Change name of SecretMetadatum table to SecretStoreMetadatum * Update .gitreview file for new repo name * Fix flake8 errors in migration version file * Lock Pecan version to 0.5.0 for now, https://review.openstack.org/89849 * Fix dogtag unit tests * Remove extra parameter for generate_symmetric & generate_asymmetric in test plugin * Fixed http 500 due to mismatch between ResponseDTO and tuple from plugin encrypt * Fixed a typo in a comment * Pass secret metadata to crypto extension manager for plugin search * Remove redundant code on offset, limit parameters * Implement the REST API with pecan * Increase DevStack uWSGI buffer for Tempest support (Keystone Headers) * change assertEquals to assertEqual * Adds SecretMetadatum table * Extend crypto plugin to support more key types * Metaclass Python 3.x Compatibility * Ensure uWSGI process in DevStack is actually killed * Fix a bug with handling of binary data * Add configuration items to limit secret and request sizes * Add Sphinx developer documentation skeleton * Add Dogtag crypto plugin * correcting spelling error in README file 2014.1 ------ * Support for debug mode start in barbican * Update crypto plugin interface to support Dogtag * Clean up Verifications resource * Revive Alembic-based database migration logic * Remove v1 from the falcon app route * Housekeeping, re-alphabetize requirements * Make max api request size configurable * Barbican should not do rbac on version api call * Include discover in test-requirements * Switch to testtools and make barbican compatible with Python2.6 * Fix string format used to report content validation errors * Correct RBAC response codes * Create secret - case insensitive Content-Type check 2014.1.b3 --------- * adding placeholder for endpoints * Barbican uWSGI stats server listen on localhost * Minor changes to make the bash8 check happy * Adding more msgs, checking admin/non-admin endpoints * Modify python-dateutil RPM build process * Fixing bug 1287993 - logging issue * Correcting bash script pre-test-hook * Remove oslo.uuidutils * Locking the version of iso8601 * Adjust contrib location, minor adjustments * Adjust location of the functionaltests folder to root of project * use of more readable policy language in policy file * Adding DevStack Support * Adds container endpoints to Barbican API * Tell install to create files as 644 instead of 755 * Removes dead and unneeded code * Add script to ensure semantic versions work with continuous build * Specified content type is dropped on single step create secret * add fpm packaging to satisfy oslo.messaging dependency for pyYaml * Add queue configuration changes to support HA Rabbit queues * Initial checkin of doc source * fix for - create a secret - doesn't trim content type * Support building wheels (PEP-427) * exclude abstractmethod from coverage * fix for testr * Switch to testr * Set rpm %_topdir macro during build * Removed dead code and updated tests to cover 100% for crypto * Remove custom install_command for argparse * Enable tag-based full version reporting via pbr 2014.1.b2 --------- * Add RPM package dependencies needed for Barbican nodes post oslo.msg/pbr * Alphabetizing requirements as per OpenStack best practices * Switching to use VIRTUAL_ENV env variable * Fixing issue around error messages not being populated * Update crypto unit test coverage * Add RPM package dependencies needed for Barbican API/Worker nodes * Removed scripts that are not being used * Remove PostgreSQL dependencies from Barbican * removing run_tests.sh * Updating copyright year * Adding invalid_property attr to support test code * Sync the kombu and amqp versions * Remove lingering celery dependencies out of RPM dependencies * Enabled branch coverage for barbican. Took out inclusive coverage * Fix file copy bug with the barbican installer * Removing bin/versionrelease * Updating RPM build version * Attempting to fix the tox environments * Fix RBAC unit test failure due to uninitialized mock * Sync with global requirements * Fixed to un-quote the name parameter that gets used when querying for secrets * Removes unneeded details from the error message * Migrate to pbr * Replace Celery with Oslo messaging for asynchronous queuing * Expand secrets list API to allow for filters on name, mode, algorithm, and bit length * host_href should include scheme * Fixed response object in the Verifications GET list resource method * Add .mailmap file * Add verification REST service to Barbican * Line wrap README, revise IRC channel to #openstack-barbican * Fix automatic signing * Sign RPM builds * Update Launchpad URL * add link to bug tracker * Removed README.rst in favor of README.md * Remove Celery and Kombu version pinning * Gracefully handle request for binary data as plain * Update the keystone configuration script for Barbican per Johns Hopkins team * Add a better installer for local Barbican application instances * Fix spelling error, and add uWSGI stats server support * Pin billiard version to 2.7.3.34 * Fix for bug/1238895: normalize plain text mime types * Change the queue 'broker' config to allow a list of brokers * Add back a explicit barbican-worker.py install line * Remove unneeded Barbican worker script from generated RPM * Add files to create worker RPM and get api/db/queue/worker nodes deployed * Modify version template for the final OpenStack Havana release * Fix KEK generation in the P11 Plugin + fix unit tests * Change from using 'cypher_type' to 'mode' in API and code base * Remove duplicate TenantSecret on secret PUT call * Remove base64 encoding feature from decrypted secrets GET API * Refine the Secret metadata response by *not* returning the 'content-encodings' * Add the error reason to the Order entity if async processing fails * Refactor plugin API methods * Update 404 message * Freeze amqp version for rpm packaging * Update dependency packaging * moar dependencies thanks to oslo * Add missing content type and encoding business logic after MIME-type revamp * Remove uwsgi requirement * Added total number of secrets/orders available to response * Add new dependencies to rpm * adds lazy app loading to the uwsgi inis * Add role based access control (RBAC) to Barbican * Handle missing cyper_type and algorithm fields * Remove padding code and tests in the p11 plugin * Fix order creation bug involving content types * Add mock testing for the p11_crypto plugin * Fix rpm build * P11CryptoPlugin is now functional * Update P11 plugin to function & match new plugin contract * First attempt to integrate Paul's HSM plugin into Barbican * p11 plugin work * Added hacking target to tox * Change the location of the New Relic config file * Mime Type Revamp * Add New Relic application monitoring * Fix PyCrypto rpm package name built by fpm * Package PyCrypto 2.6 into an rpm using fpm * Stop barbican-api during rpm removal * Fixing NoneType issue with limit and offset * Restrict use of 'plain_text' to 'text/plain' MIME * Package dependencies in RPMs using fpm * Automate rpm build * Clean list paging offset and limit values properly * Fix rpm dependencies * Validate MIME type and cypher type during order creation * Added logic to skip migrations for sqlite databases, as sqlite does not support altering schema data * Added barbican-api rpm packaging * Adding files to support database migration via alembic * Adjust debian upstart script to allow for updating running app when updating package version * Added missing -1 in debian changelog file * Revamped version release script to use time stamps; * Clean up debian packaging. Install dependencies from distro repos * Add .gitreviw file; Fixed pyflakes violations; Fixed pep7 violations; Fixed tox.ini config; * Prepare for next release 0.1.65 ------ * Release for v0.1.65 * Fixed script rename in setup.py * Restore comments about policy in barbican run script * Renamed barbican-api script to barbican-all * Use upstart instead of init scripts for uwsgi emperor * Added admin api as a separate falcon app * Removed unused uwsgi.ini file * Prepare for next release 0.1.64 ------ * Release for v0.1.64 * Updated the perf URI; * Prepare for next release 0.1.63 ------ * Release for v0.1.63 * Changing the performance URI; * Prepare for next release 0.1.62 ------ * Release for v0.1.62 * Adding blitz.io API contract; * Modified readme to refer to the wiki home page; * Clean up unit test suite; Remove print statements; * Prepare for next release 0.1.61 ------ * Release for v0.1.61 * Require latest SQLAlchemy (8.1) * Prepare for next release 0.1.60 ------ * Release for v0.1.60 * Prepare for next release 0.1.59 ------ * Release for v0.1.59 * support faux key creation in crypto plugins + tests * PEP8 clean up; * #131 Fix defect involving expiration dates with time zones; Add graceful handling of 500 errors in REST resources; * Prepare for next release 0.1.58 ------ * Release for v0.1.58 * Prepare for next release * Release for v0.1.57 0.1.57 ------ * Updating API Doc and Technology links * Prepare for next release * Release for v0.1.56 0.1.56 ------ * Address Douglass review items; Officially deleting api validators.py; * Put text/plain in front of octet-stream in secret mime-type enum, as is more common type probably; * Added enum validation of mime-types * Finished order validation; Fixed secret from order bug; Added logic to display secret UUID for name if not specified; * Added orders validation; Fixed bug in the max size of plain text secrets; * Prepare for next release * Release for v0.1.55 0.1.55 ------ * Added more testing around nulls/empties in JSON input; * Prepare for next release * Fixed parser failure output back to REST client * Release for v0.1.54 0.1.54 ------ * FIxed PEP8 violations; Added jsonschema to installer dependencies; * Added validation logic for new secrets; * put back the copyright range to that of the original copied file .. instead of the incremented 2013 * do not need wsgi, using Middleware class from barbican.api.middleware.__init__.py * using Middleware from api.middleware.__init__, just what we need. Added debug middleware to the __init__ file. Also extended copyright range from 2011-2012 * Prepare for next release * Release for v0.1.53 0.1.53 ------ * Adding err.log to git ignore list; Remove spurious log; * Fixed mismatch between tenantsecret table's FKs and the ids of the models; * Add tox to test requires; * Add tox to test requires; Better handle mal-formed JSON errors; * creating a request context after token authorization that contains user particulars such as tenant-id, user-id etc * support for keystone context in request * fixes the problem of 'Command tools/with_venv.sh pip install --upgrade pip failed.' In the process adopted install_venv_common.py. Note setup.sh is currently commented out * Prepare for next release * Release for v0.1.52 0.1.52 ------ * Added more debug logging in the paging repo methods, to troubleshoot defect; * Oops, put back the 'see glance setup.py' for later version flow incorporation ala OpenStack lines * Grab kek from config file for simple plugin * need to make executable to not get permissions error when no virtual environment exists * Prepare for next release * Release for v0.1.51 0.1.51 ------ * removed references to glance and substituted with barbican * Prepare for next release * Release for v0.1.50 0.1.50 ------ * Adjusted limits logic; Added logging around limits to troubleshoot bug; * Prepare for next release * Release for v0.1.49 0.1.49 ------ * Added missing is-deleted filter on order query; Changed secret POST response code to 201; * Prepare for next release * Release for v0.1.48 0.1.48 ------ * Restore docstring param accidently removed; * Addressed Douglas issues with imports; Fixed PEP8 violations * Bug fix for order/tenant creation; * Merged in Douglas' changes; * Added better error feedback to clients; Added missing unit tests; * Remove EncryptedDatum from plugin * Added missing tenant-filtering logic/SQL; Added improved error reporting to clients; * Only use byte type in plugin * Added tenant-id filter to db queries of secrets; * Secret can have more than one encrypted datum * Save encryption details to kek_metadata, not datum.mime_type * Prepare for next release * Release for v0.1.47 * Added aes-128-cbc encryption in plugin using pycrypto 0.1.47 ------ * Added size limits to the secret data; Fixed the next nav link to hide if no more data to display; * Added methods to the crypto plugin manager to handle key gen and is-supports API flows; Added more code to deal with corner cases in API flow; * Added soft-delete orders/secrets support; Updated unit tests; * Fixed PEP8 violations; Tweaked fake encrypt/decrypt to show original passed in data; * Prepare for next release * Release for v0.1.46 0.1.46 ------ * Don't inject conf into extension manager * Added support for retrieving secret info stored in datum records, more work to follow * Prepare for next release * Release for v0.1.45 0.1.45 ------ * Prepare for next release * Release for v0.1.44 0.1.44 ------ * Prepare for next release * Release for v0.1.43 * Use oslo.config to load crypto plugins 0.1.43 ------ * Prepare for next release * Fixed the one-step POST call flow, verified with text/plain mime type; Modified exception handling to provide better feedback to client on errors; * Release for v0.1.42 0.1.42 ------ * Comment out identity policy setup in bin that is clobbering auto deployment process * Added warning about is-null test * Fix SQL bug in model code; * Prepare for next release * Release for v0.1.41 * Fixed some PEP8 violations; * Merged Douglas' changes; Updated debian installer to install stevedore package; * Added support for restricting GETs of expired secrets; Set default page size to 10 records; * Prepare for next release * Release for v0.1.40 0.1.40 ------ * Prepare for next release * Release for v0.1.39 0.1.39 ------ * pep8 fixes * Fixed tox tests broken by upstream commits * Add support/tests for paged orders list via GETs; * Fixed unit tests to mock policy enforcer; * Fixed order test * Finished secrets GET unit tests; * Removed barbican.crypto.fields module. Use plugin system instead * Added decrypt via plugin * Added paging logic, preliminary unit tests * manual test aid * merged with latest secrets resources, and load-balance related changes * keystone middleware integration * integrating keystone policy infrastructure * Add paged list of secrets via secrets GET; * Added crypto plugin encryption to Secrets post * Prepare for next release * Release for v0.1.38 0.1.38 ------ * Converted ids to HATEOS-style refs per API docs; Added true host name for this href via conf file; * Transferring blueprint from apiary.io * Added missing secret fields; Changed order request to have an embedded secret dict that mirrors the secret request type; * Prepare for next release * Release for v0.1.37 * Prepare for next release * Release for v0.1.37 0.1.37 ------ * Fix secret post error * Prepare for next release * Release for v0.1.36 0.1.36 ------ * Prepare for next release * Add unit testing; Add text/plain mime handling; * Release for v0.1.35 * Add PUT secret logic; add generate key (simple only); Add more unit tests; Still need to add missing secret paramters * Added content-types output for secret GET * changed port to 9311 instead of 8080 * Prepare for next release * Release for v0.1.34 * Added creation of secret from order processing * changed port to 9311, avoiding 9292 (taken by glance) and 8080 (taken by S3) * Prepare for next release * Release for v0.1.33 0.1.33 ------ * Prepare for next release * Release for v0.1.32 * Resolve diff in versions * Prepare for next release * Release for v0.1.30 * Revamped Secret and Order stuff per discussion, just baseline functionality, not complete * Modified files per revamped secrets/orders API * require python-keystoneclient to support keystone authentication * Prepare for next release * Release for v0.1.31 0.1.31 ------ * Remove duplicate debian source folder * Ignore IntelliJ module files * Prepare for next release * Release for v0.1.30 0.1.30 ------ * Prepare for next release * Release for v0.1.29 * Added v1 prefix to URI routes * Prepare for next release * Release for v0.1.28 * Changed working and links for new info * Changed the mailing list in the README to the openstack-dev one; Added info about the Freenode IRC for barbican as well * Prepare for next release * Release for v0.1.27 * Add links from main git page to wiki pages * Prepare for next release * Release for v0.1.26 * Added unit test for the sample middleware component; Tweaks to the Barbican worker node boot script; * Prepare for next release * Release for v0.1.25 * Added unit test for tasks; Replaced TBDs with more IDE-friendly TODOs * Prepare for next release * Release for v0.1.24 * Add final config file changes for queuing/Celery * Prepare for next release * Release for v0.1.23 * Prepare for next release * Release for v0.1.22 * Added files needed to run Celery- and RabbitMQ-based workers * Revamped to look more like the Celery tasking approach * Prepare for next release * Release for v0.1.21 * Prepare for next release * Release for v0.1.20 * Prepare for next release * Release for v0.1.19 * Prepare for next release * Release for v0.1.18 * Modified the test/default/standalone db connection to use a sqlite file in a folder writable by the barbican user * Prepare for next release * Release for v0.1.17 * Added missing pip dependencies for debian package * Prepare for next release * Release for v0.1.16 * Prepare for next release * Release for v0.1.15 * Modify boot script to put copy of conf file into home dir for local-only deployments * Address issue with oslo config rejecting cli options for logging * Fixed mismatch between wiki and api script file venv setting * Moved config files back to etc/barbican * Added missing requires * Added middleware sample; Modified boot script accordingly; * Utilizing openstack jsonutils, per Chad feedback * Fixed mismatch in bin script with wiki docs * Fixed bug in local launch of barbican via bin script * PEP8 cleanup * Added secret entity type; Updated the banner of source files to match Rackspace open source * Fixed/added unit tests...more to come * Added initial worker and queue modules, more work to go.. * Added CSR and SSL Certificate resource and models...still need to plumb components * Added tenent resource test; * Cleaned up repo/model/config files to be more like Glance; * Add initial revamped data related files * Add paste config file * Prepare for next release * Release for v0.1.14 * Add oslo/openstack files; Modified barbican-api script and config to support local spinup of uwsgi API server * Prepare for next release * Release for v0.1.13 * Fixed PEP8 violations * Prepare for next release * Release for v0.1.12 * adding images for the wiki page * Prepare for next release * Release for v0.1.11 * Prepare for next release * Release for v0.1.10 * Add change to test Jenkins for sprint review * Prepare for next release * Release for v0.1.9 * Test config to trigger Jenkins * Prepare for next release * Release for v0.1.8 * Prepare for next release * Release for v0.1.7 * Test change to repo * Prepare for next release * Release for v0.1.6 * Prepare for next release * Release for v0.1.5 * Prepare for next release * Release for v0.1.4 * Prepare for next release * Release for v0.1.3 * Prepare for next release * Release for v0.1.2 * Added pythonpath to where config.py is located * Added missing version utility modules * Bump to the next version * Fixed syntax error in script * Added bin folder, to contain start scripts for the Barbican services * Adjust the inital version to 0.1.1 * Moving POC code * Update README.md * Update README.md * Update LICENSE * Basic Readme * Adding PANIC log type * removing tag code * workaround for bad header parsing * Wrap policies in plural container * Update README.md * Added missing dependency * PEP8 Fixes * \Merge branch 'tenantAPI' of github.com:cloudkeep/barbican * fix a bug for mutiple page agent pairing * a bug fix for updating pairings * Added Agent API and Agent WebUI * Use Markup for escape * Update Web UI for event display * Updated tenant API and API testing * add tenant api * add test script for API * added sample files to give coverage tool something to chew on * added missing args from tox config * Removed conflicting argument for nosetests * added missing nosetests parameters in setup.cfg * added missing package py files * added pip and test requirements * Add test and config files to test Jenkins * adding tox.ini file * API Support For Policies * Added Pairing Call * API Example * WIP: First API Call * Added basic policies * Added Key model * Added Tenant model * Authenciation & Admin * Improved style for Users * Added Twitter Bootstrap for style * SQLAlchemy Part 2 * Adding SQLAlchemy persistence * Simple formatting fixes * API Blueprint * Flask Structure * Update README.md * Initial commit barbican-2.0.0/PKG-INFO0000664000567000056710000000141412701406024015504 0ustar jenkinsjenkins00000000000000Metadata-Version: 1.1 Name: barbican Version: 2.0.0 Summary: UNKNOWN Home-page: http://www.openstack.org/ Author: OpenStack Author-email: openstack-dev@lists.openstack.org License: UNKNOWN Description: Service for storing sensitive client information for OpenStack Platform: UNKNOWN Classifier: Environment :: OpenStack Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.4 barbican-2.0.0/tox.ini0000664000567000056710000001001212701405673015725 0ustar jenkinsjenkins00000000000000[tox] envlist = pep8,py34,py27,docs [testenv] install_command = pip install -U {opts} {packages} deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt commands = /usr/bin/find . -type f -name "*.pyc" -delete python setup.py testr --coverage --testr-args='{posargs}' coverage combine coverage report -m [testenv:cover] deps = {[testenv]deps} diff_cover commands = python setup.py testr --coverage --testr-args='{posargs}' coverage combine coverage xml diff-cover --fail-under 100 coverage.xml [testenv:releasenotes] commands = sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html [testenv:py34] commands = /usr/bin/find . -type f -name "*.pyc" -delete python -m testtools.run \ barbican.tests.api.controllers.test_versions \ barbican.tests.api.middleware.test_context \ barbican.tests.api.middleware.test_simple \ barbican.tests.cmd.test_cmd \ barbican.tests.common.test_hrefs \ barbican.tests.common.test_quota \ barbican.tests.common.test_utils \ barbican.tests.model.repositories.test_repositories \ barbican.tests.model.repositories.test_repositories_acls \ barbican.tests.model.repositories.test_repositories_certificate_authorities \ barbican.tests.model.repositories.test_repositories_consumers \ barbican.tests.model.repositories.test_repositories_containers \ barbican.tests.model.repositories.test_repositories_order_retry_tasks \ barbican.tests.model.repositories.test_repositories_orders \ barbican.tests.model.repositories.test_repositories_projects \ barbican.tests.model.repositories.test_repositories_quotas \ barbican.tests.model.repositories.test_repositories_secrets \ barbican.tests.model.repositories.test_repositories_transport_keys \ barbican.tests.model.test_models \ barbican.tests.plugin.crypto.test_manager \ barbican.tests.plugin.interface.test_secret_store \ barbican.tests.plugin.test_simple_certificate_manager \ barbican.tests.plugin.util.test_mime_types \ barbican.tests.plugin.util.test_utils \ barbican.tests.queue.test_client \ barbican.tests.queue.test_keystone_listener \ barbican.tests.queue.test_retry_scheduler \ barbican.tests.queue.test_server \ barbican.tests.tasks.test_certificate_resources \ barbican.tests.tasks.test_common \ barbican.tests.tasks.test_resources [testenv:pep8] sitepackages = False commands = flake8 {posargs} # Run security linter bandit -r barbican -x tests -n5 [testenv:venv] commands = {posargs} [testenv:debug] commands = oslo_debug_helper {posargs} [testenv:py3pep8] # This hack is in place to allow us to run py3 based flake8 # without installing barbican. basepython = python3 install_command = /bin/echo {packages} commands = pip install "hacking>=0.10.0,<0.11" flake8 barbican setup.py [testenv:docs] commands= rm -rf api-guide/build python setup.py build_sphinx sphinx-build -W -b html api-guide/source api-guide/build/html [testenv:api-guide] # This environment is called from CI scripts to test and publish # the API Guide to developer.openstack.org. commands = sphinx-build -W -b html -d api-guide/build/doctrees api-guide/source api-guide/build/html [testenv:functional] # This tox env is purely to make local test development easier # Note: This requires local running instances of Barbican and Keystone deps = -r{toxinidir}/test-requirements.txt setenv = OS_TEST_PATH={toxinidir}/functionaltests commands = /usr/bin/find . -type f -name "*.pyc" -delete /bin/bash {toxinidir}/functionaltests/pretty_tox.sh '{posargs}' [flake8] exclude = .git,.idea,.tox,bin,dist,debian,rpmbuild,tools,*.egg-info,*.eggs,*openstack/common,contrib, functionaltests,*alembic_migrations/versions,*docs/target,*.egg [testenv:bandit] deps = -r{toxinidir}/test-requirements.txt commands = bandit -r barbican -x tests -n5 barbican-2.0.0/barbican.egg-info/0000775000567000056710000000000012701406024017642 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican.egg-info/requires.txt0000664000567000056710000000104512701406023022241 0ustar jenkinsjenkins00000000000000alembic>=0.8.0 Babel>=1.3 cffi cryptography>=1.0 eventlet!=0.18.3,>=0.18.2 jsonschema!=2.5.0,<3.0.0,>=2.0.0 oslo.concurrency>=3.5.0 oslo.config>=3.7.0 oslo.context>=0.2.0 oslo.i18n>=2.1.0 oslo.messaging>=4.0.0 oslo.middleware>=3.0.0 oslo.log>=1.14.0 oslo.policy>=0.5.0 oslo.serialization>=1.10.0 oslo.service>=1.0.0 oslo.utils>=3.5.0 Paste PasteDeploy>=1.5.0 pbr>=1.6 pecan>=1.0.0 pycadf!=2.0.0,>=1.1.0 pycrypto>=2.6 pyOpenSSL>=0.14 ldap3>=0.9.8.2 keystonemiddleware!=4.1.0,>=4.0.0 six>=1.9.0 SQLAlchemy<1.1.0,>=1.0.10 stevedore>=1.5.0 WebOb>=1.2.3 barbican-2.0.0/barbican.egg-info/dependency_links.txt0000664000567000056710000000000112701406023023707 0ustar jenkinsjenkins00000000000000 barbican-2.0.0/barbican.egg-info/not-zip-safe0000664000567000056710000000000112701405777022106 0ustar jenkinsjenkins00000000000000 barbican-2.0.0/barbican.egg-info/SOURCES.txt0000664000567000056710000003643512701406024021541 0ustar jenkinsjenkins00000000000000.coveragerc .mailmap .testr.conf AUTHORS ChangeLog HACKING.rst LICENSE README.md apiary.apib babel.cfg pylintrc requirements.txt setup.cfg setup.py test-requirements.txt tox.ini api-guide/source/acls.rst api-guide/source/cas.rst api-guide/source/certificates.rst api-guide/source/conf.py api-guide/source/consumers.rst api-guide/source/containers.rst api-guide/source/dogtag_setup.rst api-guide/source/index.rst api-guide/source/orders.rst api-guide/source/pkcs11keygeneration.rst api-guide/source/quotas.rst api-guide/source/secret_metadata.rst api-guide/source/secrets.rst barbican/__init__.py barbican/context.py barbican/i18n.py barbican/version.py barbican.egg-info/PKG-INFO barbican.egg-info/SOURCES.txt barbican.egg-info/dependency_links.txt barbican.egg-info/entry_points.txt barbican.egg-info/not-zip-safe barbican.egg-info/pbr.json barbican.egg-info/requires.txt barbican.egg-info/top_level.txt barbican/api/__init__.py barbican/api/app.py barbican/api/app.wsgi barbican/api/hooks.py barbican/api/controllers/__init__.py barbican/api/controllers/acls.py barbican/api/controllers/cas.py barbican/api/controllers/consumers.py barbican/api/controllers/containers.py barbican/api/controllers/orders.py barbican/api/controllers/quotas.py barbican/api/controllers/secretmeta.py barbican/api/controllers/secrets.py barbican/api/controllers/transportkeys.py barbican/api/controllers/versions.py barbican/api/middleware/__init__.py barbican/api/middleware/context.py barbican/api/middleware/simple.py barbican/cmd/__init__.py barbican/cmd/barbican_manage.py barbican/cmd/db_manage.py barbican/cmd/keystone_listener.py barbican/cmd/pkcs11_kek_rewrap.py barbican/cmd/pkcs11_key_generation.py barbican/cmd/pkcs11_migrate_kek_signatures.py barbican/cmd/retry_scheduler.py barbican/cmd/worker.py barbican/common/__init__.py barbican/common/config.py barbican/common/exception.py barbican/common/hrefs.py barbican/common/quota.py barbican/common/resources.py barbican/common/utils.py barbican/common/validators.py barbican/locale/barbican-log-error.pot barbican/locale/barbican-log-info.pot barbican/locale/barbican-log-warning.pot barbican/locale/barbican.pot barbican/locale/zh_CN/LC_MESSAGES/barbican-log-error.po barbican/model/__init__.py barbican/model/clean.py barbican/model/models.py barbican/model/repositories.py barbican/model/migration/__init__.py barbican/model/migration/alembic.ini barbican/model/migration/commands.py barbican/model/migration/alembic_migrations/README barbican/model/migration/alembic_migrations/__init__.py barbican/model/migration/alembic_migrations/container_init_ops.py barbican/model/migration/alembic_migrations/encrypted_init_ops.py barbican/model/migration/alembic_migrations/env.py barbican/model/migration/alembic_migrations/kek_init_ops.py barbican/model/migration/alembic_migrations/order_ops.py barbican/model/migration/alembic_migrations/projects_init_ops.py barbican/model/migration/alembic_migrations/script.py.mako barbican/model/migration/alembic_migrations/secrets_init_ops.py barbican/model/migration/alembic_migrations/transport_keys_init_ops.py barbican/model/migration/alembic_migrations/versions/10220ccbe7fa_remove_transport_keys_column_from_.py barbican/model/migration/alembic_migrations/versions/13d127569afa_create_secret_store_metadata_table.py barbican/model/migration/alembic_migrations/versions/156cd9933643_add_project_column_to_consumer_table.py barbican/model/migration/alembic_migrations/versions/161f8aceb687_fill_project_id_to_secrets_where_missing.py barbican/model/migration/alembic_migrations/versions/1a0c2cdafb38_initial_version.py barbican/model/migration/alembic_migrations/versions/1a7cf79559e3_new_secret_and_container_acl_tables.py barbican/model/migration/alembic_migrations/versions/1bc885808c76_add_project_id_to_secrets.py barbican/model/migration/alembic_migrations/versions/1bece815014f_remove_projectsecret_table.py barbican/model/migration/alembic_migrations/versions/1c0f328bfce0_fixing_composite_primary_keys_and_.py barbican/model/migration/alembic_migrations/versions/1e86c18af2dd_add_new_columns_type_meta_containerid.py barbican/model/migration/alembic_migrations/versions/254495565185_removing_redundant_fields_from_order.py barbican/model/migration/alembic_migrations/versions/256da65e0c5f_change_keystone_id_for_external_id_in_.py barbican/model/migration/alembic_migrations/versions/2843d6469f25_add_sub_status_info_for_orders.py barbican/model/migration/alembic_migrations/versions/2ab3f5371bde_dsa_in_container_type_modelbase_to.py barbican/model/migration/alembic_migrations/versions/2d21598e7e70_added_ca_related_tables.py barbican/model/migration/alembic_migrations/versions/3041b53b95d7_remove_size_limits_on_meta_table_values.py barbican/model/migration/alembic_migrations/versions/30dba269cc64_update_order_retry_tasks_table.py barbican/model/migration/alembic_migrations/versions/39a96e67e990_add_missing_constraints.py barbican/model/migration/alembic_migrations/versions/3c3b04040bfe_add_owning_project_and_creator_to_cas.py barbican/model/migration/alembic_migrations/versions/3d36a26b88af_add_order_barbican_metadata_table.py barbican/model/migration/alembic_migrations/versions/4070806f6972_add_orders_plugin_metadata_table_and_.py barbican/model/migration/alembic_migrations/versions/443d6f4a69ac_added_secret_type_column_to_secrets_.py barbican/model/migration/alembic_migrations/versions/46b98cde536_add_project_quotas_table.py barbican/model/migration/alembic_migrations/versions/47b69e523451_made_plugin_names_in_kek_datum_non_.py barbican/model/migration/alembic_migrations/versions/4ecde3a3a72a_add_cas_column_to_project_quotas_table.py barbican/model/migration/alembic_migrations/versions/6a4457517a3_rename_acl_creator_only_to_project_.py barbican/model/migration/alembic_migrations/versions/795737bb3c3_change_tenants_to_projects.py barbican/model/migration/alembic_migrations/versions/aa2cf96a1d5_add_orderretrytask.py barbican/model/migration/alembic_migrations/versions/cd4106a1a0_add_cert_to_container_type.py barbican/model/migration/alembic_migrations/versions/d2780d5aa510_change_url_length.py barbican/model/migration/alembic_migrations/versions/dce488646127_add_secret_user_metadata.py barbican/model/migration/alembic_migrations/versions/juno_initial.py barbican/model/migration/alembic_migrations/versions/kilo_release.py barbican/plugin/__init__.py barbican/plugin/dogtag.py barbican/plugin/kmip_secret_store.py barbican/plugin/resources.py barbican/plugin/simple_certificate_manager.py barbican/plugin/snakeoil_ca.py barbican/plugin/store_crypto.py barbican/plugin/symantec.py barbican/plugin/crypto/__init__.py barbican/plugin/crypto/crypto.py barbican/plugin/crypto/manager.py barbican/plugin/crypto/p11_crypto.py barbican/plugin/crypto/pkcs11.py barbican/plugin/crypto/simple_crypto.py barbican/plugin/interface/__init__.py barbican/plugin/interface/certificate_manager.py barbican/plugin/interface/secret_store.py barbican/plugin/util/__init__.py barbican/plugin/util/mime_types.py barbican/plugin/util/translations.py barbican/plugin/util/utils.py barbican/queue/__init__.py barbican/queue/client.py barbican/queue/keystone_listener.py barbican/queue/retry_scheduler.py barbican/queue/server.py barbican/tasks/__init__.py barbican/tasks/certificate_resources.py barbican/tasks/common.py barbican/tasks/keystone_consumer.py barbican/tasks/resources.py barbican/tests/__init__.py barbican/tests/certificate_utils.py barbican/tests/database_utils.py barbican/tests/keys.py barbican/tests/test_middleware_auth.py barbican/tests/utils.py barbican/tests/api/__init__.py barbican/tests/api/test_init.py barbican/tests/api/test_resources.py barbican/tests/api/test_resources_policy.py barbican/tests/api/test_transport_keys_resource.py barbican/tests/api/controllers/__init__.py barbican/tests/api/controllers/test_acls.py barbican/tests/api/controllers/test_cas.py barbican/tests/api/controllers/test_containers.py barbican/tests/api/controllers/test_orders.py barbican/tests/api/controllers/test_quotas.py barbican/tests/api/controllers/test_secretmeta.py barbican/tests/api/controllers/test_secrets.py barbican/tests/api/controllers/test_versions.py barbican/tests/api/middleware/__init__.py barbican/tests/api/middleware/test_context.py barbican/tests/api/middleware/test_simple.py barbican/tests/cmd/__init__.py barbican/tests/cmd/test_barbican_manage.py barbican/tests/cmd/test_cmd.py barbican/tests/cmd/test_db_cleanup.py barbican/tests/common/__init__.py barbican/tests/common/test_hrefs.py barbican/tests/common/test_quota.py barbican/tests/common/test_utils.py barbican/tests/common/test_validators.py barbican/tests/model/__init__.py barbican/tests/model/test_models.py barbican/tests/model/repositories/__init__.py barbican/tests/model/repositories/test_repositories.py barbican/tests/model/repositories/test_repositories_acls.py barbican/tests/model/repositories/test_repositories_certificate_authorities.py barbican/tests/model/repositories/test_repositories_consumers.py barbican/tests/model/repositories/test_repositories_containers.py barbican/tests/model/repositories/test_repositories_order_retry_tasks.py barbican/tests/model/repositories/test_repositories_orders.py barbican/tests/model/repositories/test_repositories_projects.py barbican/tests/model/repositories/test_repositories_quotas.py barbican/tests/model/repositories/test_repositories_secret_metadata.py barbican/tests/model/repositories/test_repositories_secrets.py barbican/tests/model/repositories/test_repositories_transport_keys.py barbican/tests/plugin/__init__.py barbican/tests/plugin/test_dogtag.py barbican/tests/plugin/test_kmip.py barbican/tests/plugin/test_resource.py barbican/tests/plugin/test_simple_certificate_manager.py barbican/tests/plugin/test_snakeoil_ca.py barbican/tests/plugin/test_store_crypto.py barbican/tests/plugin/test_symantec.py barbican/tests/plugin/crypto/__init__.py barbican/tests/plugin/crypto/test_crypto.py barbican/tests/plugin/crypto/test_manager.py barbican/tests/plugin/crypto/test_p11_crypto.py barbican/tests/plugin/crypto/test_pkcs11.py barbican/tests/plugin/interface/__init__.py barbican/tests/plugin/interface/test_certificate_manager.py barbican/tests/plugin/interface/test_secret_store.py barbican/tests/plugin/util/__init__.py barbican/tests/plugin/util/test_mime_types.py barbican/tests/plugin/util/test_translations.py barbican/tests/plugin/util/test_utils.py barbican/tests/queue/__init__.py barbican/tests/queue/test_client.py barbican/tests/queue/test_keystone_listener.py barbican/tests/queue/test_retry_scheduler.py barbican/tests/queue/test_server.py barbican/tests/tasks/__init__.py barbican/tests/tasks/test_certificate_resources.py barbican/tests/tasks/test_common.py barbican/tests/tasks/test_keystone_consumer.py barbican/tests/tasks/test_resources.py bin/barbican-api bin/barbican.sh bin/demo_requests.py bin/keystone_data.sh bin/versionbuild.py devstack/README.md devstack/gate_hook.sh devstack/local.conf.example devstack/plugin.sh devstack/settings devstack/barbican-vagrant/Vagrantfile devstack/barbican-vagrant/install_devstack.sh devstack/lib/barbican doc/source/conf.py doc/source/index.rst doc/source/testing.rst doc/source/admin-guide-cloud/access_control.rst doc/source/admin-guide-cloud/barbican_manage.rst doc/source/admin-guide-cloud/database_cleaning.rst doc/source/admin-guide-cloud/index.rst doc/source/api/index.rst doc/source/api/reference/acls.rst doc/source/api/reference/cas.rst doc/source/api/reference/certificates.rst doc/source/api/reference/consumers.rst doc/source/api/reference/containers.rst doc/source/api/reference/orders.rst doc/source/api/reference/quotas.rst doc/source/api/reference/secret_metadata.rst doc/source/api/reference/secret_types.rst doc/source/api/reference/secrets.rst doc/source/contribute/architecture.rst doc/source/contribute/database_migrations.rst doc/source/contribute/dataflow.rst doc/source/contribute/dependencies.rst doc/source/contribute/getting_involved.rst doc/source/contribute/structure.rst doc/source/images/barbican-components.gif doc/source/images/barbican-overall-architecture.gif doc/source/plugin/certificate.rst doc/source/plugin/crypto.rst doc/source/plugin/index.rst doc/source/plugin/secret_store.rst doc/source/setup/audit.rst doc/source/setup/certificate.rst doc/source/setup/dev.rst doc/source/setup/devstack.rst doc/source/setup/index.rst doc/source/setup/keystone.rst doc/source/setup/noauth.rst doc/source/setup/troubleshooting.rst etc/barbican/api_audit_map.conf etc/barbican/barbican-api-paste.ini etc/barbican/barbican-functional.conf etc/barbican/barbican.conf etc/barbican/policy.json etc/barbican/vassals/barbican-api.ini etc/init/barbican-keystone-listener.conf etc/init/barbican-worker.conf etc/init/barbican.conf etc/logrotate.d/barbican-api functionaltests/.testr.conf functionaltests/__init__.py functionaltests/post_test_hook.sh functionaltests/pretty_tox.sh functionaltests/run_tests.sh functionaltests/api/__init__.py functionaltests/api/base.py functionaltests/api/v1/__init__.py functionaltests/api/v1/behaviors/__init__.py functionaltests/api/v1/behaviors/acl_behaviors.py functionaltests/api/v1/behaviors/base_behaviors.py functionaltests/api/v1/behaviors/ca_behaviors.py functionaltests/api/v1/behaviors/consumer_behaviors.py functionaltests/api/v1/behaviors/container_behaviors.py functionaltests/api/v1/behaviors/order_behaviors.py functionaltests/api/v1/behaviors/quota_behaviors.py functionaltests/api/v1/behaviors/secret_behaviors.py functionaltests/api/v1/behaviors/secretmeta_behaviors.py functionaltests/api/v1/functional/__init__.py functionaltests/api/v1/functional/test_acls.py functionaltests/api/v1/functional/test_acls_rbac.py functionaltests/api/v1/functional/test_cas.py functionaltests/api/v1/functional/test_certificate_orders.py functionaltests/api/v1/functional/test_consumers.py functionaltests/api/v1/functional/test_containers.py functionaltests/api/v1/functional/test_containers_rbac.py functionaltests/api/v1/functional/test_orders.py functionaltests/api/v1/functional/test_orders_rbac.py functionaltests/api/v1/functional/test_quotas.py functionaltests/api/v1/functional/test_quotas_enforce.py functionaltests/api/v1/functional/test_quotas_rbac.py functionaltests/api/v1/functional/test_rsa.py functionaltests/api/v1/functional/test_secretmeta.py functionaltests/api/v1/functional/test_secrets.py functionaltests/api/v1/functional/test_secrets_rbac.py functionaltests/api/v1/models/__init__.py functionaltests/api/v1/models/acl_models.py functionaltests/api/v1/models/base_models.py functionaltests/api/v1/models/ca_models.py functionaltests/api/v1/models/consumer_model.py functionaltests/api/v1/models/container_models.py functionaltests/api/v1/models/order_models.py functionaltests/api/v1/models/quota_models.py functionaltests/api/v1/models/secret_models.py functionaltests/api/v1/smoke/__init__.py functionaltests/api/v1/smoke/test_consumers.py functionaltests/api/v1/smoke/test_containers.py functionaltests/api/v1/smoke/test_orders.py functionaltests/api/v1/smoke/test_secrets.py functionaltests/api/v1/smoke/test_versions.py functionaltests/common/__init__.py functionaltests/common/auth.py functionaltests/common/client.py functionaltests/common/config.py releasenotes/notes/.placeholder releasenotes/notes/barbican-manage-d469b4d15454f981.yaml releasenotes/notes/metadata-api-e95d4559e7bf9ca9.yaml releasenotes/notes/pkcs11-backend-performance-f3caacbe9e1ab535.yaml releasenotes/source/conf.py releasenotes/source/index.rst releasenotes/source/liberty.rst releasenotes/source/unreleased.rst releasenotes/source/_static/.placeholderbarbican-2.0.0/barbican.egg-info/PKG-INFO0000664000567000056710000000141412701406023020736 0ustar jenkinsjenkins00000000000000Metadata-Version: 1.1 Name: barbican Version: 2.0.0 Summary: UNKNOWN Home-page: http://www.openstack.org/ Author: OpenStack Author-email: openstack-dev@lists.openstack.org License: UNKNOWN Description: Service for storing sensitive client information for OpenStack Platform: UNKNOWN Classifier: Environment :: OpenStack Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.4 barbican-2.0.0/barbican.egg-info/top_level.txt0000664000567000056710000000001112701406023022363 0ustar jenkinsjenkins00000000000000barbican barbican-2.0.0/barbican.egg-info/pbr.json0000664000567000056710000000005612701406023021320 0ustar jenkinsjenkins00000000000000{"is_release": true, "git_version": "acbdb03"}barbican-2.0.0/barbican.egg-info/entry_points.txt0000664000567000056710000000243512701406023023143 0ustar jenkinsjenkins00000000000000[barbican.certificate.event.plugin] simple_certificate_event = barbican.plugin.simple_certificate_manager:SimpleCertificateEventPlugin [barbican.certificate.plugin] dogtag = barbican.plugin.dogtag:DogtagCAPlugin simple_certificate = barbican.plugin.simple_certificate_manager:SimpleCertificatePlugin snakeoil_ca = barbican.plugin.snakeoil_ca:SnakeoilCACertificatePlugin symantec = barbican.plugin.symantec:SymantecCertificatePlugin [barbican.crypto.plugin] p11_crypto = barbican.plugin.crypto.p11_crypto:P11CryptoPlugin simple_crypto = barbican.plugin.crypto.simple_crypto:SimpleCryptoPlugin [barbican.secretstore.plugin] dogtag_crypto = barbican.plugin.dogtag:DogtagKRAPlugin kmip_plugin = barbican.plugin.kmip_secret_store:KMIPSecretStore store_crypto = barbican.plugin.store_crypto:StoreCryptoAdapterPlugin [barbican.test.crypto.plugin] test_crypto = barbican.tests.crypto.test_plugin:TestCryptoPlugin [console_scripts] barbican-db-manage = barbican.cmd.db_manage:main barbican-keystone-listener = barbican.cmd.keystone_listener:main barbican-manage = barbican.cmd.barbican_manage:main barbican-retry = barbican.cmd.retry_scheduler:main barbican-worker = barbican.cmd.worker:main pkcs11-kek-rewrap = barbican.cmd.pkcs11_kek_rewrap:main pkcs11-key-generation = barbican.cmd.pkcs11_key_generation:main barbican-2.0.0/pylintrc0000664000567000056710000000145412701405673016213 0ustar jenkinsjenkins00000000000000[Messages Control] # W0511: TODOs in code comments are fine. # W0142: *args and **kwargs are fine. # W0622: Redefining id is fine. disable-msg=W0511,W0142,W0622 [Basic] # Variable names can be 1 to 31 characters long, with lowercase and underscores variable-rgx=[a-z_][a-z0-9_]{0,30}$ # Argument names can be 2 to 31 characters long, with lowercase and underscores argument-rgx=[a-z_][a-z0-9_]{1,30}$ # Method names should be at least 3 characters long # and be lowecased with underscores method-rgx=[a-z_][a-z0-9_]{2,50}$ # Module names matching nova-* are ok (files in bin/) module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+)|(nova-[a-z0-9_-]+))$ # Don't require docstrings on tests. no-docstring-rgx=((__.*__)|([tT]est.*)|setUp|tearDown)$ [Design] max-public-methods=100 min-public-methods=0 max-args=6 barbican-2.0.0/.testr.conf0000664000567000056710000000056112701405673016510 0ustar jenkinsjenkins00000000000000[DEFAULT] test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \ OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \ OS_LOG_CAPTURE=${OS_LOG_CAPTURE:-1} \ ${PYTHON:-python} -m subunit.run discover -s ${OS_TEST_PATH:-./barbican} -t . $LISTOPT $IDOPTION test_id_option=--load-list $IDFILE test_list_option=--list group_regex=([^\.]+\.)+ barbican-2.0.0/AUTHORS0000664000567000056710000001327712701406023015470 0ustar jenkinsjenkins00000000000000Abhishek Koneru Adam Harwell Ade Lee Ajay Kalambur Alex Gaynor Alex Schultz Alex Schultz Amy Marrich Andreas Jaeger Andreas Jaeger Andrew Hartnett Anusha Unnam Aradhana Singh Arash Ghoreyshi Arun Kant Arun Kant Arvind Tiwari Atsushi SAKAI Bertrand Lallau Bryan D. Payne Béla Vancsics CHARDON Gerome Carlos D. Garza Carlos Marin Chad Lung Chandan Kumar Chaozhe.Chen Charles Neill Chellygel Chelsea Winfree Chelsea Winfree Chris Solis Christian Berendt Christopher Solis Constanze Kratel Craig Tracey Daniel Gonzalez Davanum Srinivas Dave McCowan Dave Walker (Daviey) Dirk Mueller Dmitry Ratushnyy Dolph Mathews Donald Stufft Doug Hellmann Dougal Matthews Douglas Mendizabal Douglas Mendizábal Douglas Mendizábal Elvin Tubillara Eric Brown Eric-Xie Everardo Padilla Saca Fernando Diaz Freddy Pedraza Gregory Haynes Hironori Shiina Huseyin Gedikli Ian Cordasco Igor Gueths James E. Blair Jarret Raim Jason Fritcher Jeff Feng Jeremy Stanley Joe Gordon John McKenzie John Vrbanac John Vrbanac John Wood Jorge Munoz Juan Antonio Osorio Juan Antonio Osorio Juan Antonio Osorio Robles Juan Antonio Osorio Robles Julien Danjou Kai Qiang Wu Kaitlin Farr Kaitlin Farr Karen Siles Karthik Prabhu Vinod Kenji Yasui Kevin Bishop Kevin_Zheng Lisa Clark Longgeek Louis Taylor Luz Malini K. Bhandaru Martin Kletzander Matthew Treinish Max Abidi Meera Belur Michael Krotscheck Michael McCune Michael Perng Mohit Motiani Monty Taylor Nathan Reller Ning Sun Nithya Renganathan OTSUKA, Yuanying Ollie Leahy Paul Glass Paul Kehrer Peter Hamilton Peter Kazmir Pradeep Kumar Singh Priti Desai Rafael Rivero Ravi Sankar Penta Robert Clark Russell Bryant Ryan Petrello Sascha Peilicke Sheena Gregson Shuquan Huang Sig Sigler Stanislaw Pitucha Steve Heyman Steve Martinelli Steven Gonzales Swapnil Kulkarni (coolsvap) Thierry Carrez Thomas Bechtold Thomas Dinkjian Thomas Goirand Thomas Herve Tim Kelsey Tin Lam Victor Stinner Wanlong Gao Wyllys Ingersoll Yuuichi Fujioka Zhao Lei Zhenguo Niu ZhiQiang Fan Zhongyue Luo binean chadlung chioleong edtubill jfwood jqxin2006 priti_desai sonu.kumar ting.wang tsv venkatamahesh werner mendizabal “Fernando barbican-2.0.0/requirements.txt0000664000567000056710000000206312701405674017706 0ustar jenkinsjenkins00000000000000# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. alembic>=0.8.0 # MIT Babel>=1.3 # BSD cffi # MIT cryptography>=1.0 # BSD/Apache-2.0 eventlet!=0.18.3,>=0.18.2 # MIT jsonschema!=2.5.0,<3.0.0,>=2.0.0 # MIT oslo.concurrency>=3.5.0 # Apache-2.0 oslo.config>=3.7.0 # Apache-2.0 oslo.context>=0.2.0 # Apache-2.0 oslo.i18n>=2.1.0 # Apache-2.0 oslo.messaging>=4.0.0 # Apache-2.0 oslo.middleware>=3.0.0 # Apache-2.0 oslo.log>=1.14.0 # Apache-2.0 oslo.policy>=0.5.0 # Apache-2.0 oslo.serialization>=1.10.0 # Apache-2.0 oslo.service>=1.0.0 # Apache-2.0 oslo.utils>=3.5.0 # Apache-2.0 Paste # MIT PasteDeploy>=1.5.0 # MIT pbr>=1.6 # Apache-2.0 pecan>=1.0.0 # BSD pycadf!=2.0.0,>=1.1.0 # Apache-2.0 pycrypto>=2.6 # Public Domain pyOpenSSL>=0.14 # Apache-2.0 ldap3>=0.9.8.2 # LGPLv3 keystonemiddleware!=4.1.0,>=4.0.0 # Apache-2.0 six>=1.9.0 # MIT SQLAlchemy<1.1.0,>=1.0.10 # MIT stevedore>=1.5.0 # Apache-2.0 WebOb>=1.2.3 # MIT barbican-2.0.0/HACKING.rst0000664000567000056710000000035512701405673016221 0ustar jenkinsjenkins00000000000000Barbican Style Commandments ============================ - Step 1: Read the OpenStack Style Commandments http://docs.openstack.org/developer/hacking/ - Step 2: Read on Barbican Specific Commandments ------------------------------- barbican-2.0.0/README.md0000664000567000056710000001057312701405673015705 0ustar jenkinsjenkins00000000000000# Barbican Barbican is a REST API designed for the secure storage, provisioning and management of secrets. It is aimed at being useful for all environments, including large ephemeral Clouds. Barbican is an OpenStack project developed by the [Barbican Project Team ](https://wiki.openstack.org/wiki/Barbican) with support from [Rackspace Hosting](http://www.rackspace.com/), EMC, Ericsson, Johns Hopkins University, HP, Red Hat, Cisco Systems, and many more. The full documentation can be found on the [Barbican Developer Documentation Site](http://docs.openstack.org/developer/barbican/). If you have a technical question, you can ask it at [Ask OpenStack]( https://ask.openstack.org/en/questions/) with the `barbican` tag, or you can send an email to the [OpenStack General mailing list]( http://lists.openstack.org/pipermail/openstack/) at `openstack@lists.openstack.org` with the prefix `[barbican]` in the subject. To file a bug, use our bug tracker on [Launchpad]( https://bugs.launchpad.net/barbican/). For development questions or discussion, hop on the [OpenStack-dev mailing list ](http://lists.openstack.org/pipermail/openstack-dev/) at `openstack-dev@lists.openstack.org` and let us know what you think, just add `[barbican]` to the subject. You can also join our IRC channel `#openstack-barbican` on Freenode. Barbican began as part of a set of applications that make up the CloudKeep ecosystem. The other systems are: * [Postern](https://github.com/cloudkeep/postern) - Go based agent that provides access to secrets from the Barbican API. * [Palisade](https://github.com/cloudkeep/palisade) - AngularJS based web ui for the Barbican API. * [Python-barbicanclient](https://github.com/openstack/python-barbicanclient) - A convenient Python-based library to interact with the Barbican API. ## Getting Started Please visit our [Getting Started wiki page ](https://github.com/cloudkeep/barbican/wiki/Barbican-Getting-Started-Guide) for details. ## Why Should You Use Barbican? The current state of key management is atrocious. While Windows does have some decent options through the use of the Data Protection API (DPAPI) and Active Directory, Linux lacks a cohesive story around how to manage keys for application use. Barbican was designed to solve this problem. The system was motivated by internal Rackspace needs, requirements from [OpenStack](http://www.openstack.org/) and a realization that the current state of the art could use some help. Barbican will handle many types of secrets, including: * **Symmetric Keys** - Used to perform reversible encryption of data at rest, typically using the AES algorithm set. This type of key is required to enable features like [encrypted Swift containers and Cinder volumes](http://www.openstack.org/software/openstack-storage/), [encrypted Cloud Backups](http://www.rackspace.com/cloud/backup/), etc. * **Asymmetric Keys** - Asymmetric key pairs (sometimes referred to as [public / private keys](http://en.wikipedia.org/wiki/Public-key_cryptography)) are used in many scenarios where communication between untrusted parties is desired. The most common case is with SSL/TLS certificates, but also is used in solutions like SSH keys, S/MIME (mail) encryption and digital signatures. * **Raw Secrets** - Barbican stores secrets as a base64 encoded block of data (encrypted, naturally). Clients can use the API to store any secrets in any format they desire. The [Postern](https://github.com/cloudkeep/postern) agent is capable of presenting these secrets in various formats to ease integration. For the symmetric and asymmetric key types, Barbican supports full life cycle management including provisioning, expiration, reporting, etc. A plugin system allows for multiple certificate authority support (including public and private CAs). ## Design Goals 1. Provide a central secret-store capable of distributing secret / keying material to all types of deployments including ephemeral Cloud instances. 2. Support reasonable compliance regimes through reporting and auditability. 3. Application adoption costs should be minimal or non-existent. 4. Build a community and ecosystem by being open-source and extensible. 5. Improve security through sane defaults and centralized management of [policies for all secrets](https://github.com/cloudkeep/barbican/wiki/Policies). 6. Provide an out of band communication mechanism to notify and protect sensitive assets. barbican-2.0.0/releasenotes/0000775000567000056710000000000012701406024017100 5ustar jenkinsjenkins00000000000000barbican-2.0.0/releasenotes/notes/0000775000567000056710000000000012701406024020230 5ustar jenkinsjenkins00000000000000barbican-2.0.0/releasenotes/notes/metadata-api-e95d4559e7bf9ca9.yaml0000664000567000056710000000046712701405673026010 0ustar jenkinsjenkins00000000000000--- prelude: > The Mitaka release includes a new API to add arbitrary user-defined metadata to Secrets. upgrade: - The Metadata API requires an update to the Database Schema. Existing deployments that are being upgraded to Mitaka should use the 'barbican-manage' utility to update the schema. barbican-2.0.0/releasenotes/notes/.placeholder0000664000567000056710000000000012701405673022512 0ustar jenkinsjenkins00000000000000barbican-2.0.0/releasenotes/notes/barbican-manage-d469b4d15454f981.yaml0000664000567000056710000000121212701405673026201 0ustar jenkinsjenkins00000000000000--- prelude: > This release includes a new command line utility 'barbican-manage' that consolidates and supersedes the separate HSM and database management scripts. features: - The 'barbican-manage' tool can be used to manage database schema changes as well as provision and rotate keys in the HSM backend. deprecations: - The 'barbican-db-manage' script is deprecated. Use the new 'barbican-manage' utility instead. - The 'pkcs11-kek-rewrap' script is deprecated. Use the new 'barbican-manage' utility instead. - The 'pkcs11-key-generation' script is deprecated. Use the new 'barbican-manage' utility instead. barbican-2.0.0/releasenotes/notes/pkcs11-backend-performance-f3caacbe9e1ab535.yaml0000664000567000056710000000211712701405673030620 0ustar jenkinsjenkins00000000000000--- prelude: > This release includes significant improvements to the performance of the PKCS#11 Cryptographic Plugin driver. These changes will require a data migration of any existing data stored by previous versions of the PKCS#11 backend. issues: - > The service will encounter errors if you attempt to run this new release using data stored by a previous version of the PKCS#11 Cryptographic Plugin that has not yet been migrated for this release. The logged errors will look like ``'P11CryptoPluginException: HSM returned response code: 0xc0L CKR_SIGNATURE_INVALID'`` upgrade: - > If you are upgrading from previous version of barbican that uses the PKCS#11 Cryptographic Plugin driver, you will need to run the migration script ``python barbican/cmd/pkcs11_migrate_kek_signatures.py`` critical: - > If you are upgrading from previous version of barbican that uses the PKCS#11 Cryptographic Plugin driver, you will need to run the migration script ``python barbican/cmd/pkcs11_migrate_kek_signatures.py`` barbican-2.0.0/releasenotes/source/0000775000567000056710000000000012701406024020400 5ustar jenkinsjenkins00000000000000barbican-2.0.0/releasenotes/source/index.rst0000664000567000056710000000021012701405674022244 0ustar jenkinsjenkins00000000000000====================== Barbican Release Notes ====================== Contents: .. toctree:: :maxdepth: 1 liberty unreleased barbican-2.0.0/releasenotes/source/unreleased.rst0000664000567000056710000000016012701405673023267 0ustar jenkinsjenkins00000000000000============================== Current Series Release Notes ============================== .. release-notes:: barbican-2.0.0/releasenotes/source/liberty.rst0000664000567000056710000000022212701405673022611 0ustar jenkinsjenkins00000000000000============================== Liberty Series Release Notes ============================== .. release-notes:: :branch: origin/stable/liberty barbican-2.0.0/releasenotes/source/conf.py0000664000567000056710000002160012701405673021707 0ustar jenkinsjenkins00000000000000# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # Barbican Release Notes documentation build configuration file, created by # sphinx-quickstart on Mon Nov 30 10:43:57 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'oslosphinx', 'reno.sphinxext', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Barbican Release Notes' copyright = u'2015, Barbican Developers' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # from barbican.version import version_info as barbican_version # The short X.Y version. version = barbican_version.canonical_version_string() # The full version, including alpha/beta/rc tags. release = barbican_version.version_string_with_vcs() # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'BarbicanReleaseNotesdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'BarbicanReleaseNotes.tex', u'Barbican Release Notes Documentation', u'Barbican Developers', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'barbicanreleasenotes', u'Barbican Release Notes Documentation', [u'Barbican Developers'], 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'BarbicanReleaseNotes', u'Barbican Release Notes Documentation', u'Barbican Developers', 'BarbicanReleaseNotes', 'Barbican Release Notes Documentation.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False barbican-2.0.0/releasenotes/source/_static/0000775000567000056710000000000012701406024022026 5ustar jenkinsjenkins00000000000000barbican-2.0.0/releasenotes/source/_static/.placeholder0000664000567000056710000000000012701405673024310 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/0000775000567000056710000000000012701406024016150 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/0000775000567000056710000000000012701406024017312 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/keys.py0000664000567000056710000005077512701405673020666 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. def get_private_key_pem(): """Returns a private key in PCKS#8 format This key was created by issuing the following openssl commands: openssl genrsa -out private.pem 2048 openssl pkcs8 -topk8 -nocrypt -in private.pem -out private.pk8 The byte string returned by this function is the contents of the private.pk8 file. """ return """-----BEGIN PRIVATE KEY----- MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQCza2VoDXmBUMmw jFu9F6MM5q/AZ1WjnWA2YNdNy237TrGN/nobDDv8FBBpUPmHNZ04H1LyxFcP8ReF rcIXpifsReu2lAWaqRPxovu5CuAhfecKv+RhjLVLJ0I+MZIb72ROKpfZTmb7dhlF gGD3vkC51BCfhGVW35w52OY/23x5MeO4yvx5myPccnxMVQ42KuDrzKqjBlSjmBnc pGYx0JgCT+syFmHsl8rOkqCPPFLo24YQn+4/pr1AYwaZAbMTl9zoLtEQj6sxScuH cS9e8niptDxlsbLQgqGVaGdE117stC95QH7UvITbuYzdjZwBFc1Sgz8GZ/2hLSsH ujJiIQcvAgMBAAECggEAMOlUKbuSpigp85Ev6Sqqbnfs7Zy+Ae6DLg/UYgbVIq9f RABdtUXujFfD6ZIDlFKPW59ec4QG3/evm+e0g9HuDEE7cviDVphFMZhm2xkV5Mt3 0rxhPB6pxaUcL+w/kpH+XDjMUJdJB8A4P3Qx+xfIeWBQb8wd/ELVSgfRLRNeqYL0 0KXVs04/FOBEhqSiqi/oHYJ4gxNrSoINX71PHVbaEikIygzi4HZVyMut3LE6ceHz fSj71ftn+Ui0TzkLOb+NoBP31haHC/sfCrpKg7QtUP9q9dRq6dZcI17q5d7oEdET eDRKhT2vm7bx2bLGeF1w2H9B/V81upjiAah2RVnecQKBgQDsfHSjR1gd+SHw/2A9 SaXS1k9LeXLt+UbDQdbjYOsh5LoT+EN/utO70RyDYqjlhzqJzciKTuAW5SVPC6gQ uCppA29Kntq7x1+Lw/4wG947poXb60tLdg3BK5mBFTORk5ATqAwVq7t+2NtS5S/J unzs5xrRolDFnSX4KnvVl6Jj3QKBgQDCOXZTVXRPEFhnqnqLErZe6EJkySwG8wgt OdCmr660bocY1i9vV+RaM1iARHX6u/suMhkz+3KRinzxIG5gQsyiWmTpFV298W9v kRtmsCQDn2my90yv4e6sLI0ng7l/N3r7CwLLNIV/CqeyaN40suzE8AjgEga5jTua 6bP5m+x8ewKBgQCeuW3DxXfkLjnUumMK36qX11XDb5FvHjebiE5FsOBAkHdAPgp3 6ZqBXfoISSjZXakxotft1MDdPRGMe2NjTWjRsQd6iyJ+lHORqIusGJhRaxQ/Ji8U R/k1ZSETnXpORD+YodrylKA0pDKY8dDgUfXVP8wlVg9mg3JfnYweMTdCVQKBgQCx 133iNmgmkTfxzGci+wJkitVohdA7mMOO7daBGnKlImOvuUd784XTlhpecNF6wi/w D82GDKLOY3meLO0EVYYczxqBVqAccXtxM/RcJcMEUi6twcXFcuJhYvXpDbOHqlyA jIeFW9U1C6OcOGvm40Lr3UKzMa5Yrtq6MW4ri7uSCwKBgQDfdqVjT4uXmGwOh1z4 Pzv6GCoc+6GobXg4DvvCUjP9MR+2+5sX0AY/f+aVCD05/Nj0RqpAwUc03zZU5ZtL 2uNe6XDjEugfFtlzea6+rbD6KpFS+nxPJA8YyWYRpNhpRWGWQakHedr3BtMtGs0h pKNAQG72HKWtSfJQMXvn2RlicA== -----END PRIVATE KEY-----""" def get_private_key_der(): """Returns a private key in DER format This key was created by issuing the following openssl commands: openssl genrsa -out private.pem 2048 openssl pkcs8 -in private.pem -topk8 -nocrypt \ -outform DER -out private_pk8.der The byte string returned by this function is the contents of the private_pk8.der file. """ key_der = ( '\x30\x82\x04\xbf\x02\x01\x00\x30\x0d\x06\x09\x2a\x86\x48\x86' '\xf7\x0d\x01\x01\x01\x05\x00\x04\x82\x04\xa9\x30\x82\x04\xa5' '\x02\x01\x00\x02\x82\x01\x01\x00\xb3\x6b\x65\x68\x0d\x79\x81' '\x50\xc9\xb0\x8c\x5b\xbd\x17\xa3\x0c\xe6\xaf\xc0\x67\x55\xa3' '\x9d\x60\x36\x60\xd7\x4d\xcb\x6d\xfb\x4e\xb1\x8d\xfe\x7a\x1b' '\x0c\x3b\xfc\x14\x10\x69\x50\xf9\x87\x35\x9d\x38\x1f\x52\xf2' '\xc4\x57\x0f\xf1\x17\x85\xad\xc2\x17\xa6\x27\xec\x45\xeb\xb6' '\x94\x05\x9a\xa9\x13\xf1\xa2\xfb\xb9\x0a\xe0\x21\x7d\xe7\x0a' '\xbf\xe4\x61\x8c\xb5\x4b\x27\x42\x3e\x31\x92\x1b\xef\x64\x4e' '\x2a\x97\xd9\x4e\x66\xfb\x76\x19\x45\x80\x60\xf7\xbe\x40\xb9' '\xd4\x10\x9f\x84\x65\x56\xdf\x9c\x39\xd8\xe6\x3f\xdb\x7c\x79' '\x31\xe3\xb8\xca\xfc\x79\x9b\x23\xdc\x72\x7c\x4c\x55\x0e\x36' '\x2a\xe0\xeb\xcc\xaa\xa3\x06\x54\xa3\x98\x19\xdc\xa4\x66\x31' '\xd0\x98\x02\x4f\xeb\x32\x16\x61\xec\x97\xca\xce\x92\xa0\x8f' '\x3c\x52\xe8\xdb\x86\x10\x9f\xee\x3f\xa6\xbd\x40\x63\x06\x99' '\x01\xb3\x13\x97\xdc\xe8\x2e\xd1\x10\x8f\xab\x31\x49\xcb\x87' '\x71\x2f\x5e\xf2\x78\xa9\xb4\x3c\x65\xb1\xb2\xd0\x82\xa1\x95' '\x68\x67\x44\xd7\x5e\xec\xb4\x2f\x79\x40\x7e\xd4\xbc\x84\xdb' '\xb9\x8c\xdd\x8d\x9c\x01\x15\xcd\x52\x83\x3f\x06\x67\xfd\xa1' '\x2d\x2b\x07\xba\x32\x62\x21\x07\x2f\x02\x03\x01\x00\x01\x02' '\x82\x01\x00\x30\xe9\x54\x29\xbb\x92\xa6\x28\x29\xf3\x91\x2f' '\xe9\x2a\xaa\x6e\x77\xec\xed\x9c\xbe\x01\xee\x83\x2e\x0f\xd4' '\x62\x06\xd5\x22\xaf\x5f\x44\x00\x5d\xb5\x45\xee\x8c\x57\xc3' '\xe9\x92\x03\x94\x52\x8f\x5b\x9f\x5e\x73\x84\x06\xdf\xf7\xaf' '\x9b\xe7\xb4\x83\xd1\xee\x0c\x41\x3b\x72\xf8\x83\x56\x98\x45' '\x31\x98\x66\xdb\x19\x15\xe4\xcb\x77\xd2\xbc\x61\x3c\x1e\xa9' '\xc5\xa5\x1c\x2f\xec\x3f\x92\x91\xfe\x5c\x38\xcc\x50\x97\x49' '\x07\xc0\x38\x3f\x74\x31\xfb\x17\xc8\x79\x60\x50\x6f\xcc\x1d' '\xfc\x42\xd5\x4a\x07\xd1\x2d\x13\x5e\xa9\x82\xf4\xd0\xa5\xd5' '\xb3\x4e\x3f\x14\xe0\x44\x86\xa4\xa2\xaa\x2f\xe8\x1d\x82\x78' '\x83\x13\x6b\x4a\x82\x0d\x5f\xbd\x4f\x1d\x56\xda\x12\x29\x08' '\xca\x0c\xe2\xe0\x76\x55\xc8\xcb\xad\xdc\xb1\x3a\x71\xe1\xf3' '\x7d\x28\xfb\xd5\xfb\x67\xf9\x48\xb4\x4f\x39\x0b\x39\xbf\x8d' '\xa0\x13\xf7\xd6\x16\x87\x0b\xfb\x1f\x0a\xba\x4a\x83\xb4\x2d' '\x50\xff\x6a\xf5\xd4\x6a\xe9\xd6\x5c\x23\x5e\xea\xe5\xde\xe8' '\x11\xd1\x13\x78\x34\x4a\x85\x3d\xaf\x9b\xb6\xf1\xd9\xb2\xc6' '\x78\x5d\x70\xd8\x7f\x41\xfd\x5f\x35\xba\x98\xe2\x01\xa8\x76' '\x45\x59\xde\x71\x02\x81\x81\x00\xec\x7c\x74\xa3\x47\x58\x1d' '\xf9\x21\xf0\xff\x60\x3d\x49\xa5\xd2\xd6\x4f\x4b\x79\x72\xed' '\xf9\x46\xc3\x41\xd6\xe3\x60\xeb\x21\xe4\xba\x13\xf8\x43\x7f' '\xba\xd3\xbb\xd1\x1c\x83\x62\xa8\xe5\x87\x3a\x89\xcd\xc8\x8a' '\x4e\xe0\x16\xe5\x25\x4f\x0b\xa8\x10\xb8\x2a\x69\x03\x6f\x4a' '\x9e\xda\xbb\xc7\x5f\x8b\xc3\xfe\x30\x1b\xde\x3b\xa6\x85\xdb' '\xeb\x4b\x4b\x76\x0d\xc1\x2b\x99\x81\x15\x33\x91\x93\x90\x13' '\xa8\x0c\x15\xab\xbb\x7e\xd8\xdb\x52\xe5\x2f\xc9\xba\x7c\xec' '\xe7\x1a\xd1\xa2\x50\xc5\x9d\x25\xf8\x2a\x7b\xd5\x97\xa2\x63' '\xdd\x02\x81\x81\x00\xc2\x39\x76\x53\x55\x74\x4f\x10\x58\x67' '\xaa\x7a\x8b\x12\xb6\x5e\xe8\x42\x64\xc9\x2c\x06\xf3\x08\x2d' '\x39\xd0\xa6\xaf\xae\xb4\x6e\x87\x18\xd6\x2f\x6f\x57\xe4\x5a' '\x33\x58\x80\x44\x75\xfa\xbb\xfb\x2e\x32\x19\x33\xfb\x72\x91' '\x8a\x7c\xf1\x20\x6e\x60\x42\xcc\xa2\x5a\x64\xe9\x15\x5d\xbd' '\xf1\x6f\x6f\x91\x1b\x66\xb0\x24\x03\x9f\x69\xb2\xf7\x4c\xaf' '\xe1\xee\xac\x2c\x8d\x27\x83\xb9\x7f\x37\x7a\xfb\x0b\x02\xcb' '\x34\x85\x7f\x0a\xa7\xb2\x68\xde\x34\xb2\xec\xc4\xf0\x08\xe0' '\x12\x06\xb9\x8d\x3b\x9a\xe9\xb3\xf9\x9b\xec\x7c\x7b\x02\x81' '\x81\x00\x9e\xb9\x6d\xc3\xc5\x77\xe4\x2e\x39\xd4\xba\x63\x0a' '\xdf\xaa\x97\xd7\x55\xc3\x6f\x91\x6f\x1e\x37\x9b\x88\x4e\x45' '\xb0\xe0\x40\x90\x77\x40\x3e\x0a\x77\xe9\x9a\x81\x5d\xfa\x08' '\x49\x28\xd9\x5d\xa9\x31\xa2\xd7\xed\xd4\xc0\xdd\x3d\x11\x8c' '\x7b\x63\x63\x4d\x68\xd1\xb1\x07\x7a\x8b\x22\x7e\x94\x73\x91' '\xa8\x8b\xac\x18\x98\x51\x6b\x14\x3f\x26\x2f\x14\x47\xf9\x35' '\x65\x21\x13\x9d\x7a\x4e\x44\x3f\x98\xa1\xda\xf2\x94\xa0\x34' '\xa4\x32\x98\xf1\xd0\xe0\x51\xf5\xd5\x3f\xcc\x25\x56\x0f\x66' '\x83\x72\x5f\x9d\x8c\x1e\x31\x37\x42\x55\x02\x81\x81\x00\xb1' '\xd7\x7d\xe2\x36\x68\x26\x91\x37\xf1\xcc\x67\x22\xfb\x02\x64' '\x8a\xd5\x68\x85\xd0\x3b\x98\xc3\x8e\xed\xd6\x81\x1a\x72\xa5' '\x22\x63\xaf\xb9\x47\x7b\xf3\x85\xd3\x96\x1a\x5e\x70\xd1\x7a' '\xc2\x2f\xf0\x0f\xcd\x86\x0c\xa2\xce\x63\x79\x9e\x2c\xed\x04' '\x55\x86\x1c\xcf\x1a\x81\x56\xa0\x1c\x71\x7b\x71\x33\xf4\x5c' '\x25\xc3\x04\x52\x2e\xad\xc1\xc5\xc5\x72\xe2\x61\x62\xf5\xe9' '\x0d\xb3\x87\xaa\x5c\x80\x8c\x87\x85\x5b\xd5\x35\x0b\xa3\x9c' '\x38\x6b\xe6\xe3\x42\xeb\xdd\x42\xb3\x31\xae\x58\xae\xda\xba' '\x31\x6e\x2b\x8b\xbb\x92\x0b\x02\x81\x81\x00\xdf\x76\xa5\x63' '\x4f\x8b\x97\x98\x6c\x0e\x87\x5c\xf8\x3f\x3b\xfa\x18\x2a\x1c' '\xfb\xa1\xa8\x6d\x78\x38\x0e\xfb\xc2\x52\x33\xfd\x31\x1f\xb6' '\xfb\x9b\x17\xd0\x06\x3f\x7f\xe6\x95\x08\x3d\x39\xfc\xd8\xf4' '\x46\xaa\x40\xc1\x47\x34\xdf\x36\x54\xe5\x9b\x4b\xda\xe3\x5e' '\xe9\x70\xe3\x12\xe8\x1f\x16\xd9\x73\x79\xae\xbe\xad\xb0\xfa' '\x2a\x91\x52\xfa\x7c\x4f\x24\x0f\x18\xc9\x66\x11\xa4\xd8\x69' '\x45\x61\x96\x41\xa9\x07\x79\xda\xf7\x06\xd3\x2d\x1a\xcd\x21' '\xa4\xa3\x40\x40\x6e\xf6\x1c\xa5\xad\x49\xf2\x50\x31\x7b\xe7' '\xd9\x19\x62\x70') return key_der def get_public_key_pem(): """Returns a public key in PEM format This key was created by issuing the following openssl commands: openssl genrsa -out private.pem 2048 openssl rsa -in private.pem -pubout > public.pem The byte string returned by this function is the contents of the public.pem file. """ return """-----BEGIN PUBLIC KEY----- MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAs2tlaA15gVDJsIxbvRej DOavwGdVo51gNmDXTctt+06xjf56Gww7/BQQaVD5hzWdOB9S8sRXD/EXha3CF6Yn 7EXrtpQFmqkT8aL7uQrgIX3nCr/kYYy1SydCPjGSG+9kTiqX2U5m+3YZRYBg975A udQQn4RlVt+cOdjmP9t8eTHjuMr8eZsj3HJ8TFUONirg68yqowZUo5gZ3KRmMdCY Ak/rMhZh7JfKzpKgjzxS6NuGEJ/uP6a9QGMGmQGzE5fc6C7REI+rMUnLh3EvXvJ4 qbQ8ZbGy0IKhlWhnRNde7LQveUB+1LyE27mM3Y2cARXNUoM/Bmf9oS0rB7oyYiEH LwIDAQAB -----END PUBLIC KEY-----""" def get_public_key_der(): """Returns a public key in DER format This key was created by issuing the following openssl commands: openssl genrsa -out private.pem 2048 openssl rsa -in private.pem -pubout > public.pem The byte string returned by this function is the contents of the public.der file. """ key_der = ( '\x30\x82\x01\x22\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01' '\x01\x01\x05\x00\x03\x82\x01\x0f\x00\x30\x82\x01\x0a\x02\x82' '\x01\x01\x00\xb3\x6b\x65\x68\x0d\x79\x81\x50\xc9\xb0\x8c\x5b' '\xbd\x17\xa3\x0c\xe6\xaf\xc0\x67\x55\xa3\x9d\x60\x36\x60\xd7' '\x4d\xcb\x6d\xfb\x4e\xb1\x8d\xfe\x7a\x1b\x0c\x3b\xfc\x14\x10' '\x69\x50\xf9\x87\x35\x9d\x38\x1f\x52\xf2\xc4\x57\x0f\xf1\x17' '\x85\xad\xc2\x17\xa6\x27\xec\x45\xeb\xb6\x94\x05\x9a\xa9\x13' '\xf1\xa2\xfb\xb9\x0a\xe0\x21\x7d\xe7\x0a\xbf\xe4\x61\x8c\xb5' '\x4b\x27\x42\x3e\x31\x92\x1b\xef\x64\x4e\x2a\x97\xd9\x4e\x66' '\xfb\x76\x19\x45\x80\x60\xf7\xbe\x40\xb9\xd4\x10\x9f\x84\x65' '\x56\xdf\x9c\x39\xd8\xe6\x3f\xdb\x7c\x79\x31\xe3\xb8\xca\xfc' '\x79\x9b\x23\xdc\x72\x7c\x4c\x55\x0e\x36\x2a\xe0\xeb\xcc\xaa' '\xa3\x06\x54\xa3\x98\x19\xdc\xa4\x66\x31\xd0\x98\x02\x4f\xeb' '\x32\x16\x61\xec\x97\xca\xce\x92\xa0\x8f\x3c\x52\xe8\xdb\x86' '\x10\x9f\xee\x3f\xa6\xbd\x40\x63\x06\x99\x01\xb3\x13\x97\xdc' '\xe8\x2e\xd1\x10\x8f\xab\x31\x49\xcb\x87\x71\x2f\x5e\xf2\x78' '\xa9\xb4\x3c\x65\xb1\xb2\xd0\x82\xa1\x95\x68\x67\x44\xd7\x5e' '\xec\xb4\x2f\x79\x40\x7e\xd4\xbc\x84\xdb\xb9\x8c\xdd\x8d\x9c' '\x01\x15\xcd\x52\x83\x3f\x06\x67\xfd\xa1\x2d\x2b\x07\xba\x32' '\x62\x21\x07\x2f\x02\x03\x01\x00\x01') return key_der def get_encrypted_private_key_pem(): """Returns an encrypted private key in PKCS#8 format This key was created by issuing the following openssl commands: openssl genrsa -out private.pem 2048 echo password > passphrase.txt openssl pkcs8 -topk8 -passout file:passphrase.txt \ -in private.pem -out private_encrypted.pk8 The byte string returned by this function is the contents of the private_encrypted.pk8 file. """ return """-----BEGIN ENCRYPTED PRIVATE KEY----- MIIE6TAbBgkqhkiG9w0BBQMwDgQIssadeQrYhhACAggABIIEyDNw3SV2b19yy4Q/ kTbtJ/p2X2zKDqr7GgLeAowqqhcMfvprI7G8C0XtwxkR4SjMZUXNcmOwQB2kNKtK ZilCz6pSx81iUj4s1fU460XkhkIeV+F7aB2PsTG1oDfPCuzKFjT6EuSE6lFUH89r TRuHWMPseW7lrvEB5kNMFag5QxeKjsSCNkZWOT74o4fh3cEplgCEaA+nCclXU79m 5rhaa9e1SUpPuPlhnAIDkBtHcC38B+SOYKQxLdZT1f72oZ1ozWJ4bEhKxvnNu5J+ tCvgWOXMIEJVGgf8Cu58PoR18LyyAIk7zza+1LkCiyuLNgiz8a1sVw8uBcrVgD5R 8f4XgI/Yjb16Bmpp/0iEjNcURaby9GnCCEc+W/ivSJTnG3o1Xn00FO98l2aggNpt S8gxK05NeCtdWoFFjTeIXxnb1ct0Iep8RwuO+FnupAf6aw12Uqj4qYNvNiY/kBhS P/Yd3KznasrolUZ9+PVTMUI45UTMN/XhNvXrozMq9nItWTV7wHyEL3mrYipvcxrm SnLlAp2zkmSu923cHN1teLE99/rV2jaBM03ROqvYWaxjfOjxfwz6PhdE8G//kWd0 tf2Om+fyCkBRxo1sUcuiE79hJXgP5KJCMbPsDyG/aQk4oeS1nbn15AhthwiU7A13 h9X6asgV2H+4Ljf+tr1b8p3qj3CSljfzoVErLqoHagjVB45WktHhrWbUSRpXSvPo Hh0LY62qxTa67gKjwarH5hYr5IaH39iR9bcyuvzE+u9TJWvWmeLJ7UmesfVPZtSf /JTpvr0zu4C95lXKt4FdxOhGcWwDN1Zp+lCsF5ruBGc+/pEggiXi1qvW9xUny1Of 8NqdPxGPb4/zPHGaysypPsc6LiY3esI8wa7FnDsS4e79dWinD/BPWEa5N2jLm0Rr njkHTy0xtnw/a8Ofrtyy9V1tBBOCaswzGIZZj6oHyFCtAvjZuYa8TWVmSi6EqJKi lY5wSdQQXg3H0HnQYivtOY1YbfjtRkUB9e4xkSVhvYJpY1QWBtApdUGBsxsELkDC 6cv/Kxnd9U7dz9+VhD0hAdrhFqbWqOEGTWt7xE44yzWokdKQWu5FsTs6gyXsGPen ZgZlR5pjPNGbMdftW0M473YyvtzjrCuSVgJspCzpA9uo6wfejaFb4RF/tcWtXglE Q5FzfsO1OZr6nONraShj9N1kxGBXUUOtAjZI/zoTWk3yndxw3IpvPtDTg9ByCp7F RFUtDyrki+YAIAiTgPq7qwc1upjU7R1Zlg4jIe0RI9A73NyLwa4QhgO+HmRBt7At LLuUeCFKuXMBHzlDaMYwq5ZPOb8VcMkhUoug2YJIc4YOOHh5O0mYnat0vaYO+A58 DiuYgxKmO5+6+OMk2ovZgk1sFawR4rk9HUt8goUUptZ+hoHUVGtte5YcQniIOcds qY3ni/zwswHWQRaAu8Ej4qJKt1XwZo2K04xHhL90TMaY8NpLSMCfVqDDL409TqIj zHUfYl6N2Me4eKc8vl6Sm63g57NzLqTttD6KSn8v+OmUF5mOQwcLnr3nK7S+BQfI DLPY1Oh7Kec/M/d1080/Qv9YBAJhz50TLKoxXwVeH4OOvuaHVaotElMkr5QEkEXl gRgwkbMrQjg0II0O9g== -----END ENCRYPTED PRIVATE KEY-----""" def get_passphrase_txt(): """Returns the plain text string used to encrypt the private key This key was created by issuing the following commands: echo password > passphrase.txt The byte string returned by this function is the contents of the passphrase.txt file. """ return """password""" def get_csr_pem(): """Returns a Certificate Signing Request (CSR) in PEM format This key was created by issuing the following openssl commands: openssl genrsa -out private.pem 2048 openssl req -new -key private.pem -out csr.pem -subj '/CN=example.com' The byte string returned by this function is the contents of the csr.pem file. """ return """-----BEGIN CERTIFICATE REQUEST----- MIICWzCCAUMCAQAwFjEUMBIGA1UEAwwLZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3 DQEBAQUAA4IBDwAwggEKAoIBAQCza2VoDXmBUMmwjFu9F6MM5q/AZ1WjnWA2YNdN y237TrGN/nobDDv8FBBpUPmHNZ04H1LyxFcP8ReFrcIXpifsReu2lAWaqRPxovu5 CuAhfecKv+RhjLVLJ0I+MZIb72ROKpfZTmb7dhlFgGD3vkC51BCfhGVW35w52OY/ 23x5MeO4yvx5myPccnxMVQ42KuDrzKqjBlSjmBncpGYx0JgCT+syFmHsl8rOkqCP PFLo24YQn+4/pr1AYwaZAbMTl9zoLtEQj6sxScuHcS9e8niptDxlsbLQgqGVaGdE 117stC95QH7UvITbuYzdjZwBFc1Sgz8GZ/2hLSsHujJiIQcvAgMBAAGgADANBgkq hkiG9w0BAQsFAAOCAQEAPJDIxzgtUDRgpfTbTOPDJYap+Lm4jYxsCuAFbYiQ43B+ c7RyzEFOB2anrldTm3XzNytHZAkRTnN4dH09p1K1Pyepv+weSv8rvN9OohfYgpcj wQqw8ksdGb3Q6oPnTgGxmWvV4PbzHmDnOvOiQ+wuBHWXYks6tdgU7iCZ1djYibmL 1j+XEvtstou8gu1lWhzH6tStwmA9udncg5rEvfDUDyvMN3T06QFqrlK9K1TXIlbM RvUDrBjINIOuEeZ/5czjBl1CX1Z1YqdunrPiCQM4+oUAtjyD6ZAsyAEXLKdSYtKZ hSZgIl7v+UAIM+9bhpVg15aTjRzfH2OsZodFIbsMDw== -----END CERTIFICATE REQUEST-----""" def get_certificate_pem(): """Returns an X509 certificate in PEM format This certificate was created by issuing the following openssl commands: openssl genrsa -out private.pem 2048 openssl req -new -x509 -key private.pem -out cert.pem \ -days 1000 -subj '/CN=example.com' The byte string returned by this function is the contents of the cert.pem file. """ return """-----BEGIN CERTIFICATE----- MIIC/zCCAeegAwIBAgIJAOLqXKJ9q9/nMA0GCSqGSIb3DQEBCwUAMBYxFDASBgNV BAMMC2V4YW1wbGUuY29tMB4XDTE1MDQxMTAyMTUyOVoXDTE4MDEwNTAyMTUyOVow FjEUMBIGA1UEAwwLZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw ggEKAoIBAQCza2VoDXmBUMmwjFu9F6MM5q/AZ1WjnWA2YNdNy237TrGN/nobDDv8 FBBpUPmHNZ04H1LyxFcP8ReFrcIXpifsReu2lAWaqRPxovu5CuAhfecKv+RhjLVL J0I+MZIb72ROKpfZTmb7dhlFgGD3vkC51BCfhGVW35w52OY/23x5MeO4yvx5myPc cnxMVQ42KuDrzKqjBlSjmBncpGYx0JgCT+syFmHsl8rOkqCPPFLo24YQn+4/pr1A YwaZAbMTl9zoLtEQj6sxScuHcS9e8niptDxlsbLQgqGVaGdE117stC95QH7UvITb uYzdjZwBFc1Sgz8GZ/2hLSsHujJiIQcvAgMBAAGjUDBOMB0GA1UdDgQWBBSUq2A0 b2Xo+sKvmKgN8Wq8l6j82jAfBgNVHSMEGDAWgBSUq2A0b2Xo+sKvmKgN8Wq8l6j8 2jAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBjiuqhlzNVOVLrHDQy Gr0fTACFJdDREnuhZp4d91++DmMCT+bcTG0+GCp3rfFOuEWpJLLLPdSOnIsnibsO syKPXuBBX5kmdYIojbdjUTSwnhcx9JTAfKSmxXWSC0rnKCefAf44Mm6fqvoTyTbe GSQP6nHzc7eLaK/efcrMvYdct+TeTkHjqR8Lu4pjZvRdUQadQHhDyN+ONKdKD9Tr jvfPim0b7Aq885PjSN6Qo4Z9HXR6+nK+bTz9HyUATMfDGNQt0L3vyfVxbNOxkCBc YI4hFtGfkOzd6B7r2sY1wGKdTLHkuT4m4/9A/SOzvnH+epnJqIS9jw+1iRj8xcDA 6PNT -----END CERTIFICATE----- """ def get_certificate_der(): """Returns an X509 certificate in DER format This certificate was created by issuing the following openssl commands: openssl genrsa -out private.pem 2048 openssl req -new -x509 -key private.pem -out cert.pem \ -days 1000 -subj '/CN=example.com' openssl x509 -outform der -in cert.pem -out cert.der The byte string returned by this function is the contents of the cert.der file. """ cert_der = ( '\x30\x82\x02\xff\x30\x82\x01\xe7\xa0\x03\x02\x01\x02\x02\x09' '\x00\xe2\xea\x5c\xa2\x7d\xab\xdf\xe7\x30\x0d\x06\x09\x2a\x86' '\x48\x86\xf7\x0d\x01\x01\x0b\x05\x00\x30\x16\x31\x14\x30\x12' '\x06\x03\x55\x04\x03\x0c\x0b\x65\x78\x61\x6d\x70\x6c\x65\x2e' '\x63\x6f\x6d\x30\x1e\x17\x0d\x31\x35\x30\x34\x31\x31\x30\x32' '\x31\x35\x32\x39\x5a\x17\x0d\x31\x38\x30\x31\x30\x35\x30\x32' '\x31\x35\x32\x39\x5a\x30\x16\x31\x14\x30\x12\x06\x03\x55\x04' '\x03\x0c\x0b\x65\x78\x61\x6d\x70\x6c\x65\x2e\x63\x6f\x6d\x30' '\x82\x01\x22\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01' '\x01\x05\x00\x03\x82\x01\x0f\x00\x30\x82\x01\x0a\x02\x82\x01' '\x01\x00\xb3\x6b\x65\x68\x0d\x79\x81\x50\xc9\xb0\x8c\x5b\xbd' '\x17\xa3\x0c\xe6\xaf\xc0\x67\x55\xa3\x9d\x60\x36\x60\xd7\x4d' '\xcb\x6d\xfb\x4e\xb1\x8d\xfe\x7a\x1b\x0c\x3b\xfc\x14\x10\x69' '\x50\xf9\x87\x35\x9d\x38\x1f\x52\xf2\xc4\x57\x0f\xf1\x17\x85' '\xad\xc2\x17\xa6\x27\xec\x45\xeb\xb6\x94\x05\x9a\xa9\x13\xf1' '\xa2\xfb\xb9\x0a\xe0\x21\x7d\xe7\x0a\xbf\xe4\x61\x8c\xb5\x4b' '\x27\x42\x3e\x31\x92\x1b\xef\x64\x4e\x2a\x97\xd9\x4e\x66\xfb' '\x76\x19\x45\x80\x60\xf7\xbe\x40\xb9\xd4\x10\x9f\x84\x65\x56' '\xdf\x9c\x39\xd8\xe6\x3f\xdb\x7c\x79\x31\xe3\xb8\xca\xfc\x79' '\x9b\x23\xdc\x72\x7c\x4c\x55\x0e\x36\x2a\xe0\xeb\xcc\xaa\xa3' '\x06\x54\xa3\x98\x19\xdc\xa4\x66\x31\xd0\x98\x02\x4f\xeb\x32' '\x16\x61\xec\x97\xca\xce\x92\xa0\x8f\x3c\x52\xe8\xdb\x86\x10' '\x9f\xee\x3f\xa6\xbd\x40\x63\x06\x99\x01\xb3\x13\x97\xdc\xe8' '\x2e\xd1\x10\x8f\xab\x31\x49\xcb\x87\x71\x2f\x5e\xf2\x78\xa9' '\xb4\x3c\x65\xb1\xb2\xd0\x82\xa1\x95\x68\x67\x44\xd7\x5e\xec' '\xb4\x2f\x79\x40\x7e\xd4\xbc\x84\xdb\xb9\x8c\xdd\x8d\x9c\x01' '\x15\xcd\x52\x83\x3f\x06\x67\xfd\xa1\x2d\x2b\x07\xba\x32\x62' '\x21\x07\x2f\x02\x03\x01\x00\x01\xa3\x50\x30\x4e\x30\x1d\x06' '\x03\x55\x1d\x0e\x04\x16\x04\x14\x94\xab\x60\x34\x6f\x65\xe8' '\xfa\xc2\xaf\x98\xa8\x0d\xf1\x6a\xbc\x97\xa8\xfc\xda\x30\x1f' '\x06\x03\x55\x1d\x23\x04\x18\x30\x16\x80\x14\x94\xab\x60\x34' '\x6f\x65\xe8\xfa\xc2\xaf\x98\xa8\x0d\xf1\x6a\xbc\x97\xa8\xfc' '\xda\x30\x0c\x06\x03\x55\x1d\x13\x04\x05\x30\x03\x01\x01\xff' '\x30\x0d\x06\x09\x2a\x86\x48\x86\xf7\x0d\x01\x01\x0b\x05\x00' '\x03\x82\x01\x01\x00\x63\x8a\xea\xa1\x97\x33\x55\x39\x52\xeb' '\x1c\x34\x32\x1a\xbd\x1f\x4c\x00\x85\x25\xd0\xd1\x12\x7b\xa1' '\x66\x9e\x1d\xf7\x5f\xbe\x0e\x63\x02\x4f\xe6\xdc\x4c\x6d\x3e' '\x18\x2a\x77\xad\xf1\x4e\xb8\x45\xa9\x24\xb2\xcb\x3d\xd4\x8e' '\x9c\x8b\x27\x89\xbb\x0e\xb3\x22\x8f\x5e\xe0\x41\x5f\x99\x26' '\x75\x82\x28\x8d\xb7\x63\x51\x34\xb0\x9e\x17\x31\xf4\x94\xc0' '\x7c\xa4\xa6\xc5\x75\x92\x0b\x4a\xe7\x28\x27\x9f\x01\xfe\x38' '\x32\x6e\x9f\xaa\xfa\x13\xc9\x36\xde\x19\x24\x0f\xea\x71\xf3' '\x73\xb7\x8b\x68\xaf\xde\x7d\xca\xcc\xbd\x87\x5c\xb7\xe4\xde' '\x4e\x41\xe3\xa9\x1f\x0b\xbb\x8a\x63\x66\xf4\x5d\x51\x06\x9d' '\x40\x78\x43\xc8\xdf\x8e\x34\xa7\x4a\x0f\xd4\xeb\x8e\xf7\xcf' '\x8a\x6d\x1b\xec\x0a\xbc\xf3\x93\xe3\x48\xde\x90\xa3\x86\x7d' '\x1d\x74\x7a\xfa\x72\xbe\x6d\x3c\xfd\x1f\x25\x00\x4c\xc7\xc3' '\x18\xd4\x2d\xd0\xbd\xef\xc9\xf5\x71\x6c\xd3\xb1\x90\x20\x5c' '\x60\x8e\x21\x16\xd1\x9f\x90\xec\xdd\xe8\x1e\xeb\xda\xc6\x35' '\xc0\x62\x9d\x4c\xb1\xe4\xb9\x3e\x26\xe3\xff\x40\xfd\x23\xb3' '\xbe\x71\xfe\x7a\x99\xc9\xa8\x84\xbd\x8f\x0f\xb5\x89\x18\xfc' '\xc5\xc0\xc0\xe8\xf3\x53') return cert_der barbican-2.0.0/barbican/tests/certificate_utils.py0000664000567000056710000000526612701405673023410 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ The following functions were created for testing purposes. """ from OpenSSL import crypto def create_key_pair(type, bits): key_pair = crypto.PKey() key_pair.generate_key(type, bits) return key_pair def get_valid_csr_object(): """Create a valid X509Req object""" key_pair = create_key_pair(crypto.TYPE_RSA, 2048) csr = crypto.X509Req() subject = csr.get_subject() setattr(subject, "CN", "host.example.net") csr.set_pubkey(key_pair) csr.sign(key_pair, "sha256") return csr def create_good_csr(): """Generate a CSR that will pass validation.""" csr = get_valid_csr_object() pem = crypto.dump_certificate_request(crypto.FILETYPE_PEM, csr) return pem def create_csr_that_has_not_been_signed(): """Generate a CSR that has not been signed.""" key_pair = create_key_pair(crypto.TYPE_RSA, 2048) csr = crypto.X509Req() subject = csr.get_subject() setattr(subject, "CN", "host.example.net") csr.set_pubkey(key_pair) pem = crypto.dump_certificate_request(crypto.FILETYPE_PEM, csr) return pem def create_csr_signed_with_wrong_key(): """Generate a CSR that has been signed by the wrong key.""" key_pair1 = create_key_pair(crypto.TYPE_RSA, 2048) key_pair2 = create_key_pair(crypto.TYPE_RSA, 2048) csr = crypto.X509Req() subject = csr.get_subject() setattr(subject, "CN", "host.example.net") # set public key from key pair 1 csr.set_pubkey(key_pair1) # sign with public key from key pair 2 csr.sign(key_pair2, "sha256") pem = crypto.dump_certificate_request(crypto.FILETYPE_PEM, csr) return pem def create_bad_csr(): """Generate a CSR that will not parse.""" return "Bad PKCS10 Data" def create_csr_with_bad_subject_dn(): """Generate a CSR that has a bad subject dn.""" key_pair = create_key_pair(crypto.TYPE_RSA, 2048) csr = crypto.X509Req() subject = csr.get_subject() # server certs require attribute 'CN' setattr(subject, "UID", "bar") csr.set_pubkey(key_pair) csr.sign(key_pair, "sha256") pem = crypto.dump_certificate_request(crypto.FILETYPE_PEM, csr) return pem barbican-2.0.0/barbican/tests/test_middleware_auth.py0000664000567000056710000000333312701405673024074 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from six.moves import http_client host = "localhost" port = 9311 method = "GET" timeout = 1000 body = None path = "/" headers = "" expected_response = {"v1": "current", "build": "0.1.34dev"} # Typically an authenticated user session will make a request for a key to # barbican # The restful request in all likelihood contain an auth token # this test mimics such a request provided a token # if pki tokens are used, the token is rather large # uuid tokens are smaller and easier to test with # assume there is a "demo" user with only member role # curl -XPOST -d '{"auth":{"passwordCredentials":{"username": "demo", # "password": "secret"}, "tenantName": "demo"}}' # -H "Content-type: application/json" http://localhost:35357/v2.0/tokens # # pull out the token_id from above and use in ping_barbican # # TODO(malini) flesh this out def get_demo_token(password): pass def ping_barbican(token_id): headers = {'X_AUTH_TOKEN': token_id, 'X_IDENTITY_STATUS': 'Confirmed'} connection = http_client.HTTPConnection(host, port, timeout=timeout) connection.request(method, path, None, headers) response = connection.getresponse().read() connection.close() return response barbican-2.0.0/barbican/tests/utils.py0000664000567000056710000004353012701405673021042 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime import functools from os import path import time import types import uuid import mock import oslotest.base as oslotest import six from six.moves.urllib import parse import webtest from OpenSSL import crypto from barbican.api import app import barbican.context from barbican.tests import database_utils class BarbicanAPIBaseTestCase(oslotest.BaseTestCase): """Base TestCase for all tests needing to interact with a Barbican app.""" root_controller = None def _build_context(self, project_id, roles=None, user=None, is_admin=True, policy_enforcer=None): context = barbican.context.RequestContext( roles=roles, user=user, project=project_id, is_admin=is_admin ) context.policy_enforcer = policy_enforcer return context def setUp(self): super(BarbicanAPIBaseTestCase, self).setUp() # Make sure we have a test db and session to work with database_utils.setup_in_memory_db() # Generic project id to perform actions under self.project_id = str(uuid.uuid4()) # Build the test app wsgi_app = app.build_wsgi_app( controller=self.root_controller, transactional=True ) self.app = webtest.TestApp(wsgi_app) self.app.extra_environ = { 'barbican.context': self._build_context(self.project_id) } def tearDown(self): database_utils.in_memory_cleanup() super(BarbicanAPIBaseTestCase, self).tearDown() class BaseTestCase(oslotest.BaseTestCase): """DEPRECATED - Will remove in future refactoring.""" def setUp(self): super(BaseTestCase, self).setUp() self.order_id = 'order1234' self.external_project_id = 'keystone1234' self.request_id = 'request1234' def tearDown(self): super(BaseTestCase, self).tearDown() class MockModelRepositoryMixin(object): """Class for setting up the repo factory mocks This class has the purpose of setting up the mocks for the model repository factory functions. This is because they are intended to be singletons, and thus called inside the code-base, and not really passed around as arguments. Thus, this kind of approach is needed. The functions assume that the class that inherits from this is a test case fixture class. This is because as a side-effect patcher objects will be added to the class, and also the cleanup of these patcher objects will be added to the tear-down of the respective classes. """ def setup_container_consumer_repository_mock( self, mock_container_consumer_repo=mock.MagicMock()): """Mocks the container consumer repository factory function :param mock_container_consumer_repo: The pre-configured mock container consumer repo to be returned. """ self.mock_container_consumer_repo_patcher = None self._setup_repository_mock( repo_factory='get_container_consumer_repository', mock_repo_obj=mock_container_consumer_repo, patcher_obj=self.mock_container_consumer_repo_patcher) def setup_container_repository_mock(self, mock_container_repo=mock.MagicMock()): """Mocks the container repository factory function :param mock_container_repo: The pre-configured mock container repo to be returned. """ self.mock_container_repo_patcher = None self._setup_repository_mock( repo_factory='get_container_repository', mock_repo_obj=mock_container_repo, patcher_obj=self.mock_container_repo_patcher) def setup_container_secret_repository_mock( self, mock_container_secret_repo=mock.MagicMock()): """Mocks the container-secret repository factory function :param mock_container_secret_repo: The pre-configured mock container-secret repo to be returned. """ self.mock_container_secret_repo_patcher = None self._setup_repository_mock( repo_factory='get_container_secret_repository', mock_repo_obj=mock_container_secret_repo, patcher_obj=self.mock_container_secret_repo_patcher) def setup_encrypted_datum_repository_mock( self, mock_encrypted_datum_repo=mock.MagicMock()): """Mocks the encrypted datum repository factory function :param mock_encrypted_datum_repo: The pre-configured mock encrypted datum repo to be returned. """ self.mock_encrypted_datum_repo_patcher = None self._setup_repository_mock( repo_factory='get_encrypted_datum_repository', mock_repo_obj=mock_encrypted_datum_repo, patcher_obj=self.mock_encrypted_datum_repo_patcher) def setup_kek_datum_repository_mock(self, mock_kek_datum_repo=mock.MagicMock()): """Mocks the kek datum repository factory function :param mock_kek_datum_repo: The pre-configured mock kek-datum repo to be returned. """ self.mock_kek_datum_repo_patcher = None self._setup_repository_mock( repo_factory='get_kek_datum_repository', mock_repo_obj=mock_kek_datum_repo, patcher_obj=self.mock_kek_datum_repo_patcher) def setup_order_barbican_meta_repository_mock( self, mock_order_barbican_meta_repo=mock.MagicMock()): """Mocks the order-barbican-meta repository factory function :param mock_order_barbican_meta_repo: The pre-configured mock order barbican-meta repo to be returned. """ self.mock_order_barbican_meta_repo_patcher = None self._setup_repository_mock( repo_factory='get_order_barbican_meta_repository', mock_repo_obj=mock_order_barbican_meta_repo, patcher_obj=self.mock_order_barbican_meta_repo_patcher) def setup_order_plugin_meta_repository_mock( self, mock_order_plugin_meta_repo=mock.MagicMock()): """Mocks the order-plugin-meta repository factory function :param mock_order_plugin_meta_repo: The pre-configured mock order plugin-meta repo to be returned. """ self.mock_order_plugin_meta_repo_patcher = None self._setup_repository_mock( repo_factory='get_order_plugin_meta_repository', mock_repo_obj=mock_order_plugin_meta_repo, patcher_obj=self.mock_order_plugin_meta_repo_patcher) def setup_order_repository_mock(self, mock_order_repo=mock.MagicMock()): """Mocks the order repository factory function :param mock_order_repo: The pre-configured mock order repo to be returned. """ self.mock_order_repo_patcher = None self._setup_repository_mock(repo_factory='get_order_repository', mock_repo_obj=mock_order_repo, patcher_obj=self.mock_order_repo_patcher) def setup_project_repository_mock(self, mock_project_repo=mock.MagicMock()): """Mocks the project repository factory function :param mock_project_repo: The pre-configured mock project repo to be returned. """ self.mock_project_repo_patcher = None self._setup_repository_mock(repo_factory='get_project_repository', mock_repo_obj=mock_project_repo, patcher_obj=self.mock_project_repo_patcher) def setup_secret_meta_repository_mock( self, mock_secret_meta_repo=mock.MagicMock()): """Mocks the secret-meta repository factory function :param mock_secret_meta_repo: The pre-configured mock secret-meta repo to be returned. """ self.mock_secret_meta_repo_patcher = None self._setup_repository_mock( repo_factory='get_secret_meta_repository', mock_repo_obj=mock_secret_meta_repo, patcher_obj=self.mock_secret_meta_repo_patcher) def setup_secret_repository_mock(self, mock_secret_repo=mock.MagicMock()): """Mocks the secret repository factory function :param mock_secret_repo: The pre-configured mock secret repo to be returned. """ self.mock_secret_repo_patcher = None self._setup_repository_mock(repo_factory='get_secret_repository', mock_repo_obj=mock_secret_repo, patcher_obj=self.mock_secret_repo_patcher) def setup_transport_key_repository_mock( self, mock_transport_key_repo=mock.MagicMock()): """Mocks the transport-key repository factory function :param mock_transport_key_repo: The pre-configured mock transport_key repo to be returned. """ self.mock_transport_key_repo_patcher = None self._setup_repository_mock( repo_factory='get_transport_key_repository', mock_repo_obj=mock_transport_key_repo, patcher_obj=self.mock_transport_key_repo_patcher) def setup_ca_repository_mock(self, mock_ca_repo=mock.MagicMock()): """Mocks the project repository factory function :param mock_ca_repo: The pre-configured mock ca repo to be returned. """ self.mock_ca_repo_patcher = None self._setup_repository_mock(repo_factory='get_ca_repository', mock_repo_obj=mock_ca_repo, patcher_obj=self.mock_ca_repo_patcher) def setup_preferred_ca_repository_mock( self, mock_preferred_ca_repo=mock.MagicMock()): """Mocks the project repository factory function :param mock_preferred_ca_repo: The pre-configured mock project ca repo to be returned. """ self.mock_preferred_ca_repo_patcher = None self._setup_repository_mock( repo_factory='get_preferred_ca_repository', mock_repo_obj=mock_preferred_ca_repo, patcher_obj=self.mock_preferred_ca_repo_patcher) def setup_project_ca_repository_mock( self, mock_project_ca_repo=mock.MagicMock()): """Mocks the project repository factory function :param mock_project_ca_repo: The pre-configured mock project ca repo to be returned. """ self.mock_project_ca_repo_patcher = None self._setup_repository_mock( repo_factory='get_project_ca_repository', mock_repo_obj=mock_project_ca_repo, patcher_obj=self.mock_project_ca_repo_patcher) def _setup_repository_mock(self, repo_factory, mock_repo_obj, patcher_obj): patcher_obj = mock.patch( 'barbican.model.repositories.' + repo_factory, return_value=mock_repo_obj ) patcher_obj.start() self.addCleanup(patcher_obj.stop) def construct_new_test_function(original_func, name, build_params): """Builds a new test function based on parameterized data. :param original_func: The original test function that is used as a template :param name: The fullname of the new test function :param build_params: A dictionary or list containing args or kwargs for the new test :return: A new function object """ new_func = types.FunctionType( six.get_function_code(original_func), six.get_function_globals(original_func), name=name, argdefs=six.get_function_defaults(original_func) ) for key, val in original_func.__dict__.items(): if key != 'build_data': new_func.__dict__[key] = val # Support either an arg list or kwarg dict for our data build_args = build_params if isinstance(build_params, list) else [] build_kwargs = build_params if isinstance(build_params, dict) else {} # Build a test wrapper to execute with our kwargs def test_wrapper(func, test_args, test_kwargs): @functools.wraps(func) def wrapper(self): return func(self, *test_args, **test_kwargs) return wrapper return test_wrapper(new_func, build_args, build_kwargs) def process_parameterized_function(name, func_obj, build_data): """Build lists of functions to add and remove to a test case.""" to_remove = [] to_add = [] for subtest_name, params in build_data.items(): # Build new test function func_name = '{0}_{1}'.format(name, subtest_name) new_func = construct_new_test_function(func_obj, func_name, params) # Mark the new function as needed to be added to the class to_add.append((func_name, new_func)) # Mark key for removal to_remove.append(name) return to_remove, to_add def parameterized_test_case(cls): """Class decorator to process parameterized tests This allows for parameterization to be used for potentially any unittest compatible runner; including testr and py.test. """ tests_to_remove = [] tests_to_add = [] for key, val in vars(cls).items(): # Only process tests with build data on them if key.startswith('test_') and val.__dict__.get('build_data'): to_remove, to_add = process_parameterized_function( name=key, func_obj=val, build_data=val.__dict__.get('build_data') ) tests_to_remove.extend(to_remove) tests_to_add.extend(to_add) # Add all new test functions [setattr(cls, name, func) for name, func in tests_to_add] # Remove all old test function templates (if they still exist) [delattr(cls, key) for key in tests_to_remove if hasattr(cls, key)] return cls def parameterized_dataset(build_data): """Simple decorator to mark a test method for processing.""" def decorator(func): func.__dict__['build_data'] = build_data return func return decorator def create_timestamp_w_tz_and_offset(timezone=None, days=0, hours=0, minutes=0, seconds=0): """Creates a timestamp with a timezone and offset in days :param timezone: Timezone used in creation of timestamp :param days: The offset in days :param hours: The offset in hours :param minutes: The offset in minutes :return a timestamp """ if timezone is None: timezone = time.strftime("%z") timestamp = '{time}{timezone}'.format( time=(datetime.datetime.today() + datetime.timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds)), timezone=timezone) return timestamp def get_limit_and_offset_from_ref(ref): matches = dict(parse.parse_qsl(parse.urlparse(ref).query)) ref_limit = matches['limit'] ref_offset = matches['offset'] return ref_limit, ref_offset def get_tomorrow_timestamp(): tomorrow = (datetime.today() + datetime.timedelta(days=1)) return tomorrow.isoformat() def string_to_datetime(datetimestring, date_formats=None): date_formats = date_formats or [ '%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S.%fZ', '%Y-%m-%dT%H:%M:%S.%f', "%Y-%m-%dT%H:%M:%SZ", "%Y-%m-%dT%H:%M:%S"] for dateformat in date_formats: try: return datetime.datetime.strptime(datetimestring, dateformat) except ValueError: continue else: raise def get_id_from_ref(ref): """Returns id from reference.""" ref_id = None if ref is not None and len(ref) > 0: ref_id = path.split(ref)[1] return ref_id def generate_test_uuid(tail_value=0): """Returns a blank uuid with the given value added to the end segment.""" return '00000000-0000-0000-0000-{value:0>{pad}}'.format(value=tail_value, pad=12) def get_symmetric_key(): s = "MIICdgIBADANBgkqhkiG9w==" return s def get_triple_des_key(): s = "AQIDBAUGBwgBAgMEBQYHCAECAwQFBgcI" return s def is_cert_valid(expected, observed): c1 = crypto.load_certificate(crypto.FILETYPE_PEM, expected) c2 = crypto.load_certificate(crypto.FILETYPE_PEM, observed) return (crypto.dump_certificate(crypto.FILETYPE_PEM, c1) == crypto.dump_certificate(crypto.FILETYPE_PEM, c2)) def is_private_key_valid(expected, observed): k1 = crypto.load_privatekey(crypto.FILETYPE_PEM, expected) k2 = crypto.load_privatekey(crypto.FILETYPE_PEM, observed) return (crypto.dump_privatekey(crypto.FILETYPE_PEM, k1) == crypto.dump_privatekey(crypto.FILETYPE_PEM, k2)) def is_public_key_valid(expected, observed): # TODO(alee) fill in the relevant test here return True class DummyClassForTesting(object): pass barbican-2.0.0/barbican/tests/__init__.py0000664000567000056710000000000012701405673021422 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/queue/0000775000567000056710000000000012701406024020436 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/queue/test_server.py0000664000567000056710000004227112701405673023374 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime import mock import six from barbican.model import models from barbican.model import repositories from barbican.queue import server from barbican.tasks import common from barbican.tests import database_utils from barbican.tests import utils class WhenUsingTransactionalDecorator(utils.BaseTestCase): """Test using the 'transactional' decorator in server.py. Note that only the 'I am a server' logic is tested here, as the alternate mode is only used for direct invocation of Task methods in the standalone server mode, which is also thoroughly tested in WhenUsingBeginTypeOrderTask below. """ def setUp(self): super(WhenUsingTransactionalDecorator, self).setUp() # Ensure we always thing we are in 'I am a server' mode. is_server_side_config = { 'return_value': True } self.is_server_side_patcher = mock.patch( 'barbican.queue.is_server_side', **is_server_side_config ) self.is_server_side_patcher.start() self.commit_patcher = mock.patch( 'barbican.model.repositories.commit' ) self.commit_mock = self.commit_patcher.start() self.rollback_patcher = mock.patch( 'barbican.model.repositories.rollback' ) self.rollback_mock = self.rollback_patcher.start() self.clear_patcher = mock.patch( 'barbican.model.repositories.clear' ) self.clear_mock = self.clear_patcher.start() self.args = ('foo', 'bar') self.kwargs = {'k_foo': 1, 'k_bar': 2} # Class/decorator under test. class TestClass(object): my_args = None my_kwargs = None is_exception_needed = False @server.transactional def test_method(self, *args, **kwargs): if self.is_exception_needed: raise ValueError() self.my_args = args self.my_kwargs = kwargs self.test_object = TestClass() def tearDown(self): super(WhenUsingTransactionalDecorator, self).tearDown() self.is_server_side_patcher.stop() self.commit_patcher.stop() self.rollback_patcher.stop() self.clear_patcher.stop() def test_should_commit(self): self.test_object.test_method(*self.args, **self.kwargs) self.assertEqual(self.args, self.test_object.my_args) self.assertEqual(self.kwargs, self.test_object.my_kwargs) self.assertEqual(1, self.commit_mock.call_count) self.assertEqual(0, self.rollback_mock.call_count) self.assertEqual(1, self.clear_mock.call_count) def test_should_rollback(self): self.test_object.is_exception_needed = True self.test_object.test_method(*self.args, **self.kwargs) self.assertEqual(0, self.commit_mock.call_count) self.assertEqual(1, self.rollback_mock.call_count) self.assertEqual(1, self.clear_mock.call_count) class WhenUsingRetryableOrderDecorator(utils.BaseTestCase): """Test using the 'retryable_order' decorator in server.py.""" def setUp(self): super(WhenUsingRetryableOrderDecorator, self).setUp() self.schedule_retry_tasks_patcher = mock.patch( 'barbican.queue.server.schedule_order_retry_tasks' ) self.schedule_retry_tasks_mock = ( self.schedule_retry_tasks_patcher.start() ) self.order_id = 'order-id' self.args = ('foo', 'bar') self.kwargs = {'k_foo': 1, 'k_bar': 2} # Class/decorator under test. class TestClass(object): self.order_id = None my_args = None my_kwargs = None is_exception_needed = False result = common.FollowOnProcessingStatusDTO() @server.retryable_order def test_method(self, order_id, *args, **kwargs): if self.is_exception_needed: raise ValueError() self.order_id = order_id self.my_args = args self.my_kwargs = kwargs return self.result self.test_object = TestClass() self.test_method = TestClass.test_method def tearDown(self): super(WhenUsingRetryableOrderDecorator, self).tearDown() self.schedule_retry_tasks_patcher.stop() def test_should_successfully_schedule_a_task_for_retry(self): self.test_object.test_method(self.order_id, *self.args, **self.kwargs) self.assertEqual(self.order_id, self.test_object.order_id) self.assertEqual(self.args, self.test_object.my_args) self.assertEqual(self.kwargs, self.test_object.my_kwargs) self.assertEqual(1, self.schedule_retry_tasks_mock.call_count) self.schedule_retry_tasks_mock.assert_called_with( mock.ANY, self.test_object.result, self.order_id, *self.args, **self.kwargs) def test_retry_should_not_be_scheduled_if_exception_is_raised(self): self.test_object.is_exception_needed = True self.assertRaises( ValueError, self.test_object.test_method, self.order_id, self.args, self.kwargs, ) self.assertEqual(0, self.schedule_retry_tasks_mock.call_count) class WhenCallingScheduleOrderRetryTasks(database_utils.RepositoryTestCase): """Test calling schedule_order_retry_tasks() in server.py.""" def setUp(self): super(WhenCallingScheduleOrderRetryTasks, self).setUp() self.project = database_utils.create_project() self.order = database_utils.create_order(self.project) database_utils.get_session().commit() self.repo = repositories.OrderRetryTaskRepo() self.result = common.FollowOnProcessingStatusDTO() self.args = ['args-foo', 'args-bar'] self.kwargs = {'order_id': self.order.id, 'foo': 1, 'bar': 2} self.date_to_retry_at = ( datetime.datetime.utcnow() + datetime.timedelta( milliseconds=self.result.retry_msec) ) def test_should_not_schedule_task_due_to_no_result(self): retry_rpc_method = server.schedule_order_retry_tasks(None, None, None) self.assertIsNone(retry_rpc_method) def test_should_not_schedule_task_due_to_no_action_required_result(self): self.result.retry_task = common.RetryTasks.NO_ACTION_REQUIRED retry_rpc_method = server.schedule_order_retry_tasks( None, self.result, None) self.assertIsNone(retry_rpc_method) def test_should_schedule_invoking_task_for_retry(self): self.result.retry_task = common.RetryTasks.INVOKE_SAME_TASK # Schedule this test method as the passed-in 'retry' function. retry_rpc_method = server.schedule_order_retry_tasks( self.test_should_schedule_invoking_task_for_retry, self.result, None, # Not used. *self.args, **self.kwargs) database_utils.get_session().commit() # Flush to the database. self.assertEqual( 'test_should_schedule_invoking_task_for_retry', retry_rpc_method) def test_should_schedule_certificate_status_task_for_retry(self): self.result.retry_task = ( common.RetryTasks.INVOKE_CERT_STATUS_CHECK_TASK ) # Schedule this test method as the passed-in 'retry' function. retry_rpc_method = server.schedule_order_retry_tasks( None, # Should be ignored for non-self retries. self.result, None, # Not used. *self.args, **self.kwargs) database_utils.get_session().commit() # Flush to the database. self.assertEqual( 'check_certificate_status', retry_rpc_method) self._verify_retry_task_entity( 'check_certificate_status') def _verify_retry_task_entity(self, retry_task): # Retrieve the task retry entity created above and verify it. entities, offset, limit, total = self.repo.get_by_create_date() self.assertEqual(1, total) retry_model = entities[0] self.assertEqual(retry_task, retry_model.retry_task) self.assertEqual(self.args, retry_model.retry_args) self.assertEqual(self.kwargs, retry_model.retry_kwargs) self.assertEqual(0, retry_model.retry_count) # Compare retry_at times. # Note that the expected retry_at time is computed at setUp() time, but # the retry_at time on the task retry entity/model is computed and set # a few milliseconds after this setUp() time, hence they will vary by a # small amount of time. delta = retry_model.retry_at - self.date_to_retry_at delta_seconds = delta.seconds self.assertLessEqual(delta_seconds, 2) class WhenCallingTasksMethod(utils.BaseTestCase): """Test calling methods on the Tasks class.""" def setUp(self): super(WhenCallingTasksMethod, self).setUp() # Mock the 'am I a server process?' flag used by the decorator around # all task methods. Since this test class focuses on testing task # method behaviors, this flag is set to false to allow for direct # testing of these tasks without database transactional interference. is_server_side_config = { 'return_value': False } self.is_server_side_patcher = mock.patch( 'barbican.queue.is_server_side', **is_server_side_config ) self.is_server_side_patcher.start() self.tasks = server.Tasks() def tearDown(self): super(WhenCallingTasksMethod, self).tearDown() self.is_server_side_patcher.stop() @mock.patch('barbican.queue.server.schedule_order_retry_tasks') @mock.patch('barbican.tasks.resources.BeginTypeOrder') def test_should_process_begin_order(self, mock_begin_order, mock_schedule): method = mock_begin_order.return_value.process_and_suppress_exceptions method.return_value = 'result' self.tasks.process_type_order( None, self.order_id, self.external_project_id, self.request_id) mock_process = mock_begin_order.return_value mock_process.process_and_suppress_exceptions.assert_called_with( self.order_id, self.external_project_id) mock_schedule.assert_called_with( mock.ANY, 'result', None, 'order1234', 'keystone1234', 'request1234') @mock.patch('barbican.queue.server.schedule_order_retry_tasks') @mock.patch('barbican.tasks.resources.UpdateOrder') def test_should_process_update_order( self, mock_update_order, mock_schedule): method = mock_update_order.return_value.process_and_suppress_exceptions method.return_value = 'result' updated_meta = {'foo': 1} self.tasks.update_order( None, self.order_id, self.external_project_id, updated_meta, self.request_id) mock_process = mock_update_order.return_value mock_process.process_and_suppress_exceptions.assert_called_with( self.order_id, self.external_project_id, updated_meta ) mock_schedule.assert_called_with( mock.ANY, 'result', None, 'order1234', 'keystone1234', updated_meta, 'request1234') @mock.patch('barbican.queue.server.schedule_order_retry_tasks') @mock.patch('barbican.tasks.resources.CheckCertificateStatusOrder') def test_should_check_certificate_order( self, mock_check_cert, mock_schedule): method = mock_check_cert.return_value.process_and_suppress_exceptions method.return_value = 'result' self.tasks.check_certificate_status( None, self.order_id, self.external_project_id, self.request_id) mock_process = mock_check_cert.return_value mock_process.process_and_suppress_exceptions.assert_called_with( self.order_id, self.external_project_id ) mock_schedule.assert_called_with( mock.ANY, 'result', None, 'order1234', 'keystone1234', 'request1234') @mock.patch('barbican.tasks.resources.BeginTypeOrder') def test_process_order_catch_exception(self, mock_begin_order): """Test that BeginTypeOrder's process() handles all exceptions.""" mock_begin_order.return_value._process.side_effect = Exception() self.tasks.process_type_order(None, self.order_id, self.external_project_id, self.request_id) class WhenUsingTaskServer(database_utils.RepositoryTestCase): """Test using the asynchronous task client. This test suite performs a full-stack test of worker-side task processing (except for queue interactions, which are mocked). This includes testing database commit and session close behaviors. """ def setUp(self): super(WhenUsingTaskServer, self).setUp() # Queue target mocking setup. self.target = 'a target value here' queue_get_target_config = { 'return_value': self.target } self.queue_get_target_patcher = mock.patch( 'barbican.queue.get_target', **queue_get_target_config ) self.queue_get_target_mock = self.queue_get_target_patcher.start() # Queue server mocking setup. self.server_mock = mock.MagicMock() self.server_mock.start.return_value = None self.server_mock.stop.return_value = None queue_get_server_config = { 'return_value': self.server_mock } self.queue_get_server_patcher = mock.patch( 'barbican.queue.get_server', **queue_get_server_config ) self.queue_get_server_mock = self.queue_get_server_patcher.start() self.server = server.TaskServer() # Add an order to the in-memory database. self.external_id = 'keystone-id' project = database_utils.create_project( external_id=self.external_id) self.order = database_utils.create_order( project=project) self.request_id = 'request1234' def tearDown(self): super(WhenUsingTaskServer, self).tearDown() self.queue_get_target_patcher.stop() self.queue_get_server_patcher.stop() def test_should_start(self): self.server.start() self.queue_get_target_mock.assert_called_with() self.queue_get_server_mock.assert_called_with( target=self.target, endpoints=[self.server]) self.server_mock.start.assert_called_with() def test_should_stop(self): self.server.stop() self.queue_get_target_mock.assert_called_with() self.queue_get_server_mock.assert_called_with( target=self.target, endpoints=[self.server]) self.server_mock.stop.assert_called_with() def test_process_bogus_begin_type_order_should_not_rollback(self): order_id = self.order.id self.order.type = 'bogus-type' # Force error out of business logic. # Invoke process, including the transactional decorator that terminates # the session when it is done. Hence we must re-retrieve the order for # verification afterwards. self.server.process_type_order( None, self.order.id, self.external_id, self.request_id) order_repo = repositories.get_order_repository() order_result = order_repo.get(order_id, self.external_id) self.assertEqual(models.States.ERROR, order_result.status) self.assertEqual( six.u( 'Process TypeOrder failure seen - ' 'please contact site administrator.'), order_result.error_reason) self.assertEqual( six.u('500'), order_result.error_status_code) def test_process_bogus_update_type_order_should_not_rollback(self): order_id = self.order.id self.order.type = 'bogus-type' # Force error out of business logic. # Invoke process, including the transactional decorator that terminates # the session when it is done. Hence we must re-retrieve the order for # verification afterwards. self.server.update_order( None, self.order.id, self.external_id, None, self.request_id) order_repo = repositories.get_order_repository() order_result = order_repo.get(order_id, self.external_id) self.assertEqual(models.States.ERROR, order_result.status) self.assertEqual( six.u( 'Update Order failure seen - ' 'please contact site administrator.'), order_result.error_reason) self.assertEqual( six.u('500'), order_result.error_status_code) barbican-2.0.0/barbican/tests/queue/__init__.py0000664000567000056710000000000012701405673022546 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/queue/test_retry_scheduler.py0000664000567000056710000001722312701405673025270 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime import time import eventlet import mock import oslotest.base as oslotest from barbican.model import models from barbican.model import repositories from barbican.queue import retry_scheduler from barbican.tests import database_utils # Oslo messaging RPC server uses eventlet. eventlet.monkey_patch() INITIAL_DELAY_SECONDS = 5.0 NEXT_RETRY_SECONDS = 5.0 def is_interval_in_expected_range(interval): return NEXT_RETRY_SECONDS * .8 <= interval < NEXT_RETRY_SECONDS * 1.2 class WhenRunningPeriodicServerRetryLogic(database_utils.RepositoryTestCase): """Tests the retry logic invoked by the periodic task retry server. These tests are only concerned with the logic of the invoked periodic task method. Testing of whether or not the periodic tasks are actually invoked per configured schedule configuration is deferred to the tests in :class:`WhenRunningPeriodicServer`. """ def setUp(self): super(WhenRunningPeriodicServerRetryLogic, self).setUp() retry_scheduler.CONF.set_override( "initial_delay_seconds", 2 * INITIAL_DELAY_SECONDS, group='retry_scheduler', enforce_type=True) retry_scheduler.CONF.set_override( "periodic_interval_max_seconds", NEXT_RETRY_SECONDS, group='retry_scheduler', enforce_type=True) self.queue_client = mock.MagicMock() self.periodic_server = retry_scheduler.PeriodicServer( queue_resource=self.queue_client) def tearDown(self): super(WhenRunningPeriodicServerRetryLogic, self).tearDown() self.periodic_server.stop() def test_should_perform_retry_processing_no_tasks(self): interval = self.periodic_server._check_retry_tasks() self.assertTrue(is_interval_in_expected_range(interval)) def test_should_perform_retry_processing_one_task(self): # Add one retry task. args, kwargs, retry_repo = self._create_retry_task() # Retrieve this entity. entities, _, _, total = retry_repo.get_by_create_date() self.assertEqual(1, total) time.sleep(1) interval = self.periodic_server._check_retry_tasks() # Attempt to retrieve this entity, should have been deleted above. entities, _, _, total = retry_repo.get_by_create_date( suppress_exception=True) self.assertEqual(0, total) self.assertTrue(is_interval_in_expected_range(interval)) self.queue_client.test_task.assert_called_once_with( *args, **kwargs ) @mock.patch('barbican.model.repositories.commit') def test_should_fail_and_force_a_rollback(self, mock_commit): mock_commit.side_effect = Exception() # Add one retry task. args, kwargs, retry_repo = self._create_retry_task() # Retrieve this entity. entities, _, _, total = retry_repo.get_by_create_date() self.assertEqual(1, total) time.sleep(1) self.periodic_server._check_retry_tasks() # Attempt to retrieve this entity, should not have been deleted above. entities, _, _, total = retry_repo.get_by_create_date( suppress_exception=True) self.assertEqual(1, total) @mock.patch('barbican.model.repositories.get_order_retry_tasks_repository') def test_should_fail_process_retry(self, mock_get_repo): mock_get_repo.return_value.get_by_create_date.side_effect = \ Exception() periodic_server_with_mock_repo = retry_scheduler.PeriodicServer( queue_resource=self.queue_client) interval = periodic_server_with_mock_repo._check_retry_tasks() self.assertTrue(is_interval_in_expected_range(interval)) def _create_retry_task(self): # Add one retry task: task = 'test_task' args = ('foo', 'bar') kwargs = {'k_foo': 1, 'k_bar': 2} order = database_utils.create_order() retry = models.OrderRetryTask() retry.order_id = order.id retry.retry_at = datetime.datetime.utcnow() retry.retry_task = task retry.retry_args = args retry.retry_kwargs = kwargs retry_repo = repositories.get_order_retry_tasks_repository() retry_repo.create_from(retry) database_utils.get_session().commit() return args, kwargs, retry_repo class WhenRunningPeriodicServer(oslotest.BaseTestCase): """Tests the timing-related functionality of the periodic task retry server. These tests are only concerned with whether or not periodic tasks are actually invoked per configured schedule configuration. The logic of the invoked periodic task method itself is deferred to the tests in :class:`WhenRunningPeriodicServerRetryLogic`. """ def setUp(self): super(WhenRunningPeriodicServer, self).setUp() retry_scheduler.CONF.set_override( "initial_delay_seconds", INITIAL_DELAY_SECONDS, group='retry_scheduler', enforce_type=True) self.database_patcher = _DatabasePatcherHelper() self.database_patcher.start() self.periodic_server = _PeriodicServerStub(queue_resource=None) self.periodic_server.start() def tearDown(self): super(WhenRunningPeriodicServer, self).tearDown() self.periodic_server.stop() self.database_patcher.stop() def test_should_have_invoked_periodic_task_after_initial_delay(self): # Wait a bit longer than the initial delay. time.sleep(3 * INITIAL_DELAY_SECONDS / 2) self.assertEqual(1, self.periodic_server.invoke_count) def test_should_have_invoked_periodic_task_twice(self): # Wait a bit longer than the initial delay plus retry interval. time.sleep(INITIAL_DELAY_SECONDS + 2 * NEXT_RETRY_SECONDS) self.assertEqual(2, self.periodic_server.invoke_count) def test_should_have_not_invoked_periodic_task_yet(self): # Wait a short time, before the initial delay expires. time.sleep(1) self.assertEqual(0, self.periodic_server.invoke_count) class _PeriodicServerStub(retry_scheduler.PeriodicServer): """Periodic server testing stub class. This class overrides the periodic retry task so that we can track how many times it has been invoked by the Oslo periodic task process. """ def __init__(self, queue_resource=None): super(_PeriodicServerStub, self).__init__() self.invoke_count = 0 def _check_retry_tasks(self): """Override the periodic method, indicating we have called it.""" self.invoke_count += 1 return NEXT_RETRY_SECONDS class _DatabasePatcherHelper(object): """This test suite does not test database interactions, so just stub it.""" def __init__(self): super(_DatabasePatcherHelper, self).__init__() database_config = { 'return_value': None } self.database_patcher = mock.patch( 'barbican.model.repositories.setup_database_engine_and_factory', **database_config ) def start(self): self.database_patcher.start() def stop(self): self.database_patcher.stop() barbican-2.0.0/barbican/tests/queue/test_client.py0000664000567000056710000000644412701405673023346 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from barbican import queue from barbican.queue import client from barbican.tests import utils class WhenUsingAsyncTaskClient(utils.BaseTestCase): """Test using the asynchronous task client.""" def setUp(self): super(WhenUsingAsyncTaskClient, self).setUp() # Mock out the queue get_client() call: self.mock_client = mock.MagicMock() self.mock_client.cast.return_value = None get_client_config = { 'return_value': self.mock_client } self.get_client_patcher = mock.patch( 'barbican.queue.get_client', **get_client_config ) self.get_client_patcher.start() self.client = client.TaskClient() def tearDown(self): super(WhenUsingAsyncTaskClient, self).tearDown() self.get_client_patcher.stop() def test_should_process_type_order(self): self.client.process_type_order(order_id=self.order_id, project_id=self.external_project_id, request_id=self.request_id) self.mock_client.cast.assert_called_with( {}, 'process_type_order', order_id=self.order_id, project_id=self.external_project_id, request_id=self.request_id) def test_should_update_order(self): updated_meta = {} self.client.update_order(order_id=self.order_id, project_id=self.external_project_id, updated_meta=updated_meta, request_id=self.request_id) self.mock_client.cast.assert_called_with( {}, 'update_order', order_id=self.order_id, project_id=self.external_project_id, updated_meta=updated_meta, request_id=self.request_id) def test_should_check_certificate_order(self): self.client.check_certificate_status( order_id=self.order_id, project_id=self.external_project_id, request_id=self.request_id) self.mock_client.cast.assert_called_with( {}, 'check_certificate_status', order_id=self.order_id, project_id=self.external_project_id, request_id=self.request_id) class WhenCreatingDirectTaskClient(utils.BaseTestCase): """Test using the synchronous task client (i.e. standalone mode).""" def setUp(self): super(WhenCreatingDirectTaskClient, self).setUp() queue.get_client = mock.MagicMock(return_value=None) self.client = client.TaskClient() def test_should_use_direct_task_client(self): self.assertIsInstance(self.client._client, client._DirectTaskInvokerClient) barbican-2.0.0/barbican/tests/queue/test_keystone_listener.py0000664000567000056710000003447612701405673025644 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import uuid import mock import oslo_messaging from oslo_service import service from barbican.common import config from barbican import queue from barbican.queue import keystone_listener from barbican.tasks import keystone_consumer as consumer from barbican.tests import utils class UtilMixin(object): def __init__(self, *args, **kwargs): super(UtilMixin, self).__init__(*args, **kwargs) self.conf = config.CONF # dict which has item as {property: (value, group_name)} self.overrides = {} def revert_overrides(self): '''Reverts configuration override values after test end.''' for k, v in self.overrides.items(): value, group = v self.conf.set_override(k, value, group) def setUp(self): super(UtilMixin, self).setUp() self.addCleanup(self.revert_overrides) def opt_in_group(self, group, **kw): for k, v in kw.items(): # add to local overrides if its not already set # we want to keep the original value from first override dict_value = self.overrides.get(k) if not dict_value: if group: orig_value = getattr(getattr(self.conf, group), k) else: orig_value = getattr(self.conf, k) self.overrides[k] = orig_value, group self.conf.set_override(k, v, group) class WhenUsingNotificationTask(UtilMixin, utils.BaseTestCase): """Test for 'Notification' task functionality.""" def setUp(self): super(WhenUsingNotificationTask, self).setUp() self.task = keystone_listener.NotificationTask(self.conf) self.payload = {'resource_info': uuid.uuid4().hex} self.type_index = 2 self.payload_index = 3 self.task_args = ['my_context', 'publisher_id', 'event_type', self.payload, {'metadata': 'value'}] @mock.patch.object(keystone_listener.NotificationTask, 'process_event') def test_info_level_notification(self, mock_process): self.task.info(*self.task_args) mock_process.assert_called_once_with(*self.task_args) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_create_project_event_notification(self, mock_process): self.task_args[self.type_index] = 'identity.project.created' result = self.task.info(*self.task_args) self.assertFalse(mock_process.called, 'Should not call event consumer ' 'for project create event') self.assertIsNone(result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_update_project_event_notification(self, mock_process): self.task_args[self.type_index] = 'identity.project.updated' result = self.task.info(*self.task_args) self.assertFalse(mock_process.called, 'Should not call event consumer ' 'for project update event') self.assertIsNone(result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_delete_project_event_notification_with_required_data( self, mock_process): project_id = uuid.uuid4().hex self.task_args[self.type_index] = 'identity.project.deleted' self.task_args[self.payload_index] = {'resource_info': project_id} result = self.task.info(*self.task_args) mock_process.assert_called_once_with(project_id=project_id, operation_type='deleted', resource_type='project') self.assertEqual(oslo_messaging.NotificationResult.HANDLED, result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_delete_project_event_with_different_service_name_in_event_type( self, mock_process): project_id = uuid.uuid4().hex self.task_args[self.type_index] = 'aaa.project.deleted' self.task_args[self.payload_index] = {'resource_info': project_id} result = self.task.info(*self.task_args) mock_process.assert_called_once_with(project_id=project_id, operation_type='deleted', resource_type='project') self.assertEqual(oslo_messaging.NotificationResult.HANDLED, result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_delete_project_event_with_event_type_in_different_case( self, mock_process): project_id = uuid.uuid4().hex self.task_args[self.type_index] = 'Identity.PROJECT.DeleteD' self.task_args[self.payload_index] = {'resource_info': project_id} result = self.task.info(*self.task_args) mock_process.assert_called_once_with(project_id=project_id, operation_type='deleted', resource_type='project') self.assertEqual(oslo_messaging.NotificationResult.HANDLED, result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_delete_project_event_with_incomplete_event_type_format( self, mock_process): project_id = uuid.uuid4().hex self.task_args[self.type_index] = 'project.deleted' self.task_args[self.payload_index] = {'resource_info': project_id} result = self.task.info(*self.task_args) self.assertFalse(mock_process.called, 'Should not call event consumer ' 'for project delete event as service name is missing ' 'in event_type data. Expected format is ' ' ..') self.assertIsNone(result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_delete_project_event_notification_with_missing_resource_info( self, mock_process): self.task_args[self.type_index] = 'identity.project.deleted' self.task_args[self.payload_index] = {'resource_info': None} result = self.task.info(*self.task_args) self.assertFalse(mock_process.called, 'Should not call event consumer ' 'for project delete event when project_id is missing ' 'in payload') self.assertIsNone(result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_delete_project_event_notification_with_missing_payload( self, mock_process): self.task_args[self.type_index] = 'identity.project.deleted' self.task_args[self.payload_index] = None result = self.task.info(*self.task_args) self.assertFalse(mock_process.called, 'Should not call event consumer ' 'for project delete event when payload is missing') self.assertIsNone(result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_delete_project_event_notification_with_blank_payload( self, mock_process): self.task_args[self.type_index] = 'identity.project.deleted' self.task_args[self.payload_index] = '' result = self.task.info(*self.task_args) self.assertFalse(mock_process.called, 'Should not call event consumer ' 'for project delete event when payload is missing') self.assertIsNone(result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_event_notification_with_missing_event_type(self, mock_process): project_id = uuid.uuid4().hex self.task_args[self.type_index] = None self.task_args[self.payload_index] = {'resource_info': project_id} result = self.task.info(*self.task_args) self.assertFalse(mock_process.called, 'Should not call event consumer ' 'for keystone event when event_type is missing in ' 'notification') self.assertIsNone(result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process', return_value=None) def test_event_notification_with_blank_event_type(self, mock_process): project_id = uuid.uuid4().hex self.task_args[self.type_index] = '' self.task_args[self.payload_index] = {'resource_info': project_id} result = self.task.info(*self.task_args) self.assertFalse(mock_process.called, 'Should not call event consumer ' 'keystone event when event_type is blank in ' 'notification') self.assertIsNone(result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process') def test_event_notification_with_processing_error_requeue_disabled( self, mock_process): self.opt_in_group(queue.KS_NOTIFICATIONS_GRP_NAME, allow_requeue=False) local_task = keystone_listener.NotificationTask(self.conf) mock_process.side_effect = Exception('Dummy Error') project_id = uuid.uuid4().hex self.task_args[self.type_index] = 'identity.project.deleted' self.task_args[self.payload_index] = {'resource_info': project_id} result = local_task.info(*self.task_args) self.assertTrue(mock_process.called, 'Should call event consumer for' ' project delete event') self.assertEqual(oslo_messaging.NotificationResult.HANDLED, result) @mock.patch.object(consumer.KeystoneEventConsumer, 'process') def test_event_notification_with_processing_error_requeue_enabled( self, mock_process): self.opt_in_group(queue.KS_NOTIFICATIONS_GRP_NAME, allow_requeue=True) local_task = keystone_listener.NotificationTask(self.conf) mock_process.side_effect = Exception('Dummy Error') project_id = uuid.uuid4().hex self.task_args[self.type_index] = 'identity.project.deleted' self.task_args[self.payload_index] = {'resource_info': project_id} result = local_task.info(*self.task_args) self.assertTrue(mock_process.called, 'Should call event consumer for' ' project delete event') self.assertEqual(oslo_messaging.NotificationResult.REQUEUE, result) class WhenUsingMessageServer(UtilMixin, utils.BaseTestCase): """Test using the asynchronous task client.""" def setUp(self): super(WhenUsingMessageServer, self).setUp() queue.init(self.conf) patcher = mock.patch('oslo_messaging.server.MessageHandlingServer') mock_server_class = patcher.start() self.addCleanup(patcher.stop) self.msg_server_mock = mock_server_class() self.msg_server_mock.start.return_value = None self.msg_server_mock.stop.return_value = None self.msg_server_mock.wait.return_value = None @mock.patch.object(queue, 'get_notification_server') @mock.patch.object(queue, 'get_notification_target') def test_target_and_notification_server_invocations(self, mock_target, mock_server): target = 'a target value here' mock_target.return_value = target msg_server = keystone_listener.MessageServer(self.conf) mock_target.assert_called_once_with() mock_server.assert_called_once_with( targets=[target], endpoints=[msg_server]) def test_keystone_notification_config_used(self): topic = 'my test topic' exchange = 'my test exchange' version = ' my test version' self.opt_in_group(queue.KS_NOTIFICATIONS_GRP_NAME, topic=topic) self.opt_in_group(queue.KS_NOTIFICATIONS_GRP_NAME, control_exchange=exchange) self.opt_in_group(queue.KS_NOTIFICATIONS_GRP_NAME, version=version) self.opt_in_group(queue.KS_NOTIFICATIONS_GRP_NAME, version=version) target = queue.get_notification_target() self.assertEqual(topic, target.topic) self.assertEqual(exchange, target.exchange) self.assertEqual(version, target.version) @mock.patch.object(service.Service, '__init__') def test_keystone_notification_pool_size_used(self, mock_service_init): thread_pool_size = 5 self.opt_in_group(queue.KS_NOTIFICATIONS_GRP_NAME, thread_pool_size=thread_pool_size) msg_server = keystone_listener.MessageServer(self.conf) mock_service_init.assert_called_once_with(msg_server, threads=thread_pool_size) @mock.patch.object(service.Service, 'start') def test_should_start(self, mock_service): msg_server = keystone_listener.MessageServer(self.conf) msg_server.start() self.msg_server_mock.start.assert_called_with() @mock.patch.object(service.Service, 'stop') def test_should_stop(self, mock_service_stop): msg_server = keystone_listener.MessageServer(self.conf) msg_server.stop() self.msg_server_mock.stop.assert_called_with() @mock.patch.object(service.Service, 'wait') def test_should_wait(self, mock_service_wait): msg_server = keystone_listener.MessageServer(self.conf) msg_server.wait() self.assertFalse(self.msg_server_mock.stop.called, 'No need to call' 'message server wait() as Service itself creates the ' ' wait event') self.assertTrue(mock_service_wait.called, 'Expected to only call ' 'service.Service.wait() method') barbican-2.0.0/barbican/tests/plugin/0000775000567000056710000000000012701406024020610 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/plugin/util/0000775000567000056710000000000012701406024021565 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/plugin/util/test_translations.py0000664000567000056710000003052212701405673025732 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 from barbican.plugin.interface import secret_store as s from barbican.plugin.util import translations from barbican.tests import keys from barbican.tests import utils @utils.parameterized_test_case class WhenNormalizingBeforeEncryption(utils.BaseTestCase): dataset_for_raised_exceptions = { 'non_encrypted_content': { 'exception': s.SecretNoPayloadProvidedException, 'unencrypted': None, 'secret_type': s.SecretType.OPAQUE, 'content_type': '', 'content_encoding': '' }, 'invalid_content_type': { 'exception': s.SecretContentTypeNotSupportedException, 'unencrypted': 'stuff', 'secret_type': s.SecretType.OPAQUE, 'content_type': 'nope', 'content_encoding': '' }, 'content_encoding_isnt_base64': { 'exception': s.SecretContentEncodingMustBeBase64, 'unencrypted': 'stuff', 'secret_type': s.SecretType.OPAQUE, 'content_type': 'application/octet-stream', 'content_encoding': 'other_stuff', 'enforce_text_only': True }, 'unsupported_content_encoding': { 'exception': s.SecretContentEncodingNotSupportedException, 'unencrypted': 'stuff', 'secret_type': s.SecretType.OPAQUE, 'content_type': 'application/octet-stream', 'content_encoding': 'other_stuff' } } dataset_for_normalization = { 'plain_text': { 'unencrypted': 'stuff', 'secret_type': s.SecretType.OPAQUE, 'content_type': 'text/plain', 'content_encoding': '', 'expected': base64.b64encode('stuff'.encode('utf-8')) }, 'binary_base64': { 'unencrypted': base64.b64encode('stuff'), 'secret_type': s.SecretType.OPAQUE, 'content_type': 'application/octet-stream', 'content_encoding': 'base64', 'expected': base64.b64encode('stuff') }, 'binary': { 'unencrypted': 'stuff', 'secret_type': s.SecretType.OPAQUE, 'content_type': 'application/octet-stream', 'content_encoding': None, 'expected': base64.b64encode('stuff') }, 'symmetric_base64': { 'unencrypted': base64.b64encode('stuff'), 'secret_type': s.SecretType.SYMMETRIC, 'content_type': 'application/octet-stream', 'content_encoding': 'base64', 'expected': base64.b64encode('stuff') }, 'symmetric': { 'unencrypted': 'stuff', 'secret_type': s.SecretType.SYMMETRIC, 'content_type': 'application/octet-stream', 'content_encoding': None, 'expected': base64.b64encode('stuff') }, 'private_base64': { 'unencrypted': base64.b64encode(keys.get_private_key_pem()), 'secret_type': s.SecretType.PRIVATE, 'content_type': 'application/octet-stream', 'content_encoding': 'base64', 'expected': base64.b64encode(keys.get_private_key_pem()) }, 'private': { 'unencrypted': keys.get_private_key_pem(), 'secret_type': s.SecretType.PRIVATE, 'content_type': 'application/octet-stream', 'content_encoding': None, 'expected': base64.b64encode(keys.get_private_key_pem()) }, 'public_base64': { 'unencrypted': base64.b64encode(keys.get_public_key_pem()), 'secret_type': s.SecretType.PUBLIC, 'content_type': 'application/octet-stream', 'content_encoding': 'base64', 'expected': base64.b64encode(keys.get_public_key_pem()) }, 'public': { 'unencrypted': keys.get_public_key_pem(), 'secret_type': s.SecretType.PUBLIC, 'content_type': 'application/octet-stream', 'content_encoding': None, 'expected': base64.b64encode(keys.get_public_key_pem()) }, 'certificate_base64': { 'unencrypted': base64.b64encode(keys.get_certificate_pem()), 'secret_type': s.SecretType.CERTIFICATE, 'content_type': 'application/octet-stream', 'content_encoding': 'base64', 'expected': base64.b64encode(keys.get_certificate_pem()) }, 'certificate': { 'unencrypted': keys.get_certificate_pem(), 'secret_type': s.SecretType.CERTIFICATE, 'content_type': 'application/octet-stream', 'content_encoding': None, 'expected': base64.b64encode(keys.get_certificate_pem()) }, } def setUp(self): super(WhenNormalizingBeforeEncryption, self).setUp() # Aliasing to reduce the number of line continuations self.normalize = translations.normalize_before_encryption @utils.parameterized_dataset(dataset_for_normalization) def test_can_normalize(self, **kwargs): unencrypted, content_type = self.normalize( unencrypted=kwargs['unencrypted'], content_type=kwargs['content_type'], content_encoding=kwargs['content_encoding'], secret_type=kwargs['secret_type'] ) self.assertEqual(kwargs['expected'], unencrypted) self.assertEqual(kwargs['content_type'], content_type) def test_can_normalize_tmp_plain_text(self): unencrypted, content_type = self.normalize( unencrypted='stuff', content_type='text/plain', content_encoding='', secret_type=s.SecretType.OPAQUE ) self.assertEqual(base64.b64encode('stuff'), unencrypted) self.assertEqual('text/plain', content_type) def test_null_content_encoding_gets_passed_through(self): unencrypted, content_type = self.normalize( unencrypted='bam', content_type='application/octet-stream', content_encoding=None, secret_type=s.SecretType.OPAQUE ) self.assertEqual(base64.b64encode('bam'), unencrypted) self.assertEqual('application/octet-stream', content_type) @utils.parameterized_dataset(dataset_for_raised_exceptions) def test_normalize_raising_exceptions_with(self, exception, **kwargs): self.assertRaises(exception, self.normalize, **kwargs) class WhenAnalyzingBeforeDecryption(utils.BaseTestCase): def setUp(self): super(WhenAnalyzingBeforeDecryption, self).setUp() # Aliasing to reduce the number of line continuations self.analyze = translations.analyze_before_decryption def test_supported_content_type_doesnt_raise_exception(self): try: self.analyze('text/plain') except Exception as e: self.fail('Shouldn\'t have raised: {0}'.format(e)) def test_unsupported_content_type_raises_exception(self): exception = s.SecretAcceptNotSupportedException kwargs = {'content_type': 'nope!'} self.assertRaises(exception, self.analyze, **kwargs) @utils.parameterized_test_case class WhenDenormalizingAfterDecryption(utils.BaseTestCase): dataset_for_pem_denormalize = { 'private_key': { 'encoded_pem': base64.b64encode(keys.get_private_key_pem()), 'content_type': 'application/octet-stream' }, 'public_key': { 'encoded_pem': base64.b64encode(keys.get_public_key_pem()), 'content_type': 'application/octet-stream' }, 'certificate': { 'encoded_pem': base64.b64encode(keys.get_certificate_pem()), 'content_type': 'application/octet-stream' } } def setUp(self): super(WhenDenormalizingAfterDecryption, self).setUp() # Aliasing to reduce the number of line continuations self.denormalize = translations.denormalize_after_decryption def test_ascii_characters_to_utf8_with_plain_text(self): secret = 'bam' normalized_secret = secret.encode('utf-8') normalized_secret = base64.b64encode(normalized_secret) unencrypted = self.denormalize(normalized_secret, 'text/plain') self.assertEqual('bam', unencrypted) def test_ascii_characters_to_utf8_with_app_octet_stream(self): unencrypted = self.denormalize(base64.b64encode('bam'), 'application/octet-stream') self.assertEqual('bam', unencrypted) def test_non_ascii_character_with_plain_text_raises_exception(self): exception = s.SecretAcceptNotSupportedException kwargs = { 'unencrypted': base64.b64encode('\xff'), 'content_type': 'text/plain' } self.assertRaises(exception, self.denormalize, **kwargs) def test_content_type_not_text_or_binary_raises_exception(self): exception = s.SecretContentTypeNotSupportedException kwargs = { 'unencrypted': 'bam', 'content_type': 'other_content_type' } self.assertRaises(exception, self.denormalize, **kwargs) @utils.parameterized_dataset(dataset_for_pem_denormalize) def test_denormalize_pem(self, encoded_pem, content_type): denorm_secret = self.denormalize(encoded_pem, content_type) self.assertEqual(base64.b64decode(encoded_pem), denorm_secret) class WhenConvertingKeyFormats(utils.BaseTestCase): def setUp(self): super(WhenConvertingKeyFormats, self).setUp() def test_passes_convert_private_pem_to_der(self): pem = keys.get_private_key_pem() expected_der = keys.get_private_key_der() der = translations.convert_pem_to_der( pem, s.SecretType.PRIVATE) self.assertEqual(expected_der, der) def test_passes_convert_private_der_to_pem(self): der = keys.get_private_key_der() expected_pem = keys.get_private_key_pem() pem = translations.convert_der_to_pem( der, s.SecretType.PRIVATE) self.assertEqual(expected_pem, pem) def test_passes_convert_public_pem_to_der(self): pem = keys.get_public_key_pem() expected_der = keys.get_public_key_der() der = translations.convert_pem_to_der( pem, s.SecretType.PUBLIC) self.assertEqual(expected_der, der) def test_passes_convert_public_der_to_pem(self): der = keys.get_public_key_der() expected_pem = keys.get_public_key_pem() pem = translations.convert_der_to_pem( der, s.SecretType.PUBLIC) self.assertEqual(expected_pem, pem) def test_passes_convert_certificate_pem_to_der(self): pem = keys.get_certificate_pem() expected_der = keys.get_certificate_der() der = translations.convert_pem_to_der( pem, s.SecretType.CERTIFICATE) self.assertEqual(expected_der, der) def test_passes_convert_certificate_der_to_pem(self): der = keys.get_certificate_der() expected_pem = keys.get_certificate_pem() pem = translations.convert_der_to_pem( der, s.SecretType.CERTIFICATE) self.assertEqual(expected_pem, pem) def test_passes_certificate_conversion(self): pem = keys.get_certificate_pem() der = translations.convert_pem_to_der( pem, s.SecretType.CERTIFICATE) converted_pem = translations.convert_der_to_pem( der, s.SecretType.CERTIFICATE) self.assertEqual(pem, converted_pem) def test_should_raise_to_pem_with_bad_secret_type(self): self.assertRaises(s.SecretGeneralException, translations.convert_der_to_pem, "der", "bad type") def test_should_raise_to_der_with_bad_secret_type(self): self.assertRaises(s.SecretGeneralException, translations.convert_pem_to_der, "pem", "bad type") barbican-2.0.0/barbican/tests/plugin/util/__init__.py0000664000567000056710000000000012701405673023675 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/plugin/util/test_utils.py0000664000567000056710000000407712701405673024357 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.plugin.util import utils as plugin_utils from barbican.tests import utils as test_utils class ExtensionStub(object): def __init__(self): self.name = 'my_name' self.plugin_instance = 'my_instance' self.obj = None self.exc = None self.args = None self.kwargs = None def plugin(self, *args, **kwargs): if self.exc: raise self.exc self.args = args self.kwargs = kwargs return self.plugin_instance def set_raise_exception(self, exc): self.exc = exc class ManagerStub(object): def __init__(self, extensions): self.extensions = extensions class WhenInvokingInstantiatePlugins(test_utils.BaseTestCase): def setUp(self): super(WhenInvokingInstantiatePlugins, self).setUp() self.extension = ExtensionStub() self.manager = ManagerStub([self.extension]) def test_creates_plugin_instance(self): args = ('foo', 'bar') kwargs = {'foo': 1} plugin_utils.instantiate_plugins( self.manager, invoke_args=args, invoke_kwargs=kwargs) self.assertEqual('my_instance', self.extension.obj) self.assertEqual(args, self.extension.args) self.assertEqual(kwargs, self.extension.kwargs) def test_does_not_create_plugin_instance_due_to_error(self): self.extension.set_raise_exception(ValueError()) plugin_utils.instantiate_plugins(self.manager) self.assertIsNone(self.extension.obj) barbican-2.0.0/barbican/tests/plugin/util/test_mime_types.py0000664000567000056710000002172012701405673025364 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.model import models from barbican.plugin.util import mime_types from barbican.tests import utils class WhenTestingIsBase64ProcessingNeeded(utils.BaseTestCase): def test_is_base64_needed(self): r = mime_types.is_base64_processing_needed('application/octet-stream', 'base64') self.assertTrue(r) def test_is_base64_plus_needed(self): r = mime_types.is_base64_processing_needed('application/octet-stream', 'base64;q=0.5, ' 'gzip;q=0.6, compress') self.assertTrue(r) def test_not_base64_needed_binary(self): r = mime_types.is_base64_processing_needed('application/octet-stream', None) self.assertFalse(r) def test_not_base64_needed_invalid_content_type(self): r = mime_types.is_base64_processing_needed('bababooey', 'base64') self.assertFalse(r) def test_not_base64_needed_text(self): r = mime_types.is_base64_processing_needed('text/plain', 'base64') self.assertFalse(r) class WhenTestingIsBase64ProcessingSupported(utils.BaseTestCase): def test_is_base64_supported_application_octet_stream(self): r = mime_types.is_base64_encoding_supported('application/octet-stream') self.assertTrue(r) def test_is_base64_supported_with_unsupported_values(self): mimes_where_base64_is_not_supported = ['text/plain', 'bogus'] for mime in mimes_where_base64_is_not_supported: r = mime_types.is_base64_encoding_supported(mime) self.assertFalse(r) class WhenTestingAllowBinaryContent(utils.BaseTestCase): def test_binary_content_without_encoding(self): r = mime_types.use_binary_content_as_is('application/octet-stream', None) self.assertTrue(r) def test_binary_content_with_valid_encoding(self): r = mime_types.use_binary_content_as_is('application/octet-stream', 'binary') self.assertTrue(r) def test_binary_content_with_encoding(self): r = mime_types.use_binary_content_as_is('application/octet-stream', 'binary;q=0.5, ' 'gzip;q=0.6, compress') self.assertTrue(r) def test_binary_content_with_base64_encoding(self): r = mime_types.use_binary_content_as_is('application/octet-stream', 'base64') self.assertFalse(r) def test_not_allow_with_invalid_content_type(self): r = mime_types.use_binary_content_as_is('invalid_conent_type', 'binary') self.assertFalse(r) def test_plain_content_with_base64_encoding(self): r = mime_types.use_binary_content_as_is('text/plain', 'base64') self.assertFalse(r) class WhenTestingAugmentFieldsWithContentTypes(utils.BaseTestCase): def setUp(self): super(WhenTestingAugmentFieldsWithContentTypes, self).setUp() self.secret = models.Secret({}) self.secret.secret_id = "secret#1" self.datum = models.EncryptedDatum(self.secret) self.secret.encrypted_data = [self.datum] def test_static_supported_plain_text(self): for pt in mime_types.PLAIN_TEXT: self.assertEqual('text/plain', mime_types.INTERNAL_CTYPES[pt]) def test_static_supported_binary(self): for bin in mime_types.BINARY: self.assertIn(mime_types.INTERNAL_CTYPES[bin], mime_types.BINARY) def test_static_content_to_encodings(self): self.assertIn('text/plain', mime_types.CTYPES_TO_ENCODINGS) self.assertIsNone(mime_types.CTYPES_TO_ENCODINGS['text/plain']) self.assertIn('application/aes', mime_types.CTYPES_TO_ENCODINGS) self.assertIsNone(mime_types.CTYPES_TO_ENCODINGS['application/aes']) self.assertIn('application/octet-stream', mime_types.CTYPES_TO_ENCODINGS) self.assertIn('base64', mime_types.CTYPES_TO_ENCODINGS[ 'application/octet-stream']) self.assertIn('binary', mime_types.CTYPES_TO_ENCODINGS[ 'application/octet-stream']) def test_secret_with_matching_datum(self): for ct in mime_types.SUPPORTED: self._test_secret_and_datum_for_content_type(ct) def test_secret_with_non_matching_datum(self): self.datum.content_type = "bababooey" fields = mime_types.augment_fields_with_content_types(self.secret) self.assertNotIn("bababooey", fields) def _test_secret_and_datum_for_content_type(self, content_type): self.assertIn(content_type, mime_types.INTERNAL_CTYPES) # TODO(rm_work): This is deprecated and should eventually be removed self.datum.content_type = mime_types.INTERNAL_CTYPES[content_type] # Set up the Secret Metadata content_meta = models.SecretStoreMetadatum('content_type', self.datum.content_type) self.secret.secret_store_metadata['content_type'] = content_meta fields = mime_types.augment_fields_with_content_types(self.secret) self.assertIn('content_types', fields) content_types = fields['content_types'] self.assertIn('default', content_types) self.assertEqual(self.datum.content_type, content_types['default']) class WhenTestingNormalizationOfMIMETypes(utils.BaseTestCase): def test_plain_text_normalization(self): mimes = ['text/plain', ' text/plain ', 'text/plain;charset=utf-8', 'text/plain;charset=UTF-8', 'text/plain; charset=utf-8', 'text/plain; charset=UTF-8', 'text/plain; charset=utf-8', 'text/plain; charset=UTF-8', 'text/plain ; charset = utf-8', 'text/plain ; charset = UTF-8'] for mime in mimes: self._test_plain_text_mime_type(mime) def _test_plain_text_mime_type(self, mime): r = mime_types.normalize_content_type(mime) self.assertEqual('text/plain', r) def test_unsupported_charset_in_plain_text_mime(self): mime = 'text/plain; charset=ISO-8859-1' r = mime_types.normalize_content_type(mime) self.assertEqual(mime, r) def test_malformed_charset_in_plain_text_mime(self): mime = 'text/plain; charset is ISO-8859-1' r = mime_types.normalize_content_type(mime) self.assertEqual(mime, r) def test_binary_normalization(self): mime = 'application/octet-stream' r = mime_types.normalize_content_type(mime) self.assertEqual('application/octet-stream', r) def test_bogus_mime_normalization(self): mime = 'something/bogus' r = mime_types.normalize_content_type(mime) self.assertEqual('something/bogus', r) @utils.parameterized_test_case class WhenTestingIfContentTypeAndEncodingSupported(utils.BaseTestCase): @utils.parameterized_dataset({ 'plaintext_none': ['text/plain', None], 'octectstream_base64': ['application/octet-stream', 'base64'], 'pkcs8_base64': ['application/pkcs8', 'base64'], 'pkix_base64': ['application/pkix-cert', 'base64'], }) def test_content_type_encoding_supported(self, content_type, encoding): self.assertTrue(mime_types.is_content_type_with_encoding_supported( content_type, encoding)) @utils.parameterized_dataset({ 'plaintext_none': ['text/plain', 'base64'], 'octectstream_no_encoding': ['application/octet-stream', None], 'pkcs8_no_encoding': ['application/pkcs8', None], 'pkix_no_encoding': ['application/pkix-cert', None], 'unknown_with_valid_encoding': ['application/uknown-content-type', 'base64'] }) def test_content_type_encoding_not_supported(self, content_type, encoding): self.assertFalse(mime_types.is_content_type_with_encoding_supported( content_type, encoding)) barbican-2.0.0/barbican/tests/plugin/crypto/0000775000567000056710000000000012701406024022130 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/plugin/crypto/test_crypto.py0000664000567000056710000003327712701405673025106 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os from Crypto.PublicKey import DSA from Crypto.PublicKey import RSA from Crypto.Util import asn1 from cryptography import fernet import mock import six from barbican.model import models from barbican.plugin.crypto import crypto as plugin from barbican.plugin.crypto import simple_crypto as simple from barbican.tests import utils class WhenTestingSimpleCryptoPlugin(utils.BaseTestCase): def setUp(self): super(WhenTestingSimpleCryptoPlugin, self).setUp() self.plugin = simple.SimpleCryptoPlugin() def _get_mocked_kek_meta_dto(self): # For SimpleCryptoPlugin, per-project KEKs are stored in # kek_meta_dto.plugin_meta. SimpleCryptoPlugin does a get-or-create # on the plugin_meta field, so plugin_meta should be None initially. kek_meta_dto = plugin.KEKMetaDTO(mock.MagicMock()) kek_meta_dto.plugin_meta = None return self.plugin.bind_kek_metadata(kek_meta_dto) def test_encrypt_unicode_raises_value_error(self): unencrypted = u'unicode_beer\U0001F37A' encrypt_dto = plugin.EncryptDTO(unencrypted) secret = mock.MagicMock() secret.mime_type = 'text/plain' kek_meta_dto = self._get_mocked_kek_meta_dto() self.assertRaises( ValueError, self.plugin.encrypt, encrypt_dto, kek_meta_dto, mock.MagicMock(), ) def test_encrypt_with_unicode_kek_must_pass(self): """Test plan: Generate a kek Encrypt with master kek Convert to unicode call plugin.encrypt on unencrypted decrypt response cypher_text Compare with unencrypted """ project_kek = fernet.Fernet.generate_key() encryptor = fernet.Fernet(self.plugin.master_kek) ENC_project_kek = encryptor.encrypt(project_kek) UENC_project_kek = six.u(ENC_project_kek) kek_meta_dto = self._get_mocked_kek_meta_dto() kek_meta_dto.plugin_meta = UENC_project_kek unencrypted = 'PlainTextSecret' encrypt_dto = plugin.EncryptDTO(unencrypted) response_dto = self.plugin.encrypt(encrypt_dto, kek_meta_dto, mock.MagicMock()) project_encryptor = fernet.Fernet(project_kek) decrypted = project_encryptor.decrypt(response_dto.cypher_text) self.assertEqual(unencrypted, decrypted) def test_decrypt_kek_not_created(self): kek_meta_dto = mock.MagicMock() kek_meta_dto.plugin_meta = None self.assertRaises( ValueError, self.plugin.decrypt, mock.MagicMock(), kek_meta_dto, mock.MagicMock(), mock.MagicMock(), ) def test_byte_string_encryption(self): unencrypted = b'some_secret' encrypt_dto = plugin.EncryptDTO(unencrypted) kek_meta_dto = self._get_mocked_kek_meta_dto() response_dto = self.plugin.encrypt(encrypt_dto, kek_meta_dto, mock.MagicMock()) decrypt_dto = plugin.DecryptDTO(response_dto.cypher_text) decrypted = self.plugin.decrypt(decrypt_dto, kek_meta_dto, response_dto.kek_meta_extended, mock.MagicMock()) self.assertEqual(unencrypted, decrypted) def test_random_bytes_encryption(self): unencrypted = os.urandom(10) encrypt_dto = plugin.EncryptDTO(unencrypted) kek_meta_dto = self._get_mocked_kek_meta_dto() response_dto = self.plugin.encrypt(encrypt_dto, kek_meta_dto, mock.MagicMock()) decrypt_dto = plugin.DecryptDTO(response_dto.cypher_text) decrypted = self.plugin.decrypt(decrypt_dto, kek_meta_dto, response_dto.kek_meta_extended, mock.MagicMock()) self.assertEqual(unencrypted, decrypted) def test_generate_256_bit_key(self): secret = models.Secret() secret.bit_length = 256 secret.algorithm = "AES" kek_meta_dto = self._get_mocked_kek_meta_dto() generate_dto = plugin.GenerateDTO( secret.algorithm, secret.bit_length, secret.mode, None) response_dto = self.plugin.generate_symmetric( generate_dto, kek_meta_dto, mock.MagicMock() ) decrypt_dto = plugin.DecryptDTO(response_dto.cypher_text) key = self.plugin.decrypt(decrypt_dto, kek_meta_dto, response_dto.kek_meta_extended, mock.MagicMock()) self.assertEqual(32, len(key)) def test_generate_192_bit_key(self): secret = models.Secret() secret.bit_length = 192 secret.algorithm = "AES" kek_meta_dto = self._get_mocked_kek_meta_dto() generate_dto = plugin.GenerateDTO( secret.algorithm, secret.bit_length, None, None) response_dto = self.plugin.generate_symmetric( generate_dto, kek_meta_dto, mock.MagicMock() ) decrypt_dto = plugin.DecryptDTO(response_dto.cypher_text) key = self.plugin.decrypt(decrypt_dto, kek_meta_dto, response_dto.kek_meta_extended, mock.MagicMock()) self.assertEqual(24, len(key)) def test_generate_128_bit_key(self): secret = models.Secret() secret.bit_length = 128 secret.algorithm = "AES" kek_meta_dto = self._get_mocked_kek_meta_dto() generate_dto = plugin.GenerateDTO( secret.algorithm, secret.bit_length, None, None) response_dto = self.plugin.generate_symmetric( generate_dto, kek_meta_dto, mock.MagicMock() ) decrypt_dto = plugin.DecryptDTO(response_dto.cypher_text) key = self.plugin.decrypt(decrypt_dto, kek_meta_dto, response_dto.kek_meta_extended, mock.MagicMock()) self.assertEqual(16, len(key)) def test_supports_encrypt_decrypt(self): self.assertTrue( self.plugin.supports(plugin.PluginSupportTypes.ENCRYPT_DECRYPT) ) def test_supports_symmetric_key_generation(self): self.assertTrue( self.plugin.supports( plugin.PluginSupportTypes.SYMMETRIC_KEY_GENERATION, 'AES', 64) ) self.assertFalse( self.plugin.supports( plugin.PluginSupportTypes.SYMMETRIC_KEY_GENERATION, 'AES') ) self.assertTrue( self.plugin.supports( plugin.PluginSupportTypes.SYMMETRIC_KEY_GENERATION, 'hmacsha512', 128) ) self.assertFalse( self.plugin.supports( plugin.PluginSupportTypes.SYMMETRIC_KEY_GENERATION, 'hmacsha512', 12) ) self.assertFalse( self.plugin.supports( plugin.PluginSupportTypes.SYMMETRIC_KEY_GENERATION, 'Camillia', 128) ) def test_does_not_support_unknown_type(self): self.assertFalse( self.plugin.supports("SOMETHING_RANDOM") ) def test_bind_kek_metadata(self): kek_metadata_dto = mock.MagicMock() kek_metadata_dto = self.plugin.bind_kek_metadata(kek_metadata_dto) self.assertEqual('aes', kek_metadata_dto.algorithm) self.assertEqual(128, kek_metadata_dto.bit_length) self.assertEqual('cbc', kek_metadata_dto.mode) def test_supports_asymmetric_key_generation(self): self.assertTrue( self.plugin.supports( plugin.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION, 'DSA', 1024) ) self.assertTrue( self.plugin.supports( plugin.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION, "RSA", 1024) ) self.assertFalse( self.plugin.supports( plugin.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION, "DSA", 512) ) self.assertFalse( self.plugin.supports( plugin.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION, "RSA", 64) ) def test_generate_512_bit_RSA_key(self): generate_dto = plugin.GenerateDTO('rsa', 512, None, None) kek_meta_dto = self._get_mocked_kek_meta_dto() self.assertRaises(ValueError, self.plugin.generate_asymmetric, generate_dto, kek_meta_dto, mock.MagicMock()) def test_generate_2048_bit_DSA_key(self): generate_dto = plugin.GenerateDTO('dsa', 2048, None, None) kek_meta_dto = self._get_mocked_kek_meta_dto() self.assertRaises(ValueError, self.plugin.generate_asymmetric, generate_dto, kek_meta_dto, mock.MagicMock()) def test_generate_1024_bit_DSA_key_with_passphrase(self): generate_dto = plugin.GenerateDTO('dsa', 1024, None, 'Passphrase') kek_meta_dto = self._get_mocked_kek_meta_dto() self.assertRaises(ValueError, self.plugin.generate_asymmetric, generate_dto, kek_meta_dto, mock.MagicMock()) def test_generate_asymmetric_1024_bit_key(self): generate_dto = plugin.GenerateDTO('rsa', 1024, None, None) kek_meta_dto = self._get_mocked_kek_meta_dto() private_dto, public_dto, passwd_dto = self.plugin.generate_asymmetric( generate_dto, kek_meta_dto, mock.MagicMock()) decrypt_dto = plugin.DecryptDTO(private_dto.cypher_text) private_dto = self.plugin.decrypt(decrypt_dto, kek_meta_dto, private_dto.kek_meta_extended, mock.MagicMock()) decrypt_dto = plugin.DecryptDTO(public_dto.cypher_text) public_dto = self.plugin.decrypt(decrypt_dto, kek_meta_dto, public_dto.kek_meta_extended, mock.MagicMock()) public_dto = RSA.importKey(public_dto) private_dto = RSA.importKey(private_dto) self.assertEqual(1023, public_dto.size()) self.assertEqual(1023, private_dto.size()) self.assertTrue(private_dto.has_private) def test_generate_1024_bit_RSA_key_in_pem(self): generate_dto = plugin.GenerateDTO('rsa', 1024, None, 'changeme') kek_meta_dto = self._get_mocked_kek_meta_dto() private_dto, public_dto, passwd_dto = self.plugin.generate_asymmetric( generate_dto, kek_meta_dto, mock.MagicMock() ) decrypt_dto = plugin.DecryptDTO(private_dto.cypher_text) private_dto = self.plugin.decrypt(decrypt_dto, kek_meta_dto, private_dto.kek_meta_extended, mock.MagicMock()) private_dto = RSA.importKey(private_dto, 'changeme') self.assertTrue(private_dto.has_private()) def test_generate_1024_DSA_key_in_pem_and_reconstruct_key_der(self): generate_dto = plugin.GenerateDTO('dsa', 1024, None, None) kek_meta_dto = self._get_mocked_kek_meta_dto() private_dto, public_dto, passwd_dto = self.plugin.generate_asymmetric( generate_dto, kek_meta_dto, mock.MagicMock() ) decrypt_dto = plugin.DecryptDTO(private_dto.cypher_text) private_dto = self.plugin.decrypt(decrypt_dto, kek_meta_dto, private_dto.kek_meta_extended, mock.MagicMock()) prv_seq = asn1.DerSequence() prv_seq.decode(private_dto) p, q, g, y, x = prv_seq[1:] private_dto = DSA.construct((y, g, p, q, x)) self.assertTrue(private_dto.has_private()) def test_generate_128_bit_hmac_key(self): secret = models.Secret() secret.bit_length = 128 secret.algorithm = "hmacsha256" kek_meta_dto = self._get_mocked_kek_meta_dto() generate_dto = plugin.GenerateDTO( secret.algorithm, secret.bit_length, None, None) response_dto = self.plugin.generate_symmetric( generate_dto, kek_meta_dto, mock.MagicMock() ) decrypt_dto = plugin.DecryptDTO(response_dto.cypher_text) key = self.plugin.decrypt(decrypt_dto, kek_meta_dto, response_dto.kek_meta_extended, mock.MagicMock()) self.assertEqual(16, len(key)) barbican-2.0.0/barbican/tests/plugin/crypto/test_p11_crypto.py0000664000567000056710000003321712701405673025561 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import six from barbican.model import models from barbican.plugin.crypto import crypto as plugin_import from barbican.plugin.crypto import p11_crypto from barbican.plugin.crypto import pkcs11 from barbican.tests import utils if six.PY3: long = int def generate_random_effect(length, session): return b'0' * length class WhenTestingP11CryptoPlugin(utils.BaseTestCase): def setUp(self): super(WhenTestingP11CryptoPlugin, self).setUp() self.pkcs11 = mock.Mock() self.pkcs11.get_session.return_value = long(1) self.pkcs11.return_session.return_value = None self.pkcs11.generate_random.side_effect = generate_random_effect self.pkcs11.get_key_handle.return_value = long(2) self.pkcs11.encrypt.return_value = {'iv': b'0', 'ct': b'0'} self.pkcs11.decrypt.return_value = b'0' self.pkcs11.generate_key.return_value = long(3) self.pkcs11.wrap_key.return_value = {'iv': b'1', 'wrapped_key': b'1'} self.pkcs11.unwrap_key.return_value = long(4) self.pkcs11.compute_hmac.return_value = b'1' self.pkcs11.verify_hmac.return_value = None self.pkcs11.destroy_object.return_value = None self.cfg_mock = mock.MagicMock(name='config mock') self.cfg_mock.p11_crypto_plugin.mkek_label = 'mkek_label' self.cfg_mock.p11_crypto_plugin.hmac_label = 'hmac_label' self.cfg_mock.p11_crypto_plugin.mkek_length = 32 self.cfg_mock.p11_crypto_plugin.slot_id = 1 self.cfg_mock.p11_crypto_plugin.rw_session = True self.cfg_mock.p11_crypto_plugin.pkek_length = 32 self.cfg_mock.p11_crypto_plugin.pkek_cache_ttl = 900 self.cfg_mock.p11_crypto_plugin.pkek_cache_limit = 10 self.cfg_mock.p11_crypto_plugin.algorithm = 'CKM_AES_GCM' self.plugin = p11_crypto.P11CryptoPlugin( conf=self.cfg_mock, pkcs11=self.pkcs11 ) def test_invalid_library_path(self): cfg = self.cfg_mock.p11_crypto_plugin cfg.library_path = None self.assertRaises(ValueError, p11_crypto.P11CryptoPlugin, conf=self.cfg_mock, pkcs11=self.pkcs11) def test_bind_kek_metadata_without_existing_key(self): kek_datum = models.KEKDatum() dto = plugin_import.KEKMetaDTO(kek_datum) dto = self.plugin.bind_kek_metadata(dto) self.assertEqual(dto.algorithm, 'AES') self.assertEqual(dto.bit_length, 256) self.assertEqual(dto.mode, 'CBC') self.assertEqual(self.pkcs11.get_key_handle.call_count, 2) self.assertEqual(self.pkcs11.generate_key.call_count, 1) self.assertEqual(self.pkcs11.wrap_key.call_count, 1) self.assertEqual(self.pkcs11.compute_hmac.call_count, 1) def test_bind_kek_metadata_with_existing_key(self): kek_datum = models.KEKDatum() dto = plugin_import.KEKMetaDTO(kek_datum) dto.plugin_meta = '{}' dto = self.plugin.bind_kek_metadata(dto) self.assertEqual(self.pkcs11.generate_key.call_count, 0) self.assertEqual(self.pkcs11.wrap_key.call_count, 0) self.assertEqual(self.pkcs11.compute_hmac.call_count, 0) def test_encrypt(self): payload = b'test payload' encrypt_dto = plugin_import.EncryptDTO(payload) kek_meta = mock.MagicMock() kek_meta.kek_label = 'pkek' kek_meta.plugin_meta = ('{"iv": "iv==",' '"hmac": "hmac",' '"wrapped_key": "wrappedkey==",' '"mkek_label": "mkek_label",' '"hmac_label": "hmac_label"}') response_dto = self.plugin.encrypt(encrypt_dto, kek_meta, mock.MagicMock()) self.assertEqual(response_dto.cypher_text, b'0') self.assertIn('iv', response_dto.kek_meta_extended) self.assertEqual(self.pkcs11.get_key_handle.call_count, 2) self.assertEqual(self.pkcs11.get_session.call_count, 2) self.assertEqual(self.pkcs11.verify_hmac.call_count, 1) self.assertEqual(self.pkcs11.unwrap_key.call_count, 1) self.assertEqual(self.pkcs11.encrypt.call_count, 1) self.assertEqual(self.pkcs11.return_session.call_count, 1) def test_encrypt_bad_session(self): self.pkcs11.get_session.return_value = mock.DEFAULT self.pkcs11.get_session.side_effect = pkcs11.P11CryptoPluginException( 'Testing error handling' ) payload = b'test payload' encrypt_dto = plugin_import.EncryptDTO(payload) kek_meta = mock.MagicMock() kek_meta.kek_label = 'pkek' kek_meta.plugin_meta = ('{"iv": "iv==",' '"hmac": "hmac",' '"wrapped_key": "wrappedkey==",' '"mkek_label": "mkek_label",' '"hmac_label": "hmac_label"}') self.assertRaises(pkcs11.P11CryptoPluginException, self.plugin.encrypt, encrypt_dto, kek_meta, mock.MagicMock()) self.assertEqual(self.pkcs11.get_key_handle.call_count, 2) self.assertEqual(self.pkcs11.get_session.call_count, 2) self.assertEqual(self.pkcs11.verify_hmac.call_count, 1) self.assertEqual(self.pkcs11.unwrap_key.call_count, 1) self.assertEqual(self.pkcs11.encrypt.call_count, 0) self.assertEqual(self.pkcs11.return_session.call_count, 0) def test_decrypt(self): ct = b'ctct' kek_meta_extended = '{"iv":"AAAA"}' decrypt_dto = plugin_import.DecryptDTO(ct) kek_meta = mock.MagicMock() kek_meta.kek_label = 'pkek' kek_meta.plugin_meta = ('{"iv": "iv==",' '"hmac": "hmac",' '"wrapped_key": "wrappedkey==",' '"mkek_label": "mkek_label",' '"hmac_label": "hmac_label"}') pt = self.plugin.decrypt(decrypt_dto, kek_meta, kek_meta_extended, mock.MagicMock()) self.assertEqual(pt, b'0') self.assertEqual(self.pkcs11.get_key_handle.call_count, 2) self.assertEqual(self.pkcs11.get_session.call_count, 2) self.assertEqual(self.pkcs11.verify_hmac.call_count, 1) self.assertEqual(self.pkcs11.unwrap_key.call_count, 1) self.assertEqual(self.pkcs11.decrypt.call_count, 1) self.assertEqual(self.pkcs11.return_session.call_count, 1) def test_decrypt_bad_session(self): self.pkcs11.get_session.return_value = mock.DEFAULT self.pkcs11.get_session.side_effect = pkcs11.P11CryptoPluginException( 'Testing error handling' ) ct = b'ctct' kek_meta_extended = '{"iv":"AAAA"}' decrypt_dto = plugin_import.DecryptDTO(ct) kek_meta = mock.MagicMock() kek_meta.kek_label = 'pkek' kek_meta.plugin_meta = ('{"iv": "iv==",' '"hmac": "hmac",' '"wrapped_key": "wrappedkey==",' '"mkek_label": "mkek_label",' '"hmac_label": "hmac_label"}') self.assertRaises(pkcs11.P11CryptoPluginException, self.plugin.decrypt, decrypt_dto, kek_meta, kek_meta_extended, mock.MagicMock()) self.assertEqual(self.pkcs11.get_key_handle.call_count, 2) self.assertEqual(self.pkcs11.get_session.call_count, 2) self.assertEqual(self.pkcs11.verify_hmac.call_count, 1) self.assertEqual(self.pkcs11.unwrap_key.call_count, 1) self.assertEqual(self.pkcs11.decrypt.call_count, 0) self.assertEqual(self.pkcs11.return_session.call_count, 0) def test_generate_symmetric(self): secret = models.Secret() secret.bit_length = 128 secret.algorithm = 'AES' generate_dto = plugin_import.GenerateDTO( secret.algorithm, secret.bit_length, None, None) kek_meta = mock.MagicMock() kek_meta.kek_label = 'pkek' kek_meta.plugin_meta = ('{"iv": "iv==",' '"hmac": "hmac",' '"wrapped_key": "wrappedkey==",' '"mkek_label": "mkek_label",' '"hmac_label": "hmac_label"}') response_dto = self.plugin.generate_symmetric(generate_dto, kek_meta, mock.MagicMock()) self.assertEqual(response_dto.cypher_text, b'0') self.assertIn('iv', response_dto.kek_meta_extended) self.assertEqual(self.pkcs11.get_key_handle.call_count, 2) self.assertEqual(self.pkcs11.get_session.call_count, 2) self.assertEqual(self.pkcs11.generate_random.call_count, 1) self.assertEqual(self.pkcs11.verify_hmac.call_count, 1) self.assertEqual(self.pkcs11.unwrap_key.call_count, 1) self.assertEqual(self.pkcs11.encrypt.call_count, 1) self.assertEqual(self.pkcs11.return_session.call_count, 1) def test_generate_asymmetric_raises_error(self): self.assertRaises(NotImplementedError, self.plugin.generate_asymmetric, mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) def test_supports_encrypt_decrypt(self): self.assertTrue( self.plugin.supports( plugin_import.PluginSupportTypes.ENCRYPT_DECRYPT ) ) def test_supports_symmetric_key_generation(self): self.assertTrue( self.plugin.supports( plugin_import.PluginSupportTypes.SYMMETRIC_KEY_GENERATION ) ) def test_does_not_supports_asymmetric_key_generation(self): self.assertFalse( self.plugin.supports( plugin_import.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION ) ) def test_does_not_support_unknown_type(self): self.assertFalse( self.plugin.supports('SOMETHING_RANDOM') ) def test_missing_mkek(self): self.pkcs11.get_key_handle.return_value = None self.assertRaises(pkcs11.P11CryptoKeyHandleException, self.plugin._get_master_key, 'bad_key_label') def test_cached_kek_expired(self): self.plugin.pkek_cache['expired_kek'] = p11_crypto.CachedKEK(4, 0) self.assertIsNone(self.plugin._pkek_cache_get('expired_kek')) def test_generate_mkek(self): self.pkcs11.get_key_handle.return_value = None mkek = self.plugin._generate_mkek(256, 'mkek_label_2') self.assertEqual(mkek, 3) self.assertEqual(self.pkcs11.get_key_handle.call_count, 3) self.assertEqual(self.pkcs11.generate_key.call_count, 1) def test_cached_generate_mkek(self): self.assertRaises(pkcs11.P11CryptoPluginKeyException, self.plugin._generate_mkek, 256, 'mkek_label') self.assertEqual(self.pkcs11.get_key_handle.call_count, 2) def test_existing_generate_mkek(self): self.assertRaises(pkcs11.P11CryptoPluginKeyException, self.plugin._generate_mkek, 256, 'mkek2_label') self.assertEqual(self.pkcs11.get_key_handle.call_count, 3) def test_generate_mkhk(self): self.pkcs11.get_key_handle.return_value = None mkhk = self.plugin._generate_mkhk(256, 'mkhk_label_2') self.assertEqual(mkhk, 3) self.assertEqual(self.pkcs11.get_key_handle.call_count, 3) self.assertEqual(self.pkcs11.generate_key.call_count, 1) def test_cached_generate_mkhk(self): self.assertRaises(pkcs11.P11CryptoPluginKeyException, self.plugin._generate_mkhk, 256, 'hmac_label') self.assertEqual(self.pkcs11.get_key_handle.call_count, 2) def test_existing_generate_mkhk(self): self.assertRaises(pkcs11.P11CryptoPluginKeyException, self.plugin._generate_mkhk, 256, 'mkhk2_label') self.assertEqual(self.pkcs11.get_key_handle.call_count, 3) def test_create_pkcs11(self): def _generate_random(session, buf, length): ffi.buffer(buf)[:] = b'0' * length return pkcs11.CKR_OK lib = mock.Mock() lib.C_Initialize.return_value = pkcs11.CKR_OK lib.C_OpenSession.return_value = pkcs11.CKR_OK lib.C_CloseSession.return_value = pkcs11.CKR_OK lib.C_GetSessionInfo.return_value = pkcs11.CKR_OK lib.C_Login.return_value = pkcs11.CKR_OK lib.C_GenerateRandom.side_effect = _generate_random ffi = pkcs11.build_ffi() setattr(ffi, 'dlopen', lambda x: lib) p11 = self.plugin._create_pkcs11(self.cfg_mock.p11_crypto_plugin, ffi) self.assertIsInstance(p11, pkcs11.PKCS11) barbican-2.0.0/barbican/tests/plugin/crypto/test_pkcs11.py0000664000567000056710000003212212701405673024654 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import six from barbican.plugin.crypto import pkcs11 from barbican.tests import utils if six.PY3: long = int class WhenTestingPKCS11(utils.BaseTestCase): def setUp(self): super(WhenTestingPKCS11, self).setUp() self.lib = mock.Mock() self.lib.C_Initialize.return_value = pkcs11.CKR_OK self.lib.C_OpenSession.side_effect = self._open_session self.lib.C_CloseSession.return_value = pkcs11.CKR_OK self.lib.C_GetSessionInfo.side_effect = self._get_session_user self.lib.C_Login.return_value = pkcs11.CKR_OK self.lib.C_FindObjectsInit.return_value = pkcs11.CKR_OK self.lib.C_FindObjects.side_effect = self._find_objects_one self.lib.C_FindObjectsFinal.return_value = pkcs11.CKR_OK self.lib.C_GenerateKey.side_effect = self._generate_key self.lib.C_GenerateRandom.side_effect = self._generate_random self.lib.C_EncryptInit.return_value = pkcs11.CKR_OK self.lib.C_Encrypt.side_effect = self._encrypt self.lib.C_DecryptInit.return_value = pkcs11.CKR_OK self.lib.C_Decrypt.side_effect = self._decrypt self.lib.C_WrapKey.side_effect = self._wrap_key self.lib.C_UnwrapKey.side_effect = self._unwrap_key self.lib.C_SignInit.return_value = pkcs11.CKR_OK self.lib.C_Sign.side_effect = self._sign self.lib.C_VerifyInit.return_value = pkcs11.CKR_OK self.lib.C_Verify.side_effect = self._verify self.lib.C_DestroyObject.return_value = pkcs11.CKR_OK self.ffi = pkcs11.build_ffi() setattr(self.ffi, 'dlopen', lambda x: self.lib) self.cfg_mock = mock.MagicMock(name='config mock') self.cfg_mock.library_path = '/dev/null' self.cfg_mock.login_passphrase = 'foobar' self.cfg_mock.rw_session = False self.cfg_mock.slot_id = 1 self.cfg_mock.algorithm = 'CKM_AES_GCM' self.pkcs11 = pkcs11.PKCS11( self.cfg_mock.library_path, self.cfg_mock.login_passphrase, self.cfg_mock.rw_session, self.cfg_mock.slot_id, ffi=self.ffi ) def _generate_random(self, session, buf, length): self.ffi.buffer(buf)[:] = b'0' * length return pkcs11.CKR_OK def _get_session_public(self, session, session_info_ptr): if self.cfg_mock.rw_session: session_info_ptr[0].state = pkcs11.CKS_RW_PUBLIC_SESSION else: session_info_ptr[0].state = pkcs11.CKS_RO_PUBLIC_SESSION return pkcs11.CKR_OK def _get_session_user(self, session, session_info_ptr): if self.cfg_mock.rw_session: session_info_ptr[0].state = pkcs11.CKS_RW_USER_FUNCTIONS else: session_info_ptr[0].state = pkcs11.CKS_RO_USER_FUNCTIONS return pkcs11.CKR_OK def _open_session(self, *args, **kwargs): args[4][0] = long(1) return pkcs11.CKR_OK def _find_objects_one(self, session, obj_handle_ptr, max_count, count): obj_handle_ptr[0] = long(2) count[0] = 1 return pkcs11.CKR_OK def _find_objects_two(self, session, obj_handle_ptr, max_count, count): obj_handle_ptr[0] = long(2) count[0] = 2 return pkcs11.CKR_OK def _find_objects_zero(self, session, obj_handle_ptr, max_count, count): count[0] = 0 return pkcs11.CKR_OK def _generate_key(self, session, mech, attributes, attributes_len, obj_handle_ptr): obj_handle_ptr[0] = long(3) return pkcs11.CKR_OK def _encrypt(self, session, pt, pt_len, ct, ct_len): self.ffi.buffer(ct)[:] = pt[::-1] + b'0' * self.pkcs11.gcmtagsize return pkcs11.CKR_OK def _decrypt(self, session, ct, ct_len, pt, pt_len): tmp = ct[:-self.pkcs11.gcmtagsize][::-1] self.ffi.buffer(pt)[:len(tmp)] = tmp return pkcs11.CKR_OK def _wrap_key(self, *args, **kwargs): wrapped_key = args[4] wrapped_key_len = args[5] wrapped_key_len[0] = long(16) if wrapped_key != self.ffi.NULL: self.ffi.buffer(wrapped_key)[:] = b'0' * 16 return pkcs11.CKR_OK def _unwrap_key(self, *args, **kwargs): unwrapped_key = args[7] unwrapped_key[0] = long(1) return pkcs11.CKR_OK def _sign(self, *args, **kwargs): buf = args[3] buf_len = args[4] self.ffi.buffer(buf)[:] = b'0' * buf_len[0] return pkcs11.CKR_OK def _verify(self, *args, **kwargs): return pkcs11.CKR_OK def test_public_get_session(self): self.lib.C_GetSessionInfo.side_effect = self._get_session_public sess = self.pkcs11.get_session() self.assertEqual(sess, 1) self.assertEqual(self.lib.C_OpenSession.call_count, 2) self.assertEqual(self.lib.C_GetSessionInfo.call_count, 2) self.assertEqual(self.lib.C_Login.call_count, 1) self.assertEqual(self.lib.C_CloseSession.call_count, 1) def test_user_get_session(self): self.pkcs11.get_session() self.assertEqual(self.lib.C_OpenSession.call_count, 2) self.assertEqual(self.lib.C_GetSessionInfo.call_count, 2) self.assertEqual(self.lib.C_Login.call_count, 0) def test_generate_random(self): r = self.pkcs11.generate_random(32, mock.MagicMock()) self.assertEqual(r, b'0' * 32) self.assertEqual(self.lib.C_GenerateRandom.call_count, 2) def test_rng_self_test_fail(self): def _bad_generate_random(session, buf, length): self.ffi.buffer(buf)[:] = b'\x00' * length return pkcs11.CKR_OK self.lib.C_GenerateRandom.side_effect = _bad_generate_random self.assertRaises(pkcs11.P11CryptoPluginException, self.pkcs11._rng_self_test, mock.MagicMock()) def test_get_key_handle_one_key(self): key = self.pkcs11.get_key_handle('foo', mock.MagicMock()) self.assertEqual(key, 2) self.assertEqual(self.lib.C_FindObjectsInit.call_count, 1) self.assertEqual(self.lib.C_FindObjects.call_count, 1) self.assertEqual(self.lib.C_FindObjectsFinal.call_count, 1) def test_get_key_handle_no_keys(self): self.lib.C_FindObjects.side_effect = self._find_objects_zero key = self.pkcs11.get_key_handle('foo', mock.MagicMock()) self.assertIsNone(key) self.assertEqual(self.lib.C_FindObjectsInit.call_count, 1) self.assertEqual(self.lib.C_FindObjects.call_count, 1) self.assertEqual(self.lib.C_FindObjectsFinal.call_count, 1) def test_get_key_handle_multiple_keys(self): self.lib.C_FindObjects.side_effect = self._find_objects_two self.assertRaises(pkcs11.P11CryptoPluginKeyException, self.pkcs11.get_key_handle, 'foo', mock.MagicMock()) self.assertEqual(self.lib.C_FindObjectsInit.call_count, 1) self.assertEqual(self.lib.C_FindObjects.call_count, 1) self.assertEqual(self.lib.C_FindObjectsFinal.call_count, 1) def test_generate_session_key(self): key = self.pkcs11.generate_key(16, mock.MagicMock(), encrypt=True) self.assertEqual(key, 3) self.assertEqual(self.lib.C_GenerateKey.call_count, 1) def test_generate_master_key(self): key = self.pkcs11.generate_key(16, mock.MagicMock(), key_label='key', encrypt=True, master_key=True) self.assertEqual(key, 3) self.assertEqual(self.lib.C_GenerateKey.call_count, 1) def test_generate_key_no_flags(self): self.assertRaises(pkcs11.P11CryptoPluginException, self.pkcs11.generate_key, mock.MagicMock(), mock.MagicMock()) def test_generate_master_key_no_label(self): self.assertRaises(ValueError, self.pkcs11.generate_key, mock.MagicMock(), mock.MagicMock(), encrypt=True, master_key=True) def test_encrypt(self): pt = b'0123456789ABCDEF' ct = self.pkcs11.encrypt(mock.MagicMock(), pt, mock.MagicMock()) self.assertEqual(ct['ct'][:len(pt)], pt[::-1]) self.assertGreater(len(ct['iv']), 0) self.assertEqual(self.lib.C_GenerateRandom.call_count, 2) self.assertEqual(self.lib.C_EncryptInit.call_count, 1) self.assertEqual(self.lib.C_Encrypt.call_count, 1) def test_decrypt(self): ct = b'FEDCBA9876543210' + b'0' * self.pkcs11.gcmtagsize iv = b'0' * self.pkcs11.noncesize pt = self.pkcs11.decrypt(mock.MagicMock(), iv, ct, mock.MagicMock()) pt_len = len(ct) - self.pkcs11.gcmtagsize self.assertEqual(pt[:pt_len], ct[:-self.pkcs11.gcmtagsize][::-1]) self.assertEqual(self.lib.C_DecryptInit.call_count, 1) self.assertEqual(self.lib.C_Decrypt.call_count, 1) def test_decrypt_with_pad(self): ct = b'\x03\x03\x03CBA9876543210' + b'0' * self.pkcs11.gcmtagsize iv = b'0' * self.pkcs11.blocksize pt = self.pkcs11.decrypt(mock.MagicMock(), iv, ct, mock.MagicMock()) pt_len = len(ct) - self.pkcs11.gcmtagsize - 3 self.assertEqual(pt[:pt_len], ct[3:-self.pkcs11.gcmtagsize][::-1]) self.assertEqual(self.lib.C_DecryptInit.call_count, 1) self.assertEqual(self.lib.C_Decrypt.call_count, 1) def test_decrypt_with_pad_new_iv(self): ct = b'\x03\x03\x03CBA9876543210' + b'0' * self.pkcs11.gcmtagsize iv = b'0' * self.pkcs11.gcmtagsize pt = self.pkcs11.decrypt(mock.MagicMock(), iv, ct, mock.MagicMock()) pt_len = len(ct) - self.pkcs11.gcmtagsize self.assertEqual(pt[:pt_len], ct[:-self.pkcs11.gcmtagsize][::-1]) self.assertEqual(self.lib.C_DecryptInit.call_count, 1) self.assertEqual(self.lib.C_Decrypt.call_count, 1) def test_decrypt_with_pad_wrong_size(self): ct = b'\x03\x03\x03CBA987654321' + b'0' * self.pkcs11.gcmtagsize iv = b'0' * self.pkcs11.blocksize pt = self.pkcs11.decrypt(mock.MagicMock(), iv, ct, mock.MagicMock()) pt_len = len(ct) - self.pkcs11.gcmtagsize self.assertEqual(pt[:pt_len], ct[:-self.pkcs11.gcmtagsize][::-1]) self.assertEqual(self.lib.C_DecryptInit.call_count, 1) self.assertEqual(self.lib.C_Decrypt.call_count, 1) def test_decrypt_with_pad_wrong_length(self): ct = b'\x03EDCBA9876543210' + b'0' * self.pkcs11.gcmtagsize iv = b'0' * self.pkcs11.blocksize pt = self.pkcs11.decrypt(mock.MagicMock(), iv, ct, mock.MagicMock()) pt_len = len(ct) - self.pkcs11.gcmtagsize self.assertEqual(pt[:pt_len], ct[:-self.pkcs11.gcmtagsize][::-1]) self.assertEqual(self.lib.C_DecryptInit.call_count, 1) self.assertEqual(self.lib.C_Decrypt.call_count, 1) def test_decrypt_with_too_large_pad(self): ct = b'\x11EDCBA9876543210' + b'0' * self.pkcs11.gcmtagsize iv = b'0' * self.pkcs11.blocksize pt = self.pkcs11.decrypt(mock.MagicMock(), iv, ct, mock.MagicMock()) pt_len = len(ct) - self.pkcs11.gcmtagsize self.assertEqual(pt[:pt_len], ct[:-self.pkcs11.gcmtagsize][::-1]) self.assertEqual(self.lib.C_DecryptInit.call_count, 1) self.assertEqual(self.lib.C_Decrypt.call_count, 1) def test_wrap_key(self): wkek = self.pkcs11.wrap_key(mock.Mock(), mock.Mock(), mock.Mock()) self.assertGreater(len(wkek['iv']), 0) self.assertEqual(wkek['wrapped_key'], b'0' * 16) self.assertEqual(self.lib.C_GenerateRandom.call_count, 2) self.assertEqual(self.lib.C_WrapKey.call_count, 2) def test_unwrap_key(self): kek = self.pkcs11.unwrap_key(mock.Mock(), b'0' * 16, b'0' * 16, mock.Mock()) self.assertEqual(kek, 1) self.assertEqual(self.lib.C_UnwrapKey.call_count, 1) def test_compute_hmac(self): buf = self.pkcs11.compute_hmac(mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) self.assertEqual(len(buf), 32) self.assertEqual(self.lib.C_SignInit.call_count, 1) self.assertEqual(self.lib.C_Sign.call_count, 1) def test_verify_hmac(self): self.pkcs11.verify_hmac(mock.MagicMock(), mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) self.assertEqual(self.lib.C_VerifyInit.call_count, 1) self.assertEqual(self.lib.C_Verify.call_count, 1) def test_destroy_object(self): self.pkcs11.destroy_object(mock.MagicMock(), mock.MagicMock()) self.assertEqual(self.lib.C_DestroyObject.call_count, 1) def test_invalid_build_attributes(self): self.assertRaises(TypeError, self.pkcs11._build_attributes, [pkcs11.Attribute(pkcs11.CKA_CLASS, {})]) barbican-2.0.0/barbican/tests/plugin/crypto/__init__.py0000664000567000056710000000000012701405673024240 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/plugin/crypto/test_manager.py0000664000567000056710000000776112701405673025177 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import threading from barbican.common import utils as common_utils from barbican.plugin.crypto import crypto from barbican.plugin.crypto import manager as cm from barbican.plugin.interface import secret_store from barbican.tests import utils class MyThread(threading.Thread): def __init__(self, index, results): threading.Thread.__init__(self) self.index = index self.results = results def run(self): self.results[self.index] = cm.get_manager() class WhenTestingManager(utils.BaseTestCase): def setUp(self): super(WhenTestingManager, self).setUp() self.plugin_returned = mock.MagicMock() self.plugin_type = crypto.PluginSupportTypes.ENCRYPT_DECRYPT self.plugin_returned.supports.return_value = True self.plugin_name = common_utils.generate_fullname_for( self.plugin_returned) self.plugin_loaded = mock.MagicMock(obj=self.plugin_returned) self.manager = cm.get_manager() self.manager.extensions = [self.plugin_loaded] def test_can_override_enabled_plugins(self): """Verify can override default configuration for plugin selection.""" # Reset manager singleton otherwise we have test execution # order problems cm._PLUGIN_MANAGER = None cm.CONF.set_override( "enabled_crypto_plugins", ['foo_plugin'], group='crypto', enforce_type=True) manager_to_test = cm.get_manager() self.assertIsInstance( manager_to_test, cm._CryptoPluginManager) self.assertListEqual(['foo_plugin'], manager_to_test._names) def test_get_plugin_store_generate(self): self.assertEqual( self.plugin_returned, self.manager.get_plugin_store_generate(self.plugin_type)) def test_raises_error_with_wrong_plugin_type(self): self.plugin_returned.supports.return_value = False self.assertRaises( secret_store.SecretStorePluginNotFound, self.manager.get_plugin_store_generate, self.plugin_type) def test_raises_error_with_no_active_store_generate_plugin(self): self.manager.extensions = [] self.assertRaises( crypto.CryptoPluginNotFound, self.manager.get_plugin_store_generate, self.plugin_type) def test_get_plugin_retrieve(self): self.assertEqual( self.plugin_returned, self.manager.get_plugin_retrieve(self.plugin_name)) def test_raises_error_with_wrong_plugin_name(self): self.assertRaises( secret_store.SecretStorePluginNotFound, self.manager.get_plugin_retrieve, 'other-name') def test_raises_error_with_no_active_plugin_name(self): self.manager.extensions = [] self.assertRaises( crypto.CryptoPluginNotFound, self.manager.get_plugin_retrieve, self.plugin_name) def test_get_manager_with_multi_threads(self): self.manager.extensions = [] self.manager = None results = [None] * 10 # setup 10 threads to call get_manager() at same time for i in range(10): t = MyThread(i, results) t.start() # verify all threads return one and same plugin manager for i in range(10): self.assertIsInstance(results[i], cm._CryptoPluginManager) self.assertEqual(results[0], results[i]) barbican-2.0.0/barbican/tests/plugin/__init__.py0000664000567000056710000000000012701405673022720 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/plugin/test_store_crypto.py0000664000567000056710000007132512701405673024776 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import mock import testtools from barbican.common import utils from barbican.model import models from barbican.plugin.crypto import crypto from barbican.plugin.interface import secret_store from barbican.plugin import store_crypto from barbican.tests import keys from barbican.tests import utils as test_utils def get_private_dto(): spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048) return secret_store.SecretDTO(secret_store.SecretType.PRIVATE, base64.b64encode( keys.get_private_key_pem()), spec, 'application/pkcs8') def get_public_dto(): spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048) return secret_store.SecretDTO(secret_store.SecretType.PUBLIC, base64.b64encode( keys.get_public_key_pem()), spec, 'application/octet-stream') def get_certificate_dto(): spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048) return secret_store.SecretDTO(secret_store.SecretType.CERTIFICATE, base64.b64encode( keys.get_certificate_pem()), spec, 'application/pkix-cert') class TestSecretStoreBase(testtools.TestCase, test_utils.MockModelRepositoryMixin): """Define common configurations for testing store_crypto.py.""" def setUp(self): super(TestSecretStoreBase, self).setUp() self.patchers = [] # List of patchers utilized in this test class. self.project_id = '12345' self.content_type = 'application/octet-stream' self.content_encoding = 'base64' self.secret = base64.b64encode('secret') self.decrypted_secret = 'decrypted_secret' self.cypher_text = 'cypher_text' self.kek_meta_extended = 'kek-meta-extended' self.spec_aes = secret_store.KeySpec('AES', 64, 'CBC') self.spec_rsa = secret_store.KeySpec( 'RSA', 1024, passphrase='changeit') self.project_model = mock.MagicMock() self.project_model.id = 'project-model-id' self.project_model.external_id = self.project_id self.secret_dto = secret_store.SecretDTO( secret_store.SecretType.OPAQUE, self.secret, secret_store.KeySpec(), self.content_type ) self.response_dto = crypto.ResponseDTO( self.cypher_text, kek_meta_extended=self.kek_meta_extended) self.private_key_dto = crypto.ResponseDTO(self.cypher_text) self.public_key_dto = crypto.ResponseDTO(self.cypher_text) self.passphrase_dto = crypto.ResponseDTO(self.cypher_text) self.kek_meta_project_model = models.KEKDatum() self.kek_meta_project_model.plugin_name = 'plugin-name' self.kek_meta_project_model.kek_label = 'kek-meta-label' self.kek_meta_project_model.algorithm = 'kek-meta-algo' self.kek_meta_project_model.bit_length = 1024 self.kek_meta_project_model.mode = 'kek=meta-mode' self.kek_meta_project_model.plugin_meta = 'kek-meta-plugin-meta' self.encrypted_datum_model = models.EncryptedDatum() self.encrypted_datum_model.kek_meta_project = ( self.kek_meta_project_model) self.encrypted_datum_model.cypher_text = base64.b64encode( 'cypher_text') self.encrypted_datum_model.content_type = 'content_type' self.encrypted_datum_model.kek_meta_extended = 'extended_meta' self.secret_model = models.Secret( { 'algorithm': 'myalg', 'bit_length': 1024, 'mode': 'mymode' } ) self.secret_model.id = 'secret-model-id' self.secret_model.encrypted_data = [self.encrypted_datum_model] self.context = store_crypto.StoreCryptoContext( secret_model=self.secret_model, project_model=self.project_model, content_type=self.content_type) def tearDown(self): super(TestSecretStoreBase, self).tearDown() for patcher in self.patchers: patcher.stop() def init_patchers(self): self._config_get_secret_repository() self._config_get_encrypted_datum_repository() self._config_get_kek_datum_repository() def _start_patcher(self, patcher): mock = patcher.start() self.patchers.append(patcher) return mock def _config_get_secret_repository(self): """Mock the get_secret_repository() factory function.""" self.secret_repo = mock.MagicMock() self.secret_repo.create_from.return_value = self.secret_model self.setup_secret_repository_mock(self.secret_repo) def _config_get_encrypted_datum_repository(self): """Mock the get_encrypted_datum_repository() factory function.""" self.datum_repo = mock.MagicMock() self.datum_repo.create_from.return_value = None self.setup_encrypted_datum_repository_mock(self.datum_repo) def _config_get_kek_datum_repository(self): """Mock the get_kek_datum_repository() factory function.""" kek_model = self.kek_meta_project_model self.kek_repo = mock.MagicMock() self.kek_repo.find_or_create_kek_datum.return_value = kek_model self.setup_kek_datum_repository_mock(self.kek_repo) @test_utils.parameterized_test_case class WhenTestingStoreCrypto(TestSecretStoreBase): dataset_for_pem = { 'private': [get_private_dto()], 'public': [get_public_dto()], 'certificate': [get_certificate_dto()] } def setUp(self): super(WhenTestingStoreCrypto, self).setUp() self.init_patchers() self._config_crypto_plugin() self._config_private_methods() self.plugin_to_test = store_crypto.StoreCryptoAdapterPlugin() def test_store_secret_with_context_type(self): """Test storing a secret.""" response_dict = self.plugin_to_test.store_secret( self.secret_dto, self.context) self.assertIsNone(response_dict) # Verify encrypt plugin and method where invoked. encrypt_mock = self.encrypting_plugin.encrypt self.assertEqual(1, encrypt_mock.call_count) args, kwargs = encrypt_mock.call_args test_encrypt_dto, test_kek_meta_dto, test_project_id = tuple(args) self.assertIsInstance(test_encrypt_dto, crypto.EncryptDTO) self.assertEqual('secret', test_encrypt_dto.unencrypted) self.assertEqual(self.kek_meta_dto, test_kek_meta_dto) self.assertEqual(self.project_id, test_project_id) def test_store_secret_without_context_type(self): """Test storing a secret.""" self.context.content_type = None self.plugin_to_test.store_secret( self.secret_dto, self.context) self.assertEqual(self.content_type, self.context.content_type) @test_utils.parameterized_dataset(dataset_for_pem) def test_store_pem_secret(self, secret_dto): """Test storing a secret that is PEM encoded.""" response_dict = self.plugin_to_test.store_secret( secret_dto, self.context) self.assertIsNone(response_dict) raw_content = base64.b64decode(secret_dto.secret) # Verify encrypt plugin and method where invoked. encrypt_mock = self.encrypting_plugin.encrypt self.assertEqual(1, encrypt_mock.call_count) args, kwargs = encrypt_mock.call_args test_encrypt_dto, test_kek_meta_dto, test_project_id = tuple(args) self.assertIsInstance(test_encrypt_dto, crypto.EncryptDTO) self.assertEqual(raw_content, test_encrypt_dto.unencrypted) self.assertEqual(self.kek_meta_dto, test_kek_meta_dto) self.assertEqual(self.project_id, test_project_id) def test_get_secret(self): """Test getting a secret.""" secret_dto = self.plugin_to_test.get_secret( secret_store.SecretType.OPAQUE, None, # Secret metadata is not relevant to store_crypto process. self.context) # Verify response. self.assertIsInstance(secret_dto, secret_store.SecretDTO) self.assertEqual(secret_store.SecretType.OPAQUE, secret_dto.type) self.assertEqual( base64.encodestring(self.decrypted_secret).rstrip('\n'), secret_dto.secret) self.assertEqual( self.encrypted_datum_model.content_type, secret_dto.content_type) self.assertIsInstance(secret_dto.key_spec, secret_store.KeySpec) self.assertEqual( self.secret_model.algorithm, secret_dto.key_spec.alg) self.assertEqual( self.secret_model.bit_length, secret_dto.key_spec.bit_length) self.assertEqual( self.secret_model.mode, secret_dto.key_spec.mode) # Verify decrypt plugin and method where invoked. decrypt_mock = self.retrieving_plugin.decrypt self.assertEqual(1, decrypt_mock.call_count) args, kwargs = decrypt_mock.call_args ( test_decrypt, test_kek_meta, test_kek_meta_extended, test_project_id ) = tuple(args) self.assertIsInstance(test_decrypt, crypto.DecryptDTO) self.assertEqual( base64.b64decode(self.encrypted_datum_model.cypher_text), test_decrypt.encrypted) self.assertIsInstance(test_kek_meta, crypto.KEKMetaDTO) self.assertEqual( self.kek_meta_project_model.plugin_name, test_kek_meta.plugin_name) self.assertEqual( self.encrypted_datum_model.kek_meta_extended, test_kek_meta_extended) self.assertEqual(self.project_id, test_project_id) @test_utils.parameterized_dataset(dataset_for_pem) def test_get_secret_encoding(self, input_secret_dto): """Test getting a secret that should be returend in PEM format.""" secret = input_secret_dto.secret key_spec = input_secret_dto.key_spec secret_type = input_secret_dto.type decrypt_mock = self.retrieving_plugin.decrypt decrypt_mock.return_value = base64.decodestring(secret) secret_model = self.context.secret_model secret_model.algorithm = key_spec.alg secret_model.bit_length = key_spec.bit_length secret_model.mode = key_spec.mode secret_dto = self.plugin_to_test.get_secret( secret_type, None, # Secret metadata is not relevant to store_crypto process. self.context) # Verify response. self.assertIsInstance(secret_dto, secret_store.SecretDTO) self.assertEqual(secret, secret_dto.secret) self.assertEqual(secret_type, secret_dto.type) self.assertIsInstance(secret_dto.key_spec, secret_store.KeySpec) self.assertEqual( secret_model.algorithm, secret_dto.key_spec.alg) self.assertEqual( secret_model.bit_length, secret_dto.key_spec.bit_length) self.assertEqual( secret_model.mode, secret_dto.key_spec.mode) def test_generate_symmetric_key(self): """test symmetric secret generation.""" generation_type = crypto.PluginSupportTypes.SYMMETRIC_KEY_GENERATION self._config_determine_generation_type_private_method( generation_type) response_dict = self.plugin_to_test.generate_symmetric_key( self.spec_aes, self.context) self.assertIsNone(response_dict) # Verify KEK objects finder was invoked. method_target = self.find_or_create_kek_objects_patcher.target method_mock = method_target._find_or_create_kek_objects self.assertEqual(1, method_mock.call_count) # Verify generating plugin and method where invoked. self._verify_generating_plugin_args( self.generating_plugin.generate_symmetric, self.spec_aes.alg, self.spec_aes.bit_length) # Verify secret save was invoked. method_target = self.store_secret_and_datum_patcher.target method_mock = method_target._store_secret_and_datum self.assertEqual(1, method_mock.call_count) def test_generate_asymmetric_key_with_passphrase(self): """test asymmetric secret generation with passphrase.""" self._test_generate_asymmetric_key(passphrase='passphrase') def test_generate_asymmetric_key_without_passphrase(self): """test asymmetric secret generation with passphrase.""" self._test_generate_asymmetric_key(passphrase=None) def test_generate_supports(self): """test generate_supports.""" # False return if KeySpec == None self.assertFalse(self.plugin_to_test.generate_supports(None)) # AES KeySpec should be supported. key_spec = secret_store.KeySpec(alg='AES', bit_length=64, mode='CBC') self.assertTrue(self.plugin_to_test.generate_supports(key_spec)) key_spec = secret_store.KeySpec(alg='aes', bit_length=64, mode='CBC') self.assertTrue(self.plugin_to_test.generate_supports(key_spec)) # RSA KeySpec should be supported. key_spec = secret_store.KeySpec(alg='RSA', bit_length=2048) self.assertTrue(self.plugin_to_test.generate_supports(key_spec)) # Camellia KeySpec should not be supported. self.key_spec = secret_store.KeySpec('Camellia', 64) self.assertFalse(self.plugin_to_test.generate_supports(self.key_spec)) # Bogus KeySpec should not be supported. key_spec = secret_store.KeySpec(alg='bogus', bit_length=2048) self.assertFalse(self.plugin_to_test.generate_supports(key_spec)) def test_store_secret_supports(self): # All spec types are supported for storage. key_spec = secret_store.KeySpec( alg='anyalg', bit_length=64, mode='CBC') self.assertTrue(self.plugin_to_test.store_secret_supports(key_spec)) def test_delete_secret(self): """Delete is not implemented, so just verify passes.""" self.plugin_to_test.delete_secret(None) def test_should_raise_secret_not_found_get_secret_with_no_model(self): self.context.secret_model = None self.assertRaises( secret_store.SecretNotFoundException, self.plugin_to_test.get_secret, secret_store.SecretType.OPAQUE, None, # get_secret() doesn't use the secret metadata argument self.context ) def test_should_raise_secret_not_found_get_secret_no_encrypted_data(self): self.context.secret_model.encrypted_data = [] self.assertRaises( secret_store.SecretNotFoundException, self.plugin_to_test.get_secret, secret_store.SecretType.OPAQUE, None, # get_secret() doesn't use the secret metadata argument self.context ) def test_should_raise_algorithm_not_supported_generate_symmetric_key(self): generation_type = crypto.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION self._config_determine_generation_type_private_method( generation_type) self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, self.plugin_to_test.generate_symmetric_key, self.spec_aes, self.context ) def test_should_raise_algo_not_supported_generate_asymmetric_key(self): generation_type = crypto.PluginSupportTypes.SYMMETRIC_KEY_GENERATION self._config_determine_generation_type_private_method( generation_type) self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, self.plugin_to_test.generate_asymmetric_key, self.spec_rsa, self.context ) def _test_generate_asymmetric_key(self, passphrase=None): """test asymmetric secret generation with passphrase parameter.""" self.spec_rsa.passphrase = passphrase generation_type = crypto.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION self._config_determine_generation_type_private_method( generation_type) response_dto = self.plugin_to_test.generate_asymmetric_key( self.spec_rsa, self.context) # Verify response. self.assertIsInstance( response_dto, secret_store.AsymmetricKeyMetadataDTO) self.assertIsNone(response_dto.private_key_meta) self.assertIsNone(response_dto.public_key_meta) self.assertIsNone(response_dto.passphrase_meta) # Verify KEK objects finder was invoked. method_target = self.find_or_create_kek_objects_patcher.target method_mock = method_target._find_or_create_kek_objects self.assertEqual(1, method_mock.call_count) # Verify generating plugin and method where invoked. self._verify_generating_plugin_args( self.generating_plugin.generate_asymmetric, self.spec_rsa.alg, self.spec_rsa.bit_length) # Assert the secret save was called the proper number of times. call_count = 2 if passphrase: call_count = 3 method_target = self.store_secret_and_datum_patcher.target method_mock = method_target._store_secret_and_datum self.assertEqual(call_count, method_mock.call_count) def _verify_generating_plugin_args(self, generate_mock, alg, bit_length): """Verify generating plugin and method where invoked.""" self.assertEqual(1, generate_mock.call_count) args, kwargs = generate_mock.call_args test_generate_dto, test_kek_meta_dto, test_project_id = tuple(args) self.assertIsInstance(test_generate_dto, crypto.GenerateDTO) self.assertEqual(alg, test_generate_dto.algorithm) self.assertEqual(bit_length, test_generate_dto.bit_length) self.assertEqual(self.kek_meta_dto, test_kek_meta_dto) self.assertEqual(self.project_id, test_project_id) return generate_mock def _config_crypto_plugin(self): """Mock the crypto plugin.""" # Create encrypting and generating plugins (the same plugin does both) response_dto = self.response_dto self.generating_plugin = mock.MagicMock() self.encrypting_plugin = self.generating_plugin self.generating_plugin.encrypt.return_value = response_dto self.generating_plugin.generate_symmetric.return_value = response_dto self.generating_plugin.generate_asymmetric.return_value = ( self.private_key_dto, self.public_key_dto, self.passphrase_dto ) # Create secret retrieving plugin self.retrieving_plugin = mock.MagicMock() self.retrieving_plugin.decrypt.return_value = self.decrypted_secret gen_plugin_config = { 'get_plugin_store_generate.return_value': self.generating_plugin, 'get_plugin_retrieve.return_value': self.retrieving_plugin, } self.gen_plugin_patcher = mock.patch( 'barbican.plugin.crypto.manager._PLUGIN_MANAGER', **gen_plugin_config ) self._start_patcher(self.gen_plugin_patcher) def _config_private_methods(self): """Mock store_crypto's private methods.""" # Mock _find_or_create_kek_objects(). self.kek_meta_dto = mock.MagicMock() find_or_create_kek_objects_config = { 'return_value': ( self.kek_meta_project_model, self.kek_meta_dto), } self.find_or_create_kek_objects_patcher = mock.patch( 'barbican.plugin.store_crypto._find_or_create_kek_objects', **find_or_create_kek_objects_config ) self._start_patcher(self.find_or_create_kek_objects_patcher) # Mock _store_secret_and_datum(). self.store_secret_and_datum_patcher = mock.patch( 'barbican.plugin.store_crypto._store_secret_and_datum' ) self._start_patcher(self.store_secret_and_datum_patcher) def _config_determine_generation_type_private_method(self, type_to_return): """Mock _determine_generation_type().""" determine_generation_type_config = { 'return_value': type_to_return, } self.determine_generation_type_patcher = mock.patch( 'barbican.plugin.store_crypto._determine_generation_type', **determine_generation_type_config ) self._start_patcher(self.determine_generation_type_patcher) class WhenTestingStoreCryptoDetermineGenerationType(testtools.TestCase): """Tests store_crypto.py's _determine_generation_type() function.""" def test_symmetric_algorithms(self): for algorithm in crypto.PluginSupportTypes.SYMMETRIC_ALGORITHMS: self.assertEqual( crypto.PluginSupportTypes.SYMMETRIC_KEY_GENERATION, store_crypto._determine_generation_type(algorithm)) # Case doesn't matter. self.assertEqual( crypto.PluginSupportTypes.SYMMETRIC_KEY_GENERATION, store_crypto._determine_generation_type('AeS')) def test_asymmetric_algorithms(self): for algorithm in crypto.PluginSupportTypes.ASYMMETRIC_ALGORITHMS: self.assertEqual( crypto.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION, store_crypto._determine_generation_type(algorithm)) # Case doesn't matter. self.assertEqual( crypto.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION, store_crypto._determine_generation_type('RsA')) def test_should_raise_not_supported_no_algorithm(self): self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, store_crypto._determine_generation_type, None ) def test_should_raise_not_supported_bogus_algorithm(self): self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, store_crypto._determine_generation_type, 'bogus' ) class WhenTestingStoreCryptoFindOrCreateKekObjects(TestSecretStoreBase): """Tests store_crypto.py's _find_or_create_kek_objects() function.""" def setUp(self): super(WhenTestingStoreCryptoFindOrCreateKekObjects, self).setUp() self.init_patchers() self._config_private_methods() def test_kek_bind_completed(self): self.kek_meta_project_model.bind_completed = True plugin_inst = self kek_model, kek_meta_dto = store_crypto._find_or_create_kek_objects( plugin_inst, self.project_model) # Verify returns. self.assertEqual(self.kek_meta_project_model, kek_model) self.assertIsInstance(kek_meta_dto, crypto.KEKMetaDTO) # Verify the KEK repository interactions. self._verify_kek_repository_interactions(plugin_inst) def test_kek_bind_not_completed(self): self.kek_meta_project_model.bind_completed = False test_kek_metadata = 'metadata' plugin_inst = mock.MagicMock() plugin_inst.bind_kek_metadata.return_value = test_kek_metadata kek_model, kek_meta_dto = store_crypto._find_or_create_kek_objects( plugin_inst, self.project_model) # Verify returns. self.assertEqual(self.kek_meta_project_model, kek_model) self.assertEqual(test_kek_metadata, kek_meta_dto) # Verify the KEK repository interactions. self._verify_kek_repository_interactions(plugin_inst) # Verify bind operations. self.assertEqual( 1, plugin_inst.bind_kek_metadata.call_count) self.assertEqual( 1, self.bind_completed_mock.call_count) self.assertEqual( 1, self.kek_repo.save.call_count) args, kwargs = self.kek_repo.save.call_args kek_model = args[0] self.assertEqual(self.kek_meta_project_model, kek_model) def test_kek_raise_no_kek_bind_not_completed(self): self.kek_meta_project_model.bind_completed = False plugin_inst = mock.MagicMock() plugin_inst.bind_kek_metadata.return_value = None self.assertRaises( crypto.CryptoKEKBindingException, store_crypto._find_or_create_kek_objects, plugin_inst, self.project_model) def _verify_kek_repository_interactions(self, plugin_inst): """Verify the KEK repository interactions.""" self.assertEqual( 1, self.kek_repo.find_or_create_kek_datum.call_count) args, kwargs = self.kek_repo.find_or_create_kek_datum.call_args test_project_model = args[0] test_full_plugin_name = args[1] self.assertEqual(self.project_model, test_project_model) plugin_name = utils.generate_fullname_for(plugin_inst) self.assertEqual(plugin_name, test_full_plugin_name) def _config_private_methods(self): """Mock store_crypto's private methods.""" # Mock _indicate_bind_completed(). indicate_bind_completed_config = { 'return_value': None } self.indicate_bind_completed_patcher = mock.patch( 'barbican.plugin.store_crypto._indicate_bind_completed', **indicate_bind_completed_config) self.bind_completed_mock = self._start_patcher( self.indicate_bind_completed_patcher) class WhenTestingStoreCryptoStoreSecretAndDatum(TestSecretStoreBase): """Tests store_crypto.py's _store_secret_and_datum() function.""" def setUp(self): super(WhenTestingStoreCryptoStoreSecretAndDatum, self).setUp() self.init_patchers() def test_without_existing_secret(self): self.secret_model.id = None store_crypto._store_secret_and_datum( self.context, self.secret_model, self.kek_meta_project_model, self.response_dto) # Verify the repository interactions. self._verify_secret_repository_interactions() self._verify_encrypted_datum_repository_interactions() def test_with_existing_secret(self): store_crypto._store_secret_and_datum( self.context, self.secret_model, self.kek_meta_project_model, self.response_dto) # Verify the repository interactions. self._verify_encrypted_datum_repository_interactions() # Verify **not** these repository interactions. self.assertEqual( 0, self.secret_repo.create_from.call_count) def _verify_secret_repository_interactions(self): """Verify the secret repository interactions.""" self.assertEqual( 1, self.secret_repo.create_from.call_count) args, kwargs = self.secret_repo.create_from.call_args test_secret_model = args[0] self.assertEqual(self.secret_model, test_secret_model) def _verify_encrypted_datum_repository_interactions(self): """Verify the encrypted datum repository interactions.""" self.assertEqual( 1, self.datum_repo.create_from.call_count) args, kwargs = self.datum_repo.create_from.call_args test_datum_model = args[0] self.assertIsInstance(test_datum_model, models.EncryptedDatum) self.assertEqual( self.content_type, test_datum_model.content_type) self.assertEqual( base64.encodestring(self.cypher_text).rstrip('\n'), test_datum_model.cypher_text) self.assertEqual( self.response_dto.kek_meta_extended, test_datum_model.kek_meta_extended) class WhenTestingStoreCryptoIndicateBindCompleted(TestSecretStoreBase): """Tests store_crypto.py's _indicate_bind_completed() function.""" def test_bind_operation(self): kek_meta_dto = crypto.KEKMetaDTO(self.kek_meta_project_model) self.kek_meta_project_model.bind_completed = False store_crypto._indicate_bind_completed( kek_meta_dto, self.kek_meta_project_model) self.assertTrue(self.kek_meta_project_model.bind_completed) self.assertEqual( self.kek_meta_project_model.algorithm, kek_meta_dto.algorithm) self.assertEqual( self.kek_meta_project_model.bit_length, kek_meta_dto.bit_length) self.assertEqual( self.kek_meta_project_model.mode, kek_meta_dto.mode) self.assertEqual( self.kek_meta_project_model.plugin_meta, kek_meta_dto.plugin_meta) barbican-2.0.0/barbican/tests/plugin/test_dogtag.py0000664000567000056710000010445212701405673023505 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import datetime import os import tempfile from Crypto.PublicKey import RSA import mock from requests import exceptions as request_exceptions import testtools from barbican.tests import keys from barbican.tests import utils try: import barbican.plugin.dogtag as dogtag_import import barbican.plugin.interface.certificate_manager as cm import barbican.plugin.interface.secret_store as sstore import pki import pki.cert as dogtag_cert import pki.key as dogtag_key imports_ok = True except ImportError: # dogtag imports probably not available imports_ok = False @testtools.skipIf(not imports_ok, "Dogtag imports not available") class WhenTestingDogtagKRAPlugin(utils.BaseTestCase): def setUp(self): super(WhenTestingDogtagKRAPlugin, self).setUp() self.keyclient_mock = mock.MagicMock(name="KeyClient mock") self.patcher = mock.patch('pki.crypto.NSSCryptoProvider') self.patcher.start() # create nss db for test only self.nss_dir = tempfile.mkdtemp() self.cfg_mock = mock.MagicMock(name='config mock') self.cfg_mock.dogtag_plugin = mock.MagicMock( nss_db_path=self.nss_dir) self.plugin = dogtag_import.DogtagKRAPlugin(self.cfg_mock) self.plugin.keyclient = self.keyclient_mock def tearDown(self): super(WhenTestingDogtagKRAPlugin, self).tearDown() self.patcher.stop() os.rmdir(self.nss_dir) def test_generate_symmetric_key(self): key_spec = sstore.KeySpec(sstore.KeyAlgorithm.AES, 128) self.plugin.generate_symmetric_key(key_spec) self.keyclient_mock.generate_symmetric_key.assert_called_once_with( mock.ANY, sstore.KeyAlgorithm.AES.upper(), 128, mock.ANY) def test_generate_asymmetric_key(self): key_spec = sstore.KeySpec(sstore.KeyAlgorithm.RSA, 2048) self.plugin.generate_asymmetric_key(key_spec) self.keyclient_mock.generate_asymmetric_key.assert_called_once_with( mock.ANY, sstore.KeyAlgorithm.RSA.upper(), 2048, mock.ANY) def test_generate_non_supported_algorithm(self): key_spec = sstore.KeySpec(sstore.KeyAlgorithm.EC, 192) self.assertRaises( dogtag_import.DogtagPluginAlgorithmException, self.plugin.generate_symmetric_key, key_spec ) def test_raises_error_with_no_pem_path(self): m = mock.MagicMock() m.dogtag_plugin = mock.MagicMock(pem_path=None, nss_db_path='/tmp') self.assertRaises( ValueError, dogtag_import.DogtagKRAPlugin, m, ) def test_store_secret(self): payload = 'encrypt me!!' key_spec = mock.MagicMock() content_type = mock.MagicMock() transport_key = None secret_dto = sstore.SecretDTO(sstore.SecretType.SYMMETRIC, payload, key_spec, content_type, transport_key) self.plugin.store_secret(secret_dto) self.keyclient_mock.archive_key.assert_called_once_with( mock.ANY, "passPhrase", payload, key_algorithm=None, key_size=None) def test_store_secret_with_tkey_id(self): payload = 'data wrapped in PKIArchiveOptions object' key_spec = mock.MagicMock() content_type = mock.MagicMock() transport_key = mock.MagicMock() secret_dto = sstore.SecretDTO(sstore.SecretType.SYMMETRIC, payload, key_spec, content_type, transport_key) self.plugin.store_secret(secret_dto) self.keyclient_mock.archive_pki_options.assert_called_once_with( mock.ANY, "passPhrase", payload, key_algorithm=None, key_size=None) def test_get_secret(self): secret_metadata = { dogtag_import.DogtagKRAPlugin.ALG: sstore.KeyAlgorithm.AES, dogtag_import.DogtagKRAPlugin.BIT_LENGTH: 256, dogtag_import.DogtagKRAPlugin.KEY_ID: 'key1' } self.plugin.get_secret(sstore.SecretType.SYMMETRIC, secret_metadata) self.keyclient_mock.retrieve_key.assert_called_once_with('key1', None) def test_get_secret_with_twsk(self): twsk = mock.MagicMock() secret_metadata = { dogtag_import.DogtagKRAPlugin.ALG: sstore.KeyAlgorithm.AES, dogtag_import.DogtagKRAPlugin.BIT_LENGTH: 256, dogtag_import.DogtagKRAPlugin.KEY_ID: 'key1', 'trans_wrapped_session_key': twsk } self.plugin.get_secret(sstore.SecretType.SYMMETRIC, secret_metadata) self.keyclient_mock.retrieve_key.assert_called_once_with('key1', twsk) def test_get_private_key(self): test_key = RSA.generate(2048) key_data = dogtag_key.KeyData() key_data.data = test_key.exportKey('DER') self.keyclient_mock.retrieve_key.return_value = key_data secret_metadata = { dogtag_import.DogtagKRAPlugin.ALG: sstore.KeyAlgorithm.RSA, dogtag_import.DogtagKRAPlugin.BIT_LENGTH: 2048, dogtag_import.DogtagKRAPlugin.KEY_ID: 'key1', dogtag_import.DogtagKRAPlugin.CONVERT_TO_PEM: 'true' } result = self.plugin.get_secret(sstore.SecretType.PRIVATE, secret_metadata) self.assertEqual(test_key.exportKey('PEM').encode('utf-8'), result.secret) def test_get_public_key(self): test_public_key = RSA.generate(2048).publickey() key_info = dogtag_key.KeyInfo() key_info.public_key = test_public_key.exportKey('DER') self.keyclient_mock.get_key_info.return_value = key_info secret_metadata = { dogtag_import.DogtagKRAPlugin.ALG: sstore.KeyAlgorithm.RSA, dogtag_import.DogtagKRAPlugin.BIT_LENGTH: 2048, dogtag_import.DogtagKRAPlugin.KEY_ID: 'key1', dogtag_import.DogtagKRAPlugin.CONVERT_TO_PEM: 'true' } result = self.plugin.get_secret(sstore.SecretType.PUBLIC, secret_metadata) self.assertEqual(test_public_key.exportKey('PEM').encode('utf-8'), result.secret) def test_store_passphrase_for_using_in_private_key_retrieval(self): key_spec = sstore.KeySpec(sstore.KeyAlgorithm.RSA, 2048, passphrase="password123") # Mock the response for passphrase archival request_response = dogtag_key.KeyRequestResponse() request_info = dogtag_key.KeyRequestInfo() request_info.key_url = "https://example_url/1" request_response.request_info = request_info self.keyclient_mock.archive_key.return_value = request_response asym_key_DTO = self.plugin.generate_asymmetric_key(key_spec) self.assertEqual( '1', asym_key_DTO.private_key_meta[ dogtag_import.DogtagKRAPlugin.PASSPHRASE_KEY_ID] ) self.keyclient_mock.generate_asymmetric_key.assert_called_once_with( mock.ANY, sstore.KeyAlgorithm.RSA.upper(), 2048, mock.ANY) def test_supports_symmetric_aes_key_generation(self): key_spec = sstore.KeySpec(sstore.KeyAlgorithm.AES, 256) self.assertTrue( self.plugin.generate_supports(key_spec) ) def test_supports_asymmetric_rsa_key_generation(self): key_spec = sstore.KeySpec(sstore.KeyAlgorithm.RSA, 2048) self.assertTrue( self.plugin.generate_supports(key_spec) ) def test_supports_asymmetric_ec_key_generation(self): key_spec = sstore.KeySpec(sstore.KeyAlgorithm.EC, 156) self.assertFalse( self.plugin.generate_supports(key_spec) ) def test_supports_symmetric_dh_key_generation(self): key_spec = sstore.KeySpec(sstore.KeyAlgorithm.DIFFIE_HELLMAN, 156) self.assertFalse( self.plugin.generate_supports(key_spec) ) def test_does_not_support_unknown_type(self): key_spec = sstore.KeySpec("SOMETHING_RANDOM", 156) self.assertFalse( self.plugin.generate_supports(key_spec) ) @testtools.skipIf(not imports_ok, "Dogtag imports not available") class WhenTestingDogtagCAPlugin(utils.BaseTestCase): def setUp(self): super(WhenTestingDogtagCAPlugin, self).setUp() self.certclient_mock = mock.MagicMock(name="CertClient mock") self.patcher = mock.patch('pki.crypto.NSSCryptoProvider') self.patcher2 = mock.patch('pki.client.PKIConnection') self.patcher.start() self.patcher2.start() # create nss db for test only self.nss_dir = tempfile.mkdtemp() # create expiration file for test fh, self.expiration_data_path = tempfile.mkstemp() exp_time = datetime.datetime.utcnow() + datetime.timedelta(days=2) os.write(fh, exp_time.strftime( "%Y-%m-%d %H:%M:%S.%f")) os.close(fh) # create host CA file for test fh, self.host_ca_path = tempfile.mkstemp() os.write(fh, "host_ca_aid") os.close(fh) self.approved_profile_id = "caServerCert" CONF = dogtag_import.CONF CONF.dogtag_plugin.nss_db_path = self.nss_dir CONF.dogtag_plugin.ca_expiration_data_path = self.expiration_data_path CONF.dogtag_plugin.ca_host_aid_path = self.host_ca_path CONF.dogtag_plugin.auto_approved_profiles = [self.approved_profile_id] CONF.dogtag_plugin.dogtag_host = "localhost" CONF.dogtag_plugin.dogtag_port = "8443" CONF.dogtag_plugin.simple_cmc_profile = "caOtherCert" self.cfg = CONF self.plugin = dogtag_import.DogtagCAPlugin(CONF) self.plugin.certclient = self.certclient_mock self.order_id = mock.MagicMock() self.profile_id = mock.MagicMock() # request generated self.request_id_mock = mock.MagicMock() self.request = dogtag_cert.CertRequestInfo() self.request.request_id = self.request_id_mock self.request.request_status = dogtag_cert.CertRequestStatus.COMPLETE self.cert_id_mock = mock.MagicMock() self.request.cert_id = self.cert_id_mock # cert generated self.cert = mock.MagicMock() self.cert.encoded = keys.get_certificate_pem() self.cert.pkcs7_cert_chain = keys.get_certificate_der() # for cancel/modify self.review_response = mock.MagicMock() # modified request self.modified_request = mock.MagicMock() self.modified_request_id_mock = mock.MagicMock() self.modified_request.request_id = self.modified_request_id_mock self.modified_request.request_status = ( dogtag_cert.CertRequestStatus.COMPLETE) self.modified_request.cert_id = self.cert_id_mock self.barbican_meta_dto = cm.BarbicanMetaDTO() def tearDown(self): super(WhenTestingDogtagCAPlugin, self).tearDown() self.patcher2.stop() self.patcher.stop() os.rmdir(self.nss_dir) os.remove(self.host_ca_path) os.remove(self.expiration_data_path) def _process_approved_profile_request(self, order_meta, plugin_meta): enrollment_result = dogtag_cert.CertEnrollmentResult( self.request, self.cert) enrollment_results = [enrollment_result] self.certclient_mock.enroll_cert.return_value = enrollment_results result_dto = self.plugin.issue_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.enroll_cert.assert_called_once_with( self.approved_profile_id, order_meta) self.assertEqual(cm.CertificateStatus.CERTIFICATE_GENERATED, result_dto.status, "result_dto status incorrect") self.assertEqual(base64.b64encode(keys.get_certificate_pem()), result_dto.certificate) self.assertEqual( self.request_id_mock, plugin_meta.get(dogtag_import.DogtagCAPlugin.REQUEST_ID) ) def _process_non_approved_profile_request(self, order_meta, plugin_meta, profile_id, inputs=None): if inputs is None: inputs = { 'cert_request_type': 'pkcs10', 'cert_request': base64.b64decode( order_meta.get('request_data')) } # mock CertRequestInfo enrollment_result = dogtag_cert.CertRequestInfo() enrollment_result.request_id = self.request_id_mock enrollment_result.request_status = ( dogtag_cert.CertRequestStatus.PENDING) # mock CertRequestInfoCollection enrollment_results = dogtag_cert.CertRequestInfoCollection() enrollment_results.cert_request_info_list = ( [enrollment_result]) self.certclient_mock.create_enrollment_request.return_value = ( enrollment_result) self.certclient_mock.submit_enrollment_request.return_value = ( enrollment_results) result_dto = self.plugin.issue_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.create_enrollment_request.assert_called_once_with( profile_id, inputs) self.certclient_mock.submit_enrollment_request.assert_called_once_with( enrollment_result) self.assertEqual(cm.CertificateStatus.WAITING_FOR_CA, result_dto.status, "result_dto status incorrect") self.assertEqual( self.request_id_mock, plugin_meta.get(dogtag_import.DogtagCAPlugin.REQUEST_ID) ) def test_issue_simple_cmc_request(self): order_meta = { cm.REQUEST_TYPE: cm.CertificateRequestType.SIMPLE_CMC_REQUEST, 'request_data': base64.b64encode(keys.get_csr_pem()) } plugin_meta = {} self._process_non_approved_profile_request( order_meta, plugin_meta, self.cfg.dogtag_plugin.simple_cmc_profile) def test_issue_full_cmc_request(self): order_meta = { cm.REQUEST_TYPE: cm.CertificateRequestType.FULL_CMC_REQUEST, 'request_data': 'Full CMC data ...' } plugin_meta = {} self.assertRaises( dogtag_import.DogtagPluginNotSupportedException, self.plugin.issue_certificate_request, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) def test_issue_stored_key_request(self): order_meta = { cm.REQUEST_TYPE: cm.CertificateRequestType.STORED_KEY_REQUEST, 'request_data': base64.b64encode(keys.get_csr_pem()) } plugin_meta = {} self._process_non_approved_profile_request( order_meta, plugin_meta, self.cfg.dogtag_plugin.simple_cmc_profile) def test_issue_custom_key_request(self): order_meta = { cm.REQUEST_TYPE: cm.CertificateRequestType.CUSTOM_REQUEST, dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id, } plugin_meta = {} self._process_approved_profile_request(order_meta, plugin_meta) def test_issue_no_cert_request_type_provided(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id} plugin_meta = {} self._process_approved_profile_request(order_meta, plugin_meta) def test_issue_bad_cert_request_type_provided(self): order_meta = { cm.REQUEST_TYPE: 'BAD_REQUEST_TYPE', dogtag_import.DogtagCAPlugin.PROFILE_ID: self.profile_id, } plugin_meta = {} self.assertRaises( dogtag_import.DogtagPluginNotSupportedException, self.plugin.issue_certificate_request, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) def test_issue_return_data_error_with_no_profile_id(self): order_meta = {} plugin_meta = {} result_dto = self.plugin.issue_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.assertEqual(result_dto.status, cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, "result_dto status incorrect") self.assertEqual(result_dto.status_message, "No profile_id specified") def test_issue_return_data_error_with_request_rejected(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id} plugin_meta = {} self.request.request_status = dogtag_cert.CertRequestStatus.REJECTED enrollment_result = dogtag_cert.CertEnrollmentResult( self.request, None) enrollment_results = [enrollment_result] self.certclient_mock.enroll_cert.return_value = enrollment_results result_dto = self.plugin.issue_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.enroll_cert.assert_called_once_with( self.approved_profile_id, order_meta) self.assertEqual(cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, result_dto.status, "result_dto status incorrect") self.assertEqual( self.request_id_mock, plugin_meta.get(dogtag_import.DogtagCAPlugin.REQUEST_ID)) def test_issue_return_canceled_with_request_canceled(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id} plugin_meta = {} self.request.request_status = dogtag_cert.CertRequestStatus.CANCELED enrollment_result = dogtag_cert.CertEnrollmentResult( self.request, None) enrollment_results = [enrollment_result] self.certclient_mock.enroll_cert.return_value = enrollment_results result_dto = self.plugin.issue_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.enroll_cert.assert_called_once_with( self.approved_profile_id, order_meta) self.assertEqual(cm.CertificateStatus.REQUEST_CANCELED, result_dto.status, "result_dto status incorrect") self.assertEqual( self.request_id_mock, plugin_meta.get(dogtag_import.DogtagCAPlugin.REQUEST_ID), ) def test_issue_return_waiting_with_request_pending(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: "otherProfile", 'cert_request': base64.b64encode(keys.get_csr_pem())} plugin_meta = {} inputs = { 'cert_request': keys.get_csr_pem(), dogtag_import.DogtagCAPlugin.PROFILE_ID: "otherProfile" } self._process_non_approved_profile_request( order_meta, plugin_meta, "otherProfile", inputs) def test_issue_raises_error_request_complete_no_cert(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id} plugin_meta = {} enrollment_result = dogtag_cert.CertEnrollmentResult( self.request, None) enrollment_results = [enrollment_result] self.certclient_mock.enroll_cert.return_value = enrollment_results self.assertRaises( cm.CertificateGeneralException, self.plugin.issue_certificate_request, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) self.assertEqual( self.request_id_mock, plugin_meta.get(dogtag_import.DogtagCAPlugin.REQUEST_ID) ) def test_issue_raises_error_request_unknown_status(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id} plugin_meta = {} self.request.request_status = "unknown_status" enrollment_result = dogtag_cert.CertEnrollmentResult( self.request, None) enrollment_results = [enrollment_result] self.certclient_mock.enroll_cert.return_value = enrollment_results self.assertRaises( cm.CertificateGeneralException, self.plugin.issue_certificate_request, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) self.assertEqual( self.request_id_mock, plugin_meta.get(dogtag_import.DogtagCAPlugin.REQUEST_ID) ) def test_issue_return_client_error_bad_request_exception(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id} plugin_meta = {} self.certclient_mock.enroll_cert.side_effect = ( pki.BadRequestException("bad request")) result_dto = self.plugin.issue_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.enroll_cert.assert_called_once_with( self.approved_profile_id, order_meta) self.assertEqual(cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, result_dto.status, "result_dto status incorrect") def test_issue_raises_error_pki_exception(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id} plugin_meta = {} self.certclient_mock.enroll_cert.side_effect = ( pki.PKIException("generic enrollment error")) self.assertRaises( cm.CertificateGeneralException, self.plugin.issue_certificate_request, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) def test_issue_return_ca_unavailable(self): order_meta = { dogtag_import.DogtagCAPlugin.PROFILE_ID: self.approved_profile_id} plugin_meta = {} self.certclient_mock.enroll_cert.side_effect = ( request_exceptions.RequestException()) result_dto = self.plugin.issue_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.enroll_cert.assert_called_once_with( self.approved_profile_id, order_meta) self.assertEqual(cm.CertificateStatus.CA_UNAVAILABLE_FOR_REQUEST, result_dto.status, "result_dto status incorrect") def test_cancel_request(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.cancel_request.return_value = None self.certclient_mock.review_request.return_value = self.review_response result_dto = self.plugin.cancel_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.cancel_request.assert_called_once_with( self.request_id_mock, self.review_response) self.assertEqual(cm.CertificateStatus.REQUEST_CANCELED, result_dto.status, "result_dto_status incorrect") def test_cancel_no_request_found(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.review_request.side_effect = ( pki.RequestNotFoundException("request_not_found")) result_dto = self.plugin.cancel_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.review_request.assert_called_once_with( self.request_id_mock) self.assertEqual(cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, result_dto.status, "result_dto_status incorrect") def test_cancel_conflicting_operation(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.review_request.return_value = self.review_response self.certclient_mock.cancel_request.side_effect = ( pki.ConflictingOperationException("conflicting_operation")) result_dto = self.plugin.cancel_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.cancel_request.assert_called_once_with( self.request_id_mock, self.review_response) self.assertEqual(cm.CertificateStatus.INVALID_OPERATION, result_dto.status, "result_dto_status incorrect") def test_cancel_ca_unavailable(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.review_request.side_effect = ( request_exceptions.RequestException("request_exception")) result_dto = self.plugin.cancel_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.assertEqual(cm.CertificateStatus.CA_UNAVAILABLE_FOR_REQUEST, result_dto.status, "result_dto_status incorrect") def test_cancel_raise_error_no_request_id(self): order_meta = mock.ANY plugin_meta = {} self.assertRaises( cm.CertificateGeneralException, self.plugin.cancel_certificate_request, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) def test_check_status(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.get_request.return_value = self.request self.certclient_mock.get_cert.return_value = self.cert result_dto = self.plugin.check_certificate_status( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.get_request.assert_called_once_with( self.request_id_mock) self.certclient_mock.get_cert.assert_called_once_with( self.cert_id_mock) self.assertEqual(cm.CertificateStatus.CERTIFICATE_GENERATED, result_dto.status, "result_dto_status incorrect") self.assertEqual(keys.get_certificate_pem(), result_dto.certificate) def test_check_status_raise_error_no_request_id(self): order_meta = mock.ANY plugin_meta = {} self.assertRaises( cm.CertificateGeneralException, self.plugin.check_certificate_status, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) def test_check_status_rejected(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.request.request_status = dogtag_cert.CertRequestStatus.REJECTED self.certclient_mock.get_request.return_value = self.request result_dto = self.plugin.check_certificate_status( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.get_request.assert_called_once_with( self.request_id_mock) self.assertEqual(cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, result_dto.status, "result_dto_status incorrect") self.assertIsNone(result_dto.certificate) def test_check_status_canceled(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.request.request_status = dogtag_cert.CertRequestStatus.CANCELED self.certclient_mock.get_request.return_value = self.request result_dto = self.plugin.check_certificate_status( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.get_request.assert_called_once_with( self.request_id_mock) self.assertEqual(cm.CertificateStatus.REQUEST_CANCELED, result_dto.status, "result_dto_status incorrect") self.assertIsNone(result_dto.certificate) def test_check_status_pending(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.request.request_status = dogtag_cert.CertRequestStatus.PENDING self.certclient_mock.get_request.return_value = self.request result_dto = self.plugin.check_certificate_status( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.get_request.assert_called_once_with( self.request_id_mock) self.assertEqual(cm.CertificateStatus.WAITING_FOR_CA, result_dto.status, "result_dto_status incorrect") self.assertIsNone(result_dto.certificate) def test_check_status_raises_error_complete_no_cert(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.get_request.return_value = self.request self.certclient_mock.get_cert.return_value = None self.assertRaises( cm.CertificateGeneralException, self.plugin.check_certificate_status, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) def test_modify_request(self): order_meta = { cm.REQUEST_TYPE: cm.CertificateRequestType.SIMPLE_CMC_REQUEST, 'request_data': base64.b64encode(keys.get_csr_pem()) } plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self._process_non_approved_profile_request( order_meta, plugin_meta, self.cfg.dogtag_plugin.simple_cmc_profile) self.certclient_mock.cancel_request.return_value = None self.certclient_mock.review_request.return_value = self.review_response result_dto = self.plugin.modify_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.cancel_request.assert_called_once_with( self.request_id_mock, self.review_response) self.assertEqual(cm.CertificateStatus.WAITING_FOR_CA, result_dto.status, "result_dto_status incorrect") def test_modify_no_request_found(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.review_request.side_effect = ( pki.RequestNotFoundException("request_not_found")) result_dto = self.plugin.modify_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.review_request.assert_called_once_with( self.request_id_mock) self.assertEqual(cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, result_dto.status, "result_dto_status incorrect") def test_modify_conflicting_operation(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.review_request.return_value = self.review_response self.certclient_mock.cancel_request.side_effect = ( pki.ConflictingOperationException("conflicting_operation")) result_dto = self.plugin.modify_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.certclient_mock.cancel_request.assert_called_once_with( self.request_id_mock, self.review_response) self.assertEqual(cm.CertificateStatus.INVALID_OPERATION, result_dto.status, "result_dto_status incorrect") def test_modify_ca_unavailable(self): order_meta = mock.ANY plugin_meta = {dogtag_import.DogtagCAPlugin.REQUEST_ID: self.request_id_mock} self.certclient_mock.review_request.side_effect = ( request_exceptions.RequestException("request_exception")) result_dto = self.plugin.modify_certificate_request( self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) self.assertEqual(cm.CertificateStatus.CA_UNAVAILABLE_FOR_REQUEST, result_dto.status, "result_dto_status incorrect") def test_modify_raise_error_no_request_id(self): order_meta = mock.ANY plugin_meta = {} self.assertRaises( cm.CertificateGeneralException, self.plugin.modify_certificate_request, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto ) barbican-2.0.0/barbican/tests/plugin/test_symantec.py0000664000567000056710000000655012701405673024063 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import testtools try: import barbican.plugin.interface.certificate_manager as cm import barbican.plugin.symantec as sym imports_ok = True except ImportError: # Symantec imports probably not available imports_ok = False from barbican.tests import utils @testtools.skipIf(not imports_ok, "Symantec imports not available") class WhenTestingSymantecPlugin(utils.BaseTestCase): def setUp(self): super(WhenTestingSymantecPlugin, self).setUp() self.order_meta = { 'cert_type': 'ssl123', 'organization': 'Shinra Corp', 'phone': '555-555-5555', 'so many things...': 'more...' } self.error_msg = 'Error Message Here' self.symantec = sym.SymantecCertificatePlugin() self.barbican_plugin_dto = cm.BarbicanMetaDTO() self.symantec_patcher = mock.patch( 'barbican.plugin.symantec._ca_create_order' ) self.mock_create_order = self.symantec_patcher.start() def tearDown(self): super(WhenTestingSymantecPlugin, self).tearDown() if hasattr(self, 'mock_create_order'): self.mock_create_order.stop() def test_successful_issue_certificate_request(self): self.mock_create_order.return_value = (True, None, None) order_id = '1234' plugin_meta = {} result = self.symantec.issue_certificate_request( order_id, self.order_meta, plugin_meta, self.barbican_plugin_dto ) self.assertEqual("waiting for CA", result.status) def test_unsuccessful_certificate_request_can_retry(self): self.mock_create_order.return_value = (False, self.error_msg, True) order_id = '1234' plugin_meta = {} result = self.symantec.issue_certificate_request( order_id, self.order_meta, plugin_meta, self.barbican_plugin_dto ) self.assertEqual("client data issue seen", result.status) def test_unsuccessful_certificate_request_no_retry(self): self.mock_create_order.return_value = (False, self.error_msg, False) order_id = '12345' plugin_meta = {} result = self.symantec.issue_certificate_request( order_id, self.order_meta, plugin_meta, self.barbican_plugin_dto ) self.assertEqual("CA unavailable for request", result.status) def test_should_raise_unsupported_certificate_request(self): order_id = '1234' plugin_meta = {} self.assertRaises( NotImplementedError, self.symantec.check_certificate_status, order_id, self.order_meta, plugin_meta, self.barbican_plugin_dto ) barbican-2.0.0/barbican/tests/plugin/test_simple_certificate_manager.py0000664000567000056710000000636112701405673027565 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import testtools import barbican.plugin.interface.certificate_manager as cm import barbican.plugin.simple_certificate_manager as simple class WhenTestingSimpleCertificateManagerPlugin(testtools.TestCase): def setUp(self): super(WhenTestingSimpleCertificateManagerPlugin, self).setUp() self.plugin = simple.SimpleCertificatePlugin() def test_issue_certificate_request(self): result = self.plugin.issue_certificate_request(None, None, None, None) self.assertEqual(cm.CertificateStatus.WAITING_FOR_CA, result.status) def test_check_certificate_status(self): result = self.plugin.check_certificate_status(None, None, None, None) self.assertEqual( cm.CertificateStatus.CERTIFICATE_GENERATED, result.status) def test_modify_certificate_request(self): result = self.plugin.modify_certificate_request(None, None, None, None) self.assertEqual(cm.CertificateStatus.WAITING_FOR_CA, result.status) def test_cancel_certificate_request(self): result = self.plugin.cancel_certificate_request(None, None, None, None) self.assertEqual(cm.CertificateStatus.REQUEST_CANCELED, result.status) def test_supports(self): result = self.plugin.supports(None) self.assertTrue(result) def test_get_ca_info(self): result = self.plugin.get_ca_info() name = self.plugin.get_default_ca_name() self.assertIn(name, result) self.assertEqual(name, result[name][cm.INFO_NAME]) self.assertEqual(self.plugin.get_default_signing_cert(), result[name][cm.INFO_CA_SIGNING_CERT]) def test_supported_request_types(self): result = self.plugin.supported_request_types() supported_list = [cm.CertificateRequestType.CUSTOM_REQUEST, cm.CertificateRequestType.SIMPLE_CMC_REQUEST, cm.CertificateRequestType.FULL_CMC_REQUEST, cm.CertificateRequestType.STORED_KEY_REQUEST] self.assertEqual(supported_list, result) class WhenTestingSimpleCertificateEventManagerPlugin(testtools.TestCase): def setUp(self): super(WhenTestingSimpleCertificateEventManagerPlugin, self).setUp() self.plugin = simple.SimpleCertificateEventPlugin() def test_notify_ca_is_unavailable(self): # Test that eventing plugin method does not have side effects such as # raising exceptions. self.plugin.notify_ca_is_unavailable(None, None, None, None) def test_notify_certificate_is_ready(self): # Test that eventing plugin method does not have side effects such as # raising exceptions. self.plugin.notify_certificate_is_ready(None, None, None) barbican-2.0.0/barbican/tests/plugin/test_kmip.py0000664000567000056710000012042012701405673023171 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Johns Hopkins University Applied Physics Laboratory # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import socket import stat import mock from barbican.plugin.interface import secret_store from barbican.tests import keys from barbican.tests import utils from kmip.core import attributes as attr from kmip.core import enums from kmip.core.factories import attributes from kmip.core.messages import contents from kmip.core import misc from kmip.core import objects from kmip.core import secrets from kmip.core.secrets import OpaqueObject as Opaque from kmip.services import kmip_client as proxy from kmip.services import results from barbican.plugin import kmip_secret_store as kss def get_sample_opaque_secret(): opaque_type = Opaque.OpaqueDataType(enums.OpaqueDataType.NONE) opaque_value = Opaque.OpaqueDataValue(base64.b64decode( utils.get_symmetric_key())) return Opaque(opaque_type, opaque_value) def get_sample_symmetric_key(key_b64=utils.get_symmetric_key(), key_length=128, algorithm=enums.CryptographicAlgorithm.AES): key_material = objects.KeyMaterial(base64.b64decode(key_b64)) key_value = objects.KeyValue(key_material) key_block = objects.KeyBlock( key_format_type=misc.KeyFormatType(enums.KeyFormatType.RAW), key_compression_type=None, key_value=key_value, cryptographic_algorithm=attr.CryptographicAlgorithm(algorithm), cryptographic_length=attr.CryptographicLength(key_length), key_wrapping_data=None) return secrets.SymmetricKey(key_block) def get_sample_public_key(pkcs1=False): if pkcs1: public_key = kss.get_public_key_der_pkcs1(keys.get_public_key_pem()) key_format_type = misc.KeyFormatType(enums.KeyFormatType.PKCS_1) else: public_key = keys.get_public_key_der() key_format_type = misc.KeyFormatType(enums.KeyFormatType.X_509) key_material = objects.KeyMaterial(public_key) key_value = objects.KeyValue(key_material) key_block = objects.KeyBlock( key_format_type=key_format_type, key_compression_type=None, key_value=key_value, cryptographic_algorithm=attr.CryptographicAlgorithm( enums.CryptographicAlgorithm.RSA), cryptographic_length=attr.CryptographicLength(2048), key_wrapping_data=None) return secrets.PublicKey(key_block) def get_sample_private_key(pkcs1=False): if pkcs1: private_key = kss.get_private_key_der_pkcs1(keys.get_private_key_pem()) key_format_type = misc.KeyFormatType(enums.KeyFormatType.PKCS_1) else: private_key = keys.get_private_key_der() key_format_type = misc.KeyFormatType(enums.KeyFormatType.PKCS_8) key_material = objects.KeyMaterial(private_key) key_value = objects.KeyValue(key_material) key_block = objects.KeyBlock( key_format_type=key_format_type, key_compression_type=None, key_value=key_value, cryptographic_algorithm=attr.CryptographicAlgorithm( enums.CryptographicAlgorithm.RSA), cryptographic_length=attr.CryptographicLength(2048), key_wrapping_data=None) return secrets.PrivateKey(key_block) def get_sample_certificate(): return secrets.Certificate( certificate_type=enums.CertificateTypeEnum.X_509, certificate_value=keys.get_certificate_der()) @utils.parameterized_test_case class WhenTestingKMIPSecretStore(utils.BaseTestCase): """Test using the KMIP server backend for SecretStore.""" def setUp(self): super(WhenTestingKMIPSecretStore, self).setUp() self.expected_username = "sample_username" self.expected_password = "sample_password" CONF = kss.CONF CONF.kmip_plugin.username = self.expected_username CONF.kmip_plugin.password = self.expected_password CONF.kmip_plugin.keyfile = None CONF.kmip_plugin.pkcs1_only = False self.secret_store = kss.KMIPSecretStore(CONF) self.credential = self.secret_store.credential self.symmetric_type = secret_store.SecretType.SYMMETRIC self.sample_secret_features = { 'key_format_type': enums.KeyFormatType.RAW, 'key_value': { 'bytes': bytearray(b'\x00\x00\x00') }, 'cryptographic_algorithm': enums.CryptographicAlgorithm.AES, 'cryptographic_length': 128 } self.symmetric_key_uuid = 'dde870ad-cea3-41a3-9bb9-e8ab579a2f91' self.public_key_uuid = 'cb908abb-d363-4d9f-8ef2-5e84d27dd25c' self.private_key_uuid = '2d4c0544-4ec6-45b7-81cd-b23c75744eac' self.sample_secret = get_sample_symmetric_key() self.secret_store.client.proxy.open = mock.MagicMock( proxy.KMIPProxy().open) self.secret_store.client.proxy.close = mock.MagicMock( proxy.KMIPProxy().close) self.secret_store.client.proxy.create = mock.MagicMock( proxy.KMIPProxy().create, return_value=results.CreateResult( contents.ResultStatus(enums.ResultStatus.SUCCESS), uuid=attr.UniqueIdentifier( self.symmetric_key_uuid))) self.secret_store.client.proxy.create_key_pair = mock.MagicMock( proxy.KMIPProxy().create_key_pair, return_value=results.CreateKeyPairResult( contents.ResultStatus(enums.ResultStatus.SUCCESS), private_key_uuid=attr.UniqueIdentifier(self.private_key_uuid), public_key_uuid=attr.UniqueIdentifier(self.public_key_uuid))) self.secret_store.client.proxy.register = mock.MagicMock( proxy.KMIPProxy().register, return_value=results.RegisterResult( contents.ResultStatus(enums.ResultStatus.SUCCESS), uuid=attr.UniqueIdentifier('uuid'))) self.secret_store.client.proxy.destroy = mock.MagicMock( proxy.KMIPProxy().destroy, return_value=results.DestroyResult( contents.ResultStatus(enums.ResultStatus.SUCCESS))) self.secret_store.client.proxy.get = mock.MagicMock( proxy.KMIPProxy().get, return_value=results.GetResult( contents.ResultStatus(enums.ResultStatus.SUCCESS), object_type=attr.ObjectType(enums.ObjectType.SYMMETRIC_KEY), secret=self.sample_secret)) self.attribute_factory = attributes.AttributeFactory() # --------------- TEST CONFIG OPTIONS --------------------------------- def test_enable_pkcs1_only_config_option(self): CONF = kss.CONF CONF.kmip_plugin.pkcs1_only = True secret_store = kss.KMIPSecretStore(CONF) self.assertTrue(secret_store.pkcs1_only) # --------------- TEST GENERATE_SUPPORTS --------------------------------- def test_generate_supports_aes(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, None, 'mode') for x in [128, 192, 256]: key_spec.bit_length = x self.assertTrue(self.secret_store.generate_supports(key_spec)) def test_generate_supports_des(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.DES, None, 'mode') for x in [56]: key_spec.bit_length = x self.assertTrue(self.secret_store.generate_supports(key_spec)) def test_generate_supports_desede(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.DESEDE, None, 'mode') for x in [56, 112, 168]: key_spec.bit_length = x self.assertTrue(self.secret_store.generate_supports(key_spec)) def test_generate_supports_rsa(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, None, 'mode') for x in [2048, 3072, 4096]: key_spec.bit_length = x self.assertTrue(self.secret_store.generate_supports(key_spec)) def test_generate_supports_dsa(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.DSA, None, 'mode') for x in [2048, 3072]: key_spec.bit_length = x self.assertTrue(self.secret_store.generate_supports(key_spec)) def test_generate_supports_with_invalid_alg(self): key_spec = secret_store.KeySpec('invalid_alg', 56, 'mode') self.assertFalse(self.secret_store.generate_supports(key_spec)) def test_generate_supports_with_valid_alg_invalid_bit_length(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 56, 'mode') self.assertFalse(self.secret_store.generate_supports(key_spec)) # ------------ TEST GENERATE_SYMMETRIC ----------------------------------- def test_generate_symmetric_key_assert_called(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') self.secret_store.generate_symmetric_key(key_spec) self.secret_store.client.proxy.create.assert_called_once_with( enums.ObjectType.SYMMETRIC_KEY, mock.ANY) def test_generate_symmetric_key_return_value(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') return_value = self.secret_store.generate_symmetric_key(key_spec) expected = {kss.KMIPSecretStore.KEY_UUID: self.symmetric_key_uuid} self.assertEqual(expected, return_value) def test_generate_symmetric_key_server_error_occurs(self): self.secret_store.client.proxy.create = mock.MagicMock( proxy.KMIPProxy().create, return_value=results.CreateResult( contents.ResultStatus(enums.ResultStatus.OPERATION_FAILED))) key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') self.assertRaises( secret_store.SecretGeneralException, self.secret_store.generate_symmetric_key, key_spec) def test_generate_symmetric_key_invalid_algorithm(self): key_spec = secret_store.KeySpec('invalid_algorithm', 128, 'mode') self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, self.secret_store.generate_symmetric_key, key_spec) def test_generate_symmetric_key_valid_algorithm_invalid_bit_length(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 56, 'mode') self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, self.secret_store.generate_symmetric_key, key_spec) def test_generate_symmetric_key_not_symmetric_algorithm(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048, 'mode') self.assertRaises( kss.KMIPSecretStoreError, self.secret_store.generate_symmetric_key, key_spec) def test_generate_symmetric_key_error_opening_connection(self): self.secret_store.client.proxy.open = mock.Mock( side_effect=socket.error) key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') self.assertRaises( secret_store.SecretGeneralException, self.secret_store.generate_symmetric_key, key_spec) # ---------------- TEST GENERATE_ASYMMETRIC ------------------------------ def test_generate_asymmetric_key_assert_called(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048, 'mode') self.secret_store.generate_asymmetric_key(key_spec) self.secret_store.client.proxy.create_key_pair.assert_called_once_with( common_template_attribute=mock.ANY) def test_generate_asymmetric_key_return_value(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048, 'mode') return_value = self.secret_store.generate_asymmetric_key(key_spec) expected_private_key_meta = { kss.KMIPSecretStore.KEY_UUID: self.private_key_uuid} expected_public_key_meta = { kss.KMIPSecretStore.KEY_UUID: self.public_key_uuid} expected_passphrase_meta = None self.assertEqual( expected_private_key_meta, return_value.private_key_meta) self.assertEqual( expected_public_key_meta, return_value.public_key_meta) self.assertEqual( expected_passphrase_meta, return_value.passphrase_meta) def test_generate_asymmetric_key_server_error_occurs(self): self.secret_store.client.proxy.create_key_pair = mock.MagicMock( proxy.KMIPProxy().create_key_pair, return_value=results.CreateKeyPairResult( contents.ResultStatus(enums.ResultStatus.OPERATION_FAILED))) key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048, 'mode') self.assertRaises( secret_store.SecretGeneralException, self.secret_store.generate_asymmetric_key, key_spec) def test_generate_asymmetric_key_invalid_algorithm(self): key_spec = secret_store.KeySpec('invalid_algorithm', 160, 'mode') self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, self.secret_store.generate_asymmetric_key, key_spec) def test_generate_asymmetric_key_valid_algorithm_invalid_bit_length(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 56, 'mode') self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, self.secret_store.generate_asymmetric_key, key_spec) def test_generate_asymmetric_key_not_asymmetric_algorithm(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') self.assertRaises( kss.KMIPSecretStoreError, self.secret_store.generate_asymmetric_key, key_spec) def test_generate_asymmetric_key_check_for_passphrase(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048, 'mode', 'passphrase') self.assertRaises( kss.KMIPSecretStoreError, self.secret_store.generate_asymmetric_key, key_spec) def test_generate_asymmetric_key_error_opening_connection(self): self.secret_store.client.proxy.open = mock.Mock( side_effect=socket.error) key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048, 'mode') self.assertRaises( secret_store.SecretGeneralException, self.secret_store.generate_asymmetric_key, key_spec) # ----------------- TEST STORE ------------------------------------------- def test_store_symmetric_secret_assert_called(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') sym_key = utils.get_symmetric_key() secret_dto = secret_store.SecretDTO(secret_store.SecretType.SYMMETRIC, sym_key, key_spec, 'content_type', transport_key=None) self.secret_store.store_secret(secret_dto) self.secret_store.client.proxy.register.assert_called_once_with( enums.ObjectType.SYMMETRIC_KEY, mock.ANY, mock.ANY) register_mock = self.secret_store.client.proxy.register register_call_args, _ = register_mock.call_args actual_secret = register_call_args[2] self.assertEqual( 128, actual_secret.key_block.cryptographic_length.value) self.assertEqual( attr.CryptographicAlgorithm(enums.CryptographicAlgorithm.AES), actual_secret.key_block.cryptographic_algorithm) self.assertEqual( base64.b64decode(sym_key), actual_secret.key_block.key_value.key_material.value) def test_store_symmetric_secret_return_value(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') sym_key = utils.get_symmetric_key() secret_dto = secret_store.SecretDTO(secret_store.SecretType.SYMMETRIC, sym_key, key_spec, 'content_type', transport_key=None) return_value = self.secret_store.store_secret(secret_dto) expected = {kss.KMIPSecretStore.KEY_UUID: 'uuid'} self.assertEqual(expected, return_value) def test_store_passphrase_secret_assert_called(self): key_spec = secret_store.KeySpec(None, None, None) passphrase = "supersecretpassphrase" secret_dto = secret_store.SecretDTO(secret_store.SecretType.PASSPHRASE, base64.b64encode(passphrase), key_spec, 'content_type', transport_key=None) self.secret_store.store_secret(secret_dto) self.secret_store.client.proxy.register.assert_called_once_with( enums.ObjectType.SECRET_DATA, mock.ANY, mock.ANY) proxy = self.secret_store.client.proxy register_call_args, _ = proxy.register.call_args actual_secret = register_call_args[2] self.assertEqual( None, actual_secret.key_block.cryptographic_length) self.assertEqual( None, actual_secret.key_block.cryptographic_algorithm) self.assertEqual( passphrase, actual_secret.key_block.key_value.key_material.value) def test_store_passphrase_secret_return_value(self): key_spec = secret_store.KeySpec(None, None, None) passphrase = "supersecretpassphrase" secret_dto = secret_store.SecretDTO(secret_store.SecretType.PASSPHRASE, base64.b64encode(passphrase), key_spec, 'content_type', transport_key=None) return_value = self.secret_store.store_secret(secret_dto) expected = {kss.KMIPSecretStore.KEY_UUID: 'uuid'} self.assertEqual(0, cmp(expected, return_value)) def test_store_opaque_secret_assert_called(self): key_spec = secret_store.KeySpec(None, None, None) opaque = ('\x00\x01\x02\x03\x04\x05\x06\x07') secret_dto = secret_store.SecretDTO(secret_store.SecretType.OPAQUE, base64.b64encode(opaque), key_spec, 'content_type', transport_key=None) self.secret_store.store_secret(secret_dto) self.secret_store.client.proxy.register.assert_called_once_with( enums.ObjectType.OPAQUE_DATA, mock.ANY, mock.ANY) proxy = self.secret_store.client.proxy register_call_args, _ = proxy.register.call_args actual_secret = register_call_args[2] self.assertEqual( Opaque.OpaqueDataType(enums.OpaqueDataType.NONE), actual_secret.opaque_data_type) self.assertEqual( Opaque.OpaqueDataValue(opaque), actual_secret.opaque_data_value) def test_store_opaque_secret_return_value(self): key_spec = secret_store.KeySpec(None, None, None) opaque = ('\x00\x01\x02\x03\x04\x05\x06\x07') secret_dto = secret_store.SecretDTO(secret_store.SecretType.OPAQUE, base64.b64encode(opaque), key_spec, 'content_type', transport_key=None) return_value = self.secret_store.store_secret(secret_dto) expected = {kss.KMIPSecretStore.KEY_UUID: 'uuid'} self.assertEqual(0, cmp(expected, return_value)) @utils.parameterized_dataset({ 'private_pkcs8': [secret_store.SecretType.PRIVATE, keys.get_private_key_pem(), enums.ObjectType.PRIVATE_KEY, keys.get_private_key_der(), False], 'private_pkcs1': [secret_store.SecretType.PRIVATE, keys.get_private_key_pem(), enums.ObjectType.PRIVATE_KEY, kss.get_private_key_der_pkcs1( keys.get_private_key_pem()), True], 'public_pkcs8': [secret_store.SecretType.PUBLIC, keys.get_public_key_pem(), enums.ObjectType.PUBLIC_KEY, keys.get_public_key_der(), False], 'public_pkcs1': [secret_store.SecretType.PUBLIC, keys.get_public_key_pem(), enums.ObjectType.PUBLIC_KEY, kss.get_public_key_der_pkcs1( keys.get_public_key_pem()), True], }) def test_store_asymmetric_key_secret_assert_called(self, barbican_type, barbican_key, kmip_type, kmip_key, pkcs1_only): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048) secret_dto = secret_store.SecretDTO(barbican_type, base64.b64encode(barbican_key), key_spec, 'content_type') self.secret_store.pkcs1_only = pkcs1_only self.secret_store.store_secret(secret_dto) self.secret_store.client.proxy.register.assert_called_once_with( kmip_type, mock.ANY, mock.ANY) proxy = self.secret_store.client.proxy register_call_args, _ = proxy.register.call_args actual_secret = register_call_args[2] self.assertEqual( 2048, actual_secret.key_block.cryptographic_length.value) self.assertEqual( attr.CryptographicAlgorithm(enums.CryptographicAlgorithm.RSA), actual_secret.key_block.cryptographic_algorithm) self.assertEqual( kmip_key, actual_secret.key_block.key_value.key_material.value) @utils.parameterized_dataset({ 'private_pkcs8': [secret_store.SecretType.PRIVATE, keys.get_private_key_pem(), False], 'private_pkcs1': [secret_store.SecretType.PRIVATE, keys.get_private_key_pem(), True], 'public_pkcs8': [secret_store.SecretType.PUBLIC, keys.get_public_key_pem(), False], 'public_pkcs1': [secret_store.SecretType.PUBLIC, keys.get_public_key_pem(), True], }) def test_store_asymmetric_key_secret_return_value(self, barbican_type, barbican_key, pkcs1_only): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048) secret_dto = secret_store.SecretDTO(barbican_type, base64.b64encode(barbican_key), key_spec, 'content_type') self.secret_store.pkcs1_only = pkcs1_only return_value = self.secret_store.store_secret(secret_dto) expected = {kss.KMIPSecretStore.KEY_UUID: 'uuid'} self.assertEqual(expected, return_value) @utils.parameterized_dataset({ 'rsa': [secret_store.KeyAlgorithm.RSA, 2048], 'no_key_spec': [None, None] }) def test_store_certificate_secret_assert_called( self, algorithm, bit_length): key_spec = secret_store.KeySpec(algorithm, bit_length) secret_dto = secret_store.SecretDTO( secret_store.SecretType.CERTIFICATE, base64.b64encode(keys.get_certificate_pem()), key_spec, 'content_type') self.secret_store.store_secret(secret_dto) self.secret_store.client.proxy.register.assert_called_once_with( enums.ObjectType.CERTIFICATE, mock.ANY, mock.ANY) proxy = self.secret_store.client.proxy register_call_args, _ = proxy.register.call_args actual_secret = register_call_args[2] self.assertEqual( keys.get_certificate_der(), actual_secret.certificate_value.value) def test_store_certificate_secret_return_value(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.RSA, 2048) secret_dto = secret_store.SecretDTO( secret_store.SecretType.CERTIFICATE, base64.b64encode(keys.get_certificate_pem()), key_spec, 'content_type') return_value = self.secret_store.store_secret(secret_dto) expected = {kss.KMIPSecretStore.KEY_UUID: 'uuid'} self.assertEqual(expected, return_value) def test_store_secret_server_error_occurs(self): self.secret_store.client.proxy.register = mock.MagicMock( proxy.KMIPProxy().register, return_value=results.RegisterResult( contents.ResultStatus(enums.ResultStatus.OPERATION_FAILED))) key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') secret_dto = secret_store.SecretDTO(secret_store.SecretType.SYMMETRIC, utils.get_symmetric_key(), key_spec, 'content_type', transport_key=None) self.assertRaises( secret_store.SecretGeneralException, self.secret_store.store_secret, secret_dto) def test_store_secret_invalid_algorithm(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.DSA, 128, 'mode') secret_dto = secret_store.SecretDTO(secret_store.SecretType.SYMMETRIC, "AAAA", key_spec, 'content_type', transport_key=None) self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, self.secret_store.store_secret, secret_dto) def test_store_secret_valid_algorithm_invalid_bit_length(self): key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 56, 'mode') secret_dto = secret_store.SecretDTO(secret_store.SecretType.SYMMETRIC, "AAAA", key_spec, 'content_type', transport_key=None) self.assertRaises( secret_store.SecretAlgorithmNotSupportedException, self.secret_store.store_secret, secret_dto) def test_store_secret_error_opening_connection(self): self.secret_store.client.proxy.open = mock.Mock( side_effect=socket.error) key_spec = secret_store.KeySpec(secret_store.KeyAlgorithm.AES, 128, 'mode') secret_dto = secret_store.SecretDTO(secret_store.SecretType.SYMMETRIC, utils.get_symmetric_key(), key_spec, 'content_type', transport_key=None) self.assertRaises( secret_store.SecretGeneralException, self.secret_store.store_secret, secret_dto) # --------------- TEST GET ----------------------------------------------- @utils.parameterized_dataset({ 'symmetric': [get_sample_symmetric_key(), secret_store.SecretType.SYMMETRIC, enums.ObjectType.SYMMETRIC_KEY, misc.KeyFormatType(enums.KeyFormatType.RAW), utils.get_symmetric_key(), False], 'hmac_sha1': [get_sample_symmetric_key( algorithm=enums.CryptographicAlgorithm.HMAC_SHA1), secret_store.SecretType.SYMMETRIC, enums.ObjectType.SYMMETRIC_KEY, misc.KeyFormatType(enums.KeyFormatType.RAW), utils.get_symmetric_key(), False], 'hmac_sha256': [get_sample_symmetric_key( algorithm=enums.CryptographicAlgorithm.HMAC_SHA256), secret_store.SecretType.SYMMETRIC, enums.ObjectType.SYMMETRIC_KEY, misc.KeyFormatType(enums.KeyFormatType.RAW), utils.get_symmetric_key(), False], 'hmac_sha384': [get_sample_symmetric_key( algorithm=enums.CryptographicAlgorithm.HMAC_SHA384), secret_store.SecretType.SYMMETRIC, enums.ObjectType.SYMMETRIC_KEY, misc.KeyFormatType(enums.KeyFormatType.RAW), utils.get_symmetric_key(), False], 'hmac_sha512': [get_sample_symmetric_key( algorithm=enums.CryptographicAlgorithm.HMAC_SHA512), secret_store.SecretType.SYMMETRIC, enums.ObjectType.SYMMETRIC_KEY, misc.KeyFormatType(enums.KeyFormatType.RAW), utils.get_symmetric_key(), False], 'triple_des': [get_sample_symmetric_key( key_b64=utils.get_triple_des_key(), key_length=192, algorithm=enums.CryptographicAlgorithm.TRIPLE_DES), secret_store.SecretType.SYMMETRIC, enums.ObjectType.SYMMETRIC_KEY, misc.KeyFormatType(enums.KeyFormatType.RAW), utils.get_triple_des_key(), False], 'opaque': [get_sample_opaque_secret(), secret_store.SecretType.OPAQUE, enums.ObjectType.OPAQUE_DATA, misc.KeyFormatType(enums.KeyFormatType.RAW), utils.get_symmetric_key(), False], 'public_key': [get_sample_public_key(), secret_store.SecretType.PUBLIC, enums.ObjectType.PUBLIC_KEY, misc.KeyFormatType(enums.KeyFormatType.X_509), base64.b64encode(keys.get_public_key_pem()), False], 'public_key_pkcs1': [get_sample_public_key(pkcs1=True), secret_store.SecretType.PUBLIC, enums.ObjectType.PUBLIC_KEY, misc.KeyFormatType(enums.KeyFormatType.PKCS_1), base64.b64encode(keys.get_public_key_pem()), True], 'private_key': [get_sample_private_key(), secret_store.SecretType.PRIVATE, enums.ObjectType.PRIVATE_KEY, misc.KeyFormatType(enums.KeyFormatType.PKCS_8), base64.b64encode(keys.get_private_key_pem()), False], 'private_key_pkcs1': [get_sample_private_key(pkcs1=True), secret_store.SecretType.PRIVATE, enums.ObjectType.PRIVATE_KEY, misc.KeyFormatType(enums.KeyFormatType.PKCS_1), base64.b64encode(keys.get_private_key_pem()), True], 'certificate': [get_sample_certificate(), secret_store.SecretType.CERTIFICATE, enums.ObjectType.CERTIFICATE, None, base64.b64encode(keys.get_certificate_pem()), False] }) def test_get_secret(self, kmip_secret, secret_type, kmip_type, key_format_type, expected_secret, pkcs1_only): self.secret_store.pkcs1_only = pkcs1_only self.secret_store.client.proxy.get = mock.MagicMock( proxy.KMIPProxy().get, return_value=results.GetResult( contents.ResultStatus(enums.ResultStatus.SUCCESS), object_type=attr.ObjectType(kmip_type), secret=kmip_secret)) uuid = utils.generate_test_uuid(0) metadata = {kss.KMIPSecretStore.KEY_UUID: uuid} secret_dto = self.secret_store.get_secret(secret_type, metadata) self.secret_store.client.proxy.get.assert_called_once_with(uuid) self.assertEqual(secret_store.SecretDTO, type(secret_dto)) self.assertEqual(secret_type, secret_dto.type) self.assertEqual(expected_secret, secret_dto.secret) def test_get_secret_symmetric_return_value_invalid_key_material_type(self): sample_secret = self.sample_secret sample_secret.key_block.key_value.key_material = 'invalid_type' self.secret_store.client.proxy.get = mock.MagicMock( proxy.KMIPProxy().get, return_value=results.GetResult( contents.ResultStatus(enums.ResultStatus.SUCCESS), object_type=attr.ObjectType(enums.ObjectType.SYMMETRIC_KEY), secret=sample_secret)) metadata = {kss.KMIPSecretStore.KEY_UUID: self.symmetric_key_uuid} self.assertRaises( secret_store.SecretGeneralException, self.secret_store.get_secret, self.symmetric_type, metadata) def test_get_secret_symmetric_server_error_occurs(self): self.secret_store.client.proxy.get = mock.MagicMock( proxy.KMIPProxy().get, return_value=results.GetResult( contents.ResultStatus(enums.ResultStatus.OPERATION_FAILED))) metadata = {kss.KMIPSecretStore.KEY_UUID: self.symmetric_key_uuid} self.assertRaises( secret_store.SecretGeneralException, self.secret_store.get_secret, self.symmetric_type, metadata) def test_get_secret_symmetric_error_opening_connection(self): self.secret_store.client.proxy.open = mock.Mock( side_effect=socket.error) metadata = {kss.KMIPSecretStore.KEY_UUID: self.symmetric_key_uuid} self.assertRaises( secret_store.SecretGeneralException, self.secret_store.get_secret, self.symmetric_type, metadata) # ---------------- TEST DELETE ------------------------------------------- def test_delete_with_null_metadata_values(self): metadata = {kss.KMIPSecretStore.KEY_UUID: None} self.assertIsNone(self.secret_store.delete_secret(metadata)) def test_delete_secret_assert_called(self): metadata = {kss.KMIPSecretStore.KEY_UUID: self.symmetric_key_uuid} self.secret_store.delete_secret(metadata) self.secret_store.client.proxy.destroy.assert_called_once_with( self.symmetric_key_uuid) def test_delete_secret_return_value(self): metadata = {kss.KMIPSecretStore.KEY_UUID: self.symmetric_key_uuid} return_value = self.secret_store.delete_secret(metadata) self.assertIsNone(return_value) def test_delete_secret_server_error_occurs(self): self.secret_store.client.proxy.destroy = mock.MagicMock( proxy.KMIPProxy().destroy, return_value=results.DestroyResult( contents.ResultStatus(enums.ResultStatus.OPERATION_FAILED))) metadata = {kss.KMIPSecretStore.KEY_UUID: self.symmetric_key_uuid} self.assertRaises( secret_store.SecretGeneralException, self.secret_store.delete_secret, metadata) def test_delete_secret_error_opening_connection(self): self.secret_store.client.proxy.open = mock.Mock( side_effect=socket.error) metadata = {kss.KMIPSecretStore.KEY_UUID: self.symmetric_key_uuid} self.assertRaises( secret_store.SecretGeneralException, self.secret_store.delete_secret, metadata) # -------------- TEST HELPER FUNCTIONS ----------------------------------- def test_credential(self): actual_credential = self.secret_store.credential self.assertEqual( self.expected_username, actual_credential.credential_value.username.value) self.assertEqual( self.expected_password, actual_credential.credential_value.password.value) def test_credential_None(self): CONF = kss.CONF CONF.kmip_plugin.username = None CONF.kmip_plugin.password = None CONF.kmip_plugin.keyfile = None secret_store = kss.KMIPSecretStore(CONF) self.assertIsNone(secret_store.credential) def test_map_type_ss_to_kmip_valid_type(self): ss_types = [secret_store.SecretType.SYMMETRIC, secret_store.SecretType.PUBLIC, secret_store.SecretType.PRIVATE] for ss_type in ss_types: self.assertIsNotNone( self.secret_store._map_type_ss_to_kmip(ss_type)) def test_map_type_ss_to_kmip_invalid_type(self): object_type, key_format_type = ( self.secret_store._map_type_ss_to_kmip('bad_type')) self.assertIsNone(object_type) self.assertIsNone(key_format_type) def test_validate_keyfile_permissions_good(self): config = {'return_value.st_mode': (stat.S_IRUSR | stat.S_IFREG)} with mock.patch('os.stat', **config): self.assertIsNone( self.secret_store._validate_keyfile_permissions('/some/path/')) def test_check_keyfile_permissions_bad(self): config = {'return_value.st_mode': (stat.S_IWOTH | stat.S_IFREG)} with mock.patch('os.stat', **config): self.assertRaises( kss.KMIPSecretStoreError, self.secret_store._validate_keyfile_permissions, '/some/path/') def test_checks_keyfile_permissions(self): config = {'return_value': True} func = ("barbican.plugin.kmip_secret_store." "KMIPSecretStore._validate_keyfile_permissions") with mock.patch(func, **config) as m: CONF = kss.CONF CONF.kmip_plugin.keyfile = '/some/path' kss.KMIPSecretStore(CONF) self.assertEqual(1, len(m.mock_calls)) barbican-2.0.0/barbican/tests/plugin/test_resource.py0000664000567000056710000002100712701405673024061 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import mock import testtools from barbican.model import models from barbican.plugin.interface import secret_store from barbican.plugin import resources from barbican.plugin import store_crypto from barbican.tests import utils @utils.parameterized_test_case class WhenTestingPluginResource(testtools.TestCase, utils.MockModelRepositoryMixin): def setUp(self): super(WhenTestingPluginResource, self).setUp() self.plugin_resource = resources self.spec = {'algorithm': 'RSA', 'bit_length': 1024, 'passphrase': 'changeit' } self.content_type = 'application/octet-stream' self.project_model = mock.MagicMock() asymmetric_meta_dto = secret_store.AsymmetricKeyMetadataDTO() # Mock plug-in self.moc_plugin = mock.MagicMock() self.moc_plugin.generate_asymmetric_key.return_value = ( asymmetric_meta_dto) self.moc_plugin.store_secret.return_value = {} moc_plugin_config = { 'return_value.get_plugin_generate.return_value': self.moc_plugin, 'return_value.get_plugin_store.return_value': self.moc_plugin, 'return_value.get_plugin_retrieve_delete.return_value': self.moc_plugin } self.moc_plugin_patcher = mock.patch( 'barbican.plugin.interface.secret_store.get_manager', **moc_plugin_config ) self.moc_plugin_manager = self.moc_plugin_patcher.start() self.addCleanup(self.moc_plugin_patcher.stop) self.setup_project_repository_mock() self.secret_repo = mock.MagicMock() self.secret_repo.create_from.return_value = None self.setup_secret_repository_mock(self.secret_repo) self.container_repo = mock.MagicMock() self.container_repo.create_from.return_value = None self.setup_container_repository_mock(self.container_repo) self.container_secret_repo = mock.MagicMock() self.container_secret_repo.create_from.return_value = None self.setup_container_secret_repository_mock( self.container_secret_repo) self.secret_meta_repo = mock.MagicMock() self.secret_meta_repo.create_from.return_value = None self.setup_secret_meta_repository_mock(self.secret_meta_repo) def tearDown(self): super(WhenTestingPluginResource, self).tearDown() def test_store_secret_dto(self): spec = {'algorithm': 'AES', 'bit_length': 256, 'secret_type': 'symmetric'} secret = base64.b64encode('ABCDEFABCDEFABCDEFABCDEF') self.plugin_resource.store_secret( unencrypted_raw=secret, content_type_raw=self.content_type, content_encoding='base64', secret_model=models.Secret(spec), project_model=self.project_model) dto = self.moc_plugin.store_secret.call_args_list[0][0][0] self.assertEqual("symmetric", dto.type) self.assertEqual(secret, dto.secret) self.assertEqual(spec['algorithm'], dto.key_spec.alg) self.assertEqual(spec['bit_length'], dto.key_spec.bit_length) self.assertEqual(self.content_type, dto.content_type) @utils.parameterized_dataset({ 'general_secret_store': { 'moc_plugin': None }, 'store_crypto': { 'moc_plugin': mock.MagicMock(store_crypto.StoreCryptoAdapterPlugin) } }) def test_get_secret_dto(self, moc_plugin): def mock_secret_store_store_secret(dto): self.secret_dto = dto def mock_secret_store_get_secret(secret_type, secret_metadata): return self.secret_dto def mock_store_crypto_store_secret(dto, context): self.secret_dto = dto def mock_store_crypto_get_secret( secret_type, secret_metadata, context): return self.secret_dto if moc_plugin: self.moc_plugin = moc_plugin self.moc_plugin.store_secret.return_value = {} self.moc_plugin.store_secret.side_effect = ( mock_store_crypto_store_secret) self.moc_plugin.get_secret.side_effect = ( mock_store_crypto_get_secret) moc_plugin_config = { 'return_value.get_plugin_store.return_value': self.moc_plugin, 'return_value.get_plugin_retrieve_delete.return_value': self.moc_plugin } self.moc_plugin_manager.configure_mock(**moc_plugin_config) else: self.moc_plugin.store_secret.side_effect = ( mock_secret_store_store_secret) self.moc_plugin.get_secret.side_effect = ( mock_secret_store_get_secret) raw_secret = 'ABCDEFABCDEFABCDEFABCDEF' spec = {'name': 'testsecret', 'algorithm': 'AES', 'bit_length': 256, 'secret_type': 'symmetric'} self.plugin_resource.store_secret( unencrypted_raw=base64.b64encode(raw_secret), content_type_raw=self.content_type, content_encoding='base64', secret_model=models.Secret(spec), project_model=self.project_model) secret = self.plugin_resource.get_secret( 'application/octet-stream', models.Secret(spec), None) self.assertEqual(raw_secret, secret) def test_generate_asymmetric_with_passphrase(self): """test asymmetric secret generation with passphrase.""" secret_container = self.plugin_resource.generate_asymmetric_secret( self.spec, self.content_type, self.project_model, ) self.assertEqual("rsa", secret_container.type) self.assertEqual(self.moc_plugin. generate_asymmetric_key.call_count, 1) self.assertEqual(self.container_repo. create_from.call_count, 1) self.assertEqual(self.container_secret_repo. create_from.call_count, 3) def test_generate_asymmetric_without_passphrase(self): """test asymmetric secret generation without passphrase.""" del self.spec['passphrase'] secret_container = self.plugin_resource.generate_asymmetric_secret( self.spec, self.content_type, self.project_model, ) self.assertEqual("rsa", secret_container.type) self.assertEqual(1, self.moc_plugin.generate_asymmetric_key.call_count) self.assertEqual(1, self.container_repo.create_from.call_count) self.assertEqual(2, self.container_secret_repo.create_from.call_count) def test_delete_secret_w_metadata(self): project_id = "some_id" secret_model = mock.MagicMock() secret_meta = mock.MagicMock() self.secret_meta_repo.get_metadata_for_secret.return_value = ( secret_meta) self.plugin_resource.delete_secret(secret_model=secret_model, project_id=project_id) self.secret_meta_repo.get_metadata_for_secret.assert_called_once_with( secret_model.id) self.moc_plugin.delete_secret.assert_called_once_with(secret_meta) self.secret_repo.delete_entity_by_id.assert_called_once_with( entity_id=secret_model.id, external_project_id=project_id) def test_delete_secret_w_out_metadata(self): project_id = "some_id" secret_model = mock.MagicMock() self.secret_meta_repo.get_metadata_for_secret.return_value = None self.plugin_resource.delete_secret(secret_model=secret_model, project_id=project_id) self.secret_meta_repo.get_metadata_for_secret.assert_called_once_with( secret_model.id) self.secret_repo.delete_entity_by_id.assert_called_once_with( entity_id=secret_model.id, external_project_id=project_id) barbican-2.0.0/barbican/tests/plugin/test_snakeoil_ca.py0000664000567000056710000004315012701405673024505 0ustar jenkinsjenkins00000000000000# Copyright 2014 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import base64 import os from Crypto.Util import asn1 import fixtures import mock from OpenSSL import crypto from oslo_config import fixture as oslo_fixture import barbican.plugin.interface.certificate_manager as cm from barbican.plugin import snakeoil_ca from barbican.tests import certificate_utils from barbican.tests import utils class BaseTestCase(utils.BaseTestCase): def setUp(self): super(BaseTestCase, self).setUp() self.conf = self.useFixture(oslo_fixture.Config( conf=snakeoil_ca.CONF)).conf self.tmp_dir = self.useFixture(fixtures.TempDir()).path def tearDown(self): super(BaseTestCase, self).tearDown() class CaTestCase(BaseTestCase): def test_gen_cacert_no_file_storage(self): subject_dn = ( 'cn=Test CN,o=Test O,L=Test L,st=Test ST,ou=Test OU' ) ca = snakeoil_ca.SnakeoilCA(cert_path=None, key_path=None, key_size=512, subject_dn=subject_dn) subject = ca.cert.get_subject() self.assertNotEqual(ca.key, None) self.assertEqual("Test ST", subject.ST) self.assertEqual("Test L", subject.L) self.assertEqual("Test O", subject.O) self.assertEqual("Test CN", subject.CN) self.assertEqual("Test OU", subject.OU) self.assertEqual( ca.chain, crypto.dump_certificate(crypto.FILETYPE_PEM, ca.cert)) def test_gen_cacert_with_file_storage(self): cert_path = self.tmp_dir + 'cert.pem' key_path = self.tmp_dir + 'key.pem' chain_path = self.tmp_dir + 'cert.chain' pkcs7_path = self.tmp_dir + 'cert.p7b' subject_dn = 'cn=Test CN,o=Test O,L=Test L,st=Test ST' ca = snakeoil_ca.SnakeoilCA( cert_path=cert_path, key_path=key_path, chain_path=chain_path, pkcs7_path=pkcs7_path, key_size=2048, subject_dn=subject_dn) subject = ca.cert.get_subject() self.assertEqual( ca.chain, crypto.dump_certificate(crypto.FILETYPE_PEM, ca.cert)) self.assertNotEqual(None, ca.key) self.assertEqual("Test ST", subject.ST) self.assertEqual("Test L", subject.L) self.assertEqual("Test O", subject.O) self.assertEqual("Test CN", subject.CN) # Make sure we preserve existing keypairs ca = snakeoil_ca.SnakeoilCA( cert_path=cert_path, key_path=key_path, chain_path=chain_path, pkcs7_path=pkcs7_path ) subject = ca.cert.get_subject() self.assertEqual("Test ST", subject.ST) self.assertEqual("Test L", subject.L) self.assertEqual("Test O", subject.O) self.assertEqual("Test CN", subject.CN) def test_gen_sub_cacert_with_file_storage(self): cert_path = self.tmp_dir + 'cert.pem' key_path = self.tmp_dir + 'key.pem' chain_path = self.tmp_dir + 'cert.chain' pkcs7_path = self.tmp_dir + 'cert.p7b' subject_dn = 'cn=Test CN,o=Test O,L=Test L,st=Test ST' parent_ca = snakeoil_ca.SnakeoilCA( cert_path=cert_path, key_path=key_path, chain_path=chain_path, pkcs7_path=pkcs7_path, key_size=2048, subject_dn=subject_dn) self.assertIsNotNone(parent_ca) # create a sub-ca subject_dn = 'cn=Sub CA Test CN,o=Test O,L=Test L,st=Test ST' cert_path = self.tmp_dir + 'sub_cert.pem' key_path = self.tmp_dir + 'sub_key.pem' chain_path = self.tmp_dir + 'sub_cert.chain' pkcs7_path = self.tmp_dir + 'sub_cert.p7b' sub_ca = snakeoil_ca.SnakeoilCA( cert_path=cert_path, key_path=key_path, chain_path=chain_path, pkcs7_path=pkcs7_path, key_size=2048, subject_dn=subject_dn, parent_chain_path=parent_ca.chain_path, signing_dn=parent_ca.subject_dn, signing_key=parent_ca.key ) subject = sub_ca.cert.get_subject() self.assertEqual("Test ST", subject.ST) self.assertEqual("Test L", subject.L) self.assertEqual("Test O", subject.O) self.assertEqual("Sub CA Test CN", subject.CN) class CertManagerTestCase(BaseTestCase): def setUp(self): super(CertManagerTestCase, self).setUp() subject_dn = 'cn=Test CN,o=Test O,L=Test L,st=Test ST' self.ca = snakeoil_ca.SnakeoilCA(cert_path=None, key_path=None, key_size=512, subject_dn=subject_dn) def verify_sig(self, encoded_cert): der = asn1.DerSequence() der.decode(encoded_cert) der_sig = asn1.DerObject() der_sig.decode(der[2]) sig = der_sig.payload self.assertIs('\x00', sig[0]) crypto.verify(self.ca.cert, sig[1:], der[0], 'sha256') def test_gen_cert_no_file_storage(self): req = certificate_utils.get_valid_csr_object() cm = snakeoil_ca.CertManager(self.ca) cert = cm.make_certificate(req) first_serial = cert.get_serial_number() cert_enc = crypto.dump_certificate(crypto.FILETYPE_ASN1, cert) self.verify_sig(cert_enc) cert = cm.make_certificate(req) self.assertNotEqual(first_serial, cert.get_serial_number()) self.verify_sig(cert_enc) cm = snakeoil_ca.CertManager(self.ca) cert = cm.make_certificate(req) def test_gen_cert_with_file_storage(self): req = certificate_utils.get_valid_csr_object() cm = snakeoil_ca.CertManager(self.ca) cert = cm.make_certificate(req) cert_enc = crypto.dump_certificate(crypto.FILETYPE_ASN1, cert) first_serial = cert.get_serial_number() self.verify_sig(cert_enc) cm = snakeoil_ca.CertManager(self.ca) cert = cm.make_certificate(req) self.assertNotEqual(first_serial, cert.get_serial_number()) class SnakeoilCAPluginTestCase(BaseTestCase): def setUp(self): super(SnakeoilCAPluginTestCase, self).setUp() self.ca_cert_path = os.path.join(self.tmp_dir, 'ca.cert') self.ca_key_path = os.path.join(self.tmp_dir, 'ca.key') self.ca_chain_path = os.path.join(self.tmp_dir, 'ca.chain') self.ca_pkcs7_path = os.path.join(self.tmp_dir, 'ca.pkcs7') self.db_dir = self.tmp_dir self.conf.snakeoil_ca_plugin.subca_cert_key_directory = os.path.join( self.tmp_dir, 'subca_cert_key_dir') self.subca_cert_key_directory = ( self.conf.snakeoil_ca_plugin.subca_cert_key_directory) self.plugin = snakeoil_ca.SnakeoilCACertificatePlugin( self.conf) self.order_id = mock.MagicMock() self.barbican_meta_dto = cm.BarbicanMetaDTO() def test_issue_certificate_request(self): req = certificate_utils.get_valid_csr_object() req_enc = crypto.dump_certificate_request(crypto.FILETYPE_PEM, req) req_enc = base64.b64encode(req_enc) order_meta = {'request_data': req_enc} resp = self.plugin.issue_certificate_request(self.order_id, order_meta, {}, self.barbican_meta_dto) crypto.load_certificate( crypto.FILETYPE_PEM, resp.certificate.decode('base64')) def test_issue_certificate_request_with_ca_id(self): req = certificate_utils.get_valid_csr_object() req_enc = crypto.dump_certificate_request(crypto.FILETYPE_PEM, req) req_enc = base64.b64encode(req_enc) order_meta = {'request_data': req_enc} plugin_meta = {'plugin_ca_id': self.plugin.get_default_ca_name()} self.barbican_meta_dto.plugin_ca_id = self.plugin.get_default_ca_name() resp = self.plugin.issue_certificate_request(self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) crypto.load_certificate( crypto.FILETYPE_PEM, resp.certificate.decode('base64')) def test_issue_raises_with_invalid_ca_id(self): req = certificate_utils.get_valid_csr_object() req_enc = crypto.dump_certificate_request(crypto.FILETYPE_PEM, req) req_enc = base64.b64encode(req_enc) order_meta = {'request_data': req_enc} plugin_meta = {'plugin_ca_id': "invalid_ca_id"} self.barbican_meta_dto.plugin_ca_id = "invalid_ca_id" self.assertRaises( cm.CertificateGeneralException, self.plugin.issue_certificate_request, self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) def test_issue_certificate_request_set_subject(self): req = certificate_utils.get_valid_csr_object() subj = req.get_subject() subj.countryName = 'US' subj.stateOrProvinceName = 'OR' subj.localityName = 'Testlandia' subj.organizationName = 'Testers Anon' subj.organizationalUnitName = 'Testers OU' subj.commonName = 'Testing' req_enc = crypto.dump_certificate_request(crypto.FILETYPE_PEM, req) req_enc = base64.b64encode(req_enc) order_meta = {'request_data': req_enc} resp = self.plugin.issue_certificate_request(self.order_id, order_meta, {}, self.barbican_meta_dto) cert = crypto.load_certificate( crypto.FILETYPE_PEM, resp.certificate.decode('base64')) cert_subj = cert.get_subject() self.assertEqual('US', cert_subj.C) self.assertEqual('OR', cert_subj.ST) self.assertEqual('Testlandia', cert_subj.L) self.assertEqual('Testers Anon', cert_subj.O) self.assertEqual('Testers OU', cert_subj.OU) self.assertEqual('Testing', cert_subj.CN) def test_issue_certificate_request_stored_key(self): req = certificate_utils.get_valid_csr_object() req_enc = crypto.dump_certificate_request(crypto.FILETYPE_PEM, req) self.barbican_meta_dto.generated_csr = req_enc resp = self.plugin.issue_certificate_request( self.order_id, {}, {}, self.barbican_meta_dto) crypto.load_certificate( crypto.FILETYPE_PEM, resp.certificate.decode('base64')) def test_no_request_data(self): res = self.plugin.issue_certificate_request( self.order_id, {}, {}, self.barbican_meta_dto) self.assertIs(cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, res.status) self.assertEqual("No request_data specified", res.status_message) def test_get_default_ca_name(self): self.assertEqual("Snakeoil CA", self.plugin.get_default_ca_name()) def test_get_default_signing_cert(self): ca_cert = self.plugin.get_default_signing_cert() self.assertEqual( crypto.dump_certificate(crypto.FILETYPE_PEM, self.plugin.ca.cert), ca_cert) def test_get_default_intermediates_none(self): intermediates = self.plugin.get_default_intermediates() self.assertIsNone(intermediates) def test_not_implemented(self): self.assertRaises(NotImplementedError, self.plugin.modify_certificate_request, '', {}, {}, {}) self.assertRaises(NotImplementedError, self.plugin.cancel_certificate_request, '', {}, {}, {}) self.assertRaises(NotImplementedError, self.plugin.check_certificate_status, '', {}, {}, {}) def test_support_request_types(self): manager = cm.CertificatePluginManager() manager.extensions = [mock.MagicMock(obj=self.plugin)] cert_spec = { cm.REQUEST_TYPE: cm.CertificateRequestType.CUSTOM_REQUEST} self.assertEqual(self.plugin, manager.get_plugin(cert_spec)) self.assertTrue(self.plugin.supports(cert_spec)) cert_spec = { cm.REQUEST_TYPE: cm.CertificateRequestType.STORED_KEY_REQUEST} self.assertEqual(self.plugin, manager.get_plugin(cert_spec)) self.assertTrue(self.plugin.supports(cert_spec)) cert_spec = { cm.REQUEST_TYPE: cm.CertificateRequestType.FULL_CMC_REQUEST} self.assertRaises(cm.CertificatePluginNotFound, manager.get_plugin, cert_spec) self.assertFalse(self.plugin.supports(cert_spec)) def test_supports_create_ca(self): self.assertTrue(self.plugin.supports_create_ca()) def _create_subca(self): create_ca_dto = cm.CACreateDTO( name="sub ca1", description="subordinate ca", subject_dn="cn=subordinate ca signing cert, o=example.com", parent_ca_id=self.plugin.get_default_ca_name() ) return self.plugin.create_ca(create_ca_dto) def test_create_ca(self): subca_dict = self._create_subca() self.assertEqual("sub ca1", subca_dict.get(cm.INFO_NAME)) self.assertIsNotNone(subca_dict.get(cm.INFO_EXPIRATION)) self.assertIsNotNone(subca_dict.get(cm.PLUGIN_CA_ID)) ca_cert = subca_dict.get(cm.INFO_CA_SIGNING_CERT) self.assertIsNotNone(ca_cert) intermediates = subca_dict.get(cm.INFO_INTERMEDIATES) self.assertIsNotNone(intermediates) cacert = crypto.load_certificate(crypto.FILETYPE_PEM, ca_cert) subject = cacert.get_subject() self.assertEqual( "subordinate ca signing cert", subject.CN) pkcs7 = crypto.load_pkcs7_data(crypto.FILETYPE_PEM, intermediates) self.assertTrue(pkcs7.type_is_signed()) # TODO(alee) Verify that ca cert is signed by parent CA def test_issue_certificate_request_with_subca_id(self): subca_dict = self._create_subca() req = certificate_utils.get_valid_csr_object() req_enc = crypto.dump_certificate_request(crypto.FILETYPE_PEM, req) req_enc = base64.b64encode(req_enc) order_meta = {'request_data': req_enc} plugin_meta = {'plugin_ca_id': subca_dict.get(cm.PLUGIN_CA_ID)} self.barbican_meta_dto.plugin_ca_id = subca_dict.get(cm.PLUGIN_CA_ID) resp = self.plugin.issue_certificate_request(self.order_id, order_meta, plugin_meta, self.barbican_meta_dto) new_cert = crypto.load_certificate( crypto.FILETYPE_PEM, resp.certificate.decode('base64')) signing_cert = crypto.load_certificate( crypto.FILETYPE_PEM, subca_dict['ca_signing_certificate']) self.assertEqual(signing_cert.get_subject(), new_cert.get_issuer()) def test_delete_ca(self): subca_dict = self._create_subca() ca_id = subca_dict.get(cm.PLUGIN_CA_ID) self.assertIsNotNone(ca_id) cert_path = os.path.join(self.subca_cert_key_directory, ca_id + ".cert") key_path = os.path.join(self.subca_cert_key_directory, ca_id + ".key") self.assertTrue(os.path.exists(cert_path)) self.assertTrue(os.path.exists(key_path)) self.plugin.delete_ca(ca_id) self.assertFalse(os.path.exists(cert_path)) self.assertFalse(os.path.exists(key_path)) cas = self.plugin.get_ca_info() self.assertNotIn(ca_id, cas.keys()) def test_raises_no_parent_id_passed_in(self): create_ca_dto = cm.CACreateDTO( name="sub ca1", description="subordinate ca", subject_dn="cn=subordinate ca signing cert, o=example.com", ) self.assertRaises( cm.CertificateGeneralException, self.plugin.create_ca, create_ca_dto ) def test_raises_invalid_parent_id_passed_in(self): create_ca_dto = cm.CACreateDTO( name="sub ca1", description="subordinate ca", subject_dn="cn=subordinate ca signing cert, o=example.com", parent_ca_id="foo" ) self.assertRaises( cm.CertificateGeneralException, self.plugin.create_ca, create_ca_dto ) def test_get_ca_info(self): ca_info = self.plugin.get_ca_info() ca_dict = ca_info.get(self.plugin.ca.name) self.assertIsNotNone(ca_dict) self.assertEqual(self.plugin.ca.name, ca_dict.get(cm.INFO_NAME)) self.assertIsNotNone(ca_dict.get(cm.INFO_CA_SIGNING_CERT)) self.assertEqual(str, type(ca_dict.get(cm.INFO_EXPIRATION))) def test_get_ca_info_with_subca(self): subca_dict = self._create_subca() subca_id = subca_dict.get(cm.PLUGIN_CA_ID) ca_info = self.plugin.get_ca_info() self.assertIn(subca_id, ca_info.keys()) self.assertIn(self.plugin.get_default_ca_name(), ca_info.keys()) self.assertEqual(str, type(subca_dict.get(cm.INFO_EXPIRATION))) barbican-2.0.0/barbican/tests/plugin/interface/0000775000567000056710000000000012701406024022550 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/plugin/interface/test_secret_store.py0000664000567000056710000002141312701405673026674 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Johns Hopkins University Applied Physics Laboratory # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from barbican.common import utils as common_utils from barbican.plugin.interface import secret_store as str from barbican.tests import utils class TestSecretStore(str.SecretStoreBase): """Secret store plugin for testing support.""" def __init__(self, supported_alg_list): super(TestSecretStore, self).__init__() self.alg_list = supported_alg_list def generate_symmetric_key(self, key_spec): raise NotImplementedError # pragma: no cover def generate_asymmetric_key(self, key_spec): raise NotImplementedError # pragma: no cover def store_secret(self, secret_dto): raise NotImplementedError # pragma: no cover def get_secret(self, secret_metadata): raise NotImplementedError # pragma: no cover def generate_supports(self, key_spec): return key_spec.alg in self.alg_list def delete_secret(self, secret_metadata): raise NotImplementedError # pragma: no cover def store_secret_supports(self, key_spec): return key_spec.alg in self.alg_list class TestSecretStoreWithTransportKey(str.SecretStoreBase): """Secret store plugin for testing support. This plugin will override the relevant methods for key wrapping. """ def __init__(self, supported_alg_list): super(TestSecretStoreWithTransportKey, self).__init__() self.alg_list = supported_alg_list def generate_symmetric_key(self, key_spec): raise NotImplementedError # pragma: no cover def generate_asymmetric_key(self, key_spec): raise NotImplementedError # pragma: no cover def store_secret(self, secret_dto): raise NotImplementedError # pragma: no cover def get_secret(self, secret_metadata): raise NotImplementedError # pragma: no cover def generate_supports(self, key_spec): return key_spec.alg in self.alg_list def delete_secret(self, secret_metadata): raise NotImplementedError # pragma: no cover def store_secret_supports(self, key_spec): return key_spec.alg in self.alg_list def get_transport_key(self): return "transport key" def is_transport_key_current(self, transport_key): return True class WhenTestingSecretStorePluginManager(utils.BaseTestCase): def setUp(self): super(WhenTestingSecretStorePluginManager, self).setUp() self.manager = str.SecretStorePluginManager() def test_get_store_supported_plugin_no_plugin_name(self): plugin = TestSecretStore([str.KeyAlgorithm.AES]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertEqual(plugin, self.manager.get_plugin_store(keySpec)) def test_get_store_supported_plugin_with_plugin_name(self): plugin = TestSecretStore([str.KeyAlgorithm.AES]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] plugin_found = self.manager.get_plugin_store( None, plugin_name=common_utils.generate_fullname_for(plugin)) self.assertEqual(plugin, plugin_found) def test_get_generate_supported_plugin(self): plugin = TestSecretStore([str.KeyAlgorithm.AES]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertEqual(plugin, self.manager.get_plugin_generate(keySpec)) def test_get_store_no_plugin_found(self): self.manager.extensions = [] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertRaises( str.SecretStorePluginsNotConfigured, self.manager.get_plugin_store, keySpec, ) def test_get_store_no_plugin_found_by_name(self): plugin = TestSecretStore([str.KeyAlgorithm.AES]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) plugin_name = 'plugin' exception_result = self.assertRaises( str.SecretStorePluginNotFound, self.manager.get_plugin_store, keySpec, plugin_name=plugin_name ) self.assertEqual( 'Secret store plugin "{name}" not found.'.format(name=plugin_name), exception_result.message) def test_get_generate_no_plugin_found(self): self.manager.extensions = [] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertRaises( str.SecretStorePluginsNotConfigured, self.manager.get_plugin_generate, keySpec, ) def test_get_store_no_supported_plugin(self): plugin = TestSecretStore([]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertRaises( str.SecretStoreSupportedPluginNotFound, self.manager.get_plugin_store, keySpec, ) def test_get_generate_no_supported_plugin(self): plugin = TestSecretStore([]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertRaises( str.SecretStoreSupportedPluginNotFound, self.manager.get_plugin_generate, keySpec, ) def test_get_store_no_plugin_with_tkey_and_no_supports_storage(self): plugin = TestSecretStore([]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertRaises( str.SecretStoreSupportedPluginNotFound, self.manager.get_plugin_store, key_spec=keySpec, transport_key_needed=True, ) def test_get_store_plugin_with_tkey_and_no_supports_storage(self): plugin = TestSecretStoreWithTransportKey([]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertRaises( str.SecretStoreSupportedPluginNotFound, self.manager.get_plugin_store, key_spec=keySpec, transport_key_needed=True, ) def test_get_store_plugin_with_no_tkey_and_supports_storage(self): plugin = TestSecretStore([str.KeyAlgorithm.AES]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertRaises( str.SecretStoreSupportedPluginNotFound, self.manager.get_plugin_store, key_spec=keySpec, transport_key_needed=True, ) @mock.patch('barbican.common.utils.generate_fullname_for') def test_get_retrieve_plugin_raises_when_not_available( self, generate_full_name_for): plugin = TestSecretStore([str.KeyAlgorithm.AES]) plugin_mock = mock.MagicMock(obj=plugin) self.manager.extensions = [plugin_mock] generate_full_name_for.return_value = "another plugin name" plugin_name = 'plugin name searched for' exception_result = self.assertRaises( str.StorePluginNotAvailableOrMisconfigured, self.manager.get_plugin_retrieve_delete, plugin_name=plugin_name, ) self.assertIn(plugin_name, exception_result.message) def test_get_store_plugin_with_tkey_and_supports_storage(self): plugin1 = TestSecretStore([str.KeyAlgorithm.AES]) plugin1_mock = mock.MagicMock(obj=plugin1) plugin2 = TestSecretStoreWithTransportKey([str.KeyAlgorithm.AES]) plugin2_mock = mock.MagicMock(obj=plugin2) self.manager.extensions = [plugin1_mock, plugin2_mock] keySpec = str.KeySpec(str.KeyAlgorithm.AES, 128) self.assertEqual(plugin2, self.manager.get_plugin_store( key_spec=keySpec, transport_key_needed=True)) barbican-2.0.0/barbican/tests/plugin/interface/__init__.py0000664000567000056710000000000012701405673024660 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/plugin/interface/test_certificate_manager.py0000664000567000056710000002661512701405673030160 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime import mock import testtools from barbican.common import utils as common_utils from barbican.model import models from barbican.plugin.interface import certificate_manager as cm from barbican.tests import database_utils from barbican.tests import utils class WhenTestingCertificateEventPluginManager(testtools.TestCase): def setUp(self): super(WhenTestingCertificateEventPluginManager, self).setUp() self.project_id = '1234' self.order_ref = 'http://www.mycerts.com/v1/orders/123456' self.container_ref = 'http://www.mycerts.com/v1/containers/654321' self.error_msg = 'Something is broken' self.retry_in_msec = 5432 self.plugin_returned = mock.MagicMock() self.plugin_name = common_utils.generate_fullname_for( self.plugin_returned) self.plugin_loaded = mock.MagicMock(obj=self.plugin_returned) self.manager = cm.EVENT_PLUGIN_MANAGER self.manager.extensions = [self.plugin_loaded] def test_get_plugin_by_name(self): self.assertEqual(self.plugin_returned, self.manager.get_plugin_by_name(self.plugin_name)) def test_notify_ca_is_unavailable(self): self.manager.notify_ca_is_unavailable( self.project_id, self.order_ref, self.error_msg, self.retry_in_msec) self.plugin_returned.notify_ca_is_unavailable.assert_called_once_with( self.project_id, self.order_ref, self.error_msg, self.retry_in_msec) def test_notify_certificate_is_ready(self): self.manager.notify_certificate_is_ready( self.project_id, self.order_ref, self.container_ref) pr = self.plugin_returned pr.notify_certificate_is_ready.assert_called_once_with( self.project_id, self.order_ref, self.container_ref) def test_invoke_certificate_plugins(self): self.manager._invoke_certificate_plugins( 'test_invoke_certificate_plugins', self.project_id, self.order_ref, self.container_ref) # The _invoke_certificate_plugins method should invoke on # self.plugin_returned the same method by name as the function # that invoked it...in this case it is this test method. pr = self.plugin_returned pr.test_invoke_certificate_plugins.assert_called_once_with( self.project_id, self.order_ref, self.container_ref) def test_raises_error_with_no_plugin_by_name_found(self): self.manager.extensions = [] self.assertRaises( cm.CertificateEventPluginNotFound, self.manager.get_plugin_by_name, 'any-name-here' ) def test_raises_error_with_no_plugin_for_invoke_certificate_plugins(self): self.manager.extensions = [] self.assertRaises( cm.CertificateEventPluginNotFound, self.manager._invoke_certificate_plugins, self.project_id, self.order_ref, self.error_msg, self.retry_in_msec, ) class WhenTestingCertificatePluginManager(database_utils.RepositoryTestCase, utils.MockModelRepositoryMixin): def setUp(self): super(WhenTestingCertificatePluginManager, self).setUp() self.cert_spec = {} self.plugin_returned = mock.MagicMock() self.plugin_name = common_utils.generate_fullname_for( self.plugin_returned) types_list = [cm.CertificateRequestType.SIMPLE_CMC_REQUEST, cm.CertificateRequestType.CUSTOM_REQUEST] self.plugin_returned.supported_request_types.return_value = types_list self.plugin_returned.supports.return_value = True self.plugin_loaded = mock.MagicMock(obj=self.plugin_returned) expiration = (datetime.datetime.utcnow() + datetime.timedelta( days=cm.CA_INFO_DEFAULT_EXPIRATION_DAYS)) ca_info = { cm.INFO_NAME: "my_ca", cm.INFO_DESCRIPTION: "Certificate Authority my_ca", cm.INFO_CA_SIGNING_CERT: "Undefined", cm.INFO_INTERMEDIATES: "Undefined", cm.INFO_EXPIRATION: expiration.isoformat() } self.plugin_returned.get_ca_info.return_value = { 'plugin_ca_id1': ca_info } parsed_ca = { 'plugin_name': self.plugin_name, 'plugin_ca_id': 'plugin_ca_id1', 'name': self.plugin_name, 'description': 'Master CA for default plugin', 'ca_signing_certificate': 'ZZZZZ', 'intermediates': 'YYYYY' } self.ca = models.CertificateAuthority(parsed_ca) self.ca.id = 'ca_id' self.ca_repo = mock.MagicMock() self.ca_repo.get_by_create_date.return_value = ( self.ca, 0, 1, 1) self.ca_repo.create_from.return_value = None self.ca_repo.get.return_value = self.ca self.project = models.Project() self.project.id = '12345' self.setup_ca_repository_mock(self.ca_repo) self.plugin_loaded = mock.MagicMock(obj=self.plugin_returned) self.manager = cm.CertificatePluginManager() self.manager.extensions = [self.plugin_loaded] def test_get_plugin_by_name(self): self.assertEqual(self.plugin_returned, self.manager.get_plugin_by_name(self.plugin_name)) def test_get_plugin_by_ca_id(self): self.assertEqual(self.plugin_returned, self.manager.get_plugin_by_ca_id('ca_id')) def test_raises_error_with_no_plugin_by_ca_id_found(self): self.ca_repo.get.return_value = None self.assertRaises( cm.CertificatePluginNotFoundForCAID, self.manager.get_plugin_by_ca_id, 'any-name-here' ) def test_raises_error_with_no_plugin_by_name_found(self): self.manager.extensions = [] self.assertRaises( cm.CertificatePluginNotFound, self.manager.get_plugin_by_name, 'any-name-here' ) def test_get_plugin_no_request_type_provided(self): # no request_type defaults to "custom" self.assertEqual(self.plugin_returned, self.manager.get_plugin(self.cert_spec)) def test_get_plugin_request_type_supported(self): self.cert_spec = { cm.REQUEST_TYPE: cm.CertificateRequestType.SIMPLE_CMC_REQUEST} self.assertEqual(self.plugin_returned, self.manager.get_plugin(self.cert_spec)) def test_raises_error_get_plugin_request_type_not_supported(self): self.cert_spec = { cm.REQUEST_TYPE: cm.CertificateRequestType.FULL_CMC_REQUEST} self.assertRaises( cm.CertificatePluginNotFound, self.manager.get_plugin, self.cert_spec ) def test_raises_error_with_no_plugin_found(self): self.manager.extensions = [] self.assertRaises( cm.CertificatePluginNotFound, self.manager.get_plugin, self.cert_spec ) def test_get_plugin_with_ca_to_be_added(self): self.ca_repo.get_by_create_date.return_value = ( None, 0, 1, 0) self.assertEqual(self.plugin_returned, self.manager.get_plugin(self.cert_spec)) def test_refresh_ca_list(self): utc_now = datetime.datetime.utcnow() expired_time = utc_now - datetime.timedelta(days=1) expiration = utc_now + datetime.timedelta(days=1) ca1_info = { cm.INFO_NAME: "expired_ca_to_be_modified", cm.INFO_DESCRIPTION: "expired_ca to be modified", cm.INFO_CA_SIGNING_CERT: "XXXXXXX-expired-XXXXXX", cm.INFO_INTERMEDIATES: "YYYYYYY-expired-YYYYYYY", cm.INFO_EXPIRATION: expired_time.isoformat() } ca1_modified_info = { cm.INFO_NAME: "expired_ca_to_be_modified", cm.INFO_DESCRIPTION: "expired_ca to be modified", cm.INFO_CA_SIGNING_CERT: "XXXXXXX-no-longer-expired-XXXXXX", cm.INFO_INTERMEDIATES: "YYYYYYY-no-longer-expired-YYYYYYY", cm.INFO_EXPIRATION: expiration.isoformat() } ca2_info = { cm.INFO_NAME: "expired_ca_to_be_deleted", cm.INFO_DESCRIPTION: "expired ca to be deleted", cm.INFO_CA_SIGNING_CERT: "XXXX-expired-to-be-deleted-XXXX", cm.INFO_INTERMEDIATES: "YYYY-expired-to-be-deleted-YYYY", cm.INFO_EXPIRATION: expired_time.isoformat() } ca3_info = { cm.INFO_NAME: "new-ca-to-be-added", cm.INFO_DESCRIPTION: "new-ca-to-be-added", cm.INFO_CA_SIGNING_CERT: "XXXX-to-be-addeed-XXXX", cm.INFO_INTERMEDIATES: "YYYY-to-be-added-YYYY", cm.INFO_EXPIRATION: expiration.isoformat() } self.plugin_returned.get_ca_info.return_value = { 'plugin_ca_id_ca1': ca1_modified_info, 'plugin_ca_id_ca3': ca3_info } parsed_ca1 = dict(ca1_info) parsed_ca1[cm.PLUGIN_CA_ID] = 'plugin_ca_id_ca1' parsed_ca1['plugin_name'] = self.plugin_name ca1 = models.CertificateAuthority(parsed_ca1) ca1.id = "ca1_id" parsed_ca2 = dict(ca2_info) parsed_ca2[cm.PLUGIN_CA_ID] = 'plugin_ca_id_ca2' parsed_ca2['plugin_name'] = self.plugin_name ca2 = models.CertificateAuthority(parsed_ca2) ca2.id = "ca2_id" side_effect = [(None, 0, 4, 0), ([ca1, ca2], 0, 4, 2)] self.ca_repo.get_by_create_date.side_effect = side_effect self.manager.refresh_ca_table() self.plugin_returned.get_ca_info.assert_called_once_with() self.ca_repo.update_entity.assert_called_once_with( ca1, ca1_modified_info) self.ca_repo.delete_entity_by_id.assert_called_once_with( ca2.id, None) self.ca_repo.create_from.assert_has_calls([]) def test_refresh_ca_list_plugin_when_get_ca_info_raises(self): self.ca_repo.get_by_create_date.return_value = (None, 0, 4, 0) self.plugin_returned.get_ca_info.side_effect = Exception() self.manager.refresh_ca_table() self.plugin_returned.get_ca_info.assert_called_once_with() def test_refresh_ca_list_with_bad_ca_returned_from_plugin(self): ca3_info = { cm.INFO_DESCRIPTION: "PLUGIN FAIL: this-ca-has-no-info", } self.plugin_returned.get_ca_info.return_value = { 'plugin_ca_id_ca3': ca3_info } self.ca_repo.get_by_create_date.return_value = (None, 0, 4, 0) self.ca_repo.create_from.side_effect = Exception() self.manager.refresh_ca_table() self.plugin_returned.get_ca_info.assert_called_once_with() self.ca_repo.create_from.assert_has_calls([]) barbican-2.0.0/barbican/tests/database_utils.py0000664000567000056710000002377612701405673022700 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Supports database/repositories oriented unit testing. Warning: Do not merge this content with the utils.py module, as doing so will break the DevStack functional test discovery process. """ import datetime import oslotest.base as oslotest from sqlalchemy.engine import Engine from sqlalchemy import event from barbican.model import models from barbican.model import repositories @event.listens_for(Engine, "connect") def set_foreign_key_constraint(dbapi_connection, connection_record): # Ensure that foreign key constraints are enforced during tests dbapi_connection.execute("PRAGMA foreign_keys=ON") def setup_in_memory_db(): # Ensure we are using in-memory SQLite database, and creating tables. repositories.CONF.set_override("sql_connection", "sqlite:///:memory:", enforce_type=True) repositories.CONF.set_override("db_auto_create", True, enforce_type=True) repositories.CONF.set_override("debug", True, enforce_type=True) # Ensure the connection is completely closed, so any previous in-memory # database can be removed prior to starting the next test run. repositories.hard_reset() # Start the in-memory database, creating required tables. repositories.start() def in_memory_cleanup(): repositories.clear() def get_session(): return repositories.get_session() def create_project(external_id="my keystone id", session=None): project = models.Project() project.external_id = external_id project_repo = repositories.get_project_repository() project_repo.create_from(project, session=session) return project def create_order(project=None, session=None, secret=None, container=None): if not project: project = create_project(session=session) order = models.Order() order.project_id = project.id if secret: order.secret_id = secret.id if container: order.container_id = container.id order_repo = repositories.get_order_repository() order_repo.create_from(order, session=session) return order def create_secret(project=None, session=None): secret = models.Secret() secret.project_id = project.id secret_repo = repositories.get_secret_repository() secret_repo.create_from(secret, session=session) return secret def create_transport_key(plugin_name="plugin", transport_key="tkey", session=None): transport_key = models.TransportKey(plugin_name, transport_key) transport_key_repo = repositories.get_transport_key_repository() transport_key_repo.create_from(transport_key, session=session) return transport_key def create_secret_metadatum(secret=None, key="key", value="value", session=None): secret_meta = models.SecretStoreMetadatum(key, value) secret_meta.secret_id = secret.id secret_meta_repo = repositories.get_secret_meta_repository() secret_meta_repo.create_from(secret_meta, session=session) return secret_meta def create_secret_user_metadatum(secret=None, key="user_key", value="user_value", session=None): secret_user_metadatum = models.SecretUserMetadatum(key, value) secret_user_metadatum.secret_id = secret.id secret_user_metadatum_repo = repositories.get_secret_user_meta_repository() secret_user_metadatum_repo.create_from(secret_user_metadatum, session=session) return secret_user_metadatum def create_container(project=None, session=None): container = models.Container() container.project_id = project.id container_repo = repositories.get_container_repository() container_repo.create_from(container, session=session) return container def create_container_secret(container=None, secret=None, session=None): container_secret = models.ContainerSecret() container_secret.container_id = container.id container_secret.secret_id = secret.id container_secret_repo = repositories.get_container_secret_repository() container_secret_repo.create_from(container_secret, session=session) return container_secret def create_kek_datum(project=None, plugin_name="plugin", session=None): kek_datum = models.KEKDatum() kek_datum.plugin_name = plugin_name kek_datum.project_id = project.id kek_datum_repo = repositories.get_kek_datum_repository() kek_datum_repo.create_from(kek_datum, session=session) return kek_datum def create_encrypted_datum(secret=None, kek_datum=None, session=None): enc_datum = models.EncryptedDatum() enc_datum.secret_id = secret.id enc_datum.kek_id = kek_datum.id enc_datum_repo = repositories.get_encrypted_datum_repository() enc_datum_repo.create_from(enc_datum, session=session) return enc_datum def create_order_meta_datum(order=None, key="key", value="value", session=None): order_meta_datum = models.OrderBarbicanMetadatum(key, value) order_meta_datum.order_id = order.id order_meta_datum_repo = repositories.get_order_barbican_meta_repository() order_meta_datum_repo.create_from(order_meta_datum, session=session) return order_meta_datum def create_order_retry(order=None, retry_task="", retry_args=[], retry_kwargs={}, retry_at=None, session=None): order_retry = models.OrderRetryTask() order_retry.retry_task = retry_task order_retry.retry_args = retry_args order_retry.retry_kwargs = retry_kwargs if not retry_at: order_retry.retry_at = datetime.datetime.utcnow() order_retry.order_id = order.id order_retry_task_repo = repositories.get_order_retry_tasks_repository() order_retry_task_repo.create_from(order_retry, session) return order_retry def create_order_plugin_metadatum(order=None, key="key", value="value", session=None): order_plugin_metadatum = models.OrderPluginMetadatum(key, value) order_plugin_metadatum.order_id = order.id order_plugin_repo = repositories.get_order_plugin_meta_repository() order_plugin_repo.create_from(order_plugin_metadatum, session=session) return order_plugin_metadatum def create_container_consumer_meta(container=None, parsed_request=None, session=None): if not parsed_request: parsed_request = {"name": "name", "URL": "URL"} container_consumer_meta = models.ContainerConsumerMetadatum( container_id=container.id, project_id=container.project_id, parsed_request=parsed_request, ) cont_cons_meta_repo = repositories.get_container_consumer_repository() cont_cons_meta_repo.create_from(container_consumer_meta, session=session) return container_consumer_meta def create_certificate_authority(project=None, parsed_ca_in=None, session=None): if not parsed_ca_in: parsed_ca_in = {'plugin_name': 'plugin_name', 'plugin_ca_id': 'plugin_ca_id', 'expiration:': 'expiration', 'creator_id': 'creator_id', 'project_id': project.id} certificate_authority = models.CertificateAuthority( parsed_ca_in=parsed_ca_in) cert_auth_repo = repositories.get_ca_repository() cert_auth_repo.create_from(certificate_authority, session=session) return certificate_authority def create_preferred_cert_authority(cert_authority, session=None): prefered_cert_authority = models.PreferredCertificateAuthority( ca_id=cert_authority.id, project_id=cert_authority.project_id) preferred_ca_repo = repositories.get_preferred_ca_repository() preferred_ca_repo.create_from(prefered_cert_authority, session=session) return prefered_cert_authority def create_project_cert_authority(certificate_authority=None, session=None): project_cert_authority = models.ProjectCertificateAuthority( ca_id=certificate_authority.id, project_id=certificate_authority.project_id) project_cert_repo = repositories.get_project_ca_repository() project_cert_repo.create_from(project_cert_authority, session=session) return project_cert_authority def create_project_quotas(project=None, parsed_project_quotas=None, session=None): project_quota = models.ProjectQuotas( project_id=project.id, parsed_project_quotas=parsed_project_quotas) project_quota_repo = repositories.get_project_quotas_repository() project_quota_repo.create_from(project_quota, session=session) return project_quota def create_acl_secret(secret=None, user_ids=[], session=None): acl_secret = models.SecretACL(secret.id, "read") acl_secret.secret_id = secret.id acl_secret_repo = repositories.get_secret_acl_repository() acl_secret_repo.create_from(acl_secret, session=session) return acl_secret class RepositoryTestCase(oslotest.BaseTestCase): """Base test case class for in-memory database unit tests. Database/Repository oriented unit tests should *not* modify the global state in the barbican/model/repositories.py module, as this can lead to hard to debug errors. Instead only utilize methods in this fixture. Also, database-oriented unit tests extending this class MUST NO INVOKE the repositories.start()/clear()/hard_reset() methods!*, otherwise *VERY* hard to debug 'Broken Pipe' errors could result! """ def setUp(self): super(RepositoryTestCase, self).setUp() setup_in_memory_db() # Clean up once tests are completed. self.addCleanup(in_memory_cleanup) barbican-2.0.0/barbican/tests/cmd/0000775000567000056710000000000012701406024020055 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/cmd/test_barbican_manage.py0000664000567000056710000001541512701405673024556 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import fixtures import mock from barbican.cmd import barbican_manage as manager from barbican.tests import utils class TestBarbicanManageBase(utils.BaseTestCase): def setUp(self): super(TestBarbicanManageBase, self).setUp() def clear_conf(): manager.CONF.reset() manager.CONF.unregister_opt(manager.category_opt) clear_conf() self.addCleanup(clear_conf) self.useFixture(fixtures.MonkeyPatch( 'oslo_log.log.setup', lambda barbican_test, version='test': None)) manager.CONF.set_override('sql_connection', 'mockdburl') def _main_test_helper(self, argv, func_name=None, *exp_args, **exp_kwargs): self.useFixture(fixtures.MonkeyPatch('sys.argv', argv)) manager.main() func_name.assert_called_once_with(*exp_args, **exp_kwargs) class TestBarbicanManage(TestBarbicanManageBase): """Test barbican-manage functionality.""" @mock.patch('barbican.model.migration.commands.generate') def test_db_revision(self, mock_generate): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'revision', '--db-url', 'mockdb', '--message', 'mockmsg'], mock_generate, autogenerate=False, message='mockmsg', sql_url='mockdb') @mock.patch('barbican.model.migration.commands.generate') def test_db_revision_autogenerate(self, mock_generate): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'revision', '--db-url', 'mockdb', '--message', 'mockmsg', '--autogenerate'], mock_generate, autogenerate=True, message='mockmsg', sql_url='mockdb') @mock.patch('barbican.model.migration.commands.generate') def test_db_revision_no_dburl(self, mock_generate): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'revision', '--message', 'mockmsg'], mock_generate, autogenerate=False, message='mockmsg', sql_url='mockdburl') @mock.patch('barbican.model.migration.commands.upgrade') def test_db_upgrade(self, mock_upgrade): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'upgrade', '--db-url', 'mockdb'], mock_upgrade, to_version='head', sql_url='mockdb') @mock.patch('barbican.model.migration.commands.upgrade') def test_db_upgrade_no_dburl(self, mock_upgrade): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'upgrade'], mock_upgrade, to_version='head', sql_url='mockdburl') @mock.patch('barbican.model.migration.commands.history') def test_db_history(self, mock_history): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'history', '--db-url', 'mockdb'], mock_history, False, sql_url='mockdb') @mock.patch('barbican.model.migration.commands.history') def test_db_history_no_dburl(self, mock_history): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'history'], mock_history, False, sql_url='mockdburl') @mock.patch('barbican.model.clean.clean_command') def test_db_clean_no_args(self, mock_clean_command): manager.CONF.set_override('log_file', 'mock_log_file') self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'clean'], func_name=mock_clean_command, sql_url='mockdburl', min_num_days=90, do_clean_unassociated_projects=False, do_soft_delete_expired_secrets=False, verbose=False, log_file='mock_log_file') manager.CONF.clear_override('log_file') @mock.patch('barbican.model.clean.clean_command') def test_db_clean_with_args(self, mock_clean_command): manager.CONF.set_override('log_file', 'mock_log_file') self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'clean', '--db-url', 'somewhere', '--min-days', '180', '--clean-unassociated-projects', '--soft-delete-expired-secrets', '--verbose', '--log-file', '/tmp/whatevs'], func_name=mock_clean_command, sql_url='somewhere', min_num_days=180, do_clean_unassociated_projects=True, do_soft_delete_expired_secrets=True, verbose=True, log_file='/tmp/whatevs') manager.CONF.clear_override('log_file') @mock.patch('barbican.model.migration.commands.current') def test_db_current(self, mock_current): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'current', '--db-url', 'mockdb'], mock_current, False, sql_url='mockdb') @mock.patch('barbican.model.migration.commands.current') def test_db_current_no_dburl(self, mock_current): self._main_test_helper( ['barbican.cmd.barbican_manage', 'db', 'current'], mock_current, False, sql_url='mockdburl') @mock.patch('barbican.plugin.crypto.pkcs11.PKCS11') def test_hsm_gen_mkek(self, mock_pkcs11): mock_pkcs11.return_value.get_session.return_value = long(1) mock_pkcs11.return_value.get_key_handle.return_value = None mock_pkcs11.return_value.generate_key.return_value = long(0) mock_genkey = mock_pkcs11.return_value.generate_key self._main_test_helper( ['barbican.cmd.barbican_manage', 'hsm', 'gen_mkek', '--library-path', 'mocklib', '--passphrase', 'mockpassewd', '--label', 'mocklabel'], mock_genkey, 32, 1, 'mocklabel', encrypt=True, wrap=True, master_key=True) @mock.patch('barbican.plugin.crypto.pkcs11.PKCS11') def test_hsm_gen_hmac(self, mock_pkcs11): mock_pkcs11.return_value.get_session.return_value = long(1) mock_pkcs11.return_value.get_key_handle.return_value = None mock_pkcs11.return_value.generate_key.return_value = long(0) mock_genkey = mock_pkcs11.return_value.generate_key self._main_test_helper( ['barbican.cmd.barbican_manage', 'hsm', 'gen_hmac', '--library-path', 'mocklib', '--passphrase', 'mockpassewd', '--label', 'mocklabel'], mock_genkey, 32, 1, 'mocklabel', sign=True, master_key=True) barbican-2.0.0/barbican/tests/cmd/__init__.py0000664000567000056710000000000012701405673022165 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/cmd/test_db_cleanup.py0000664000567000056710000004463512701405673023607 0ustar jenkinsjenkins00000000000000# Copyright (c) 2016 IBM # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.model import clean from barbican.model import models from barbican.model import repositories as repos from barbican.tests import database_utils as utils from sqlalchemy.exc import IntegrityError import datetime import mock def _create_project(project_name): """Wrapper to create a project and clean""" def project_decorator(test_func): def project_wrapper(self, *args, **kwargs): project = utils.create_project(external_id=project_name) kwargs['project'] = project test_result = test_func(self, *args, **kwargs) project.delete() return test_result return project_wrapper return project_decorator def _entry_exists(entry): """Check to see if entry should exist in the database""" model = entry.__class__ entry_id = entry.id session = repos.get_session() query = session.query(model).filter(model.id == entry_id) count = query.count() return count >= 1 def _entry_is_soft_deleted(entry): model = entry.__class__ entry_id = entry.id session = repos.get_session() query = session.query(model) result = query.filter(model.id == entry_id).first().deleted return result def _setup_entry(name, *args, **kwargs): func_name = "create_" + name if not hasattr(utils, func_name): raise Exception("Cannot create an entry called %s", name) func = getattr(utils, func_name) kwargs['session'] = repos.get_session() entry = func(*args, **kwargs) return entry class WhenTestingDBCleanUpCommand(utils.RepositoryTestCase): def setUp(self): super(WhenTestingDBCleanUpCommand, self).setUp() def tearDown(self): super(WhenTestingDBCleanUpCommand, self).tearDown() repos.rollback() @_create_project("my keystone id") def test_soft_deleted_secret_orders(self, project): """Test that secrets without child order get deleted""" # Create a secret tied to an order and one secret that is not secret1 = _setup_entry('secret', project=project) secret2 = _setup_entry('secret', project=project) order = _setup_entry('order', project=project, secret=secret1) # Delete secrets secret1.delete() secret2.delete() clean.cleanup_parent_with_no_child(models.Secret, models.Order) # Assert that only secret2 is removed self.assertTrue(_entry_exists(secret1)) self.assertFalse(_entry_exists(secret2)) # delete order and secret order.delete() clean.cleanup_all() self.assertFalse(_entry_exists(order)) self.assertFalse(_entry_exists(secret2)) def test_cleanup_soft_deletes_transport_keys(self): """Test Cleaning up soft deleted transport keys""" # create transport key transport_key = _setup_entry('transport_key') # delete transport key transport_key.delete() clean.cleanup_all() self.assertFalse(_entry_exists(transport_key)) @_create_project("my keystone id") def test_cleanup_soft_deletes_secrets(self, project): """Test cleaning up secrets and secret_meta""" # create secret and secret_meta secret = _setup_entry('secret', project=project) secret_metadatum = _setup_entry('secret_metadatum', secret=secret) secret_user_metadatum = _setup_entry('secret_user_metadatum', secret=secret) kek_datum = _setup_entry('kek_datum', project=project) enc_datum = _setup_entry('encrypted_datum', secret=secret, kek_datum=kek_datum) # delete secret, it should automatically delete # secret_metadatum, enc_datum, and secret_user_metadatum # kek_datum should still exist secret.delete() clean.cleanup_all() self.assertFalse(_entry_exists(secret)) self.assertFalse(_entry_exists(secret_metadatum)) self.assertFalse(_entry_exists(secret_user_metadatum)) self.assertFalse(_entry_exists(enc_datum)) self.assertTrue(_entry_exists(kek_datum)) @_create_project("my keystone id") def test_cleanup_soft_deletes_containers(self, project): """Test cleaning up containers and secrets""" # create container, secret, and container_secret container = _setup_entry('container', project=project) secret = _setup_entry('secret', project=project) container_secret = _setup_entry('container_secret', container=container, secret=secret) # delete container secret and container container.delete() clean.cleanup_all() # check that container secret and container are deleted # but secret still exists self.assertFalse(_entry_exists(container_secret)) self.assertFalse(_entry_exists(container)) self.assertTrue(_entry_exists(secret)) # cleanup secrets secret.delete() clean.cleanup_all() self.assertFalse(_entry_exists(secret)) @_create_project("my keystone id") def test_cleanup_container_with_order_child(self, project): container = _setup_entry('container', project=project) secret = _setup_entry('secret', project=project) secret_container = _setup_entry('container_secret', container=container, secret=secret) order = _setup_entry('order', project=project, secret=secret, container=container) container.delete() clean.cleanup_all() # only the secret_container should be removed from the database # since it is a child of the container self.assertFalse(_entry_exists(secret_container)) self.assertTrue(_entry_exists(secret)) self.assertTrue(_entry_exists(order)) # container should still exist since child order still exists self.assertTrue(_entry_exists(container)) order.delete() clean.cleanup_all() # assert that only the secret exists self.assertFalse(_entry_exists(order)) self.assertFalse(_entry_exists(container)) self.assertTrue(_entry_exists(secret)) secret.delete() clean.cleanup_all() # the secret should now be able to be removed self.assertFalse(_entry_exists(secret)) @_create_project("my clean order keystone id") def test_cleanup_orders(self, project): """Test cleaning up an order and it's children""" # create order, order meta, and plugin meta, and retry task order = _setup_entry('order', project=project) order_barbican_meta_data = _setup_entry('order_meta_datum', order=order) order_plugin_metadata = _setup_entry('order_plugin_metadatum', order=order) order_retry_task = _setup_entry('order_retry', order=order) # soft delete order and retry task, # it should automatically delete the children order.delete() order_retry_task.delete() clean.cleanup_all() # assert everything has been cleaned up self.assertFalse(_entry_exists(order)) self.assertFalse(_entry_exists(order_plugin_metadata)) self.assertFalse(_entry_exists(order_retry_task)) self.assertFalse(_entry_exists(order_barbican_meta_data)) @_create_project("my clean order with child keystone id") def test_cleanup_order_with_child(self, project): """Test cleaning up an order with a child""" # create order and retry task order = _setup_entry('order', project=project) order_retry_task = _setup_entry('order_retry', order=order) # soft delete order and retry task, # it should automatically delete the children order.delete() clean.cleanup_all() # assert that the order was not cleaned due to child self.assertTrue(_entry_exists(order)) self.assertTrue(_entry_exists(order_retry_task)) order_retry_task.delete() clean.cleanup_all() # assert everything has been cleaned up self.assertFalse(_entry_exists(order)) self.assertFalse(_entry_exists(order_retry_task)) @_create_project("my keystone id") def test_cleanup_soft_deletion_date(self, project): """Test cleaning up entries within date""" secret = _setup_entry('secret', project=project) order = order = _setup_entry('order', project=project, secret=secret) current_time = datetime.datetime.utcnow() tomorrow = current_time + datetime.timedelta(days=1) yesterday = current_time - datetime.timedelta(days=1) secret.delete() order.delete() # Assert that nothing is deleted due to date clean.cleanup_softdeletes(models.Order, threshold_date=yesterday) clean.cleanup_parent_with_no_child(models.Secret, models.Order, threshold_date=yesterday) self.assertTrue(_entry_exists(secret)) self.assertTrue(_entry_exists(order)) # Assert that everything is deleted due to date clean.cleanup_softdeletes(models.Order, threshold_date=tomorrow) clean.cleanup_parent_with_no_child(models.Secret, models.Order, threshold_date=tomorrow) self.assertFalse(_entry_exists(secret)) self.assertFalse(_entry_exists(order)) @_create_project("my keystone id") def test_soft_deleting_expired_secrets(self, project): """Test soft deleting secrets that are expired""" current_time = datetime.datetime.utcnow() tomorrow = current_time + datetime.timedelta(days=1) yesterday = current_time - datetime.timedelta(days=1) not_expired_secret = _setup_entry('secret', project=project) expired_secret = _setup_entry('secret', project=project) not_expired_secret.expiration = tomorrow expired_secret.expiration = yesterday # Create children for expired secret expired_secret_store_metadatum = _setup_entry('secret_metadatum', secret=expired_secret) expired_secret_user_metadatum = _setup_entry('secret_user_metadatum', secret=expired_secret) kek_datum = _setup_entry('kek_datum', project=project) expired_enc_datum = _setup_entry('encrypted_datum', secret=expired_secret, kek_datum=kek_datum) container = _setup_entry('container', project=project) expired_container_secret = _setup_entry('container_secret', container=container, secret=expired_secret) expired_acl_secret = _setup_entry('acl_secret', secret=expired_secret, user_ids=["fern", "chris"]) clean.soft_delete_expired_secrets(current_time) self.assertTrue(_entry_is_soft_deleted(expired_secret)) self.assertFalse(_entry_is_soft_deleted(not_expired_secret)) # Make sure the children of the expired secret are soft deleted as well self.assertTrue(_entry_is_soft_deleted(expired_enc_datum)) self.assertTrue(_entry_is_soft_deleted(expired_container_secret)) self.assertTrue(_entry_is_soft_deleted(expired_secret_store_metadatum)) self.assertTrue(_entry_is_soft_deleted(expired_secret_user_metadatum)) self.assertFalse(_entry_exists(expired_acl_secret)) def test_cleaning_unassociated_projects(self): """Test cleaning projects that have no child entries""" childless_project = _setup_entry('project', external_id="childless project") project_with_children = _setup_entry( 'project', external_id="project with children") project_children_list = list() project_children_list.append( _setup_entry('kek_datum', project=project_with_children)) project_children_list.append( _setup_entry('secret', project=project_with_children)) container = _setup_entry('container', project=project_with_children) project_children_list.append(container) project_children_list.append( _setup_entry('container_consumer_meta', container=container)) cert_authority = _setup_entry('certificate_authority', project=project_with_children) project_children_list.append(cert_authority) project_children_list.append( _setup_entry('preferred_cert_authority', cert_authority=cert_authority)) project_children_list.append( _setup_entry('project_cert_authority', certificate_authority=cert_authority)) project_children_list.append(_setup_entry('project_quotas', project=project_with_children)) clean.cleanup_unassociated_projects() self.assertTrue(_entry_exists(project_with_children)) self.assertFalse(_entry_exists(childless_project)) container.delete() map(lambda child: child.delete(), project_children_list) clean.cleanup_all() clean.cleanup_unassociated_projects() self.assertFalse(_entry_exists(project_with_children)) @mock.patch('barbican.model.clean.cleanup_all') @mock.patch('barbican.model.clean.soft_delete_expired_secrets') @mock.patch('barbican.model.clean.cleanup_unassociated_projects') @mock.patch('barbican.model.clean.repo') @mock.patch('barbican.model.clean.log') @mock.patch('barbican.model.clean.CONF') def test_clean_up_command(self, mock_conf, mock_log, mock_repo, mock_clean_unc_projects, mock_soft_del_expire_secrets, mock_clean_all): """Tests the clean command""" test_sql_url = "mysql+pymysql://notrealuser:datab@127.0.0.1/barbican't" min_num_days = 91 do_clean_unassociated_projects = True do_soft_delete_expired_secrets = True verbose = True test_log_file = "/tmp/sometempfile" clean.clean_command(test_sql_url, min_num_days, do_clean_unassociated_projects, do_soft_delete_expired_secrets, verbose, test_log_file) set_calls = [mock.call('debug', True), mock.call('log_file', test_log_file), mock.call('sql_connection', test_sql_url)] mock_conf.set_override.assert_has_calls(set_calls) clear_calls = [mock.call('debug'), mock.call('log_file'), mock.call('sql_connection')] mock_conf.clear_override.assert_has_calls(clear_calls) self.assertTrue(mock_repo.setup_database_engine_and_factory.called) self.assertTrue(mock_repo.commit.called) self.assertTrue(mock_repo.clear.called) self.assertTrue(mock_clean_unc_projects.called) self.assertTrue(mock_soft_del_expire_secrets) self.assertTrue(mock_clean_all) @mock.patch('barbican.model.clean.cleanup_all') @mock.patch('barbican.model.clean.soft_delete_expired_secrets') @mock.patch('barbican.model.clean.cleanup_unassociated_projects') @mock.patch('barbican.model.clean.repo') @mock.patch('barbican.model.clean.log') @mock.patch('barbican.model.clean.CONF') def test_clean_up_command_with_false_args( self, mock_conf, mock_log, mock_repo, mock_clean_unc_projects, mock_soft_del_expire_secrets, mock_clean_all): """Tests the clean command with false args""" test_sql_url = None min_num_days = -1 do_clean_unassociated_projects = False do_soft_delete_expired_secrets = False verbose = None test_log_file = None clean.clean_command(test_sql_url, min_num_days, do_clean_unassociated_projects, do_soft_delete_expired_secrets, verbose, test_log_file) mock_conf.set_override.assert_not_called() mock_conf.clear_override.assert_not_called() self.assertTrue(mock_repo.setup_database_engine_and_factory.called) self.assertTrue(mock_repo.commit.called) self.assertTrue(mock_repo.clear.called) self.assertTrue(mock_clean_all) self.assertFalse(mock_clean_unc_projects.called) self.assertFalse(mock_soft_del_expire_secrets.called) @mock.patch('barbican.model.clean.cleanup_all', side_effect=IntegrityError("", "", "", "")) @mock.patch('barbican.model.clean.repo') @mock.patch('barbican.model.clean.log') @mock.patch('barbican.model.clean.CONF') def test_clean_up_command_with_exception( self, mock_conf, mock_log, mock_repo, mock_clean_all): """Tests that the clean command throws exceptions""" args = ("sql", 2, False, False, False, "/tmp/nope") self.assertRaises(IntegrityError, clean.clean_command, *args) self.assertTrue(mock_repo.rollback.called) @_create_project("my integrity error keystone id") def test_db_cleanup_raise_integrity_error(self, project): """Test that an integrity error is thrown This test tests the invalid scenario where the secret meta was not marked for deletion during the secret deletion. We want to make sure an integrity error is thrown during clean up. """ # create secret secret = _setup_entry('secret', project=project) secret_metadatum = _setup_entry('secret_metadatum', secret=secret) # delete parent but not child and assert integrity error secret.deleted = True secret_metadatum.deleted = False self.assertRaises(IntegrityError, clean.cleanup_all) barbican-2.0.0/barbican/tests/cmd/test_cmd.py0000664000567000056710000000526212701405673022247 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from barbican.cmd import retry_scheduler from barbican.cmd import worker from barbican.tests.queue import test_keystone_listener from barbican.tests import utils class WhenInvokingRetryServiceCommand(utils.BaseTestCase): """Test the retry scheduler functionality.""" def setUp(self): super(WhenInvokingRetryServiceCommand, self).setUp() @mock.patch('barbican.common.config') @mock.patch('barbican.queue.init') @mock.patch('oslo_service.service.launch') @mock.patch('barbican.queue.retry_scheduler.PeriodicServer') def test_should_launch_service( self, mock_periodic_server, mock_service_launch, mock_queue_init, mock_config): retry_scheduler.main() self.assertEqual(1, mock_queue_init.call_count) self.assertEqual(1, mock_service_launch.call_count) self.assertEqual(1, mock_periodic_server.call_count) @mock.patch('oslo_log.log.setup') @mock.patch('sys.exit') def test_should_fail_run_command( self, mock_sys_exit, mock_log_setup): mock_log_setup.side_effect = RuntimeError() retry_scheduler.main() self.assertEqual(1, mock_sys_exit.call_count) class WhenInvokingWorkerCommand(test_keystone_listener.UtilMixin, utils.BaseTestCase): """Test the asynchronous worker functionality.""" def setUp(self): super(WhenInvokingWorkerCommand, self).setUp() @mock.patch('barbican.queue.init') @mock.patch('barbican.queue.get_server') @mock.patch('oslo_service.service.launch') def test_should_launch_service( self, mock_service_launch, mock_queue_task_server, mock_queue_init): self.opt_in_group('queue', asynchronous_workers=3) worker.main() self.assertEqual(1, mock_queue_init.call_count) self.assertEqual(1, mock_service_launch.call_count) # check keyword argument for number of worker matches workers_kwarg = {'workers': 3} self.assertEqual(workers_kwarg, mock_service_launch.call_args[1]) barbican-2.0.0/barbican/tests/tasks/0000775000567000056710000000000012701406024020437 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/tasks/test_common.py0000664000567000056710000000334012701405673023351 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican import i18n as u from barbican.tasks import common from barbican.tests import utils class WhenUsingFollowOnProcessingStatusDTO(utils.BaseTestCase): """Test using the :class:`WhenUsingFollowOnProcessingStatusDTO` class.""" def setUp(self): super(WhenUsingFollowOnProcessingStatusDTO, self).setUp() self.target = common.FollowOnProcessingStatusDTO() def test_should_have_expected_defaults(self): self.assertEqual( common.RetryTasks.NO_ACTION_REQUIRED, self.target.retry_task) self.assertEqual(u._('Unknown'), self.target.status) self.assertEqual(u._('Unknown'), self.target.status_message) self.assertEqual(common.RETRY_MSEC_DEFAULT, self.target.retry_msec) self.assertFalse(self.target.is_follow_on_needed()) def test_should_indicate_no_follow_on_with_no_retry_task(self): self.target.retry_task = None self.assertFalse(self.target.is_follow_on_needed()) def test_should_indicate_follow_on_when_retry_task_provided(self): self.target.retry_task = common.RetryTasks.INVOKE_SAME_TASK self.assertTrue(self.target.is_follow_on_needed()) barbican-2.0.0/barbican/tests/tasks/__init__.py0000664000567000056710000000000012701405673022547 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/tasks/test_keystone_consumer.py0000664000567000056710000003240712701405673025643 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import uuid import mock import sqlalchemy from barbican.common import exception from barbican.common import resources as c_resources from barbican.model import models from barbican.model import repositories as rep from barbican.plugin.crypto import manager from barbican.plugin import resources as plugin from barbican.tasks import keystone_consumer as consumer from barbican.tests import database_utils class InitializeDatabaseMixin(object): def _init_memory_db_setup(self): # Force a refresh of the singleton plugin manager for each test. manager._PLUGIN_MANAGER = None manager.CONF.set_override('enabled_crypto_plugins', ['simple_crypto'], group='crypto', enforce_type=True) self.project_id1 = uuid.uuid4().hex self.project_id2 = uuid.uuid4().hex self.project1_data = c_resources.get_or_create_project( self.project_id1) self.assertIsNotNone(self.project1_data) self.project2_data = c_resources.get_or_create_project( self.project_id2) self.assertIsNotNone(self.project2_data) def _create_secret_for_project(self, project_data): secret_info = {"name": uuid.uuid4().hex, "algorithm": "aes", "bit_length": 256, "mode": "cbc", "payload_content_type": "application/octet-stream"} new_secret = plugin.generate_secret( secret_info, secret_info.get('payload_content_type'), project_data) return new_secret class WhenUsingKeystoneEventConsumer( database_utils.RepositoryTestCase, InitializeDatabaseMixin): """Test all but the process() method on KeystoneEventConsumer class. For unit testing the process() method, use the WhenUsingKeystoneEventConsumerProcessMethod class. """ def setUp(self): super(WhenUsingKeystoneEventConsumer, self).setUp() self.kek_repo = rep.get_kek_datum_repository() self.project_repo = rep.get_project_repository() self.secret_meta_repo = rep.get_secret_meta_repository() self.secret_repo = rep.get_secret_repository() self.transport_key_repo = rep.get_transport_key_repository() def test_get_project_entities_lookup_call(self): self._init_memory_db_setup() secret = self._create_secret_for_project(self.project2_data) project2_id = self.project2_data.id self.assertIsNotNone(secret) db_secrets = self.secret_repo.get_project_entities(project2_id) self.assertEqual(1, len(db_secrets)) self.assertEqual(secret.id, db_secrets[0].id) db_kek = self.kek_repo.get_project_entities(project2_id) self.assertEqual(1, len(db_kek)) # secret_meta_repo does not implement function # _build_get_project_entities_query, so it should raise error self.assertRaises(NotImplementedError, self.secret_meta_repo.get_project_entities, project2_id) # transport_key_repo does not implement function # _build_get_project_entities_query, so it should raise error self.assertRaises(NotImplementedError, self.transport_key_repo.get_project_entities, project2_id) @mock.patch.object(models.Project, 'delete', side_effect=sqlalchemy.exc.SQLAlchemyError) def test_delete_project_entities_alchemy_error_suppress_exception_true( self, mock_entity_delete): self._init_memory_db_setup() secret = self._create_secret_for_project(self.project1_data) self.assertIsNotNone(secret) project1_id = self.project1_data.id # sqlalchemy error is suppressed here no_error = self.project_repo.delete_project_entities( project1_id, suppress_exception=True) self.assertIsNone(no_error) @mock.patch.object(models.Project, 'delete', side_effect=sqlalchemy.exc.SQLAlchemyError) def test_delete_project_entities_alchemy_error_suppress_exception_false( self, mock_entity_delete): self._init_memory_db_setup() secret = self._create_secret_for_project(self.project1_data) self.assertIsNotNone(secret) project1_id = self.project1_data.id # sqlalchemy error is not suppressed here self.assertRaises(exception.BarbicanException, self.project_repo.delete_project_entities, project1_id, suppress_exception=False) def test_delete_project_entities_not_impl_error_suppress_exception_true( self): self._init_memory_db_setup() secret = self._create_secret_for_project(self.project1_data) self.assertIsNotNone(secret) project1_id = self.project1_data.id # NotImplementedError is not suppressed regardless of related flag self.assertRaises(NotImplementedError, self.secret_meta_repo.delete_project_entities, project1_id, suppress_exception=True) def test_delete_project_entities_not_impl_error_suppress_exception_false( self): self._init_memory_db_setup() secret = self._create_secret_for_project(self.project1_data) self.assertIsNotNone(secret) project1_id = self.project1_data.id # NotImplementedError is not suppressed regardless of related flag self.assertRaises(NotImplementedError, self.secret_meta_repo.delete_project_entities, project1_id, suppress_exception=False) def test_invoke_handle_error(self): task = consumer.KeystoneEventConsumer() project = mock.MagicMock() project.project_id = 'project_id' status = 'status' message = 'message' exception_test = ValueError('Abort!') resource_type = 'type' operation_type = 'operation' task.handle_error( project, status, message, exception_test, project_id=None, resource_type=resource_type, operation_type=operation_type) class WhenUsingKeystoneEventConsumerProcessMethod( database_utils.RepositoryTestCase, InitializeDatabaseMixin): """Test only the process() method on KeystoneEventConsumer class. For unit testing all but the process() method, use the WhenUsingKeystoneEventConsumer class. """ def setUp(self): super(WhenUsingKeystoneEventConsumerProcessMethod, self).setUp() # Override the database start function as repositories.start() is # already invoked by the RepositoryTestCase base class setUp(). # Similarly, override the clear function. self.task = consumer.KeystoneEventConsumer( db_start=mock.MagicMock(), db_clear=mock.MagicMock() ) def test_project_entities_cleanup_for_no_matching_barbican_project(self): self._init_memory_db_setup() result = self.task.process(project_id=self.project_id1, resource_type='project', operation_type='deleted') self.assertIsNone(result, 'No return is expected as result') def test_project_entities_cleanup_for_missing_barbican_project(self): self._init_memory_db_setup() result = self.task.process(project_id=None, resource_type='project', operation_type='deleted') self.assertIsNone(result, 'No return is expected as result') @mock.patch.object(consumer.KeystoneEventConsumer, 'handle_success') def test_existing_project_entities_cleanup_for_plain_secret( self, mock_handle_success): self._init_memory_db_setup() secret = self._create_secret_for_project(self.project1_data) self.assertIsNotNone(secret) secret_id = secret.id project1_id = self.project1_data.id secret_repo = rep.get_secret_repository() db_secrets = secret_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_secrets)) self.assertEqual(secret.id, db_secrets[0].id) # Get secret_store_metadata for related secret self.assertGreater(len(db_secrets[0].secret_store_metadata), 0) secret_metadata_id = list(db_secrets[0]. secret_store_metadata.values())[0].id self.assertIsNotNone(secret_metadata_id) # Get db entry for secret_store_metadata by id to make sure its # presence before removing via delete project task secret_meta_repo = rep.get_secret_meta_repository() db_secret_store_meta = secret_meta_repo.get( entity_id=secret_metadata_id) self.assertIsNotNone(db_secret_store_meta) kek_repo = rep.get_kek_datum_repository() db_kek = kek_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_kek)) # task = consumer.KeystoneEventConsumer() result = self.task.process(project_id=self.project_id1, resource_type='project', operation_type='deleted') self.assertIsNone(result, 'No return is expected as result') mock_handle_success.assert_has_calls([]) _, kwargs = mock_handle_success.call_args self.assertEqual(self.project_id1, kwargs['project_id']) self.assertEqual('project', kwargs['resource_type']) self.assertEqual('deleted', kwargs['operation_type']) # After project entities delete, make sure secret is not found ex = self.assertRaises(exception.NotFound, secret_repo.get, entity_id=secret_id, external_project_id=self.project_id1) self.assertIn(secret_id, str(ex)) # After project entities delete, make sure kek data is not found entities = kek_repo.get_project_entities(project1_id) self.assertEqual(0, len(entities)) project_repo = rep.get_project_repository() db_project = project_repo.get_project_entities(project1_id) self.assertEqual(0, len(db_project)) # Should have deleted SecretStoreMetadatum via children delete self.assertRaises(exception.NotFound, secret_meta_repo.get, entity_id=secret_metadata_id) @mock.patch.object(consumer.KeystoneEventConsumer, 'handle_error') @mock.patch.object(rep.ProjectRepo, 'delete_project_entities', side_effect=exception.BarbicanException) def test_rollback_with_error_during_project_cleanup(self, mock_delete, mock_handle_error): self._init_memory_db_setup() secret = self._create_secret_for_project(self.project1_data) self.assertIsNotNone(secret) secret_id = secret.id project1_id = self.project1_data.id secret_repo = rep.get_secret_repository() db_secrets = secret_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_secrets)) self.assertEqual(secret.id, db_secrets[0].id) kek_repo = rep.get_kek_datum_repository() db_kek = kek_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_kek)) # Commit changes made so far before creating rollback scenario rep.commit() handle_error_mock = mock.MagicMock() self.task.handler_error = handle_error_mock self.assertRaises(exception.BarbicanException, self.task.process, project_id=self.project_id1, resource_type='project', operation_type='deleted') mock_handle_error.assert_called_once_with( self.project1_data, 500, mock.ANY, mock.ANY, operation_type='deleted', project_id=mock.ANY, resource_type='project', ) args, kwargs = mock_handle_error.call_args self.assertEqual(500, args[1]) self.assertEqual(self.project_id1, kwargs['project_id']) self.assertEqual('project', kwargs['resource_type']) self.assertEqual('deleted', kwargs['operation_type']) # Make sure entities are still present after rollback db_secrets = secret_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_secrets)) self.assertEqual(secret_id, db_secrets[0].id) db_kek = kek_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_kek)) project_repo = rep.get_project_repository() db_project = project_repo.get_project_entities(project1_id) self.assertEqual(1, len(db_project)) barbican-2.0.0/barbican/tests/tasks/test_resources.py0000664000567000056710000004651412701405673024105 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from oslo_utils import timeutils import six from barbican import i18n as u from barbican.model import models from barbican.tasks import common from barbican.tasks import resources from barbican.tests import utils class BaseOrderTestCase(utils.BaseTestCase, utils.MockModelRepositoryMixin): def setUp(self): super(BaseOrderTestCase, self).setUp() self.requestor = 'requestor1234' self.order = models.Order() self.order.id = "id1" self.order.requestor = self.requestor self.order.type = "key" self.meta = {'name': 'name', 'payload_content_type': 'application/octet-stream', 'algorithm': 'AES', 'bit_length': 256, 'expiration': timeutils.utcnow(), 'mode': 'CBC'} self.order.meta = self.meta self.external_project_id = 'keystone1234' self.project_id = 'projectid1234' self.project = models.Project() self.project.id = self.project_id self.project.external_id = self.external_project_id self.project_repo = mock.MagicMock() self.project_repo.get.return_value = self.project self.setup_project_repository_mock(self.project_repo) self.order.status = models.States.PENDING self.order.id = 'orderid1234' self.order.project_id = self.project_id self.order_repo = mock.MagicMock() self.order_repo.get.return_value = self.order self.setup_order_repository_mock(self.order_repo) self.setup_order_plugin_meta_repository_mock() self.setup_order_barbican_meta_repository_mock() self.secret = models.Secret() self.secret_repo = mock.MagicMock() self.secret_repo.create_from.return_value = None self.setup_secret_repository_mock(self.secret_repo) self.datum_repo = mock.MagicMock() self.datum_repo.create_from.return_value = None self.setup_encrypted_datum_repository_mock(self.datum_repo) self.setup_kek_datum_repository_mock() self.setup_secret_meta_repository_mock() self.container_repo = mock.MagicMock() self.container_repo.create_from.return_value = None self.setup_container_repository_mock(self.container_repo) self.container_secret_repo = mock.MagicMock() self.container_secret_repo.create_from.return_value = None self.setup_container_secret_repository_mock(self.container_secret_repo) self.container = models.Container() class WhenUsingOrderTaskHelper(BaseOrderTestCase): def setUp(self): super(WhenUsingOrderTaskHelper, self).setUp() self.result = common.FollowOnProcessingStatusDTO() self.helper = resources._OrderTaskHelper() def test_should_retrieve_entity(self): order_model = self.helper.retrieve_entity( self.order.id, self.external_project_id) self.assertEqual(self.order.id, order_model.id) self.order_repo.get.assert_called_once_with( entity_id=self.order.id, external_project_id=self.external_project_id) def test_should_handle_error(self): self.helper.handle_error(self.order, 'status_code', 'reason', ValueError()) self.assertEqual(models.States.ERROR, self.order.status) self.assertEqual('status_code', self.order.error_status_code) self.assertEqual('reason', self.order.error_reason) self.order_repo.save.assert_called_once_with(self.order) def test_should_handle_success_no_result(self): self.helper.handle_success(self.order, None) self.assertEqual(models.States.ACTIVE, self.order.status) self.assertIsNone(self.order.sub_status) self.assertIsNone(self.order.sub_status_message) self.order_repo.save.assert_called_once_with(self.order) def test_should_handle_success_result_no_follow_on_needed(self): self.helper.handle_success(self.order, self.result) self.assertEqual(models.States.ACTIVE, self.order.status) self.assertEqual('Unknown', self.order.sub_status) self.assertEqual('Unknown', self.order.sub_status_message) self.order_repo.save.assert_called_once_with(self.order) def test_should_handle_success_result_follow_on_needed(self): self.result.retry_task = common.RetryTasks.INVOKE_SAME_TASK self.result.status = 'status' self.result.status_message = 'status_message' self.helper.handle_success(self.order, self.result) self.assertEqual(models.States.PENDING, self.order.status) self.assertEqual('status', self.order.sub_status) self.assertEqual('status_message', self.order.sub_status_message) self.order_repo.save.assert_called_once_with(self.order) def test_should_handle_success_result_large_statuses_clipped(self): sub_status = 'z' * (models.SUB_STATUS_LENGTH + 1) sub_status_message = 'z' * (models.SUB_STATUS_MESSAGE_LENGTH + 1) self.result.status = sub_status self.result.status_message = sub_status_message self.helper.handle_success(self.order, self.result) self.assertEqual(sub_status[:-1], self.order.sub_status) self.assertEqual( sub_status_message[:-1], self.order.sub_status_message) self.order_repo.save.assert_called_once_with(self.order) class WhenBeginningKeyTypeOrder(BaseOrderTestCase): def setUp(self): super(WhenBeginningKeyTypeOrder, self).setUp() self.resource = resources.BeginTypeOrder() @mock.patch('barbican.plugin.resources.generate_secret') def test_should_process_key_order(self, mock_generate_secret): mock_generate_secret.return_value = self.secret self.resource.process(self.order.id, self.external_project_id) self.order_repo.get.assert_called_once_with( entity_id=self.order.id, external_project_id=self.external_project_id) self.assertEqual(self.order.status, models.States.ACTIVE) secret_info = self.order.to_dict_fields()['meta'] mock_generate_secret.assert_called_once_with( secret_info, secret_info.get('payload_content_type', 'application/octet-stream'), self.project ) def test_should_fail_during_retrieval(self): # Force an error during the order retrieval phase. self.order_repo.get = mock.MagicMock(return_value=None, side_effect=ValueError()) self.assertRaises( ValueError, self.resource.process, self.order.id, self.external_project_id, ) # Order state doesn't change because can't retrieve it to change it. self.assertEqual(models.States.PENDING, self.order.status) def test_should_fail_during_processing(self): # Force an error during the processing handler phase. self.project_repo.get = mock.MagicMock(return_value=None, side_effect=ValueError()) self.assertRaises( ValueError, self.resource.process, self.order.id, self.external_project_id, ) self.assertEqual(models.States.ERROR, self.order.status) self.assertEqual(500, self.order.error_status_code) self.assertEqual(u._('Process TypeOrder failure seen - please contact ' 'site administrator.'), self.order.error_reason) @mock.patch('barbican.plugin.resources.generate_secret') def test_should_fail_during_success_report_fail(self, mock_generate_secret): mock_generate_secret.return_value = self.secret # Force an error during the processing handler phase. self.order_repo.save = mock.MagicMock(return_value=None, side_effect=ValueError()) self.assertRaises( ValueError, self.resource.process, self.order.id, self.external_project_id, ) def test_should_fail_during_error_report_fail(self): # Force an error during the error-report handling after # error in processing handler phase. # Force an error during the processing handler phase. self.project_repo.get = mock.MagicMock(return_value=None, side_effect=TypeError()) # Force exception in the error-reporting phase. self.order_repo.save = mock.MagicMock(return_value=None, side_effect=ValueError()) # Should see the original exception (TypeError) instead of the # secondary one (ValueError). self.assertRaises( TypeError, self.resource.process, self.order.id, self.external_project_id, ) self.project_repo.get.assert_called_once_with(self.project_id) self.order_repo.save.assert_called_once_with(self.order) class WhenBeginningCertificateTypeOrder(BaseOrderTestCase): def setUp(self): super(WhenBeginningCertificateTypeOrder, self).setUp() self.order.type = models.OrderType.CERTIFICATE self.resource = resources.BeginTypeOrder() @mock.patch( 'barbican.tasks.certificate_resources.issue_certificate_request') def test_should_process_order_no_container( self, mock_issue_cert_request): mock_issue_cert_request.return_value = None result = self.resource.process_and_suppress_exceptions( self.order.id, self.external_project_id) self.order_repo.get.assert_called_once_with( entity_id=self.order.id, external_project_id=self.external_project_id) self.assertEqual(self.order.status, models.States.ACTIVE) mock_issue_cert_request.assert_called_once_with( self.order, self.project, mock.ANY ) self.assertIsNone(self.order.container_id) self.assertIsInstance(result, common.FollowOnProcessingStatusDTO) @mock.patch( 'barbican.tasks.certificate_resources.issue_certificate_request') def test_should_process_order_with_container( self, mock_issue_cert_request): mock_issue_cert_request.return_value = self.container result = self.resource.process( self.order.id, self.external_project_id) self.order_repo.get.assert_called_once_with( entity_id=self.order.id, external_project_id=self.external_project_id) self.assertEqual(self.order.status, models.States.ACTIVE) mock_issue_cert_request.assert_called_once_with( self.order, self.project, mock.ANY ) self.assertEqual(self.container.id, self.order.container_id) self.assertIsInstance(result, common.FollowOnProcessingStatusDTO) class WhenUpdatingOrder(BaseOrderTestCase): def setUp(self): super(WhenUpdatingOrder, self).setUp() self.updated_meta = 'updated' self.resource = resources.UpdateOrder() @mock.patch( 'barbican.tasks.certificate_resources.modify_certificate_request') def test_should_update_certificate_order(self, mock_modify_cert_request): self.order.type = models.OrderType.CERTIFICATE self.resource.process_and_suppress_exceptions( self.order.id, self.external_project_id, self.updated_meta) self.assertEqual(self.order.status, models.States.ACTIVE) mock_modify_cert_request.assert_called_once_with( self.order, self.updated_meta ) @mock.patch( 'barbican.tasks.certificate_resources.modify_certificate_request') def test_should_fail_during_processing(self, mock_mod_cert): mock_mod_cert.side_effect = ValueError('Abort!') self.order.type = models.OrderType.CERTIFICATE exception = self.assertRaises( ValueError, self.resource.process, self.order_id, self.external_project_id, self.meta ) self.assertEqual('Abort!', six.text_type(exception)) mock_mod_cert.assert_called_once_with(self.order, self.meta) self.assertEqual(models.States.ERROR, self.order.status) self.assertEqual(500, self.order.error_status_code) self.assertEqual(u._('Update Order failure seen - please contact ' 'site administrator.'), self.order.error_reason) class WhenBeginningAsymmetricTypeOrder(BaseOrderTestCase): def setUp(self): super(WhenBeginningAsymmetricTypeOrder, self).setUp() self.order.type = "asymmetric" self.resource = resources.BeginTypeOrder() @mock.patch('barbican.plugin.resources.generate_asymmetric_secret') def test_should_process_asymmetric_order(self, mock_generate_asymmetric_secret): mock_generate_asymmetric_secret.return_value = self.container self.resource.process(self.order.id, self.external_project_id) self.order_repo.get.assert_called_once_with( entity_id=self.order.id, external_project_id=self.external_project_id) self.assertEqual(self.order.status, models.States.ACTIVE) secret_info = self.order.to_dict_fields()['meta'] mock_generate_asymmetric_secret.assert_called_once_with( secret_info, secret_info.get('payload_content_type', 'application/octet-stream'), self.project ) def test_should_fail_during_retrieval(self): # Force an error during the order retrieval phase. self.order_repo.get = mock.MagicMock(return_value=None, side_effect=ValueError()) self.assertRaises( ValueError, self.resource.process, self.order.id, self.external_project_id, ) # Order state doesn't change because can't retrieve it to change it. self.assertEqual(models.States.PENDING, self.order.status) def test_should_fail_during_processing(self): # Force an error during the processing handler phase. self.project_repo.get = mock.MagicMock(return_value=None, side_effect=ValueError()) self.assertRaises( ValueError, self.resource.process, self.order.id, self.external_project_id, ) self.assertEqual(models.States.ERROR, self.order.status) self.assertEqual(500, self.order.error_status_code) self.assertEqual(u._('Process TypeOrder failure seen - please contact ' 'site administrator.'), self.order.error_reason) @mock.patch('barbican.plugin.resources.generate_asymmetric_secret') def test_should_fail_during_success_report_fail(self, mock_generate_asym_secret): mock_generate_asym_secret.return_value = self.container # Force an error during the processing handler phase. self.order_repo.save = mock.MagicMock(return_value=None, side_effect=ValueError()) self.assertRaises( ValueError, self.resource.process, self.order.id, self.external_project_id, ) def test_should_fail_during_error_report_fail(self): # Force an error during the error-report handling after # error in processing handler phase. # Force an error during the processing handler phase. self.project_repo.get = mock.MagicMock(return_value=None, side_effect=TypeError()) # Force exception in the error-reporting phase. self.order_repo.save = mock.MagicMock(return_value=None, side_effect=ValueError()) # Should see the original exception (TypeError) instead of the # secondary one (ValueError). self.assertRaises( TypeError, self.resource.process, self.order.id, self.external_project_id, ) self.project_repo.get.assert_called_once_with(self.project_id) self.order_repo.save.assert_called_once_with(self.order) class WhenCheckingCertificateStatus(BaseOrderTestCase): def setUp(self): super(WhenCheckingCertificateStatus, self).setUp() self.order.type = models.OrderType.CERTIFICATE self.resource = resources.CheckCertificateStatusOrder() @mock.patch( 'barbican.tasks.certificate_resources.check_certificate_request') def test_should_process_order_no_container( self, mock_check_cert_request): mock_check_cert_request.return_value = None result = self.resource.process_and_suppress_exceptions( self.order.id, self.external_project_id) self.order_repo.get.assert_called_once_with( entity_id=self.order.id, external_project_id=self.external_project_id) self.assertEqual(self.order.status, models.States.ACTIVE) mock_check_cert_request.assert_called_once_with( self.order, self.project, mock.ANY ) self.assertIsNone(self.order.container_id) self.assertIsInstance(result, common.FollowOnProcessingStatusDTO) @mock.patch( 'barbican.tasks.certificate_resources.check_certificate_request') def test_should_process_order_with_container( self, mock_check_cert_request): mock_check_cert_request.return_value = self.container self.resource.process(self.order.id, self.external_project_id) self.order_repo.get.assert_called_once_with( entity_id=self.order.id, external_project_id=self.external_project_id) self.assertEqual(self.order.status, models.States.ACTIVE) mock_check_cert_request.assert_called_once_with( self.order, self.project, mock.ANY ) self.assertEqual(self.container.id, self.order.container_id) def test_should_fail_with_bogus_order_type(self): self.order.type = 'bogus-type' self.assertRaises( NotImplementedError, self.resource.process, self.order.id, self.external_project_id, ) # Order state should be set to ERROR. self.assertEqual(models.States.ERROR, self.order.status) self.assertEqual( six.u('Check Certificate Order Status failure seen - ' 'please contact site administrator.'), self.order.error_reason) self.assertEqual(500, self.order.error_status_code) barbican-2.0.0/barbican/tests/tasks/test_certificate_resources.py0000664000567000056710000011674512701405673026453 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import datetime from Crypto.PublicKey import RSA import mock from OpenSSL import crypto from barbican.common import exception as excep from barbican.common import hrefs from barbican.common import resources as res from barbican.model import models from barbican.model import repositories from barbican.plugin.interface import certificate_manager as cert_man from barbican.plugin.interface import secret_store from barbican.tasks import certificate_resources as cert_res from barbican.tasks import common from barbican.tests import database_utils from barbican.tests import utils container_repo = repositories.get_container_repository() secret_repo = repositories.get_secret_repository() ca_repo = repositories.get_ca_repository() project_ca_repo = repositories.get_project_ca_repository() preferred_ca_repo = repositories.get_preferred_ca_repository() project_repo = repositories.get_project_repository() order_repo = repositories.get_order_repository() class WhenPerformingPrivateOperations(utils.BaseTestCase, utils.MockModelRepositoryMixin): """Tests private methods within certificate_resources.py.""" def setUp(self): super(WhenPerformingPrivateOperations, self).setUp() self.order_plugin_meta_repo = mock.MagicMock() self.setup_order_plugin_meta_repository_mock( self.order_plugin_meta_repo) self.order_barbican_meta_repo = mock.MagicMock() self.setup_order_barbican_meta_repository_mock( self.order_barbican_meta_repo) def test_get_plugin_meta(self): class Value(object): def __init__(self, value): self.value = value class OrderModel(object): id = mock.ANY order_plugin_metadata = { "foo": Value(1), "bar": Value(2), } order_model = OrderModel() self.order_plugin_meta_repo.get_metadata_for_order.return_value = ( order_model.order_plugin_metadata ) result = cert_res._get_plugin_meta(order_model) self._assert_dict_equal(order_model.order_plugin_metadata, result) def test_get_plugin_meta_with_empty_dict(self): result = cert_res._get_plugin_meta(None) self._assert_dict_equal({}, result) def test_save_plugin_meta_w_mock_meta(self): # Test dict for plugin meta data. test_order_model = 'My order model' test_plugin_meta = {"foo": 1} cert_res._save_plugin_metadata( test_order_model, test_plugin_meta) self.order_plugin_meta_repo.save.assert_called_once_with( test_plugin_meta, test_order_model) def test_save_plugin_w_null_meta(self): test_order_model = 'My order model' # Test None for plugin meta data. cert_res._save_plugin_metadata( test_order_model, None) self.order_plugin_meta_repo.save.assert_called_once_with( {}, test_order_model) def test_get_barbican_meta_with_empty_dict(self): result = cert_res._get_barbican_meta(None) self._assert_dict_equal({}, result) def test_save_barbican_w_null_meta(self): test_order_model = 'My order model' # Test None for plugin meta data. cert_res._save_barbican_metadata( test_order_model, None) self.order_barbican_meta_repo.save.assert_called_once_with( {}, test_order_model) def _assert_dict_equal(self, expected, test): self.assertIsInstance(expected, dict) self.assertIsInstance(test, dict) if expected != test: if len(expected) != len(test): self.fail('Expected dict not same size as test dict') unmatched_items = set(expected.items()) ^ set(test.items()) if len(unmatched_items): self.fail('One or more items different ' 'between the expected and test dicts') class BaseCertificateRequestsTestCase(database_utils.RepositoryTestCase): """Base Certificate Case Test function """ def setUp(self): super(BaseCertificateRequestsTestCase, self).setUp() self.external_project_id = "56789" self.project = res.get_or_create_project(self.external_project_id) project_repo.save(self.project) self.barbican_meta_dto = mock.MagicMock() self.order_meta = {} self.plugin_meta = {} self.barbican_meta = {} self.result = cert_man.ResultDTO( cert_man.CertificateStatus.WAITING_FOR_CA ) self.result_follow_on = common.FollowOnProcessingStatusDTO() self.cert_plugin = mock.MagicMock() self.cert_plugin.issue_certificate_request.return_value = self.result self.cert_plugin.check_certificate_status.return_value = self.result self.store_plugin = mock.MagicMock() parsed_ca = { 'plugin_name': "cert_plugin", 'plugin_ca_id': "XXXX", 'name': "test ca", 'description': 'Test CA', 'ca_signing_certificate': 'ZZZZZ', 'intermediates': 'YYYYY' } self.ca = models.CertificateAuthority(parsed_ca) ca_repo.create_from(self.ca) self.ca_id = self.ca.id # second ca for testing parsed_ca = { 'plugin_name': "cert_plugin", 'plugin_ca_id': "XXXX2", 'name': "test ca2", 'description': 'Test CA2', 'ca_signing_certificate': 'ZZZZZ2', 'intermediates': 'YYYYY2' } self.ca2 = models.CertificateAuthority(parsed_ca) ca_repo.create_from(self.ca2) self.ca_id2 = self.ca2.id # data for preferred CA and global preferred CA tests # add those to the repo in those tests self.pref_ca = models.PreferredCertificateAuthority( self.project.id, self.ca_id) self.global_pref_ca = models.PreferredCertificateAuthority( self.project.id, self.ca_id) # data for stored key cases self.private_key = models.Secret() self.private_key.secret_type = 'PRIVATE' self.private_key.project_id = self.project.id secret_repo.create_from(self.private_key) self.public_key = models.Secret() self.public_key.secret_type = 'PUBLIC' self.public_key.project_id = self.project.id secret_repo.create_from(self.public_key) self.passphrase = models.Secret() self.passphrase.secret_type = 'PASSPHRASE' self.passphrase.project_id = self.project.id secret_repo.create_from(self.passphrase) self.private_key_value = None self.public_key_value = "public_key" self.passphrase_value = None self.parsed_container_with_passphrase = { 'name': 'container name', 'type': 'rsa', 'secret_refs': [ {'name': 'private_key', 'secret_ref': 'https://localhost/secrets/' + self.private_key.id}, {'name': 'public_key', 'secret_ref': 'https://localhost/secrets/' + self.public_key.id}, {'name': 'private_key_passphrase', 'secret_ref': 'https://localhost/secrets/' + self.passphrase.id} ] } self.parsed_container = { 'name': 'container name', 'type': 'rsa', 'secret_refs': [ {'name': 'private_key', 'secret_ref': 'https://localhost/secrets/' + self.private_key.id}, {'name': 'public_key', 'secret_ref': 'https://localhost/secrets/' + self.public_key.id} ] } self.container_with_passphrase = models.Container( self.parsed_container_with_passphrase) self.container_with_passphrase.project_id = self.project.id container_repo.create_from(self.container_with_passphrase) self.container = models.Container(self.parsed_container) self.container.project_id = self.project.id container_repo.create_from(self.container) repositories.commit() self.stored_key_meta = { cert_man.REQUEST_TYPE: cert_man.CertificateRequestType.STORED_KEY_REQUEST, "container_ref": "https://localhost/containers/" + self.container.id, "subject_dn": "cn=host.example.com,ou=dev,ou=us,o=example.com" } self.order = models.Order() self.order.meta = self.order_meta self.order.project_id = self.project.id self.order.order_barbican_meta = self.barbican_meta self.order.type = 'certificate' order_repo.create_from(self.order) self._config_cert_plugin() self._config_store_plugin() self._config_cert_event_plugin() self._config_save_meta_plugin() self._config_get_meta_plugin() self._config_save_barbican_meta_plugin() self._config_get_barbican_meta_plugin() self._config_barbican_meta_dto() def tearDown(self): super(BaseCertificateRequestsTestCase, self).tearDown() self.cert_plugin_patcher.stop() self.save_plugin_meta_patcher.stop() self.get_plugin_meta_patcher.stop() self.cert_event_plugin_patcher.stop() self.barbican_meta_dto_patcher.stop() self.save_barbican_barbican_meta_patcher.stop() self.get_barbican_plugin_meta_patcher.stop() self.store_plugin_patcher.stop() def stored_key_side_effect(self, *args, **kwargs): if args[0] == 'PRIVATE': return secret_store.SecretDTO( secret_store.SecretType.PRIVATE, self.private_key_value, None, 'application/octet-string', None) elif args[0] == 'PASSPHRASE': return secret_store.SecretDTO( secret_store.SecretType.PASSPHRASE, self.passphrase_value, None, 'application/octet-string', None) elif args[0] == 'PUBLIC': return secret_store.SecretDTO( secret_store.SecretType.PUBLIC, self.public_key_value, None, 'application/octet-string', None) else: return None def _test_should_return_waiting_for_ca(self, method_to_test): self.result.status = cert_man.CertificateStatus.WAITING_FOR_CA method_to_test( self.order, self.project, self.result_follow_on) self.assertEqual( common.RetryTasks.INVOKE_CERT_STATUS_CHECK_TASK, self.result_follow_on.retry_task) self.assertEqual( cert_res.ORDER_STATUS_REQUEST_PENDING.id, self.result_follow_on.status) self.assertEqual( cert_res.ORDER_STATUS_REQUEST_PENDING.message, self.result_follow_on.status_message) def _test_should_return_certificate_generated(self, method_to_test): self.result.status = cert_man.CertificateStatus.CERTIFICATE_GENERATED method_to_test( self.order, self.project, self.result_follow_on) self.assertEqual( common.RetryTasks.NO_ACTION_REQUIRED, self.result_follow_on.retry_task) self.assertEqual( cert_res.ORDER_STATUS_CERT_GENERATED.id, self.result_follow_on.status) self.assertEqual( cert_res.ORDER_STATUS_CERT_GENERATED.message, self.result_follow_on.status_message) def _test_should_raise_client_data_issue_seen(self, method_to_test): self.result.status = cert_man.CertificateStatus.CLIENT_DATA_ISSUE_SEEN self.assertRaises( cert_man.CertificateStatusClientDataIssue, method_to_test, self.order, self.project, self.result_follow_on ) def _test_should_raise_status_not_supported(self, method_to_test): self.result.status = "Legend of Link" self.assertRaises( cert_man.CertificateStatusNotSupported, method_to_test, self.order, self.project, self.result_follow_on ) def _config_cert_plugin(self): """Mock the certificate plugin manager.""" cert_plugin_config = { 'return_value.get_plugin.return_value': self.cert_plugin, 'return_value.get_plugin_by_name.return_value': self.cert_plugin, 'return_value.get_plugin_by_ca_id.return_value': self.cert_plugin } self.cert_plugin_patcher = mock.patch( 'barbican.plugin.interface.certificate_manager' '.CertificatePluginManager', **cert_plugin_config ) self.cert_plugin_patcher.start() def _config_store_plugin(self): """Mock the secret store plugin manager.""" store_plugin_config = { 'return_value.get_plugin_retrieve_delete.return_value': self.store_plugin } self.store_plugin_patcher = mock.patch( 'barbican.plugin.interface.secret_store' '.get_manager', **store_plugin_config ) self.store_plugin_patcher.start() def _config_cert_event_plugin(self): """Mock the certificate event plugin manager.""" self.cert_event_plugin_patcher = mock.patch( 'barbican.plugin.interface.certificate_manager' '.EVENT_PLUGIN_MANAGER' ) self.cert_event_plugin_patcher.start() def _config_save_meta_plugin(self): """Mock the save plugin meta function.""" self.save_plugin_meta_patcher = mock.patch( 'barbican.tasks.certificate_resources._save_plugin_metadata' ) self.mock_save_plugin = self.save_plugin_meta_patcher.start() def _config_get_meta_plugin(self): """Mock the get plugin meta function.""" get_plugin_config = {'return_value': self.plugin_meta} self.get_plugin_meta_patcher = mock.patch( 'barbican.tasks.certificate_resources._get_plugin_meta', **get_plugin_config ) self.get_plugin_meta_patcher.start() def _config_save_barbican_meta_plugin(self): """Mock the save barbican plugin meta function.""" self.save_barbican_barbican_meta_patcher = mock.patch( 'barbican.tasks.certificate_resources._save_barbican_metadata' ) self.mock_barbican_save_plugin = ( self.save_barbican_barbican_meta_patcher.start() ) def _config_get_barbican_meta_plugin(self): """Mock the get barbican plugin meta function.""" get_barbican_plugin_config = {'return_value': self.barbican_meta} self.get_barbican_plugin_meta_patcher = mock.patch( 'barbican.tasks.certificate_resources._get_barbican_meta', **get_barbican_plugin_config ) self.get_barbican_plugin_meta_patcher.start() def _config_barbican_meta_dto(self): """Mock the BarbicanMetaDTO.""" get_plugin_config = {'return_value': self.barbican_meta_dto} self.barbican_meta_dto_patcher = mock.patch( 'barbican.plugin.interface.certificate_manager' '.BarbicanMetaDTO', **get_plugin_config ) self.barbican_meta_dto_patcher.start() class WhenIssuingCertificateRequests(BaseCertificateRequestsTestCase): """Tests the 'issue_certificate_request()' function.""" def setUp(self): super(WhenIssuingCertificateRequests, self).setUp() def tearDown(self): super(WhenIssuingCertificateRequests, self).tearDown() def test_should_return_waiting_for_ca(self): self._test_should_return_waiting_for_ca( cert_res.issue_certificate_request) self._verify_issue_certificate_plugins_called() def test_should_return_waiting_for_ca_as_retry(self): # For a retry, the plugin-name to look up would have already been # saved into the barbican metadata for the order, so just make sure # we can retrieve it. self.barbican_meta.update({'plugin_name': 'foo-plugin'}) self._test_should_return_waiting_for_ca( cert_res.issue_certificate_request) self._verify_issue_certificate_plugins_called() def test_should_return_certificate_generated(self): self._test_should_return_certificate_generated( cert_res.issue_certificate_request) self._verify_issue_certificate_plugins_called() def test_should_raise_client_data_issue_seen(self): self._test_should_raise_client_data_issue_seen( cert_res.issue_certificate_request) def _do_pyopenssl_stored_key_request(self): self.order_meta.update(self.stored_key_meta) pkey = crypto.PKey() pkey.generate_key(crypto.TYPE_RSA, 2048) key_pem = crypto.dump_privatekey( crypto.FILETYPE_PEM, pkey) self.private_key_value = base64.b64encode(key_pem) self.public_key_value = "public_key" self.passphrase_value = None self.store_plugin.get_secret.side_effect = self.stored_key_side_effect self._test_should_return_waiting_for_ca( cert_res.issue_certificate_request) def test_should_return_for_pyopenssl_stored_key(self): self._do_pyopenssl_stored_key_request() self._verify_issue_certificate_plugins_called() self.assertIsNotNone( self.order.order_barbican_meta.get('generated_csr')) # TODO(alee-3) Add tests to validate the request based on the validator # code that dave-mccowan is adding. def test_should_return_for_openssl_stored_key_ca_id_passed_in(self): self.stored_key_meta['ca_id'] = self.ca_id2 self._do_pyopenssl_stored_key_request() self._verify_issue_certificate_plugins_called() self.assertIsNotNone( self.order.order_barbican_meta['generated_csr']) def test_should_return_for_openssl_stored_key_pref_ca_defined(self): preferred_ca_repo.create_from(self.pref_ca) self._do_pyopenssl_stored_key_request() self._verify_issue_certificate_plugins_called() self.assertIsNotNone( self.order.order_barbican_meta['generated_csr']) def test_should_return_for_openssl_stored_key_global_ca_defined(self): preferred_ca_repo.create_from(self.global_pref_ca) self._do_pyopenssl_stored_key_request() self._verify_issue_certificate_plugins_called() self.assertIsNotNone( self.order.order_barbican_meta['generated_csr']) def test_should_return_for_pyopenssl_stored_key_with_passphrase(self): self.order_meta.update(self.stored_key_meta) self.order_meta['container_ref'] = ( "https://localhost/containers/" + self.container_with_passphrase.id ) passphrase = "my secret passphrase" pkey = crypto.PKey() pkey.generate_key(crypto.TYPE_RSA, 2048) key_pem = crypto.dump_privatekey( crypto.FILETYPE_PEM, pkey, passphrase=passphrase.encode('utf-8') ) self.private_key_value = base64.b64encode(key_pem) self.public_key_value = "public_key" self.passphrase_value = base64.b64encode(passphrase.encode('utf-8')) self.store_plugin.get_secret.side_effect = self.stored_key_side_effect self._test_should_return_waiting_for_ca( cert_res.issue_certificate_request) self._verify_issue_certificate_plugins_called() self.assertIsNotNone( self.order.order_barbican_meta['generated_csr']) # TODO(alee-3) Add tests to validate the request based on the validator # code that dave-mccowan is adding. def test_should_return_for_pycrypto_stored_key_with_passphrase(self): self.order_meta.update(self.stored_key_meta) self.order_meta['container_ref'] = ( "https://localhost/containers/" + self.container_with_passphrase.id ) passphrase = "my secret passphrase" private_key = RSA.generate(2048, None, None, 65537) public_key = private_key.publickey() private_key_pem = private_key.exportKey('PEM', passphrase, 8) self.private_key_value = base64.b64encode(private_key_pem) public_key_pem = public_key.exportKey() self.public_key_value = base64.b64encode(public_key_pem) self.passphrase_value = base64.b64encode(passphrase.encode('utf-8')) self.store_plugin.get_secret.side_effect = self.stored_key_side_effect self._test_should_return_waiting_for_ca( cert_res.issue_certificate_request) self._verify_issue_certificate_plugins_called() self.assertIsNotNone( self.order.order_barbican_meta['generated_csr']) # TODO(alee-3) Add tests to validate the request based on the validator # code that dave-mccowan is adding. def test_should_return_for_pycrypto_stored_key_without_passphrase(self): self.order_meta.update(self.stored_key_meta) private_key = RSA.generate(2048, None, None, 65537) public_key = private_key.publickey() self.private_key_value = base64.b64encode( private_key.exportKey('PEM', None, 8)) self.public_key_value = base64.b64encode(public_key.exportKey()) self.store_plugin.get_secret.side_effect = self.stored_key_side_effect self._test_should_return_waiting_for_ca( cert_res.issue_certificate_request) self._verify_issue_certificate_plugins_called() self.assertIsNotNone( self.order.order_barbican_meta['generated_csr']) # TODO(alee-3) Add tests to validate the request based on the validator # code that dave-mccowan is adding. def test_should_raise_for_pycrypto_stored_key_no_container(self): self.order_meta.update(self.stored_key_meta) private_key = RSA.generate(2048, None, None, 65537) public_key = private_key.publickey() self.private_key_value = private_key.exportKey('PEM', None, 8) self.public_key_value = public_key.exportKey() self.store_plugin.get_secret.side_effect = self.stored_key_side_effect self.result.status = cert_man.CertificateStatus.WAITING_FOR_CA container_repo.delete_project_entities(self.project.id) self.assertRaises(excep.StoredKeyContainerNotFound, cert_res.issue_certificate_request, self.order, self.project, self.result_follow_on) def test_should_raise_for_pycrypto_stored_key_no_private_key(self): self.order_meta.update(self.stored_key_meta) private_key = RSA.generate(2048, None, None, 65537) public_key = private_key.publickey() self.private_key_value = base64.b64encode( private_key.exportKey('PEM', None, 8)) self.public_key_value = base64.b64encode( public_key.exportKey()) self.store_plugin.get_secret.side_effect = self.stored_key_side_effect self.result.status = cert_man.CertificateStatus.WAITING_FOR_CA secret_repo.delete_entity_by_id( self.private_key.id, self.external_project_id) self.assertRaises(excep.StoredKeyPrivateKeyNotFound, cert_res.issue_certificate_request, self.order, self.project, self.result_follow_on) def test_should_return_for_pyopenssl_stored_key_with_extensions(self): self.order_meta.update(self.stored_key_meta) pkey = crypto.PKey() pkey.generate_key(crypto.TYPE_RSA, 2048) self.private_key_value = base64.b64encode(crypto.dump_privatekey( crypto.FILETYPE_PEM, pkey)) self.store_plugin.get_secret.side_effect = self.stored_key_side_effect self.order_meta['extensions'] = 'my ASN.1 extensions structure here' # TODO(alee-3) Add real extensions data here self.result.status = cert_man.CertificateStatus.WAITING_FOR_CA cert_res.issue_certificate_request(self.order, self.project, self.result_follow_on) self._verify_issue_certificate_plugins_called() self.assertIsNotNone(self.order.order_barbican_meta['generated_csr']) # TODO(alee-3) Add tests to validate the request based on the validator # code that dave-mccowan is adding. # TODO(alee-3) Add tests to validate the extensions in the request def test_should_raise_invalid_operation_seen(self): self.result.status = cert_man.CertificateStatus.INVALID_OPERATION self.assertRaises( cert_man.CertificateStatusInvalidOperation, cert_res.issue_certificate_request, self.order, self.project, self.result_follow_on ) def test_should_return_ca_unavailable_for_request(self): retry_msec = 123 status_msg = 'Test status' self.result.status = ( cert_man.CertificateStatus.CA_UNAVAILABLE_FOR_REQUEST) self.result.retry_msec = retry_msec self.result.status_message = status_msg order_ref = hrefs.convert_order_to_href(self.order.id) cert_res.issue_certificate_request(self.order, self.project, self.result_follow_on) self._verify_issue_certificate_plugins_called() epm = self.cert_event_plugin_patcher.target.EVENT_PLUGIN_MANAGER epm.notify_ca_is_unavailable.assert_called_once_with( self.project.id, order_ref, status_msg, retry_msec ) self._verify_issue_certificate_plugins_called() self.assertEqual( common.RetryTasks.INVOKE_SAME_TASK, self.result_follow_on.retry_task) self.assertEqual( cert_res.ORDER_STATUS_CA_UNAVAIL_FOR_ISSUE.id, self.result_follow_on.status) self.assertEqual( cert_res.ORDER_STATUS_CA_UNAVAIL_FOR_ISSUE.message, self.result_follow_on.status_message) def test_should_raise_status_not_supported(self): self._test_should_raise_status_not_supported( cert_res.issue_certificate_request) def _verify_issue_certificate_plugins_called(self): self.cert_plugin.issue_certificate_request.assert_called_once_with( self.order.id, self.order_meta, self.plugin_meta, self.barbican_meta_dto ) self.mock_save_plugin.assert_called_once_with( self.order, self.plugin_meta ) self.mock_barbican_save_plugin.assert_called_once_with( self.order, self.barbican_meta ) class WhenCheckingCertificateRequests(BaseCertificateRequestsTestCase): """Tests the 'check_certificate_request()' function.""" def setUp(self): super(WhenCheckingCertificateRequests, self).setUp() def tearDown(self): super(WhenCheckingCertificateRequests, self).tearDown() def test_should_return_waiting_for_ca(self): self._test_should_return_waiting_for_ca( cert_res.check_certificate_request) self._verify_check_certificate_plugins_called() def test_should_return_certificate_generated(self): self._test_should_return_certificate_generated( cert_res.check_certificate_request) self._verify_check_certificate_plugins_called() def test_should_raise_client_data_issue_seen(self): self._test_should_raise_client_data_issue_seen( cert_res.check_certificate_request) def test_should_raise_status_not_supported(self): self._test_should_raise_status_not_supported( cert_res.check_certificate_request) def test_should_return_ca_unavailable_for_request(self): retry_msec = 123 status_msg = 'Test status' self.result.status = ( cert_man.CertificateStatus.CA_UNAVAILABLE_FOR_REQUEST) self.result.retry_msec = retry_msec self.result.status_message = status_msg order_ref = hrefs.convert_order_to_href(self.order.id) cert_res.check_certificate_request(self.order, self.project, self.result_follow_on) self._verify_check_certificate_plugins_called() epm = self.cert_event_plugin_patcher.target.EVENT_PLUGIN_MANAGER epm.notify_ca_is_unavailable.assert_called_once_with( self.project.id, order_ref, status_msg, retry_msec ) self.assertEqual( common.RetryTasks.INVOKE_SAME_TASK, self.result_follow_on.retry_task) self.assertEqual( cert_res.ORDER_STATUS_CA_UNAVAIL_FOR_CHECK.id, self.result_follow_on.status) self.assertEqual( cert_res.ORDER_STATUS_CA_UNAVAIL_FOR_CHECK.message, self.result_follow_on.status_message) def _do_pyopenssl_stored_key_request(self): self.order_meta.update(self.stored_key_meta) pkey = crypto.PKey() pkey.generate_key(crypto.TYPE_RSA, 2048) key_pem = crypto.dump_privatekey( crypto.FILETYPE_PEM, pkey) self.private_key_value = base64.b64encode(key_pem) self.public_key_value = "public_key" self.passphrase_value = None self.store_plugin.get_secret.side_effect = self.stored_key_side_effect self._test_should_return_waiting_for_ca( cert_res.issue_certificate_request) self._test_should_return_certificate_generated( cert_res.check_certificate_request) def test_should_return_for_pyopenssl_stored_key(self): self._do_pyopenssl_stored_key_request() self._verify_check_certificate_plugins_called() self.assertIsNotNone( self.order.order_barbican_meta.get('generated_csr')) def _verify_check_certificate_plugins_called(self): self.cert_plugin.check_certificate_status.assert_called_once_with( self.order.id, self.order_meta, self.plugin_meta, self.barbican_meta_dto ) self.mock_save_plugin.assert_called_with( self.order, self.plugin_meta ) class WhenCreatingSubordinateCAs(database_utils.RepositoryTestCase): """Tests the 'create_subordinate_ca()' function.""" def setUp(self): super(WhenCreatingSubordinateCAs, self).setUp() self.project = res.get_or_create_project('12345') self.project2 = res.get_or_create_project('56789') self.subject_name = "cn=subca1 signing certificate, o=example.com" self.creator_id = "user12345" self.name = "Subordinate CA #1" self.description = "This is a test subordinate CA" self.plugin_name = "dogtag_plugin" # create parent ca expiration = (datetime.datetime.utcnow() + datetime.timedelta(minutes=10)) parsed_ca = {'plugin_name': self.plugin_name, 'plugin_ca_id': 'ca_master', 'expiration': expiration.isoformat(), 'name': 'Dogtag CA', 'description': 'Master CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} self.parent_ca = models.CertificateAuthority(parsed_ca) ca_repo.create_from(self.parent_ca) self.parent_ca_ref = 'https://localhost:6311/cas/' + self.parent_ca.id self.new_ca_dict = { 'plugin_ca_id': 'ca_subordinate', 'expiration': expiration.isoformat(), 'name': 'Dogtag Subordinate CA', 'description': 'Subordinate CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY', } # mock plugin and calls to plugin self.cert_plugin = mock.MagicMock() self.cert_plugin.supports_create_ca.return_value = True self.cert_plugin.create_ca.return_value = self.new_ca_dict self._config_cert_plugin() def tearDown(self): super(WhenCreatingSubordinateCAs, self).tearDown() self.cert_plugin_patcher.stop() def test_should_create_subordinate_ca(self): subca = cert_res.create_subordinate_ca( project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) self.assertIsInstance(subca, models.CertificateAuthority) self.assertEqual(self.project.id, subca.project_id) self.assertEqual(self.creator_id, subca.creator_id) self.assertEqual(self.plugin_name, subca.plugin_name) def test_should_raise_invalid_parent_ca(self): self.parent_ca_ref = 'https://localhost:6311/cas/' + "BAD-CA-REF" self.assertRaises( excep.InvalidParentCA, cert_res.create_subordinate_ca, project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) def test_should_raise_unauthorized_parent_ca(self): subca = cert_res.create_subordinate_ca( project_model=self.project2, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) subca_ref = hrefs.convert_certificate_authority_to_href(subca.id) self.assertRaises( excep.UnauthorizedSubCA, cert_res.create_subordinate_ca, project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=subca_ref, creator_id=self.creator_id) def test_should_raise_subcas_not_supported(self): self.cert_plugin.supports_create_ca.return_value = False self.assertRaises( excep.SubCAsNotSupported, cert_res.create_subordinate_ca, project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) def test_should_raise_subcas_not_created(self): self.cert_plugin.create_ca.return_value = None self.assertRaises( excep.SubCANotCreated, cert_res.create_subordinate_ca, project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) def test_should_delete_subca(self): subca = cert_res.create_subordinate_ca( project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) self.assertIsInstance(subca, models.CertificateAuthority) cert_res.delete_subordinate_ca(self.project.external_id, subca) self.cert_plugin.delete_ca.assert_called_once_with(subca.plugin_ca_id) def test_should_delete_subca_and_all_related_db_entities(self): subca = cert_res.create_subordinate_ca( project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) project_ca = models.ProjectCertificateAuthority( self.project.id, subca.id ) project_ca_repo.create_from(project_ca) preferred_ca = models.PreferredCertificateAuthority( self.project.id, subca.id) preferred_ca_repo.create_from(preferred_ca) cert_res.delete_subordinate_ca(self.project.external_id, subca) self.cert_plugin.delete_ca.assert_called_once_with(subca.plugin_ca_id) def test_should_raise_when_delete_pref_subca_with_other_project_ca(self): subca = cert_res.create_subordinate_ca( project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) project_ca = models.ProjectCertificateAuthority( self.project.id, subca.id ) project_ca_repo.create_from(project_ca) preferred_ca = models.PreferredCertificateAuthority( self.project.id, subca.id) preferred_ca_repo.create_from(preferred_ca) subca2 = cert_res.create_subordinate_ca( project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) project_ca2 = models.ProjectCertificateAuthority( self.project.id, subca2.id ) project_ca_repo.create_from(project_ca2) self.assertRaises( excep.CannotDeletePreferredCA, cert_res.delete_subordinate_ca, self.project.external_id, subca ) def test_should_raise_cannot_delete_base_ca(self): self.assertRaises( excep.CannotDeleteBaseCA, cert_res.delete_subordinate_ca, self.project.external_id, self.parent_ca ) def test_should_raise_unauthorized_subca_delete(self): subca = cert_res.create_subordinate_ca( project_model=self.project, name=self.name, description=self.description, subject_dn=self.subject_name, parent_ca_ref=self.parent_ca_ref, creator_id=self.creator_id ) self.assertRaises( excep.UnauthorizedSubCA, cert_res.delete_subordinate_ca, self.project2.external_id, subca ) def _config_cert_plugin(self): """Mock the certificate plugin manager.""" cert_plugin_config = { 'return_value.get_plugin.return_value': self.cert_plugin, 'return_value.get_plugin_by_name.return_value': self.cert_plugin, 'return_value.get_plugin_by_ca_id.return_value': self.cert_plugin } self.cert_plugin_patcher = mock.patch( 'barbican.plugin.interface.certificate_manager' '.CertificatePluginManager', **cert_plugin_config ) self.cert_plugin_patcher.start() barbican-2.0.0/barbican/tests/model/0000775000567000056710000000000012701406024020412 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/model/__init__.py0000664000567000056710000000000012701405673022522 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/model/test_models.py0000664000567000056710000007044412701405673023330 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime import unittest from barbican.common import exception from barbican.model import models from barbican.plugin.interface import secret_store from barbican.tests import utils class WhenCreatingNewSecret(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewSecret, self).setUp() self.parsed_secret = {'name': 'name', 'secret_type': secret_store.SecretType.OPAQUE, 'algorithm': 'algorithm', 'bit_length': 512, 'mode': 'mode', 'plain_text': 'not-encrypted', 'creator_id': 'creator12345'} self.parsed_order = {'secret': self.parsed_secret} def test_new_secret_is_created_from_dict(self): date_time = datetime.datetime.now().isoformat() self.parsed_secret['expiration'] = date_time secret = models.Secret(self.parsed_secret) self.assertEqual(self.parsed_secret['name'], secret.name) self.assertEqual(self.parsed_secret['secret_type'], secret.secret_type) self.assertEqual(self.parsed_secret['algorithm'], secret.algorithm) self.assertEqual(self.parsed_secret['bit_length'], secret.bit_length) self.assertEqual(self.parsed_secret['mode'], secret.mode) self.assertIsInstance(secret.expiration, datetime.datetime) self.assertEqual(self.parsed_secret['creator_id'], secret.creator_id) self.assertEqual(secret.created_at, secret.updated_at) fields = secret.to_dict_fields() self.assertEqual(self.parsed_secret['secret_type'], fields['secret_type']) self.assertEqual(self.parsed_secret['algorithm'], fields['algorithm']) self.assertEqual(self.parsed_secret['creator_id'], fields['creator_id']) def test_new_secret_is_created_with_default_secret_type(self): secret_spec = dict(self.parsed_secret) date_time = datetime.datetime.now().isoformat() secret_spec['expiration'] = date_time del secret_spec['secret_type'] secret = models.Secret(secret_spec) self.assertEqual(self.parsed_secret['secret_type'], secret.secret_type) class WhenCreatingNewSecretMetadata(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewSecretMetadata, self).setUp() self.key = 'dog' self.value = 'poodle' self.metadata = { 'key': self.key, 'value': self.value } def test_new_secret_metadata_is_created_from_dict(self): secret_meta = models.SecretUserMetadatum(self.key, self.value) self.assertEqual(self.key, secret_meta.key) self.assertEqual(self.value, secret_meta.value) fields = secret_meta.to_dict_fields() self.assertEqual(self.metadata['key'], fields['key']) self.assertEqual(self.metadata['value'], fields['value']) def test_should_raise_exception_metadata_with_no_key(self): self.assertRaises(exception.MissingArgumentError, models.SecretUserMetadatum, None, self.value) def test_should_raise_exception_metadata_with_no_value(self): self.assertRaises(exception.MissingArgumentError, models.SecretUserMetadatum, self.key, None) class WhenCreatingNewOrder(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewOrder, self).setUp() self.parsed_order = { 'type': 'certificate', 'meta': { 'email': 'email@email.com' }, 'sub_status': 'Pending', 'sub_status_message': 'Waiting for instructions...', 'creator_id': 'creator12345' } def test_new_order_is_created(self): order = models.Order(self.parsed_order) self.assertEqual(self.parsed_order['type'], order.type) self.assertEqual(self.parsed_order['meta'], order.meta) self.assertEqual(self.parsed_order['sub_status'], order.sub_status) self.assertEqual(self.parsed_order['creator_id'], order.creator_id) self.assertEqual( self.parsed_order['sub_status_message'], order.sub_status_message ) fields = order.to_dict_fields() self.assertEqual(self.parsed_order['sub_status'], fields['sub_status']) self.assertEqual(self.parsed_order['type'], fields['type']) self.assertEqual(self.parsed_order['creator_id'], fields['creator_id']) class WhenCreatingNewContainer(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewContainer, self).setUp() self.parsed_container = {'name': 'name', 'type': 'generic', 'secret_refs': [ {'name': 'test secret 1', 'secret_ref': '123'}, {'name': 'test secret 2', 'secret_ref': '123'}, {'name': 'test secret 3', 'secret_ref': '123'} ], 'creator_id': 'creator123456'} def test_new_container_is_created_from_dict(self): container = models.Container(self.parsed_container) self.assertEqual(self.parsed_container['name'], container.name) self.assertEqual(self.parsed_container['type'], container.type) self.assertEqual(self.parsed_container['creator_id'], container.creator_id) self.assertEqual(len(self.parsed_container['secret_refs']), len(container.container_secrets)) self.assertEqual(self.parsed_container['secret_refs'][0]['name'], container.container_secrets[0].name) self.assertEqual(self.parsed_container['secret_refs'][0]['secret_ref'], container.container_secrets[0].secret_id) self.assertEqual(self.parsed_container['secret_refs'][1]['name'], container.container_secrets[1].name) self.assertEqual(self.parsed_container['secret_refs'][1]['secret_ref'], container.container_secrets[1].secret_id) self.assertEqual(self.parsed_container['secret_refs'][2]['name'], container.container_secrets[2].name) self.assertEqual(self.parsed_container['secret_refs'][2]['secret_ref'], container.container_secrets[2].secret_id) fields = container.to_dict_fields() self.assertEqual(self.parsed_container['name'], fields['name']) self.assertEqual(self.parsed_container['type'], fields['type']) self.assertEqual(self.parsed_container['creator_id'], fields['creator_id']) def test_new_certificate_container_is_created_from_dict(self): self.parsed_container['type'] = 'certificate' container = models.Container(self.parsed_container) self.assertEqual(self.parsed_container['name'], container.name) self.assertEqual(self.parsed_container['type'], container.type) self.assertEqual(self.parsed_container['creator_id'], container.creator_id) self.assertEqual(len(self.parsed_container['secret_refs']), len(container.container_secrets)) self.assertEqual(self.parsed_container['secret_refs'][0]['name'], container.container_secrets[0].name) self.assertEqual(self.parsed_container['secret_refs'][0]['secret_ref'], container.container_secrets[0].secret_id) self.assertEqual(self.parsed_container['secret_refs'][1]['name'], container.container_secrets[1].name,) self.assertEqual(self.parsed_container['secret_refs'][1]['secret_ref'], container.container_secrets[1].secret_id) self.assertEqual(self.parsed_container['secret_refs'][2]['name'], container.container_secrets[2].name) self.assertEqual(self.parsed_container['secret_refs'][2]['secret_ref'], container.container_secrets[2].secret_id) def test_parse_secret_ref_uri(self): self.parsed_container['secret_refs'][0]['secret_ref'] = ( 'http://localhost:9110/123/secrets/123456') container = models.Container(self.parsed_container) self.assertEqual('123456', container.container_secrets[0].secret_id) self.parsed_container['secret_refs'][0]['secret_ref'] = ( 'http://localhost:9110/123/secrets/123456/') container = models.Container(self.parsed_container) self.assertEqual('123456', container.container_secrets[0].secret_id) class WhenCreatingNewConsumer(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewConsumer, self).setUp() self.parsed_consumer = {'name': 'name', 'URL': 'URL'} self.project_id = '12345project' self.container_id = '12345container' def test_new_consumer_is_created_from_dict(self): consumer = models.ContainerConsumerMetadatum(self.container_id, self.project_id, self.parsed_consumer) self.assertEqual(self.parsed_consumer['name'], consumer.name) self.assertEqual(self.parsed_consumer['URL'], consumer.URL) self.assertEqual(models.States.ACTIVE, consumer.status) def test_new_consumer_has_correct_hash(self): consumer_one = models.ContainerConsumerMetadatum(self.container_id, self.project_id, self.parsed_consumer) consumer_two = models.ContainerConsumerMetadatum(self.container_id, self.project_id, self.parsed_consumer) different_container = '67890container' consumer_three = models.ContainerConsumerMetadatum( different_container, self.project_id, self.parsed_consumer) self.assertEqual(consumer_one.data_hash, consumer_two.data_hash) self.assertNotEqual(consumer_one.data_hash, consumer_three.data_hash) class WhenProcessingJsonBlob(utils.BaseTestCase): def setUp(self): super(WhenProcessingJsonBlob, self).setUp() self.json_blob = models.JsonBlob() def test_process_bind_param_w_dict(self): res = self.json_blob.process_bind_param({'test': True}, None) self.assertEqual('{"test": true}', res) def test_process_result_value_w_json_str(self): res = self.json_blob.process_result_value('{"test": true}', None) self.assertTrue(res.get('test')) class WhenCreatingOrderRetryTask(utils.BaseTestCase): def test_create_new_order_task(self): order = models.Order({ 'type': 'certificate', 'meta': { 'email': 'email@email.com' }, 'sub_status': 'Pending', 'sub_status_message': 'Waiting for instructions...' }) at = datetime.datetime.utcnow() order_retry_task = models.OrderRetryTask() order_retry_task.order_id = order.id order_retry_task.retry_task = "foobar" order_retry_task.retry_at = at order_retry_task.retry_args = ["one", "two"] order_retry_task.retry_kwargs = {"three": "four"} self.assertEqual(order.id, order_retry_task.order_id) self.assertEqual("foobar", order_retry_task.retry_task) self.assertEqual(at, order_retry_task.retry_at) self.assertEqual( ["one", "two"], order_retry_task.retry_args, ) self.assertEqual( {"three": "four"}, order_retry_task.retry_kwargs, ) class WhenCreatingNewCertificateAuthority(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewCertificateAuthority, self).setUp() expiration = (datetime.datetime.utcnow() + datetime.timedelta(minutes=10)) self.parsed_ca = {'plugin_name': 'dogtag_plugin', 'plugin_ca_id': 'ca_master', 'expiration': expiration.isoformat(), 'name': 'Dogtag CA', 'description': 'Master CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY', 'creator_id': 'user12345', 'parent_ca_id': '12330-223-22', 'project_id': '12345'} def test_new_ca_is_created_from_dict(self): ca = models.CertificateAuthority(self.parsed_ca) self.assertEqual(self.parsed_ca['plugin_name'], ca.plugin_name) self.assertEqual(self.parsed_ca['plugin_ca_id'], ca.plugin_ca_id) self.assertEqual(self.parsed_ca['name'], ca.ca_meta['name'].value) self.assertEqual(self.parsed_ca['description'], ca.ca_meta['description'].value) self.assertEqual(self.parsed_ca['ca_signing_certificate'], ca.ca_meta['ca_signing_certificate'].value) self.assertEqual(self.parsed_ca['intermediates'], ca.ca_meta['intermediates'].value) self.assertIsInstance(ca.expiration, datetime.datetime) self.assertEqual(ca.created_at, ca.updated_at) self.assertEqual(self.parsed_ca['creator_id'], ca.creator_id) self.assertEqual(self.parsed_ca['project_id'], ca.project_id) class WhenCreatingNewProjectCertificateAuthority(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewProjectCertificateAuthority, self).setUp() expiration = (datetime.datetime.utcnow() + datetime.timedelta(minutes=10)) self.parsed_ca = {'plugin_name': 'dogtag_plugin', 'plugin_ca_id': 'ca_master', 'expiration': expiration.isoformat(), 'name': 'Dogtag CA', 'description': 'Master CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} def test_create_new_project_ca(self): ca = models.CertificateAuthority(self.parsed_ca) ca.id = '67890' project = models.Project() project.id = '12345' project_ca = models.ProjectCertificateAuthority(project.id, ca.id) self.assertEqual(ca.id, project_ca.ca_id) self.assertEqual(project.id, project_ca.project_id) class WhenCreatingNewPreferredCertificateAuthority(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewPreferredCertificateAuthority, self).setUp() expiration = (datetime.datetime.utcnow() + datetime.timedelta(minutes=10)) self.parsed_ca = {'plugin_name': 'dogtag_plugin', 'plugin_ca_id': 'ca_master', 'expiration': expiration.isoformat(), 'name': 'Dogtag CA', 'description': 'Master CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} def test_create_new_preferred_ca(self): ca = models.CertificateAuthority(self.parsed_ca) ca.id = '67890' project = models.Project() project.id = '12345' preferred_ca = models.PreferredCertificateAuthority(project.id, ca.id) self.assertEqual(ca.id, preferred_ca.ca_id) self.assertEqual(project.id, preferred_ca.project_id) class WhenCreatingNewSecretACL(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewSecretACL, self).setUp() self.secret_id = 'secret123456' self.user_ids = ['user12345', 'user67890'] self.operation = 'read' self.project_access = True def test_new_secretacl_for_given_all_input(self): acl = models.SecretACL(self.secret_id, self.operation, self.project_access, self.user_ids) self.assertEqual(self.secret_id, acl.secret_id) self.assertEqual(self.operation, acl.operation) self.assertEqual(self.project_access, acl.project_access) self.assertTrue(all(acl_user.user_id in self.user_ids for acl_user in acl.acl_users)) def test_new_secretacl_check_to_dict_fields(self): acl = models.SecretACL(self.secret_id, self.operation, self.project_access, self.user_ids) self.assertEqual(self.secret_id, acl.to_dict_fields()['secret_id']) self.assertEqual(self.operation, acl.to_dict_fields()['operation']) self.assertEqual(self.project_access, acl.to_dict_fields()['project_access']) self.assertTrue(all(user_id in self.user_ids for user_id in acl.to_dict_fields()['users'])) self.assertIsNone(acl.to_dict_fields()['acl_id']) def test_new_secretacl_for_bare_minimum_input(self): acl = models.SecretACL(self.secret_id, self.operation, None, None) self.assertEqual(self.secret_id, acl.secret_id) self.assertEqual(0, len(acl.acl_users)) self.assertEqual(self.operation, acl.operation) self.assertIsNone(acl.project_access) def test_new_secretacl_with_duplicate_userids_input(self): user_ids = list(self.user_ids) user_ids = user_ids * 2 # duplicate ids acl = models.SecretACL(self.secret_id, self.operation, None, user_ids=user_ids) self.assertEqual(self.secret_id, acl.secret_id) self.assertEqual(self.operation, acl.operation) self.assertIsNone(acl.project_access) self.assertEqual(2, len(acl.acl_users)) def test_should_throw_exception_missing_secret_id(self): self.assertRaises(exception.MissingArgumentError, models.SecretACL, None, 'read', ['user246'], None) def test_should_throw_exception_missing_operation(self): self.assertRaises(exception.MissingArgumentError, models.SecretACL, self.secret_id, None, None, ['user246']) def test_new_secretacl_expect_user_ids_as_list(self): acl = models.SecretACL(self.secret_id, self.operation, None, {'aUser': '12345'}) self.assertEqual(0, len(acl.acl_users)) class WhenCreatingNewContainerACL(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewContainerACL, self).setUp() self.container_id = 'container123456' self.user_ids = ['user12345', 'user67890'] self.operation = 'read' self.project_access = True def test_new_containeracl_for_given_all_input(self): acl = models.ContainerACL(self.container_id, self.operation, self.project_access, self.user_ids) self.assertEqual(self.container_id, acl.container_id) self.assertEqual(self.operation, acl.operation) self.assertEqual(self.project_access, acl.project_access) self.assertTrue(all(acl_user.user_id in self.user_ids for acl_user in acl.acl_users)) def test_new_containeracl_check_to_dict_fields(self): acl = models.ContainerACL(self.container_id, self.operation, self.project_access, self.user_ids) self.assertEqual(self.container_id, acl.to_dict_fields()['container_id']) self.assertEqual(self.operation, acl.to_dict_fields()['operation']) self.assertEqual(self.project_access, acl.to_dict_fields()['project_access']) self.assertTrue(all(user_id in self.user_ids for user_id in acl.to_dict_fields()['users'])) self.assertIsNone(acl.to_dict_fields()['acl_id']) def test_new_containeracl_for_bare_minimum_input(self): acl = models.ContainerACL(self.container_id, self.operation, None, None) self.assertEqual(self.container_id, acl.container_id) self.assertEqual(0, len(acl.acl_users)) self.assertEqual(self.operation, acl.operation) self.assertIsNone(acl.project_access) def test_new_containeracl_with_duplicate_userids_input(self): user_ids = list(self.user_ids) user_ids = user_ids * 2 # duplicate ids acl = models.ContainerACL(self.container_id, self.operation, True, user_ids=user_ids) self.assertEqual(self.container_id, acl.container_id) self.assertEqual(self.operation, acl.operation) self.assertTrue(acl.project_access) self.assertEqual(2, len(acl.acl_users)) def test_should_throw_exception_missing_container_id(self): self.assertRaises(exception.MissingArgumentError, models.ContainerACL, None, 'read', None, ['user246']) def test_should_throw_exception_missing_operation(self): self.assertRaises(exception.MissingArgumentError, models.ContainerACL, self.container_id, None, None, ['user246']) def test_new_containeracl_expect_user_ids_as_list(self): acl = models.ContainerACL(self.container_id, self.operation, None, {'aUser': '12345'}) self.assertEqual(0, len(acl.acl_users)) class WhenCreatingNewSecretACLUser(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewSecretACLUser, self).setUp() self.secret_acl_id = 'secret_acl_123456' self.user_ids = ['user12345', 'user67890'] def test_new_secretacl_user_for_given_all_input(self): acl_user = models.SecretACLUser(self.secret_acl_id, self.user_ids[0]) self.assertEqual(self.secret_acl_id, acl_user.acl_id) self.assertEqual(self.user_ids[0], acl_user.user_id) self.assertEqual(models.States.ACTIVE, acl_user.status) def test_new_secretacl_user_check_to_dict_fields(self): acl_user = models.SecretACLUser(self.secret_acl_id, self.user_ids[1]) self.assertEqual(self.secret_acl_id, acl_user.to_dict_fields()['acl_id']) self.assertEqual(self.user_ids[1], acl_user.to_dict_fields()['user_id']) self.assertEqual(models.States.ACTIVE, acl_user.to_dict_fields()['status']) def test_should_throw_exception_missing_user_id(self): self.assertRaises(exception.MissingArgumentError, models.SecretACLUser, self.secret_acl_id, None) class WhenCreatingNewContainerACLUser(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewContainerACLUser, self).setUp() self.container_acl_id = 'container_acl_123456' self.user_ids = ['user12345', 'user67890'] def test_new_secretacl_user_for_given_all_input(self): acl_user = models.ContainerACLUser(self.container_acl_id, self.user_ids[0]) self.assertEqual(self.container_acl_id, acl_user.acl_id) self.assertEqual(self.user_ids[0], acl_user.user_id) self.assertEqual(models.States.ACTIVE, acl_user.status) def test_new_secretacl_user_check_to_dict_fields(self): acl_user = models.ContainerACLUser(self.container_acl_id, self.user_ids[1]) self.assertEqual(self.container_acl_id, acl_user.to_dict_fields()['acl_id']) self.assertEqual(self.user_ids[1], acl_user.to_dict_fields()['user_id']) self.assertEqual(models.States.ACTIVE, acl_user.to_dict_fields()['status']) def test_should_throw_exception_missing_user_id(self): self.assertRaises(exception.MissingArgumentError, models.ContainerACLUser, self.container_acl_id, None) class WhenCreatingNewProjectQuotas(utils.BaseTestCase): def setUp(self): super(WhenCreatingNewProjectQuotas, self).setUp() def test_create_new_project_quotas(self): project = models.Project() project.id = '12345' project.external_id = '67890' parsed_project_quotas = { 'secrets': 101, 'orders': 102, 'containers': 103, 'consumers': 105, 'cas': 106} project_quotas = models.ProjectQuotas(project.id, parsed_project_quotas) self.assertEqual('12345', project_quotas.project_id) self.assertEqual(101, project_quotas.secrets) self.assertEqual(102, project_quotas.orders) self.assertEqual(103, project_quotas.containers) self.assertEqual(105, project_quotas.consumers) self.assertEqual(106, project_quotas.cas) def test_create_new_project_quotas_with_all_default_quotas(self): project = models.Project() project.id = '12345' project.external_id = '67890' project_quotas = models.ProjectQuotas(project.id, None) self.assertEqual('12345', project_quotas.project_id) self.assertIsNone(project_quotas.secrets) self.assertIsNone(project_quotas.orders) self.assertIsNone(project_quotas.containers) self.assertIsNone(project_quotas.consumers) self.assertIsNone(project_quotas.cas) def test_create_new_project_quotas_with_some_default_quotas(self): project = models.Project() project.id = '12345' project.external_id = '67890' parsed_project_quotas = { 'secrets': 101, 'containers': 103, 'consumers': 105} project_quotas = models.ProjectQuotas(project.id, parsed_project_quotas) self.assertEqual('12345', project_quotas.project_id) self.assertEqual(101, project_quotas.secrets) self.assertIsNone(project_quotas.orders) self.assertEqual(103, project_quotas.containers) self.assertEqual(105, project_quotas.consumers) self.assertIsNone(project_quotas.cas) def test_should_throw_exception_missing_project_id(self): self.assertRaises(exception.MissingArgumentError, models.ProjectQuotas, None, None) def test_project_quotas_check_to_dict_fields(self): project = models.Project() project.id = '12345' project.external_id = '67890' parsed_project_quotas = { 'secrets': 101, 'orders': 102, 'containers': 103, 'consumers': 105, 'cas': 106} project_quotas = models.ProjectQuotas(project.id, parsed_project_quotas) self.assertEqual(project.id, project_quotas.to_dict_fields()['project_id']) self.assertEqual(101, project_quotas.to_dict_fields()['secrets']) self.assertEqual(102, project_quotas.to_dict_fields()['orders']) self.assertEqual(103, project_quotas.to_dict_fields()['containers']) self.assertEqual(105, project_quotas.to_dict_fields()['consumers']) self.assertEqual(106, project_quotas.to_dict_fields()['cas']) if __name__ == '__main__': unittest.main() barbican-2.0.0/barbican/tests/model/repositories/0000775000567000056710000000000012701406024023141 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/model/repositories/test_repositories_acls.py0000664000567000056710000005473612701405673030333 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils class TestACLMixin(object): def _assert_acl_users(self, user_ids, acls, acl_id, check_size=True): """Checks that all input users are present in matching acl users data. It also checks if number of acl users are same as input users when check_size flag is True. """ acls_map = self._map_id_to_acl(acls) acl_users = acls_map[acl_id].to_dict_fields()['users'] if check_size: self.assertEqual(len(user_ids), len(acl_users)) self.assertTrue(all(user_id in user_ids for user_id in acl_users)) def _map_id_to_acl(self, acls): """Provides dictionary of id and acl from acls list.""" m = {} for acl in acls: m[acl.id] = acl return m class WhenTestingSecretACLRepository(database_utils.RepositoryTestCase, TestACLMixin): def setUp(self): super(WhenTestingSecretACLRepository, self).setUp() self.acl_repo = repositories.get_secret_acl_repository() def _create_base_secret(self, project_id=None): # Setup the secret and needed base relationship secret_repo = repositories.get_secret_repository() session = secret_repo.get_session() if project_id is None: # don't re-create project if it created earlier project = models.Project() project.external_id = "keystone_project_id" project.save(session=session) project_id = project.id secret_model = models.Secret() secret_model.project_id = project_id secret = secret_repo.create_from(secret_model, session=session) secret.save(session=session) session.commit() return secret def test_get_by_secret_id(self): session = self.acl_repo.get_session() secret = self._create_base_secret() acls = self.acl_repo.get_by_secret_id(secret.id, session) self.assertEqual(0, len(acls)) acl1 = self.acl_repo.create_from(models.SecretACL(secret.id, 'read', True, ['u1', 'u2']), session) acls = self.acl_repo.get_by_secret_id(secret.id, session) self.assertEqual(1, len(acls)) self.assertEqual(acl1.id, acls[0].id) self.assertEqual('read', acls[0].operation) self._assert_acl_users(['u2', 'u1'], acls, acl1.id) def test_get_by_entity_id(self): session = self.acl_repo.get_session() secret = self._create_base_secret() acl1 = self.acl_repo.create_from(models.SecretACL( secret.id, 'read', True, ['u1', 'u2']), session) acl = self.acl_repo.get(acl1.id, session) self.assertIsNotNone(acl) self.assertEqual(acl1.id, acl.id) self.assertEqual('read', acl.operation) self._assert_acl_users(['u1', 'u2'], [acl], acl1.id) self.acl_repo.delete_entity_by_id(acl1.id, session) acl = self.acl_repo.get(acl1.id, session, suppress_exception=True) self.assertIsNone(acl) def test_should_raise_notfound_exception_get_by_entity_id(self): self.assertRaises(exception.NotFound, self.acl_repo.get, "invalid_id", suppress_exception=False) def test_create_or_replace_from_for_new_acls(self): """Check create_or_replace_from and get count call. It creates new acls with users and make sure that same users are returned when acls are queries by secret id. It uses get count to assert expected number of acls for that secret. """ session = self.acl_repo.get_session() secret = self._create_base_secret() acl1 = self.acl_repo.create_from(models.SecretACL( secret.id, 'read'), session) self.acl_repo.create_or_replace_from( secret, acl1, user_ids=['u1', 'u2'], session=session) acl2 = self.acl_repo.create_from(models.SecretACL( secret.id, 'write', False), session) self.acl_repo.create_or_replace_from( secret, acl2, user_ids=['u1', 'u2', 'u3'], session=session) acl3 = self.acl_repo.create_from(models.SecretACL( secret.id, 'delete'), session) self.acl_repo.create_or_replace_from( secret, acl3, user_ids=[], session=session) acls = self.acl_repo.get_by_secret_id(secret.id, session) self.assertEqual(3, len(acls)) id_map = self._map_id_to_acl(acls) self.assertTrue(id_map[acl1.id].project_access) self.assertFalse(id_map[acl2.id].project_access) self.assertEqual('read', id_map[acl1.id].operation) self.assertEqual('write', id_map[acl2.id].operation) self.assertEqual('delete', id_map[acl3.id].operation) # order of input users should not matter self._assert_acl_users(['u1', 'u2'], acls, acl1.id) self._assert_acl_users(['u2', 'u1'], acls, acl1.id) self._assert_acl_users(['u2', 'u1', 'u3'], acls, acl2.id) count = self.acl_repo.get_count(secret.id, session) self.assertEqual(3, count) self.assertEqual(count, len(acls)) def test_create_or_replace_from_with_none_or_blank_users(self): session = self.acl_repo.get_session() secret = self._create_base_secret() acl1 = self.acl_repo.create_from(models.SecretACL( secret.id, 'read'), session) self.acl_repo.create_or_replace_from( secret, acl1, user_ids=None, session=session) acl2 = self.acl_repo.create_from(models.SecretACL( secret.id, 'list'), session) self.acl_repo.create_or_replace_from( secret, acl1, user_ids=[], session=session) acls = self.acl_repo.get_by_secret_id(secret.id, session) id_map = self._map_id_to_acl(acls) self.assertIsNone(id_map[acl1.id].to_dict_fields().get('users')) self.assertIsNone(id_map[acl2.id].to_dict_fields().get('users')) def test_create_or_replace_from_for_existing_acls(self): """Check create_or_replace_from and get count call. It modifies existing acls with users and make sure that updated users and project_access flag changes are returned when acls are queries by secret id. It uses get count to assert expected number of acls for that secret. """ session = self.acl_repo.get_session() secret = self._create_base_secret() acl1 = self.acl_repo.create_from(models.SecretACL( secret.id, 'read'), session) self.acl_repo.create_or_replace_from( secret, acl1, user_ids=['u1', 'u2'], session=session) acl2 = self.acl_repo.create_from(models.SecretACL( secret.id, 'write'), session) self.acl_repo.create_or_replace_from( secret, acl2, user_ids=['u1', 'u2', 'u3'], session=session) acl3 = self.acl_repo.create_from(models.SecretACL( secret.id, 'list'), session) self.acl_repo.create_or_replace_from( secret, acl3, user_ids=[], session=session) acls = self.acl_repo.get_by_secret_id(secret.id, session) self.assertEqual(3, len(acls)) id_map = self._map_id_to_acl(acls) # replace users in existing acls id_map[acl1.id].project_access = False self.acl_repo.create_or_replace_from( secret, id_map[acl1.id], user_ids=['u5'], session=session) self.acl_repo.create_or_replace_from( secret, id_map[acl2.id], user_ids=['u1', 'u2', 'u3', 'u4'], session=session) self.acl_repo.create_or_replace_from( secret, id_map[acl3.id], user_ids=['u1', 'u2', 'u4'], session=session) session.commit() # commit the changes made so far acls = self.acl_repo.get_by_secret_id(secret.id, session) id_map = self._map_id_to_acl(acls) self.assertEqual(3, len(acls)) self.assertFalse(id_map[acl1.id].project_access) self.assertTrue(id_map[acl2.id].project_access) self.assertTrue(id_map[acl3.id].project_access) self._assert_acl_users(['u5'], acls, acl1.id) self._assert_acl_users(['u1', 'u2', 'u3', 'u4'], acls, acl2.id) self._assert_acl_users(['u1', 'u2', 'u4'], acls, acl3.id) def test_get_count(self): session = self.acl_repo.get_session() secret1 = self._create_base_secret() acl1 = self.acl_repo.create_from(models.SecretACL(secret1.id, 'read', None, ['u1', 'u2']), session) self.acl_repo.create_or_replace_from(secret1, acl1) secret2 = self._create_base_secret(secret1.project.id) acl21 = self.acl_repo.create_from(models.SecretACL(secret2.id, 'read', None, ['u3', 'u4']), session) self.acl_repo.create_or_replace_from(secret2, acl21) acl22 = self.acl_repo.create_from(models.SecretACL(secret2.id, 'write', None, ['u5', 'u6']), session) self.acl_repo.create_or_replace_from(secret2, acl22) self.assertEqual(1, self.acl_repo.get_count(secret1.id)) self.assertEqual(2, self.acl_repo.get_count(secret2.id)) def test_delete_single_acl_and_count(self): session = self.acl_repo.get_session() secret = self._create_base_secret() acl1 = self.acl_repo.create_from(models.SecretACL(secret.id, 'read', None, ['u1', 'u2']), session) self.acl_repo.create_or_replace_from(secret, acl1) acl2 = self.acl_repo.create_from( models.SecretACL(secret.id, 'write'), session) self.acl_repo.create_or_replace_from( secret, acl2, user_ids=['u1', 'u2', 'u3']) acl3 = self.acl_repo.create_from(models.SecretACL( secret.id, 'list'), session) self.acl_repo.create_or_replace_from(secret, acl3, user_ids=['u1', 'u3']) count = self.acl_repo.get_count(secret.id) self.assertEqual(3, count) self.acl_repo.delete_entity_by_id(acl2.id, None) session.commit() self.assertEqual(2, len(secret.secret_acls)) deleted_acl = self.acl_repo.get(acl2.id, suppress_exception=True) self.assertIsNone(deleted_acl) acls = self.acl_repo.get_by_secret_id(secret.id) self.assertEqual(2, len(acls)) count = self.acl_repo.get_count(secret.id) self.assertEqual(2, count) def test_delete_acls_for_secret(self): session = self.acl_repo.get_session() secret = self._create_base_secret() acl1 = self.acl_repo.create_from(models.SecretACL( secret.id, 'read'), session) self.acl_repo.create_or_replace_from( secret, acl1, user_ids=['u1', 'u2'], session=session) acl2 = self.acl_repo.create_from(models.SecretACL( secret.id, 'write'), session) self.acl_repo.create_or_replace_from( secret, acl2, user_ids=['u1', 'u2', 'u3'], session=session) self.acl_repo.delete_acls_for_secret(secret) acls = self.acl_repo.get_by_secret_id(secret.id) self.assertEqual(0, len(acls)) class WhenTestingContainerACLRepository(database_utils.RepositoryTestCase, TestACLMixin): def setUp(self): super(WhenTestingContainerACLRepository, self).setUp() self.acl_repo = repositories.get_container_acl_repository() def _create_base_container(self, project_id=None): # Setup the container and needed base relationship container_repo = repositories.get_container_repository() session = container_repo.get_session() if project_id is None: project = models.Project() project.external_id = "keystone_project_id" project.save(session=session) project_id = project.id container = models.Container() container.project_id = project_id container.save(session=session) session.commit() return container def test_get_by_container_id(self): session = self.acl_repo.get_session() container = self._create_base_container() acls = self.acl_repo.get_by_container_id(container.id, session) self.assertEqual(0, len(acls)) acl1 = self.acl_repo.create_from(models.ContainerACL(container.id, 'read', True, ['u1', 'u2']), session) acls = self.acl_repo.get_by_container_id(container.id, session) self.assertEqual(1, len(acls)) self.assertEqual(acl1.id, acls[0].id) self.assertEqual('read', acls[0].operation) self._assert_acl_users(['u1', 'u2'], acls, acl1.id) def test_get_by_entity_id(self): session = self.acl_repo.get_session() container = self._create_base_container() acl1 = self.acl_repo.create_from(models.ContainerACL( container.id, 'read', True, ['u1', 'u2']), session) acl = self.acl_repo.get(acl1.id, session) self.assertIsNotNone(acl) self.assertEqual(acl1.id, acl.id) self.assertEqual('read', acl.operation) self._assert_acl_users(['u1', 'u2'], [acl], acl1.id) self.acl_repo.delete_entity_by_id(acl1.id, session) acl = self.acl_repo.get(acl1.id, session, suppress_exception=True) self.assertIsNone(acl) def test_should_raise_notfound_exception_get_by_entity_id(self): self.assertRaises(exception.NotFound, self.acl_repo.get, "invalid_id", suppress_exception=False) def test_create_or_replace_from_for_new_acls(self): """Check create_or_replace_from and get count call. It creates new acls with users and make sure that same users are returned when acls are queries by secret id. It uses get count to assert expected number of acls for that secret. """ session = self.acl_repo.get_session() container = self._create_base_container() acl1 = self.acl_repo.create_from(models.ContainerACL( container.id, 'read'), session) self.acl_repo.create_or_replace_from( container, acl1, user_ids=['u1', 'u2'], session=session) acl2 = self.acl_repo.create_from(models.ContainerACL( container.id, 'write', False), session) self.acl_repo.create_or_replace_from( container, acl2, user_ids=['u1', 'u2', 'u3'], session=session) acl3 = self.acl_repo.create_from(models.ContainerACL( container.id, 'list'), session) self.acl_repo.create_or_replace_from( container, acl3, user_ids=[], session=session) acls = self.acl_repo.get_by_container_id(container.id, session) self.assertEqual(3, len(acls)) id_map = self._map_id_to_acl(acls) self.assertTrue(id_map[acl1.id].project_access) self.assertFalse(id_map[acl2.id].project_access) self.assertEqual('read', id_map[acl1.id].operation) self.assertEqual('write', id_map[acl2.id].operation) self.assertEqual('list', id_map[acl3.id].operation) # order of input users should not matter self._assert_acl_users(['u1', 'u2'], acls, acl1.id) self._assert_acl_users(['u2', 'u1'], acls, acl1.id) self._assert_acl_users(['u2', 'u1', 'u3'], acls, acl2.id) count = self.acl_repo.get_count(container.id, session) self.assertEqual(3, count) self.assertEqual(count, len(acls)) def test_create_or_replace_from_with_none_or_blank_users(self): session = self.acl_repo.get_session() container = self._create_base_container() acl1 = self.acl_repo.create_from(models.ContainerACL( container.id, 'read'), session) self.acl_repo.create_or_replace_from( container, acl1, user_ids=None, session=session) acl2 = self.acl_repo.create_from(models.ContainerACL( container.id, 'write'), session) self.acl_repo.create_or_replace_from( container, acl1, user_ids=[], session=session) acls = self.acl_repo.get_by_container_id(container.id, session) id_map = self._map_id_to_acl(acls) self.assertIsNone(id_map[acl1.id].to_dict_fields().get('users')) self.assertIsNone(id_map[acl2.id].to_dict_fields().get('users')) def test_create_or_replace_from_for_existing_acls(self): """Check create_or_replace_from and get count call. It modifies existing acls with users and make sure that updated users and project_access flag changes are returned when acls are queries by secret id. It uses get count to assert expected number of acls for that secret. """ session = self.acl_repo.get_session() container = self._create_base_container() acl1 = self.acl_repo.create_from(models.ContainerACL( container.id, 'read'), session) self.acl_repo.create_or_replace_from( container, acl1, user_ids=['u1', 'u2'], session=session) acl2 = self.acl_repo.create_from(models.ContainerACL( container.id, 'write'), session) self.acl_repo.create_or_replace_from( container, acl2, user_ids=['u1', 'u2', 'u3'], session=session) acl3 = self.acl_repo.create_from(models.ContainerACL( container.id, 'list'), session) self.acl_repo.create_or_replace_from( container, acl3, user_ids=[], session=session) acls = self.acl_repo.get_by_container_id(container.id, session) self.assertEqual(3, len(acls)) id_map = self._map_id_to_acl(acls) # replace users in existing acls id_map[acl1.id].project_access = False self.acl_repo.create_or_replace_from( container, id_map[acl1.id], user_ids=['u5'], session=session) self.acl_repo.create_or_replace_from( container, id_map[acl2.id], user_ids=['u1', 'u2', 'u3', 'u4'], session=session) self.acl_repo.create_or_replace_from( container, id_map[acl3.id], user_ids=['u1', 'u2', 'u4'], session=session) session.commit() acls = self.acl_repo.get_by_container_id(container.id, session) id_map = self._map_id_to_acl(acls) self.assertEqual(3, len(acls)) self.assertFalse(id_map[acl1.id].project_access) self.assertTrue(id_map[acl2.id].project_access) self.assertTrue(id_map[acl3.id].project_access) self._assert_acl_users(['u5'], acls, acl1.id) self._assert_acl_users(['u1', 'u2', 'u3', 'u4'], acls, acl2.id) self._assert_acl_users(['u1', 'u2', 'u4'], acls, acl3.id) def test_get_count(self): session = self.acl_repo.get_session() container1 = self._create_base_container() acl1 = self.acl_repo.create_from(models.ContainerACL( container1.id, 'read', None, ['u1', 'u2']), session) self.acl_repo.create_or_replace_from(container1, acl1) container2 = self._create_base_container(container1.project_id) acl21 = self.acl_repo.create_from(models.ContainerACL( container2.id, 'read', None, ['u3', 'u4']), session) self.acl_repo.create_or_replace_from(container2, acl21) acl22 = self.acl_repo.create_from(models.ContainerACL( container2.id, 'write', None, ['u5', 'u6']), session) self.acl_repo.create_or_replace_from(container2, acl22) self.assertEqual(1, self.acl_repo.get_count(container1.id)) self.assertEqual(2, self.acl_repo.get_count(container2.id)) def test_delete_single_acl_and_count(self): session = self.acl_repo.get_session() container = self._create_base_container() acl1 = self.acl_repo.create_from(models.ContainerACL( container.id, 'read'), session) self.acl_repo.create_or_replace_from(container, acl1, user_ids=['u1', 'u2']) acl2 = self.acl_repo.create_from(models.ContainerACL( container.id, 'write'), session) self.acl_repo.create_or_replace_from(container, acl2, user_ids=['u1', 'u2', 'u3']) acl3 = self.acl_repo.create_from(models.ContainerACL( container.id, 'list'), session) self.acl_repo.create_or_replace_from(container, acl3, user_ids=['u1', 'u3']) count = self.acl_repo.get_count(container.id) self.assertEqual(3, count) self.acl_repo.delete_entity_by_id(acl2.id, None) session.commit() # commit the changes made so far self.assertEqual(2, len(container.container_acls)) deleted_acl = self.acl_repo.get(acl2.id, suppress_exception=True) self.assertIsNone(deleted_acl) acls = self.acl_repo.get_by_container_id(container.id) self.assertEqual(2, len(acls)) count = self.acl_repo.get_count(container.id) self.assertEqual(2, count) def test_delete_acls_for_secret(self): session = self.acl_repo.get_session() container = self._create_base_container() acl1 = self.acl_repo.create_from(models.ContainerACL( container.id, 'read'), session) self.acl_repo.create_or_replace_from( container, acl1, user_ids=['u1', 'u2'], session=session) acl2 = self.acl_repo.create_from(models.ContainerACL( container.id, 'write'), session) self.acl_repo.create_or_replace_from( container, acl2, user_ids=['u1', 'u2', 'u3'], session=session) self.acl_repo.delete_acls_for_container(container) acls = self.acl_repo.get_by_container_id(container.id) self.assertEqual(0, len(acls)) barbican-2.0.0/barbican/tests/model/repositories/__init__.py0000664000567000056710000000000012701405673025251 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/model/repositories/test_repositories_transport_keys.py0000664000567000056710000000267412701405673032472 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import exception from barbican.model import repositories from barbican.tests import database_utils class WhenTestingTransportKeyRepository(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingTransportKeyRepository, self).setUp() self.repo = repositories.TransportKeyRepo() def test_should_raise_no_result_found_with_plugin_name(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_by_create_date, plugin_name="plugin", session=session, suppress_exception=False) def test_should_raise_no_result_found_no_plugin_name(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_by_create_date, session=session, suppress_exception=False) barbican-2.0.0/barbican/tests/model/repositories/test_repositories_consumers.py0000664000567000056710000001526512701405673031421 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils as utils class WhenTestingContainerConsumerRepository(utils.RepositoryTestCase): def setUp(self): super(WhenTestingContainerConsumerRepository, self).setUp() self.repo = repositories.ContainerConsumerRepo() self.repo_container = repositories.ContainerRepo() def test_should_update_with_duplicate_consumer(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) container = models.Container() container.project_id = project.id container.save(session=session) # Create a consumer. consumer = models.ContainerConsumerMetadatum( container.id, project.id, {'name': 'name', 'URL': 'www.foo.com'}) consumer.save(session=session) # Commit things so far, because the 'create_or_update_from' call below # will handle consumer metadata with same composite key items already # existing, and then rollback this session's transaction, which would # remove the items added above and result in a not-found error below. session.commit() # Try to create a consumer on the container...should re-use the # one added above. consumer2 = models.ContainerConsumerMetadatum( container.id, project.id, {'name': 'name', 'URL': 'www.foo.com'}) self.repo.create_or_update_from(consumer2, container, session=session) container2 = self.repo_container.get( container.id, project.external_id, session=session) self.assertEqual(1, len(container2.consumers)) def test_should_raise_duplicate_create_same_composite_key_no_id(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) container = models.Container() container.project_id = project.id container.save(session=session) # Create a consumer. consumer = models.ContainerConsumerMetadatum( container.id, project.id, {'name': 'name', 'URL': 'www.foo.com'}) consumer.save(session=session) # Commit things so far, because the 'create_from' call below will # handle consumer metadata with same composite key items already # existing, and then rollback this session's transaction, which would # remove the items added above and result in a not-found error below. session.commit() # Create a new entity with the same composite key as the first one. consumer2 = models.ContainerConsumerMetadatum( container.id, project.id, {'name': 'name', 'URL': 'www.foo.com'}) exception_result = self.assertRaises( exception.Duplicate, self.repo.create_from, consumer2, session=session) self.assertEqual( "Entity 'ContainerConsumer' already exists", exception_result.message) def test_should_raise_no_result_found_get_container_id(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_by_container_id, "my container id", session=session, suppress_exception=False) def test_should_raise_no_result_found_get_by_values_no_deleted(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_by_values, "my container id", "name", "url", session=session, suppress_exception=False, show_deleted=False) def test_should_raise_no_result_found_get_by_values_show_deleted(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_by_values, "my container id", "name", "url", session=session, suppress_exception=False, show_deleted=True) def test_should_get_count_zero(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(0, count) def test_should_get_count_one(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) container = models.Container() container.project_id = project.id container.save(session=session) consumer = models.ContainerConsumerMetadatum( container.id, project.id, {'name': 'name', 'URL': 'www.foo.com'}) consumer.save(session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(1, count) def test_should_get_count_one_after_delete(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) container = models.Container() container.project_id = project.id container.save(session=session) consumer = models.ContainerConsumerMetadatum( container.id, project.id, {'name': 'name1', 'URL': 'www.foo.com'}) consumer.save(session=session) consumer = models.ContainerConsumerMetadatum( container.id, project.id, {'name': 'name2', 'URL': 'www.foo.com'}) consumer.save(session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(2, count) self.repo.delete_entity_by_id(consumer.id, "my keystone id", session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(1, count) barbican-2.0.0/barbican/tests/model/repositories/test_repositories_certificate_authorities.py0000664000567000056710000004372112701405673034303 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime from barbican.common import exception from barbican.common import resources as res from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils class WhenTestingCertificateAuthorityRepo(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingCertificateAuthorityRepo, self).setUp() self.ca_repo = repositories.CertificateAuthorityRepo() expiration = (datetime.datetime.utcnow() + datetime.timedelta(minutes=10)) self.parsed_ca = {'plugin_name': 'dogtag_plugin', 'plugin_ca_id': 'ca_master', 'name': 'Dogtag CA', 'expiration': expiration.isoformat(), 'description': 'Master CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} self.parsed_ca2 = {'plugin_name': 'symantec_plugin', 'plugin_ca_id': 'ca_master_2', 'name': 'Symantec CA2', 'expiration': expiration.isoformat(), 'description': 'Master CA for Dogtag plugin2', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} def _add_ca(self, parsed_ca, session): ca = self.ca_repo.create_from(models.CertificateAuthority(parsed_ca), session=session) return ca def test_get_by_create_date(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) session.commit() retrieved_cas, offset, limit, total = self.ca_repo.get_by_create_date( session=session ) self.assertEqual([ca.id], [s.id for s in retrieved_cas]) self.assertEqual([ca.plugin_name], [s.plugin_name for s in retrieved_cas]) self.assertEqual( [self.parsed_ca['ca_signing_certificate']], [s.ca_meta['ca_signing_certificate'].value for s in retrieved_cas]) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_by_create_date_with_plugin_name_filter(self): session = self.ca_repo.get_session() ca1 = self._add_ca(self.parsed_ca, session) self._add_ca(self.parsed_ca2, session) retrieved_cas, offset, limit, total = self.ca_repo.get_by_create_date( session=session, plugin_name=self.parsed_ca['plugin_name'] ) self.assertEqual([ca1.id], [s.id for s in retrieved_cas]) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_by_create_date_with_plugin_ca_id_filter(self): session = self.ca_repo.get_session() ca1 = self._add_ca(self.parsed_ca, session) self._add_ca(self.parsed_ca2, session) retrieved_cas, offset, limit, total = self.ca_repo.get_by_create_date( session=session, plugin_ca_id=self.parsed_ca['plugin_ca_id'] ) self.assertEqual([ca1.id], [s.id for s in retrieved_cas]) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_by_create_date_nothing(self): session = self.ca_repo.get_session() retrieved_cas, offset, limit, total = self.ca_repo.get_by_create_date( session=session, suppress_exception=True ) self.assertEqual([], retrieved_cas) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(0, total) def test_do_entity_name(self): self.assertEqual("CertificateAuthority", self.ca_repo._do_entity_name()) def test_should_raise_no_result_found(self): session = self.ca_repo.get_session() self.assertRaises( exception.NotFound, self.ca_repo.get_by_create_date, session=session, suppress_exception=False) def test_get_count_should_return_zero(self): session = self.ca_repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) session.commit() count = self.ca_repo.get_count(project.id, session=session) self.assertEqual(0, count) def test_get_count_should_return_one(self): session = self.ca_repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) ca_model = models.CertificateAuthority(self.parsed_ca) ca_model.project_id = project.id self.ca_repo.create_from(ca_model, session=session) session.commit() count = self.ca_repo.get_count(project.id, session=session) self.assertEqual(1, count) def test_get_count_should_return_one_after_delete(self): session = self.ca_repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) ca_model = models.CertificateAuthority(self.parsed_ca) ca_model.project_id = project.id self.ca_repo.create_from(ca_model, session=session) ca_model = models.CertificateAuthority(self.parsed_ca) ca_model.project_id = project.id self.ca_repo.create_from(ca_model, session=session) session.commit() count = self.ca_repo.get_count(project.id, session=session) self.assertEqual(2, count) self.ca_repo.delete_entity_by_id(ca_model.id, "my keystone id", session=session) session.commit() count = self.ca_repo.get_count(project.id, session=session) self.assertEqual(1, count) class WhenTestingProjectCARepo(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingProjectCARepo, self).setUp() self.ca_repo = repositories.CertificateAuthorityRepo() self.project_ca_repo = repositories.ProjectCertificateAuthorityRepo() expiration = (datetime.datetime.utcnow() + datetime.timedelta(minutes=10)) self.parsed_ca = {'plugin_name': 'dogtag_plugin', 'plugin_ca_id': 'ca_master', 'expiration': expiration.isoformat(), 'name': 'Dogtag CA', 'description': 'Master CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} self.parsed_ca2 = {'plugin_name': 'symantec_plugin', 'plugin_ca_id': 'ca_master_2', 'expiration': expiration.isoformat(), 'name': 'Symantec CA2', 'description': 'Master CA for Dogtag plugin2', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} def _add_ca(self, parsed_ca, session): ca = self.ca_repo.create_from(models.CertificateAuthority(parsed_ca), session=session) return ca def _add_project(self, project_id, session): project = models.Project() project.external_id = project_id project.save(session=session) return project def _add_project_ca(self, project_id, ca_id, session): project_ca = self.project_ca_repo.create_from( models.ProjectCertificateAuthority(project_id, ca_id), session) return project_ca def test_get_by_create_date(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) project = self._add_project("project_1", session) self._add_project_ca(project.id, ca.id, session) session.commit() retrieved_pcas, offset, limit, total = ( self.project_ca_repo.get_by_create_date(session=session)) self.assertEqual([ca.id], [s.ca_id for s in retrieved_pcas]) self.assertEqual([project.id], [s.project_id for s in retrieved_pcas]) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_project_entities(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) project = self._add_project("project_1", session) self._add_project_ca(project.id, ca.id, session) session.commit() retrieved_pcas = self.project_ca_repo.get_project_entities( project.id, session) self.assertEqual([ca.id], [s.ca_id for s in retrieved_pcas]) def test_get_by_create_date_with_ca_id_filter(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) project = self._add_project("project_1", session) project_ca = self._add_project_ca(project.id, ca.id, session) ca2 = self._add_ca(self.parsed_ca2, session) project2 = self._add_project("project_2", session) self._add_project_ca(project2.id, ca2.id, session) session.commit() retrieved_pcas, offset, limit, total = ( self.project_ca_repo.get_by_create_date( session=session, ca_id=ca.id)) self.assertEqual([project_ca.id], [s.id for s in retrieved_pcas]) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_by_create_date_nothing(self): session = self.project_ca_repo.get_session() retrieved_pcas, offset, limit, total = ( self.project_ca_repo.get_by_create_date( session=session, suppress_exception=True)) self.assertEqual([], retrieved_pcas) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(0, total) def test_do_entity_name(self): self.assertEqual("ProjectCertificateAuthority", self.project_ca_repo._do_entity_name()) def test_should_raise_no_result_found(self): session = self.project_ca_repo.get_session() self.assertRaises( exception.NotFound, self.project_ca_repo.get_by_create_date, session=session, suppress_exception=False) class WhenTestingPreferredCARepo(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingPreferredCARepo, self).setUp() self.ca_repo = repositories.CertificateAuthorityRepo() self.preferred_ca_repo = ( repositories.PreferredCertificateAuthorityRepo()) expiration = (datetime.datetime.utcnow() + datetime.timedelta(minutes=10)) expiration_later = (datetime.datetime.utcnow() + datetime.timedelta(days=10)) self.parsed_ca = {'plugin_name': 'dogtag_plugin', 'plugin_ca_id': 'ca_master', 'expiration': expiration.isoformat(), 'name': 'Dogtag CA', 'description': 'Master CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} self.parsed_ca2 = {'plugin_name': 'symantec_plugin', 'plugin_ca_id': 'ca_master_2', 'expiration': expiration.isoformat(), 'name': 'Symantec CA2', 'description': 'Master CA for Dogtag plugin2', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY'} self.parsed_modified_ca = { 'plugin_name': 'dogtag_plugin', 'plugin_ca_id': 'ca_master', 'expiration': expiration_later.isoformat(), 'name': 'Dogtag CA', 'description': 'Updated Master CA for Dogtag plugin', 'ca_signing_certificate': 'XXXXX-updated-XXXXX', 'intermediates': 'YYYYY'} self.global_project = res.get_or_create_global_preferred_project() def _add_ca(self, parsed_ca, session): ca = self.ca_repo.create_from(models.CertificateAuthority(parsed_ca), session=session) return ca def _add_project(self, project_id, session): project = models.Project() project.external_id = project_id project.save(session=session) return project def _add_preferred_ca(self, project_id, ca_id, session): preferred_ca = self.preferred_ca_repo.create_from( models.PreferredCertificateAuthority(project_id, ca_id), session) return preferred_ca def _add_global_preferred_ca(self, ca_id, session): preferred_ca = self.preferred_ca_repo.create_from( models.PreferredCertificateAuthority( self.global_project.id, ca_id), session) return preferred_ca def test_get_by_create_date(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) self._add_ca(self.parsed_ca2, session) project = self._add_project("project_1", session) self._add_preferred_ca(project.id, ca.id, session) session.commit() pca, offset, limit, total = self.preferred_ca_repo.get_by_create_date( session=session ) self.assertEqual([ca.id], [s.ca_id for s in pca]) self.assertEqual([project.id], [s.project_id for s in pca]) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_by_create_date_with_params(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) self._add_ca(self.parsed_ca2, session) project = self._add_project("project_1", session) self._add_preferred_ca(project.id, ca.id, session) session.commit() pca, offset, limit, total = self.preferred_ca_repo.get_by_create_date( session=session, project_id=project.id, ca_id=ca.id ) self.assertEqual([ca.id], [s.ca_id for s in pca]) self.assertEqual([project.id], [s.project_id for s in pca]) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_project_entities(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) self._add_ca(self.parsed_ca2, session) project = self._add_project("project_1", session) self._add_preferred_ca(project.id, ca.id, session) session.commit() pca = self.preferred_ca_repo.get_project_entities(project.id, session) self.assertEqual([ca.id], [s.ca_id for s in pca]) def test_get_nothing(self): session = self.preferred_ca_repo.get_session() retrieved_pcas, offset, limit, total = ( self.preferred_ca_repo.get_by_create_date( session=session, suppress_exception=True)) self.assertEqual([], retrieved_pcas) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(0, total) def test_do_entity_name(self): self.assertEqual("PreferredCertificateAuthority", self.preferred_ca_repo._do_entity_name()) def test_should_raise_no_result_found(self): session = self.preferred_ca_repo.get_session() self.assertRaises( exception.NotFound, self.preferred_ca_repo.get_by_create_date, session=session, suppress_exception=False) def test_should_raise_duplicate_entries(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) ca2 = self._add_ca(self.parsed_ca2, session) project = self._add_project("project_1", session) self._add_preferred_ca(project.id, ca.id, session) self.assertRaises( exception.Duplicate, self._add_preferred_ca, project.id, ca2.id, session) def test_set_global_preferred_ca(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) self._add_global_preferred_ca(ca.id, session) session.commit() pca = self.preferred_ca_repo.get_project_entities( self.global_project.id, session) self.assertEqual([ca.id], [s.ca_id for s in pca]) def test_should_create(self): session = self.ca_repo.get_session() ca = self._add_ca(self.parsed_ca, session) project = self._add_project("project_1", session) self.preferred_ca_repo.create_or_update_by_project_id( project.id, ca.id) session.commit() def test_should_update(self): session = self.ca_repo.get_session() ca1 = self._add_ca(self.parsed_ca, session) ca2 = self._add_ca(self.parsed_ca2, session) project = self._add_project("project_1", session) self.preferred_ca_repo.create_or_update_by_project_id( project.id, ca1.id) session.commit() self.preferred_ca_repo.create_or_update_by_project_id( project.id, ca2.id) session.commit() barbican-2.0.0/barbican/tests/model/repositories/test_repositories_quotas.py0000664000567000056710000002522312701405673030712 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import unittest from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils class WhenTestingProjectQuotasRepo(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingProjectQuotasRepo, self).setUp() self.project_quotas_repo = repositories.ProjectQuotasRepo() self.session = self.project_quotas_repo.get_session() self.project_1 = models.Project() self.project_1.id = '11111' self.project_1.external_id = '44444' self.project_1.save(session=self.session) self.project_2 = models.Project() self.project_2.id = '22222' self.project_2.external_id = '55555' self.project_2.save(session=self.session) self.project_3 = models.Project() self.project_3.id = '33333' self.project_3.external_id = '66666' self.project_3.save(session=self.session) self.parsed_project_quotas_1 = { 'secrets': 101, 'orders': 102, 'containers': 103, 'consumers': 105, 'cas': 106} self.parsed_project_quotas_2 = { 'secrets': 201, 'orders': 202, 'containers': 203, 'consumers': 205, 'cas': 206} self.parsed_project_quotas_3 = { 'secrets': 301, 'containers': 303, 'consumers': 305} def test_get_list_of_one_project_quotas(self): self.project_quotas_repo.create_or_update_by_project_id( self.project_1.id, self.parsed_project_quotas_1, session=self.session) self.session.commit() retrieved_project_quotas, offset, limit, total =\ self.project_quotas_repo.get_by_create_date(session=self.session) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) self.assertEqual([self.project_1.id], [s.project_id for s in retrieved_project_quotas]) self.assertEqual([self.project_1.external_id], [s.project.external_id for s in retrieved_project_quotas]) self.assertEqual([101], [s.secrets for s in retrieved_project_quotas]) self.assertEqual([102], [s.orders for s in retrieved_project_quotas]) self.assertEqual([103], [s.containers for s in retrieved_project_quotas]) self.assertEqual([105], [s.consumers for s in retrieved_project_quotas]) self.assertEqual([106], [s.cas for s in retrieved_project_quotas]) def test_get_list_of_two_project_quotas(self): self.project_quotas_repo.create_or_update_by_project_id( self.project_1.id, self.parsed_project_quotas_1, session=self.session) self.project_quotas_repo.create_or_update_by_project_id( self.project_2.id, self.parsed_project_quotas_2, session=self.session) self.session.commit() retrieved_project_quotas, offset, limit, total =\ self.project_quotas_repo.get_by_create_date(session=self.session) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(2, total) self.assertItemsEqual([self.project_1.id, self.project_2.id], [s.project_id for s in retrieved_project_quotas]) self.assertItemsEqual([self.project_1.external_id, self.project_2.external_id], [s.project.external_id for s in retrieved_project_quotas]) self.assertItemsEqual([101, 201], [s.secrets for s in retrieved_project_quotas]) self.assertItemsEqual([102, 202], [s.orders for s in retrieved_project_quotas]) self.assertItemsEqual([103, 203], [s.containers for s in retrieved_project_quotas]) self.assertItemsEqual([105, 205], [s.consumers for s in retrieved_project_quotas]) self.assertItemsEqual([106, 206], [s.cas for s in retrieved_project_quotas]) def test_should_raise_get_list_of_zero_project_quotas(self): self.assertRaises( exception.NotFound, self.project_quotas_repo.get_by_create_date, session=self.session, suppress_exception=False) def test_should_suppress_get_list_of_zero_project_quotas(self): retrieved_project_quotas, offset, limit, total =\ self.project_quotas_repo.get_by_create_date( session=self.session, suppress_exception=True) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(0, total) def test_get_specific_project_quotas(self): self.project_quotas_repo.create_or_update_by_project_id( self.project_1.id, self.parsed_project_quotas_1, session=self.session) self.session.commit() retrieved_project_quotas =\ self.project_quotas_repo.get_by_external_project_id( self.project_1.external_id, session=self.session) self.assertEqual(self.project_1.id, retrieved_project_quotas.project_id) self.assertEqual(self.project_1.external_id, retrieved_project_quotas.project.external_id) self.assertEqual(101, retrieved_project_quotas.secrets) self.assertEqual(102, retrieved_project_quotas.orders) self.assertEqual(103, retrieved_project_quotas.containers) self.assertEqual(105, retrieved_project_quotas.consumers) self.assertEqual(106, retrieved_project_quotas.cas) def test_project_quotas_with_some_defaults(self): self.project_quotas_repo.create_or_update_by_project_id( self.project_3.id, self.parsed_project_quotas_3, session=self.session) self.session.commit() retrieved_project_quotas =\ self.project_quotas_repo.get_by_external_project_id( self.project_3.external_id, session=self.session) self.assertEqual(self.project_3.id, retrieved_project_quotas.project_id) self.assertEqual(self.project_3.external_id, retrieved_project_quotas.project.external_id) self.assertEqual(301, retrieved_project_quotas.secrets) self.assertIsNone(retrieved_project_quotas.orders) self.assertEqual(303, retrieved_project_quotas.containers) self.assertEqual(305, retrieved_project_quotas.consumers) self.assertIsNone(retrieved_project_quotas.cas) def test_update_specific_project_quotas(self): self.project_quotas_repo.create_or_update_by_project_id( self.project_1.id, self.parsed_project_quotas_1, session=self.session) self.session.commit() self.project_quotas_repo.create_or_update_by_project_id( self.project_1.id, self.parsed_project_quotas_2, session=self.session) self.session.commit() retrieved_project_quotas =\ self.project_quotas_repo.get_by_external_project_id( self.project_1.external_id, session=self.session) self.assertEqual(self.project_1.id, retrieved_project_quotas.project_id) self.assertEqual(self.project_1.external_id, retrieved_project_quotas.project.external_id) self.assertEqual(201, retrieved_project_quotas.secrets) self.assertEqual(202, retrieved_project_quotas.orders) self.assertEqual(203, retrieved_project_quotas.containers) self.assertEqual(205, retrieved_project_quotas.consumers) self.assertEqual(206, retrieved_project_quotas.cas) def test_should_raise_get_missing_specific_project_quotas(self): self.assertRaises( exception.NotFound, self.project_quotas_repo.get_by_external_project_id, 'dummy', suppress_exception=False, session=self.session) def test_should_suppress_get_missing_specific_project_quotas(self): retrieved_project_quotas =\ self.project_quotas_repo.get_by_external_project_id( 'dummy', suppress_exception=True, session=self.session) self.assertIsNone(retrieved_project_quotas) def test_get_by_create_date_nothing(self): retrieved_project_quotas, offset, limit, total =\ self.project_quotas_repo.get_by_create_date( session=self.session, suppress_exception=True) self.assertEqual([], retrieved_project_quotas) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(0, total) def test_should_delete(self): self.project_quotas_repo.create_or_update_by_project_id( self.project_1.id, self.parsed_project_quotas_1, session=self.session) self.session.commit() self.project_quotas_repo.delete_by_external_project_id( self.project_1.external_id, session=self.session) def test_should_raise_delete_not_found(self): self.assertRaises( exception.NotFound, self.project_quotas_repo.delete_by_external_project_id, 'dummy', session=self.session) def test_should_suppress_delete_not_found(self): self.project_quotas_repo.delete_by_external_project_id( 'dummy', suppress_exception=True, session=self.session) def test_do_entity_name(self): self.assertEqual("ProjectQuotas", self.project_quotas_repo._do_entity_name()) def test_should_raise_not_found_get_by_entity_id(self): self.assertRaises( exception.NotFound, self.project_quotas_repo.get, 'dummy', session=self.session) if __name__ == '__main__': unittest.main() barbican-2.0.0/barbican/tests/model/repositories/test_repositories_containers.py0000664000567000056710000000726112701405673031545 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils class WhenTestingContainerRepository(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingContainerRepository, self).setUp() self.repo = repositories.ContainerRepo() def test_should_raise_no_result_found(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_by_create_date, "my keystone id", session=session, suppress_exception=False) def test_get_container_by_id(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) container = models.Container() container.project_id = project.id container.save(session=session) session.commit() db_container = self.repo.get_container_by_id(container.id) self.assertIsNotNone(db_container) def test_should_raise_notfound_exception(self): self.assertRaises(exception.NotFound, self.repo.get_container_by_id, "invalid_id", suppress_exception=False) def test_should_suppress_notfound_exception(self): self.assertIsNone(self.repo.get_container_by_id( "invalid_id", suppress_exception=True)) def test_should_get_count_zero(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(0, count) def test_should_get_count_one(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) container_model = models.Container() container_model.project_id = project.id self.repo.create_from(container_model, session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(1, count) def test_should_get_count_one_after_delete(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) container_model = models.Container() container_model.project_id = project.id self.repo.create_from(container_model, session=session) container_model = models.Container() container_model.project_id = project.id self.repo.create_from(container_model, session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(2, count) self.repo.delete_entity_by_id(container_model.id, "my keystone id", session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(1, count) barbican-2.0.0/barbican/tests/model/repositories/test_repositories_order_retry_tasks.py0000664000567000056710000001157412701405673033147 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime import time from barbican.common import config from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils class WhenTestingOrderRetryTaskRepository(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingOrderRetryTaskRepository, self).setUp() self.date_time_now = datetime.datetime.utcnow() self.test_args = ['test', 'args'] self.test_kwargs = {'test': 1, 'kwargs': 2} self.repo = repositories.OrderRetryTaskRepo() self.order_repo = repositories.OrderRepo() def test_get_order_retry_task(self): session = self.repo.get_session() order_retry_task = self._create_retry_task(session) order_retry_task_from_get = self.repo.get( order_retry_task.id, session=session, ) self.assertEqual(order_retry_task.id, order_retry_task_from_get.id) self.assertEqual( self.date_time_now, order_retry_task_from_get.retry_at) self.assertEqual(u'retry-task', order_retry_task_from_get.retry_task) self.assertEqual(self.test_args, order_retry_task_from_get.retry_args) self.assertEqual(self.test_kwargs, order_retry_task_from_get.retry_kwargs) def test_get_order_retry_task_filtered_by_retry_time(self): session = self.repo.get_session() future_seconds = 3 date_time_future = ( self.date_time_now + datetime.timedelta(seconds=future_seconds) ) order_retry_task = self._create_retry_task( session, retry_at=date_time_future) # A retrieve by the current time should return no entries, as the only # retry record is set into the future. entities, offset, limit, total = self.repo.get_by_create_date( only_at_or_before_this_date=self.date_time_now, session=session, suppress_exception=True ) self.assertEqual(0, total) self.assertEqual([], entities) # Wait until the future time is the current time. time.sleep(2 * future_seconds) # Now, a retrieve by the current time should return our entry. entities, offset, limit, total = self.repo.get_by_create_date( only_at_or_before_this_date=datetime.datetime.utcnow(), session=session, suppress_exception=True ) self.assertEqual(1, total) # Verify that retry task record is what we put in originally. order_retry_task_from_get = entities[0] self.assertEqual(order_retry_task.id, order_retry_task_from_get.id) self.assertEqual(date_time_future, order_retry_task_from_get.retry_at) self.assertEqual(u'retry-task', order_retry_task_from_get.retry_task) self.assertEqual(self.test_args, order_retry_task_from_get.retry_args) self.assertEqual(self.test_kwargs, order_retry_task_from_get.retry_kwargs) def test_should_raise_no_result_found_no_exception(self): session = self.repo.get_session() entities, offset, limit, total = self.repo.get_by_create_date( session=session, suppress_exception=True) self.assertEqual([], entities) self.assertEqual(0, offset) self.assertEqual(config.CONF.default_limit_paging, limit) self.assertEqual(0, total) def test_should_raise_no_result_found_with_exceptions(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_by_create_date, session=session, suppress_exception=False) def _create_retry_task(self, session, retry_at=None): project = database_utils.create_project(session=session) order = database_utils.create_order(project, session=session) order_retry_task = models.OrderRetryTask() order_retry_task.order_id = order.id order_retry_task.retry_task = u'retry-task' order_retry_task.retry_at = retry_at or self.date_time_now order_retry_task.retry_args = self.test_args order_retry_task.retry_kwargs = self.test_kwargs self.repo.create_from(order_retry_task, session=session) session.commit() return order_retry_task barbican-2.0.0/barbican/tests/model/repositories/test_repositories_secrets.py0000664000567000056710000001625612701405673031054 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.plugin.interface import secret_store as ss from barbican.tests import database_utils from barbican.tests import utils @utils.parameterized_test_case class WhenTestingSecretRepository(database_utils.RepositoryTestCase): dataset_for_filter_tests = { 'query_by_name': { 'secret_1_dict': dict(name="name1"), 'secret_2_dict': dict(name="name2"), 'query_dict': dict(name="name1") }, 'query_by_algorithm': { 'secret_1_dict': dict(algorithm="algorithm1"), 'secret_2_dict': dict(algorithm="algorithm2"), 'query_dict': dict(alg="algorithm1") }, 'query_by_mode': { 'secret_1_dict': dict(mode="mode1"), 'secret_2_dict': dict(mode="mode2"), 'query_dict': dict(mode="mode1") }, 'query_by_bit_length': { 'secret_1_dict': dict(bit_length=1024), 'secret_2_dict': dict(bit_length=2048), 'query_dict': dict(bits=1024) }, 'query_by_secret_type': { 'secret_1_dict': dict(secret_type=ss.SecretType.SYMMETRIC), 'secret_2_dict': dict(secret_type=ss.SecretType.OPAQUE), 'query_dict': dict(secret_type=ss.SecretType.SYMMETRIC) }, } def setUp(self): super(WhenTestingSecretRepository, self).setUp() self.repo = repositories.SecretRepo() def test_get_by_create_date(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) secret_model = models.Secret() secret_model.project_id = project.id secret = self.repo.create_from(secret_model, session=session) session.commit() secrets, offset, limit, total = self.repo.get_by_create_date( "my keystone id", session=session, ) self.assertEqual([secret.id], [s.id for s in secrets]) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_secret_by_id(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) secret_model = models.Secret() secret_model.project_id = project.id secret = self.repo.create_from(secret_model, session=session) session.commit() db_secret = self.repo.get_secret_by_id(secret.id) self.assertIsNotNone(db_secret) def test_should_raise_notfound_exception(self): self.assertRaises(exception.NotFound, self.repo.get_secret_by_id, "invalid_id", suppress_exception=False) def test_should_suppress_notfound_exception(self): self.assertIsNone(self.repo.get_secret_by_id("invalid_id", suppress_exception=True)) @utils.parameterized_dataset(dataset_for_filter_tests) def test_get_by_create_date_with_filter(self, secret_1_dict, secret_2_dict, query_dict): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) secret_1_dict['project_id'] = project.id secret1 = self.repo.create_from( models.Secret(secret_1_dict), session=session, ) secret_2_dict['project_id'] = project.id secret2 = self.repo.create_from( models.Secret(secret_2_dict), session=session, ) session.commit() secrets, offset, limit, total = self.repo.get_by_create_date( "my keystone id", session=session, **query_dict ) resulting_secret_ids = [s.id for s in secrets] self.assertIn(secret1.id, resulting_secret_ids) self.assertNotIn(secret2.id, resulting_secret_ids) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(1, total) def test_get_by_create_date_nothing(self): session = self.repo.get_session() secrets, offset, limit, total = self.repo.get_by_create_date( "my keystone id", bits=1024, session=session, suppress_exception=True ) self.assertEqual([], secrets) self.assertEqual(0, offset) self.assertEqual(10, limit) self.assertEqual(0, total) def test_do_entity_name(self): self.assertEqual("Secret", self.repo._do_entity_name()) def test_should_raise_no_result_found(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_by_create_date, "my keystone id", session=session, suppress_exception=False) def test_should_get_count_zero(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(0, count) def test_should_get_count_one(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) secret_model = models.Secret() secret_model.project_id = project.id self.repo.create_from(secret_model, session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(1, count) def test_should_get_count_one_after_delete(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) secret_model = models.Secret() secret_model.project_id = project.id self.repo.create_from(secret_model, session=session) secret_model = models.Secret() secret_model.project_id = project.id self.repo.create_from(secret_model, session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(2, count) self.repo.delete_entity_by_id(secret_model.id, "my keystone id", session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(1, count) barbican-2.0.0/barbican/tests/model/repositories/test_repositories_projects.py0000664000567000056710000000336512701405673031232 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils class WhenTestingProjectRepository(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingProjectRepository, self).setUp() self.repo = repositories.ProjectRepo() def test_should_create_retrieve_deleted_project(self): session = self.repo.get_session() project = models.Project() project.keystone_id = 'my keystone id' project.status = models.States.ACTIVE self.repo.create_from(project, session=session) self.assertIsNotNone(project.id) self.assertFalse(project.deleted) project_get = self.repo.get(project.id) self.assertEqual(project.id, project_get.id) self.repo.delete_entity_by_id(project.id, 'my keystone id') self.assertTrue(project.deleted) def test_should_raise_no_result_found(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.find_by_external_project_id, "my keystone id", session=session, suppress_exception=False) barbican-2.0.0/barbican/tests/model/repositories/test_repositories_secret_metadata.py0000664000567000056710000001054112701405673032520 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils from barbican.tests import utils @utils.parameterized_test_case class WhenTestingSecretMetadataRepository(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingSecretMetadataRepository, self).setUp() self.repo = repositories.SecretUserMetadatumRepo() self.test_metadata = { "dog": "poodle", "cat": "siamese" } def _create_base_secret(self, project_id=None): # Setup the secret and needed base relationship secret_repo = repositories.get_secret_repository() session = secret_repo.get_session() if project_id is None: # don't re-create project if it created earlier project = models.Project() project.external_id = "keystone_project_id" project.save(session=session) project_id = project.id secret_model = models.Secret() secret_model.project_id = project_id secret = secret_repo.create_from(secret_model, session=session) secret.save(session=session) session.commit() return secret def test_create_and_get_metadata_for_secret(self): secret = self._create_base_secret() self.repo.create_replace_user_metadata(secret.id, self.test_metadata) metadata = self.repo.get_metadata_for_secret(secret.id) self.assertEqual(self.test_metadata, metadata) def test_get_metadata_invalid_secret(self): metadata = self.repo.get_metadata_for_secret("invalid_id") self.assertEqual({}, metadata) def test_create_user_metadatum(self): secret = self._create_base_secret() self.repo.create_replace_user_metadata(secret.id, self.test_metadata) # adds a new key self.repo.create_replace_user_metadatum(secret.id, 'lizard', 'green anole') self.test_metadata['lizard'] = 'green anole' metadata = self.repo.get_metadata_for_secret(secret.id) self.assertEqual(self.test_metadata, metadata) def test_replace_user_metadatum(self): secret = self._create_base_secret() self.repo.create_replace_user_metadata(secret.id, self.test_metadata) # updates existing key self.repo.create_replace_user_metadatum(secret.id, 'dog', 'rat terrier') self.test_metadata['dog'] = 'rat terrier' metadata = self.repo.get_metadata_for_secret(secret.id) self.assertEqual(self.test_metadata, metadata) def test_delete_user_metadatum(self): secret = self._create_base_secret() self.repo.create_replace_user_metadata(secret.id, self.test_metadata) # deletes existing key self.repo.delete_metadatum(secret.id, 'cat') del self.test_metadata['cat'] metadata = self.repo.get_metadata_for_secret(secret.id) self.assertEqual(self.test_metadata, metadata) def test_delete_secret_deletes_secret_metadata(self): secret = self._create_base_secret() self.repo.create_replace_user_metadata(secret.id, self.test_metadata) metadata = self.repo.get_metadata_for_secret(secret.id) self.assertEqual(self.test_metadata, metadata) # deletes existing secret secret.delete() metadata = self.repo.get_metadata_for_secret(secret.id) self.assertEqual({}, metadata) barbican-2.0.0/barbican/tests/model/repositories/test_repositories.py0000664000567000056710000003220512701405673027314 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import sqlalchemy from alembic import script as alembic_script from barbican.common import config from barbican.common import exception from barbican.model.migration import commands as migration from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils from barbican.tests import utils from oslo_config import cfg class WhenCleaningRepositoryPagingParameters(utils.BaseTestCase): def setUp(self): super(WhenCleaningRepositoryPagingParameters, self).setUp() self.CONF = config.CONF self.default_limit = self.CONF.default_limit_paging def test_parameters_not_assigned(self): """The cleaner should use defaults when params are not specified.""" clean_offset, clean_limit = repositories.clean_paging_values() self.assertEqual(0, clean_offset) self.assertEqual(self.default_limit, clean_limit) def test_limit_as_none(self): """When Limit is set to None it should use the default limit.""" offset = 0 clean_offset, clean_limit = repositories.clean_paging_values( offset_arg=offset, limit_arg=None) self.assertEqual(offset, clean_offset) self.assertEqual(self.default_limit, clean_limit) def test_offset_as_none(self): """When Offset is set to None it should use an offset of 0.""" clean_offset, clean_limit = repositories.clean_paging_values( offset_arg=None, limit_arg=self.default_limit) self.assertEqual(0, clean_offset) self.assertEqual(self.default_limit, clean_limit) def test_limit_as_uncastable_str(self): """When Limit cannot be cast to an int, expect the default.""" clean_offset, clean_limit = repositories.clean_paging_values( offset_arg=0, limit_arg='boom') self.assertEqual(0, clean_offset) self.assertEqual(self.default_limit, clean_limit) def test_offset_as_uncastable_str(self): """When Offset cannot be cast to an int, it should be zero.""" clean_offset, clean_limit = repositories.clean_paging_values( offset_arg='boom', limit_arg=self.default_limit) self.assertEqual(0, clean_offset) self.assertEqual(self.default_limit, clean_limit) def test_limit_is_less_than_one(self): """Offset should default to 1.""" limit = -1 clean_offset, clean_limit = repositories.clean_paging_values( offset_arg=1, limit_arg=limit) self.assertEqual(1, clean_offset) self.assertEqual(1, clean_limit) def test_limit_is_too_big(self): """Limit should max out at configured value.""" limit = self.CONF.max_limit_paging + 10 clean_offset, clean_limit = repositories.clean_paging_values( offset_arg=1, limit_arg=limit) self.assertEqual(self.CONF.max_limit_paging, clean_limit) def test_offset_is_too_big(self): """When Offset exceeds sys.maxsize, it should be zero.""" clean_offset, clean_limit = repositories.clean_paging_values( offset_arg=265613988875874769338781322035779626829233452653394495, limit_arg=self.default_limit) self.assertEqual(0, clean_offset) self.assertEqual(self.default_limit, clean_limit) class WhenInvokingExceptionMethods(utils.BaseTestCase): def setUp(self): super(WhenInvokingExceptionMethods, self).setUp() self.CONF = config.CONF self.entity_id = '123456' self.entity_name = 'test_entity' def test_should_raise_for_entity_not_found(self): exception_result = self.assertRaises( exception.NotFound, repositories._raise_entity_not_found, self.entity_name, self.entity_id) self.assertEqual( "No test_entity found with ID 123456", exception_result.message) def test_should_raise_for_entity_id_not_found(self): exception_result = self.assertRaises( exception.NotFound, repositories._raise_entity_id_not_found, self.entity_id) self.assertEqual( "Entity ID 123456 not found", exception_result.message) def test_should_raise_for_no_entities_found(self): exception_result = self.assertRaises( exception.NotFound, repositories._raise_no_entities_found, self.entity_name) self.assertEqual( "No entities of type test_entity found", exception_result.message) def test_should_raise_for_entity_already_exists(self): exception_result = self.assertRaises( exception.Duplicate, repositories._raise_entity_already_exists, self.entity_name) self.assertEqual( "Entity 'test_entity' already exists", exception_result.message) class WhenTestingBaseRepository(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingBaseRepository, self).setUp() self.repo = repositories.BaseRepo() def test_should_raise_invalid_create_from_no_entity(self): exception_result = self.assertRaises( exception.Invalid, self.repo.create_from, None) self.assertEqual( "Must supply non-None Entity.", exception_result.message) def test_should_raise_invalid_create_from_entity_with_id(self): entity = models.ModelBase() entity.id = '1234' exception_result = self.assertRaises( exception.Invalid, self.repo.create_from, entity) self.assertEqual( "Must supply Entity with id=None (i.e. new entity).", exception_result.message) def test_should_raise_invalid_do_validate_no_status(self): exception_result = self.assertRaises( exception.Invalid, self.repo._do_validate, {}) self.assertEqual( "Entity status is required.", exception_result.message) def test_should_raise_invalid_do_validate_bad_status(self): exception_result = self.assertRaises( exception.Invalid, self.repo._do_validate, dict(status='BOGUS_STATUS')) self.assertEqual( "Invalid status 'BOGUS_STATUS' for Entity.", exception_result.message) class WhenTestingWrapDbError(utils.BaseTestCase): def setUp(self): super(WhenTestingWrapDbError, self).setUp() repositories.CONF.set_override("sql_max_retries", 0, enforce_type=True) repositories.CONF.set_override("sql_retry_interval", 0, enforce_type=True) @mock.patch('barbican.model.repositories.is_db_connection_error') def test_should_raise_operational_error_is_connection_error( self, mock_is_db_error): mock_is_db_error.return_value = True @repositories.wrap_db_error def test_function(): raise sqlalchemy.exc.OperationalError( 'statement', 'params', 'orig') self.assertRaises( sqlalchemy.exc.OperationalError, test_function) class WhenTestingGetEnginePrivate(utils.BaseTestCase): def setUp(self): super(WhenTestingGetEnginePrivate, self).setUp() repositories.CONF.set_override("sql_connection", "connection", enforce_type=True) @mock.patch('barbican.model.repositories._create_engine') def test_should_raise_value_exception_engine_create_failure( self, mock_create_engine): engine = mock.MagicMock() engine.connect.side_effect = ValueError('Abort!') mock_create_engine.return_value = engine exception_result = self.assertRaises( exception.BarbicanException, repositories._get_engine, None) self.assertEqual( 'Error configuring registry database with supplied ' 'sql_connection. Got error: Abort!', exception_result.message) @mock.patch('barbican.model.repositories._create_engine') def test_should_complete_with_no_alembic_create_default_configs( self, mock_create_engine): repositories.CONF.set_override("db_auto_create", False, enforce_type=True) engine = mock.MagicMock() mock_create_engine.return_value = engine # Invoke method under test. repositories._get_engine(None) engine.connect.assert_called_once_with() mock_create_engine.assert_called_once_with( 'connection', pool_recycle=3600, convert_unicode=True, echo=False ) @mock.patch('barbican.model.repositories._create_engine') def test_should_complete_with_no_alembic_create_pool_configs( self, mock_create_engine): repositories.CONF.set_override("db_auto_create", False, enforce_type=True) repositories.CONF.set_override( "sql_pool_class", "QueuePool", enforce_type=True) repositories.CONF.set_override("sql_pool_size", 22, enforce_type=True) repositories.CONF.set_override("sql_pool_max_overflow", 11, enforce_type=True) engine = mock.MagicMock() mock_create_engine.return_value = engine # Invoke method under test. repositories._get_engine(None) engine.connect.assert_called_once_with() mock_create_engine.assert_called_once_with( 'connection', pool_recycle=3600, convert_unicode=True, echo=False, poolclass=sqlalchemy.pool.QueuePool, pool_size=22, max_overflow=11 ) class WhenTestingAutoGenerateTables(utils.BaseTestCase): @mock.patch('barbican.model.migration.commands.upgrade') def test_should_complete_with_alembic_database_update( self, mock_commands_upgrade): tables = dict( alembic_version='version') # Mimic tables already created. engine = 'engine' # Invoke method under test. repositories._auto_generate_tables(engine, tables) mock_commands_upgrade.assert_called_once_with() class WhenTestingIsDbConnectionError(utils.BaseTestCase): def test_should_return_false_no_error_code_in_args(self): args = mock.MagicMock() args.find.return_value = -1 result = repositories.is_db_connection_error(args) self.assertFalse(result) def test_should_return_true_error_code_found_in_args(self): args = mock.MagicMock() args.find.return_value = 1 result = repositories.is_db_connection_error(args) self.assertTrue(result) class WhenTestingMigrations(utils.BaseTestCase): def setUp(self): super(WhenTestingMigrations, self).setUp() repositories.CONF.set_override("sql_connection", "connection", enforce_type=True) self.alembic_config = migration.init_config() self.alembic_config.barbican_config = cfg.CONF def test_no_downgrade(self): script_dir = alembic_script.ScriptDirectory.from_config( self.alembic_config) versions = [v for v in script_dir.walk_revisions(base='base', head='heads')] failed_revisions = [] for version in versions: if hasattr(version.module, 'downgrade'): failed_revisions.append(version.revision) if failed_revisions: self.fail('Migrations %s have downgrade' % failed_revisions) class DummyRepo(repositories.BaseRepo): """Repository for the increasing code coverage of unit tests.""" def get_session(self, session=None): return None def _do_entity_name(self): return "Dummy" def _do_build_get_query(self, entity_id, external_project_id, session): return None def _do_validate(self, values): pass def _build_get_project_entities_query(self, project_id, session): return None class WhenIncreasingRepositoryTestCoverage(utils.BaseTestCase): def test_get_count_should_return_zero(self): dummy_repo = DummyRepo() count = dummy_repo.get_count('dummy_project_id') self.assertEqual(0, count) def test_get_project_entities_should_return_empty(self): dummy_repo = DummyRepo() count = dummy_repo.get_project_entities('dummy_project_id') self.assertEqual([], count) barbican-2.0.0/barbican/tests/model/repositories/test_repositories_orders.py0000664000567000056710000000752512701405673030701 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import config from barbican.common import exception from barbican.model import models from barbican.model import repositories from barbican.tests import database_utils class WhenTestingOrderRepository(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingOrderRepository, self).setUp() self.repo = repositories.OrderRepo() def test_should_raise_no_result_found_no_exception(self): session = self.repo.get_session() entities, offset, limit, total = self.repo.get_by_create_date( "my keystone id", session=session, suppress_exception=True) self.assertEqual([], entities) self.assertEqual(0, offset) self.assertEqual(config.CONF.default_limit_paging, limit) self.assertEqual(0, total) def test_should_raise_no_result_found_with_exceptions(self): session = self.repo.get_session() self.assertRaises( exception.NotFound, self.repo.get_by_create_date, "my keystone id", session=session, suppress_exception=False) def test_get_order(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) order = models.Order() order.project_id = project.id self.repo.create_from(order, session=session) session.commit() order_from_get = self.repo.get( order.id, external_project_id="my keystone id", session=session, ) self.assertEqual(order.id, order_from_get.id) def test_should_get_count_zero(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(0, count) def test_should_get_count_one(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) order_model = models.Order() order_model.project_id = project.id self.repo.create_from(order_model, session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(1, count) def test_should_get_count_one_after_delete(self): session = self.repo.get_session() project = models.Project() project.external_id = "my keystone id" project.save(session=session) order_model = models.Order() order_model.project_id = project.id self.repo.create_from(order_model, session=session) order_model = models.Order() order_model.project_id = project.id self.repo.create_from(order_model, session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(2, count) self.repo.delete_entity_by_id(order_model.id, "my keystone id", session=session) session.commit() count = self.repo.get_count(project.id, session=session) self.assertEqual(1, count) barbican-2.0.0/barbican/tests/common/0000775000567000056710000000000012701406024020602 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/common/test_hrefs.py0000664000567000056710000000234212701405673023334 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015, Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import hrefs from barbican.tests import utils as test_utils class WhenTestingGetContainerID(test_utils.BaseTestCase): def setUp(self): super(WhenTestingGetContainerID, self).setUp() def test_get_container_id_passes(self): test_ref = 'https://localhost/v1/containers/good_container_ref' result = hrefs.get_container_id_from_ref(test_ref) self.assertEqual('good_container_ref', result) def test_get_container_id_raises(self): test_ref = 'bad_container_ref' self.assertRaises(IndexError, hrefs.get_container_id_from_ref, test_ref) barbican-2.0.0/barbican/tests/common/test_quota.py0000664000567000056710000002734212701405673023365 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from barbican.common import exception as excep from barbican.common import quota from barbican.model import models from barbican.tests import database_utils class WhenTestingQuotaDriverFunctions(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingQuotaDriverFunctions, self).setUp() self.quota_driver = quota.QuotaDriver() def test_get_defaults(self): quotas = self.quota_driver._get_defaults() self.assertEqual(-1, quotas['secrets']) self.assertEqual(-1, quotas['orders']) self.assertEqual(-1, quotas['containers']) self.assertEqual(-1, quotas['consumers']) self.assertEqual(-1, quotas['cas']) def test_compute_effective_quotas_using_some_defaults(self): configured_quotas = {'consumers': None, 'containers': 66, 'orders': None, 'secrets': 55, 'cas': None} quotas = self.quota_driver._compute_effective_quotas(configured_quotas) expected_quotas = {'consumers': -1, 'containers': 66, 'orders': -1, 'secrets': 55, 'cas': -1} self.assertEqual(expected_quotas, quotas) def test_compute_effective_quotas_using_all_defaults(self): configured_quotas = {'consumers': None, 'containers': None, 'orders': None, 'secrets': None, 'cas': None} quotas = self.quota_driver._compute_effective_quotas(configured_quotas) expected_quotas = {'consumers': -1, 'containers': -1, 'orders': -1, 'secrets': -1, 'cas': -1} self.assertEqual(expected_quotas, quotas) def test_is_unlimited_true(self): self.assertTrue(self.quota_driver.is_unlimited_value(-1)) def test_is_unlimited_false(self): self.assertFalse(self.quota_driver.is_unlimited_value(1)) def test_is_disabled_true(self): self.assertTrue(self.quota_driver.is_disabled_value(0)) def test_is_disabled_false(self): self.assertFalse(self.quota_driver.is_disabled_value(1)) def test_should_get_project_quotas(self): self.create_a_test_project_quotas() project_quotas = self.quota_driver.get_project_quotas( self.get_test_project_id()) self.assertEqual({'project_quotas': self.get_test_parsed_project_quotas()}, project_quotas) def test_should_return_not_found_get_project_quotas(self): project_quotas = self.quota_driver.get_project_quotas('dummy') self.assertIsNone(project_quotas) def test_should_get_project_quotas_list(self): self.create_a_test_project_quotas() project_quotas = self.quota_driver.get_project_quotas_list() self.assertEqual({'project_quotas': [{ 'project_id': u'project1', 'project_quotas': {'consumers': 105, 'containers': 103, 'orders': 102, 'secrets': 101, 'cas': 106}}], 'total': 1}, project_quotas) def test_should_get_empty_project_quotas_list(self): project_quotas = self.quota_driver.get_project_quotas_list() self.assertEqual({'total': 0, 'project_quotas': []}, project_quotas) def test_should_delete_project_quotas(self): self.create_a_test_project_quotas() self.quota_driver.delete_project_quotas( self.get_test_project_id()) def test_should_raise_not_found_delete_project_quotas(self): self.assertRaises( excep.NotFound, self.quota_driver.delete_project_quotas, 'dummy') def test_get_project_quotas_with_partial_definition(self): self.create_a_test_project_quotas('partial') project_quotas = self.quota_driver.get_project_quotas( self.get_test_project_id('partial')) self.assertEqual({'project_quotas': self.get_test_response_project_quotas('partial')}, project_quotas) def test_get_project_quotas_using_empty_definition(self): self.create_a_test_project_quotas('none') project_quotas = self.quota_driver.get_project_quotas( self.get_test_project_id('none')) self.assertEqual({'project_quotas': self.get_test_response_project_quotas('none')}, project_quotas) def test_get_quotas_using_some_defaults(self): self.create_a_test_project_quotas('partial') quotas = self.quota_driver.get_quotas( self.get_test_project_id('partial')) expected_quotas = {'quotas': {'consumers': -1, 'containers': 66, 'orders': -1, 'secrets': 55, 'cas': -1}} self.assertEqual(expected_quotas, quotas) def test_get_quotas_using_all_defaults(self): quotas = self.quota_driver.get_quotas('not_configured') expected_quotas = {'quotas': {'consumers': -1, 'containers': -1, 'orders': -1, 'secrets': -1, 'cas': -1}} self.assertEqual(expected_quotas, quotas) # ----------------------- Helper Functions --------------------------- def get_test_project_id(self, index=1): if index == 'partial': return 'project_partial' elif index == 'none': return 'project_none' else: return 'project' + str(index) def get_test_parsed_project_quotas(self, index=1): if index == 'partial': parsed_project_quotas = { 'secrets': 55, 'containers': 66} elif index == 'none': parsed_project_quotas = {} else: parsed_project_quotas = { 'secrets': index * 100 + 1, 'orders': index * 100 + 2, 'containers': index * 100 + 3, 'consumers': index * 100 + 5, 'cas': index * 100 + 6} return parsed_project_quotas def get_test_response_project_quotas(self, index=1): if index == 'partial': response_project_quotas = { 'secrets': 55, 'orders': None, 'containers': 66, 'consumers': None, 'cas': None} elif index == 'none': response_project_quotas = { 'secrets': None, 'orders': None, 'containers': None, 'consumers': None, 'cas': None} else: response_project_quotas = { 'secrets': index * 100 + 1, 'orders': index * 100 + 2, 'containers': index * 100 + 3, 'consumers': index * 100 + 5, 'cas': index * 100 + 6} return response_project_quotas def create_a_test_project_quotas(self, index=1): project_id = self.get_test_project_id(index) parsed_project_quotas = self.get_test_parsed_project_quotas(index) self.quota_driver.set_project_quotas(project_id, parsed_project_quotas) def create_project_quotas(self): for index in [1, 2, 3]: self.create_a_test_project_quotas(index) class DummyRepoForTestingQuotaEnforcement(object): def __init__(self, get_count_return_value): self.get_count_return_value = get_count_return_value def get_count(self, internal_project_id): return self.get_count_return_value class WhenTestingQuotaEnforcingFunctions(database_utils.RepositoryTestCase): def setUp(self): super(WhenTestingQuotaEnforcingFunctions, self).setUp() self.quota_driver = quota.QuotaDriver() self.project = models.Project() self.project.id = 'my_internal_id' self.project.external_id = 'my_keystone_id' def test_should_pass_default_unlimited(self): test_repo = DummyRepoForTestingQuotaEnforcement(0) quota_enforcer = quota.QuotaEnforcer('secrets', test_repo) quota_enforcer.enforce(self.project) def test_should_raise_disabled_value(self): test_repo = DummyRepoForTestingQuotaEnforcement(0) quota_enforcer = quota.QuotaEnforcer('secrets', test_repo) disabled_project_quotas = {'consumers': 0, 'containers': 0, 'orders': 0, 'secrets': 0, 'cas': 0} self.quota_driver.set_project_quotas(self.project.external_id, disabled_project_quotas) exception = self.assertRaises( excep.QuotaReached, quota_enforcer.enforce, self.project ) self.assertIn('Quota reached for project', exception.message) self.assertIn('my_keystone_id', exception.message) self.assertIn('secrets', exception.message) self.assertIn(str(0), exception.message) def test_should_pass_below_limit(self): test_repo = DummyRepoForTestingQuotaEnforcement(4) quota_enforcer = quota.QuotaEnforcer('secrets', test_repo) five_project_quotas = {'consumers': 5, 'containers': 5, 'orders': 5, 'secrets': 5, 'cas': 5} self.quota_driver.set_project_quotas(self.project.external_id, five_project_quotas) quota_enforcer.enforce(self.project) def test_should_raise_equal_limit(self): test_repo = DummyRepoForTestingQuotaEnforcement(5) quota_enforcer = quota.QuotaEnforcer('secrets', test_repo) five_project_quotas = {'consumers': 5, 'containers': 5, 'orders': 5, 'secrets': 5, 'cas': 5} self.quota_driver.set_project_quotas(self.project.external_id, five_project_quotas) exception = self.assertRaises( excep.QuotaReached, quota_enforcer.enforce, self.project ) self.assertIn('Quota reached for project', exception.message) self.assertIn('my_keystone_id', exception.message) self.assertIn('secrets', exception.message) self.assertIn(str(5), exception.message) def test_should_raise_above_limit(self): test_repo = DummyRepoForTestingQuotaEnforcement(6) quota_enforcer = quota.QuotaEnforcer('secrets', test_repo) five_project_quotas = {'consumers': 5, 'containers': 5, 'orders': 5, 'secrets': 5, 'cas': 5} self.quota_driver.set_project_quotas(self.project.external_id, five_project_quotas) exception = self.assertRaises( excep.QuotaReached, quota_enforcer.enforce, self.project ) self.assertIn('Quota reached for project', exception.message) self.assertIn('my_keystone_id', exception.message) self.assertIn('secrets', exception.message) self.assertIn(str(5), exception.message) if __name__ == '__main__': unittest.main() barbican-2.0.0/barbican/tests/common/__init__.py0000664000567000056710000000000012701405673022712 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/common/test_utils.py0000664000567000056710000001405112701405673023365 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from oslo_config import cfg import six from barbican.common import config from barbican.common import utils from barbican.tests import utils as test_utils class WhenTestingHostnameForRefsGetter(test_utils.BaseTestCase): def setUp(self): super(WhenTestingHostnameForRefsGetter, self).setUp() self.host = 'host' self.version = 'version' self.external_project_id = 'external_project_id' self.resource = 'resource' self._old_host = utils.CONF.host_href self._old_version = utils.API_VERSION utils.CONF.set_override('host_href', self.host, enforce_type=True) utils.API_VERSION = self.version def tearDown(self): super(WhenTestingHostnameForRefsGetter, self).tearDown() utils.CONF.clear_override('host_href') utils.API_VERSION = self._old_version def test_hostname_for_refs(self): uri = utils.hostname_for_refs(resource=self.resource) self.assertEqual("{0}/{1}/{2}".format(self.host, self.version, self.resource), uri) def test_hostname_for_refs_no_resource(self): uri = utils.hostname_for_refs() self.assertEqual("{0}/{1}".format(self.host, self.version), uri) class WhenTestingAcceptEncodingGetter(test_utils.BaseTestCase): def setUp(self): super(WhenTestingAcceptEncodingGetter, self).setUp() self.req = mock.Mock() def test_parses_accept_encoding_header(self): self.req.get_header.return_value = '*' ae = utils.get_accepted_encodings(self.req) self.req.get_header.assert_called_once_with('Accept-Encoding') self.assertEqual(['*'], ae) def test_returns_none_for_empty_encoding(self): self.req.get_header.return_value = None ae = utils.get_accepted_encodings(self.req) self.assertIsNone(ae) def test_parses_single_accept_with_quality_value(self): self.req.get_header.return_value = 'base64;q=0.7' ae = utils.get_accepted_encodings(self.req) self.assertEqual(['base64'], ae) def test_parses_more_than_one_encoding(self): self.req.get_header.return_value = 'base64, gzip' ae = utils.get_accepted_encodings(self.req) self.assertEqual(['base64', 'gzip'], ae) def test_can_sort_by_quality_value(self): self.req.get_header.return_value = 'base64;q=0.5, gzip;q=0.6, compress' ae = utils.get_accepted_encodings(self.req) self.assertEqual(['compress', 'gzip', 'base64'], ae) def test_returns_none_on_invalid_quality_type(self): self.req.get_header.return_value = 'base64;q=three' ae = utils.get_accepted_encodings(self.req) self.assertIsNone(ae) def test_returns_none_on_quality_too_large(self): self.req.get_header.return_value = 'base64;q=1.1' ae = utils.get_accepted_encodings(self.req) self.assertIsNone(ae) def test_returns_none_on_quality_too_small(self): self.req.get_header.return_value = 'base64;q=-0.1' ae = utils.get_accepted_encodings(self.req) self.assertIsNone(ae) def test_ignores_encoding_with_zero_quality_value(self): self.req.get_header.return_value = 'base64;q=0.5, gzip;q=0.0, compress' ae = utils.get_accepted_encodings(self.req) self.assertEqual(['compress', 'base64'], ae) class WhenTestingGenerateFullClassnameForInstance(test_utils.BaseTestCase): def setUp(self): super(WhenTestingGenerateFullClassnameForInstance, self).setUp() self.instance = test_utils.DummyClassForTesting() def test_get_fullname_for_null_instance_raises_exception(self): self.assertRaises(ValueError, utils.generate_fullname_for, None) def test_get_fullname_for_string_doesnt_include_module(self): test_string = "foo" fullname = utils.generate_fullname_for(test_string) self.assertEqual(0, fullname.count(".")) self.assertNotIn(six.moves.builtins.__name__, fullname) def test_returns_class_name_on_null_module(self): self.instance.__class__.__module__ = None name = utils.generate_fullname_for(self.instance) self.assertEqual('DummyClassForTesting', name) def test_returns_qualified_name(self): self.instance.__class__.__module__ = 'dummy' name = utils.generate_fullname_for(self.instance) self.assertEqual('dummy.DummyClassForTesting', name) class TestConfigValues(test_utils.BaseTestCase): def setUp(self): super(TestConfigValues, self).setUp() self.barbican_config = config.CONF self.oslo_config = cfg.CONF def test_barbican_conf_values_made_visible_to_oslo_conf(self): """In this, checking oslo CONF values are same as barbican config This tests shows that after the change values referenced via oslo_config.cfg.CONF value are same as barbican.common.config.CONF. """ # Checking that 'admin_role' value referred via # barbican.common.config.CONF is same as oslo_config.cfg.CONF self.assertEqual('admin', self.barbican_config._get('admin_role')) self.assertEqual('admin', self.barbican_config.admin_role) self.assertEqual('admin', self.oslo_config._get('admin_role')) self.assertEqual('admin', self.oslo_config.admin_role) # No error in getting 'project' value from both config reading # mechanism self.assertEqual('barbican', self.barbican_config.project) self.assertEqual('barbican', self.oslo_config.project) barbican-2.0.0/barbican/tests/common/test_validators.py0000664000567000056710000021024212701405673024375 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import datetime import six import unittest import testtools from barbican.common import exception as excep from barbican.common import validators from barbican.tests import certificate_utils as certs from barbican.tests import keys from barbican.tests import utils VALID_EXTENSIONS = "valid extensions" VALID_FULL_CMC = "valid CMC" def get_symmetric_key_req(): return {'name': 'mysymmetrickey', 'payload_content_type': 'application/octet-stream', 'payload_content_encoding': 'base64', 'algorithm': 'aes', 'bit_length': 256, 'secret_type': 'symmetric', 'payload': 'gF6+lLoF3ohA9aPRpt+6bQ=='} def get_private_key_req(): return {'name': 'myprivatekey', 'payload_content_type': 'application/pkcs8', 'payload_content_encoding': 'base64', 'algorithm': 'rsa', 'bit_length': 2048, 'secret_type': 'private', 'payload': base64.b64encode(keys.get_private_key_pem())} def get_public_key_req(): return {'name': 'mypublickey', 'payload_content_type': 'application/octet-stream', 'payload_content_encoding': 'base64', 'algorithm': 'rsa', 'bit_length': 2048, 'secret_type': 'public', 'payload': base64.b64encode(keys.get_public_key_pem())} def get_certificate_req(): return {'name': 'mycertificate', 'payload_content_type': 'application/pkix-cert', 'payload_content_encoding': 'base64', 'algorithm': 'rsa', 'bit_length': 2048, 'secret_type': 'certificate', 'payload': base64.b64encode(keys.get_certificate_pem())} def get_passphrase_req(): return {'name': 'mypassphrase', 'payload_content_type': 'text/plain', 'secret_type': 'passphrase', 'payload': 'mysecretpassphrase'} def suite(): suite = unittest.TestSuite() suite.addTest(WhenTestingSecretValidator()) return suite class WhenTestingValidatorsFunctions(utils.BaseTestCase): def test_secret_too_big_is_false_for_small_secrets(self): data = b'\xb0' is_too_big = validators.secret_too_big(data) self.assertFalse(is_too_big) def test_secret_too_big_is_true_for_big_secrets(self): data = b'\x01' * validators.CONF.max_allowed_secret_in_bytes data += b'\x01' is_too_big = validators.secret_too_big(data) self.assertTrue(is_too_big) def test_secret_too_big_is_true_for_big_unicode_secrets(self): beer = u'\U0001F37A' data = beer * (validators.CONF.max_allowed_secret_in_bytes / 4) data += u'1' is_too_big = validators.secret_too_big(data) self.assertTrue(is_too_big) @utils.parameterized_test_case class WhenTestingSecretValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingSecretValidator, self).setUp() self.name = 'name' self.payload = b'not-encrypted' self.payload_content_type = 'text/plain' self.secret_algorithm = 'algo' self.secret_bit_length = 512 self.secret_type = 'opaque' self.secret_mode = 'cytype' self.secret_req = {'name': self.name, 'payload_content_type': self.payload_content_type, 'algorithm': self.secret_algorithm, 'bit_length': self.secret_bit_length, 'secret_type': self.secret_type, 'mode': self.secret_mode, 'payload': self.payload} self.validator = validators.NewSecretValidator() def test_should_validate_all_fields(self): self.validator.validate(self.secret_req) def test_should_validate_no_name(self): del self.secret_req['name'] self.validator.validate(self.secret_req) def test_should_validate_empty_name(self): self.secret_req['name'] = ' ' self.validator.validate(self.secret_req) def test_should_validate_null_name(self): self.secret_req['name'] = None self.validator.validate(self.secret_req) def test_should_validate_no_payload(self): del self.secret_req['payload'] del self.secret_req['payload_content_type'] result = self.validator.validate(self.secret_req) self.assertNotIn('payload', result) def test_should_validate_payload_with_whitespace(self): self.secret_req['payload'] = ' ' + self.payload + ' ' result = self.validator.validate(self.secret_req) self.assertEqual(self.payload, result['payload']) def test_should_validate_future_expiration(self): self.secret_req['expiration'] = '2114-02-28T19:14:44.180394' result = self.validator.validate(self.secret_req) self.assertIn('expiration', result) self.assertIsInstance(result['expiration'], datetime.datetime) def test_should_validate_future_expiration_no_t(self): self.secret_req['expiration'] = '2114-02-28 19:14:44.180394' result = self.validator.validate(self.secret_req) self.assertIn('expiration', result) self.assertIsInstance(result['expiration'], datetime.datetime) def test_should_validate_expiration_with_z(self): expiration = '2114-02-28 19:14:44.180394Z' self.secret_req['expiration'] = expiration result = self.validator.validate(self.secret_req) self.assertIn('expiration', result) self.assertIsInstance(result['expiration'], datetime.datetime) self.assertEqual(expiration[:-1], str(result['expiration'])) def test_should_validate_expiration_with_tz(self): expiration = '2114-02-28 12:14:44.180394-05:00' self.secret_req['expiration'] = expiration result = self.validator.validate(self.secret_req) self.assertIn('expiration', result) self.assertIsInstance(result['expiration'], datetime.datetime) expected = expiration[:-6].replace('12', '17', 1) self.assertEqual(expected, str(result['expiration'])) def test_should_validate_expiration_extra_whitespace(self): expiration = '2114-02-28 12:14:44.180394-05:00 ' self.secret_req['expiration'] = expiration result = self.validator.validate(self.secret_req) self.assertIn('expiration', result) self.assertIsInstance(result['expiration'], datetime.datetime) expected = expiration[:-12].replace('12', '17', 1) self.assertEqual(expected, str(result['expiration'])) def test_should_validate_empty_expiration(self): self.secret_req['expiration'] = ' ' result = self.validator.validate(self.secret_req) self.assertIn('expiration', result) self.assertTrue(not result['expiration']) def test_should_raise_numeric_name(self): self.secret_req['name'] = 123 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('name', exception.invalid_property) def test_should_raise_name_length_is_greater_than_max(self): self.secret_req['name'] = 'a' * 256 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('name', exception.invalid_property) def test_should_raise_negative_bit_length(self): self.secret_req['bit_length'] = -23 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('bit_length', exception.invalid_property) self.assertIn('bit_length', exception.message) def test_should_raise_non_integer_bit_length(self): self.secret_req['bit_length'] = "23" exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('bit_length', exception.invalid_property) self.assertIn('bit_length', exception.message) def test_should_raise_bit_length_less_than_min(self): self.secret_req['bit_length'] = 0 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('bit_length', exception.invalid_property) self.assertIn('bit_length', exception.message) def test_should_raise_bit_length_greater_than_max(self): self.secret_req['bit_length'] = 32768 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('bit_length', exception.invalid_property) self.assertIn('bit_length', exception.message) def test_should_raise_mode_length_greater_than_max(self): self.secret_req['mode'] = 'a' * 256 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('mode', exception.invalid_property) self.assertIn('mode', exception.message) def test_should_raise_mode_is_non_string(self): self.secret_req['mode'] = 123 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('mode', exception.invalid_property) self.assertIn('mode', exception.message) def test_validation_should_raise_with_empty_payload(self): self.secret_req['payload'] = ' ' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('payload', exception.invalid_property) self.assertIn('payload', exception.message) def test_should_raise_already_expired(self): self.secret_req['expiration'] = '2004-02-28T19:14:44.180394' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('expiration', exception.invalid_property) self.assertIn('expiration', exception.message) def test_should_raise_expiration_nonsense(self): self.secret_req['expiration'] = 'nonsense' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('expiration', exception.invalid_property) self.assertIn('expiration', exception.message) def test_should_raise_expiration_is_non_string(self): self.secret_req['expiration'] = 123 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('expiration', exception.invalid_property) self.assertIn('expiration', exception.message) def test_should_raise_expiration_greater_than_max(self): self.secret_req['expiration'] = 'a' * 256 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('expiration', exception.invalid_property) self.assertIn('expiration', exception.message) def test_should_raise_algorithm_is_non_string(self): self.secret_req['algorithm'] = 123 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('algorithm', exception.invalid_property) self.assertIn('algorithm', exception.message) def test_should_raise_algorithm_greater_than_max(self): self.secret_req['algorithm'] = 'a' * 256 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('algorithm', exception.invalid_property) self.assertIn('algorithm', exception.message) def test_should_raise_all_nulls(self): self.secret_req = {'name': None, 'algorithm': None, 'bit_length': None, 'mode': None} self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) def test_should_raise_all_empties(self): self.secret_req = {'name': '', 'algorithm': '', 'bit_length': '', 'mode': ''} self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) def test_should_raise_no_payload_content_type(self): del self.secret_req['payload_content_type'] self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) def test_should_raise_with_message_w_bad_payload_content_type(self): self.secret_req['payload_content_type'] = 'plain/text' try: self.validator.validate(self.secret_req) except excep.InvalidObject as e: self.assertNotEqual(str(e), 'None') self.assertIsNotNone(e.message) self.assertNotEqual(e.message, 'None') else: self.fail('No validation exception was raised') def test_should_validate_mixed_case_payload_content_type(self): self.secret_req['payload_content_type'] = 'TeXT/PlaiN' self.validator.validate(self.secret_req) def test_should_validate_upper_case_payload_content_type(self): self.secret_req['payload_content_type'] = 'TEXT/PLAIN' self.validator.validate(self.secret_req) def test_should_raise_with_mixed_case_wrong_payload_content_type(self): self.secret_req['payload_content_type'] = 'TeXT/PlaneS' self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) def test_should_raise_with_upper_case_wrong_payload_content_type(self): self.secret_req['payload_content_type'] = 'TEXT/PLANE' self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) def test_should_raise_payload_content_type_greater_than_max(self): self.secret_req['payload_content_type'] = 'a' * 256 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('payload_content_type', exception.invalid_property) self.assertIn('payload_content_type', exception.message) def test_should_raise_with_payload_content_encoding_greater_than_max(self): self.secret_req['payload_content_encoding'] = 'a' * 256 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('payload_content_encoding', exception.invalid_property) self.assertIn('payload_content_encoding', exception.message) def test_should_raise_with_plain_text_and_encoding(self): self.secret_req['payload_content_encoding'] = 'base64' self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) def test_should_raise_with_wrong_encoding(self): self.secret_req['payload_content_type'] = 'application/octet-stream' self.secret_req['payload_content_encoding'] = 'unsupported' self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) def test_should_validate_with_supported_encoding(self): self.secret_req['payload_content_type'] = 'application/octet-stream' self.secret_req['payload_content_encoding'] = 'base64' self.secret_req['payload'] = 'bXktc2VjcmV0LWhlcmU=' self.validator.validate(self.secret_req) def test_validation_should_validate_with_good_base64_payload(self): self.secret_req['payload_content_type'] = 'application/octet-stream' self.secret_req['payload_content_encoding'] = 'base64' self.secret_req['payload'] = 'bXktc2VjcmV0LWhlcmU=' self.validator.validate(self.secret_req) def test_validation_should_raise_with_bad_base64_payload(self): self.secret_req['payload_content_type'] = 'application/octet-stream' self.secret_req['payload_content_encoding'] = 'base64' self.secret_req['payload'] = 'bad base 64' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('payload', exception.invalid_property) def test_validation_should_raise_with_unicode_payload(self): self.secret_req['payload_content_type'] = 'application/octet-stream' self.secret_req['payload_content_encoding'] = 'base64' self.secret_req['payload'] = unichr(0x0080) exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) self.assertEqual('payload', exception.invalid_property) def test_should_pass_with_no_secret_type(self): request = dict(self.secret_req) del request['secret_type'] self.validator.validate(request) def test_should_fail_with_unknown_secret_type(self): self.secret_req['secret_type'] = 'unknown_type' self.assertRaises( excep.InvalidObject, self.validator.validate, self.secret_req, ) @utils.parameterized_dataset({ 'symmetric': [get_symmetric_key_req()], 'private': [get_private_key_req()], 'public': [get_public_key_req()], 'certificate': [get_certificate_req()], 'passphrase': [get_passphrase_req()], }) def test_should_pass_with_secret_type(self, request): self.validator.validate(request) @utils.parameterized_dataset({ 'symmetric': [get_symmetric_key_req(), 'foo'], 'private': [get_private_key_req(), 'foo'], 'public': [get_public_key_req(), 'foo'], 'certificate': [get_certificate_req(), 'foo'], 'passphrase': [get_passphrase_req(), 'base64'], }) def test_should_fail_with_bad_encoding(self, request, content_encoding): request['payload_content_encoding'] = content_encoding self.assertRaises( excep.InvalidObject, self.validator.validate, request, ) @utils.parameterized_dataset({ 'symmetric': [get_symmetric_key_req(), 'text/plain'], 'private': [get_private_key_req(), 'text/plain'], 'public': [get_public_key_req(), 'text/plain'], 'certificate': [get_certificate_req(), 'text/plain'], 'passphrase': [get_passphrase_req(), 'application/octet-stream'], }) def test_should_fail_with_bad_content_type(self, request, content_type): request['payload_content_type'] = content_type self.assertRaises( excep.InvalidObject, self.validator.validate, request, ) class WhenTestingContainerValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingContainerValidator, self).setUp() validators.CONF.set_override("host_href", "http://localhost:9311", enforce_type=True) self.name = 'name' self.type = 'generic' self.secret_refs = [ { 'name': 'testname', 'secret_ref': 'http://localhost:9311/1231' }, { 'name': 'testname2', 'secret_ref': 'http://localhost:9311/1232' } ] self.container_req = {'name': self.name, 'type': self.type, 'secret_refs': self.secret_refs} self.validator = validators.ContainerValidator() def test_should_validate_all_fields(self): self.validator.validate(self.container_req) def test_should_validate_no_name(self): del self.container_req['name'] self.validator.validate(self.container_req) def test_should_validate_empty_name(self): self.container_req['name'] = ' ' self.validator.validate(self.container_req) def test_should_raise_name_length_greater_than_max(self): self.container_req['name'] = 'a' * 256 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('name', exception.invalid_property) self.assertIn('name', exception.message) def test_should_raise_nonstring_secret_name(self): self.secret_refs[0]["name"] = 5 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_secret_name_too_long(self): self.secret_refs[0]['name'] = 'a' * 256 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_numeric_name(self): self.container_req['name'] = 123 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('name', exception.invalid_property) self.assertIn('name', exception.message) def test_should_raise_no_type(self): del self.container_req['type'] self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) # TODO(hgedikli): figure out why invalid_property is null here # self.assertEqual('type', e.exception.invalid_property) def test_should_raise_empty_type(self): self.container_req['type'] = '' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('type', exception.invalid_property) def test_should_raise_not_supported_type(self): self.container_req['type'] = 'testtype' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('type', exception.invalid_property) def test_should_raise_all_nulls(self): self.container_req = {'name': None, 'type': None, 'bit_length': None, 'secret_refs': None} self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) def test_should_raise_all_empties(self): self.container_req = {'name': '', 'type': '', 'secret_refs': []} self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) def test_should_validate_empty_secret_refs(self): self.container_req['secret_refs'] = [] self.validator.validate(self.container_req) def test_should_raise_no_secret_ref_in_secret_refs(self): del self.container_req['secret_refs'][0]['secret_ref'] self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) def test_should_raise_empty_secret_ref_in_secret_refs(self): self.container_req['secret_refs'][0]['secret_ref'] = '' self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) def test_should_raise_numeric_secret_ref_in_secret_refs(self): self.container_req['secret_refs'][0]['secret_ref'] = 123 self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) def test_should_raise_duplicate_names_in_secret_refs(self): self.container_req['secret_refs'].append( self.container_req['secret_refs'][0]) exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_duplicate_secret_ids_in_secret_refs(self): secret_ref = self.container_req['secret_refs'][0] secret_ref['name'] = 'testname3' self.container_req['secret_refs'].append(secret_ref) exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_duplicate_secret_ref_format_ids_in_secret_refs(self): """Test duplicate secret_id presence as part of single container. Here secret_id is represented in different format and secret_id is extracted from there. """ secret_refs = [ { 'name': 'testname', 'secret_ref': 'http://localhost:9311/v1/12345/secrets/1231' }, { 'name': 'testname2', 'secret_ref': 'http://localhost:9311/v1/12345/secrets//1232' }, { 'name': 'testname3', 'secret_ref': 'http://localhost:9311/v1/12345/secrets//1231/' } ] container_req = {'name': 'name', 'type': 'generic', 'secret_refs': secret_refs} exception = self.assertRaises( excep.InvalidObject, self.validator.validate, container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_ensure_unconfigured_secret_ref_hostname_cannot_be_passed_in(self): # Attempt to add some bogus secret refs. secret_refs = [ { 'name': 'super-secret-beer-ingredient', 'secret_ref': 'http://kegsarecool.com:9311/1234/secrets/57890' }, { 'name': 'iShouldNotBeAbleToExist', 'secret_ref': 'http://invalid.fqdn:9311/v1/secrets/FAD23' } ] container_req = { 'name': 'test-container', 'type': 'generic', 'secret_refs': secret_refs } self.assertRaises( excep.UnsupportedField, self.validator.validate, container_req, ) class WhenTestingRSAContainerValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingRSAContainerValidator, self).setUp() validators.CONF.set_override("host_href", "http://localhost:9311", enforce_type=True) self.name = 'name' self.type = 'rsa' self.secret_refs = [ { 'name': 'public_key', 'secret_ref': 'http://localhost:9311/1231' }, { 'name': 'private_key', 'secret_ref': 'http://localhost:9311/1232' }, { 'name': 'private_key_passphrase', 'secret_ref': 'http://localhost:9311/1233' } ] self.container_req = {'name': self.name, 'type': self.type, 'secret_refs': self.secret_refs} self.validator = validators.ContainerValidator() def test_should_validate_all_fields(self): self.validator.validate(self.container_req) def test_should_raise_no_names_in_secret_refs(self): del self.container_req['secret_refs'][0]['name'] exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_empty_names_in_secret_refs(self): self.container_req['secret_refs'][0]['name'] = '' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_unsupported_names_in_secret_refs(self): self.container_req['secret_refs'][0]['name'] = 'testttt' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_duplicate_secret_id_in_secret_refs(self): self.container_req['secret_refs'][0]['secret_ref'] = ( self.container_req['secret_refs'][2]['secret_ref']) exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_more_than_3_secret_refs_for_rsa_type(self): new_secret_ref = { 'name': 'new secret ref', 'secret_ref': 'http://localhost:9311/234234' } self.container_req['secret_refs'].append(new_secret_ref) exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req, ) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_if_required_name_missing(self): name = 'name' type = 'certificate' secret_refs = [ { 'name': 'private_key', 'secret_ref': 'http://localhost:9311/123' }, { 'name': 'private_key_passphrase', 'secret_ref': 'http://localhost:9311/123' } ] container_req = {'name': name, 'type': type, 'secret_refs': secret_refs} exception = self.assertRaises( excep.InvalidObject, self.validator.validate, container_req) self.assertEqual('secret_refs', exception.invalid_property) class WhenTestingCertificateContainerValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingCertificateContainerValidator, self).setUp() validators.CONF.set_override("host_href", "http://localhost:9311", enforce_type=True) self.name = 'name' self.type = 'certificate' self.secret_refs = [ { 'name': 'certificate', 'secret_ref': 'http://localhost:9311/S4dfsdrf' }, { 'name': 'private_key', 'secret_ref': 'http://localhost:9311/1231' }, { 'name': 'private_key_passphrase', 'secret_ref': 'http://localhost:9311/1232' }, { 'name': 'intermediates', 'secret_ref': 'http://localhost:9311/1233' } ] self.container_req = {'name': self.name, 'type': self.type, 'secret_refs': self.secret_refs} self.validator = validators.ContainerValidator() def test_should_validate_all_fields(self): self.validator.validate(self.container_req) def test_should_raise_more_than_4_secret_refs_for_cert_type(self): new_secret_ref = { 'name': 'new secret ref', 'secret_ref': 'http://localhost:9311/234234' } self.container_req['secret_refs'].append(new_secret_ref) exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_unsupported_names_in_secret_refs(self): self.container_req['secret_refs'][0]['name'] = 'public_key' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.container_req) self.assertEqual('secret_refs', exception.invalid_property) def test_should_raise_if_required_name_missing(self): name = 'name' type = 'certificate' secret_refs = [ { 'name': 'private_key', 'secret_ref': '123' }, { 'name': 'intermediates', 'secret_ref': '123' } ] container_req = {'name': name, 'type': type, 'secret_refs': secret_refs} exception = self.assertRaises( excep.InvalidObject, self.validator.validate, container_req) self.assertEqual('secret_refs', exception.invalid_property) class WhenTestingTransportKeyValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingTransportKeyValidator, self).setUp() self.plugin_name = 'name' self.transport_key = 'abcdef' self.transport_req = {'plugin_name': self.plugin_name, 'transport_key': self.transport_key} self.validator = validators.NewTransportKeyValidator() def test_should_raise_with_invalid_json_data_type(self): self.assertRaises( excep.InvalidObject, self.validator.validate, [] ) def test_should_raise_with_empty_transport_key(self): self.transport_req['transport_key'] = '' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.transport_req ) self.assertEqual('transport_key', exception.invalid_property) def test_should_raise_transport_key_is_non_string(self): self.transport_req['transport_key'] = 123 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.transport_req ) self.assertEqual('transport_key', exception.invalid_property) def test_should_raise_transport_key_is_missing(self): del self.transport_req['transport_key'] exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.transport_req ) self.assertEqual('transport_key', exception.invalid_property) def test_should_raise_plugin_name_is_non_string(self): self.transport_req['plugin_name'] = 123 exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.transport_req ) self.assertEqual('plugin_name', exception.invalid_property) def test_should_raise_plugin_name_is_missing(self): del self.transport_req['plugin_name'] exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.transport_req ) self.assertEqual('plugin_name', exception.invalid_property) class WhenTestingConsumerValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingConsumerValidator, self).setUp() self.name = 'name' self.URL = 'http://my.url/resource/UUID' self.consumer_req = {'name': self.name, 'URL': self.URL} self.validator = validators.ContainerConsumerValidator() def test_should_raise_with_invalid_json_data_type(self): self.assertRaises( excep.InvalidObject, self.validator.validate, [] ) def test_should_raise_with_missing_name(self): consumer_req = {'URL': self.URL} exception = self.assertRaises( excep.InvalidObject, self.validator.validate, consumer_req ) self.assertIn('\'name\'', exception.args[0]) def test_should_raise_with_missing_URL(self): consumer_req = {'name': self.name} exception = self.assertRaises( excep.InvalidObject, self.validator.validate, consumer_req ) self.assertIn('\'URL\'', exception.args[0]) def test_should_validate_all_fields(self): self.validator.validate(self.consumer_req) def test_name_too_long_should_raise_with_invalid_object(self): # Negative test to make sure our maxLength parameter for the # name field raises the proper exception when a value greater # than 255 in this case is passed in. longname = 'a' * 256 consumer_req = {'name': longname, 'url': self.URL} self.assertRaises( excep.InvalidObject, self.validator.validate, consumer_req ) class WhenTestingKeyTypeOrderValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingKeyTypeOrderValidator, self).setUp() self.type = 'key' self.meta = {"name": "secretname", "algorithm": "AES", "bit_length": 256, "mode": "cbc"} self.key_order_req = {'type': self.type, 'meta': self.meta} self.validator = validators.TypeOrderValidator() def test_should_pass_with_certificate_type_in_order_refs(self): self.key_order_req['type'] = 'certificate' result = self.validator.validate(self.key_order_req) self.assertEqual('certificate', result['type']) def test_should_pass_with_null_content_type_in_meta(self): self.key_order_req['meta']['payload_content_type'] = None result = self.validator.validate(self.key_order_req) self.assertIsNone(result['meta']['payload_content_type']) def test_should_pass_good_bit_meta_in_order_refs(self): self.key_order_req['meta']['algorithm'] = 'AES' self.key_order_req['meta']['bit_length'] = 256 result = self.validator.validate(self.key_order_req) self.assertIsNone(result['meta']['expiration']) def test_should_pass_good_exp_meta_in_order_refs(self): self.key_order_req['meta']['algorithm'] = 'AES' ony_year_factor = datetime.timedelta(days=1 * 365) date_after_year = datetime.datetime.now() + ony_year_factor date_after_year_str = date_after_year.strftime('%Y-%m-%d %H:%M:%S') self.key_order_req['meta']['expiration'] = date_after_year_str result = self.validator.validate(self.key_order_req) self.assertIn('expiration', result['meta']) self.assertIsInstance(result['meta']['expiration'], datetime.datetime) def test_should_raise_with_no_type_in_order_refs(self): del self.key_order_req['type'] exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) self.assertEqual('type', exception.invalid_property) def test_should_raise_with_bad_type_in_order_refs(self): self.key_order_req['type'] = 'badType' exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) self.assertEqual('type', exception.invalid_property) def test_should_raise_with_no_meta_in_order_refs(self): del self.key_order_req['meta'] exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) self.assertEqual('meta', exception.invalid_property) def test_should_raise_with_no_algorithm_in_order_refs(self): del self.key_order_req['meta']['algorithm'] self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) def test_should_raise_with_no_bit_length_in_order_refs(self): del self.key_order_req['meta']['bit_length'] exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) self.assertIn("bit_length' is required field for key type order", six.text_type(exception)) def test_should_raise_with_zero_bit_length_in_order_refs(self): self.key_order_req['meta']['bit_length'] = 0 exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) self.assertEqual('bit_length', exception.invalid_property) def test_should_raise_with_negative_bit_length_in_order_refs(self): self.key_order_req['meta']['bit_length'] = -1 exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) self.assertEqual('bit_length', exception.invalid_property) def test_should_raise_with_wrong_exp_meta_in_order_refs(self): self.key_order_req['meta']['algorithm'] = 'AES' self.key_order_req['meta']['expiration'] = '2014-02-28T19:14:44.180394' exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) self.assertEqual('expiration', exception.invalid_property) def test_should_not_raise_correct_hmac_order_refs(self): self.key_order_req['meta']['algorithm'] = 'hmacsha1' del self.key_order_req['meta']['mode'] result = self.validator.validate(self.key_order_req) self.assertIsNotNone(result) self.assertEqual('hmacsha1', result['meta']['algorithm']) def test_should_raise_with_payload_in_order(self): self.key_order_req['meta']['payload'] = 'payload' self.assertRaises(excep.InvalidObject, self.validator.validate, self.key_order_req) class WhenTestingAsymmetricTypeOrderValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingAsymmetricTypeOrderValidator, self).setUp() self.type = 'asymmetric' self.meta = {"name": "secretname", "algorithm": "RSA", "bit_length": 256} self.asymmetric_order_req = {'type': self.type, 'meta': self.meta} self.validator = validators.TypeOrderValidator() def test_should_pass_good_meta_in_order_refs(self): result = self.validator.validate(self.asymmetric_order_req) self.assertIsNone(result['meta']['expiration']) def test_should_raise_with_no_algorithm_in_order_refs(self): del self.asymmetric_order_req['meta']['algorithm'] self.assertRaises(excep.InvalidObject, self.validator.validate, self.asymmetric_order_req) def test_should_raise_with_payload_in_order(self): self.asymmetric_order_req['meta']['payload'] = 'payload' self.assertRaises(excep.InvalidObject, self.validator.validate, self.asymmetric_order_req) def test_should_pass_with_wrong_algorithm_in_asymmetric_order_refs(self): # Note (atiwari): because validator should not check # algorithm but that should checked at crypto_plugin # supports method. self.asymmetric_order_req['meta']['algorithm'] = 'aes' result = self.validator.validate(self.asymmetric_order_req) self.assertIsNone(result['meta']['expiration']) def test_should_raise_with_no_bit_length_in_asymmetric_order_refs(self): del self.asymmetric_order_req['meta']['bit_length'] exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.asymmetric_order_req) self.assertIn( "bit_length' is required field for asymmetric key type order", six.text_type(exception)) def test_should_raise_with_zero_bit_length_in_asymmetric_order_refs(self): self.asymmetric_order_req['meta']['bit_length'] = 0 exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.asymmetric_order_req) self.assertEqual("bit_length", exception.invalid_property) def test_should_raise_with_negative_bit_len_in_asymmetric_order_refs(self): self.asymmetric_order_req['meta']['bit_length'] = -1 exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.asymmetric_order_req) self.assertEqual("bit_length", exception.invalid_property) class WhenTestingSimpleCMCOrderValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingSimpleCMCOrderValidator, self).setUp() self.type = 'certificate' self.meta = {'request_type': 'simple-cmc', 'request_data': base64.b64encode(certs.create_good_csr()), 'requestor_name': 'Barbican User', 'requestor_email': 'barbican_user@example.com', 'requestor_phone': '555-1212'} self._set_order() self.validator = validators.TypeOrderValidator() def _set_order(self): self.order_req = {'type': self.type, 'meta': self.meta} def test_should_pass_good_data(self): self.validator.validate(self.order_req) def test_should_raise_with_no_metadata(self): self.order_req = {'type': self.type} self.assertRaises(excep.InvalidObject, self.validator.validate, self.order_req) def test_should_raise_with_bad_request_type(self): self.meta['request_type'] = 'bad_request_type' self._set_order() self.assertRaises(excep.InvalidCertificateRequestType, self.validator.validate, self.order_req) def test_should_raise_with_no_request_data(self): del self.meta['request_data'] self._set_order() self.assertRaises(excep.MissingMetadataField, self.validator.validate, self.order_req) def test_should_raise_with_pkcs10_data_with_bad_base64(self): self.meta['request_data'] = certs.create_bad_csr() self._set_order() self.assertRaises(excep.PayloadDecodingError, self.validator.validate, self.order_req) def test_should_raise_with_bad_pkcs10_data(self): self.meta['request_data'] = base64.b64encode(certs.create_bad_csr()) self._set_order() self.assertRaises(excep.InvalidPKCS10Data, self.validator.validate, self.order_req) def test_should_raise_with_signed_wrong_key_pkcs10_data(self): self.meta['request_data'] = base64.b64encode( certs.create_csr_signed_with_wrong_key()) self._set_order() self.assertRaises(excep.InvalidPKCS10Data, self.validator.validate, self.order_req) def test_should_raise_with_unsigned_pkcs10_data(self): self.meta['request_data'] = base64.b64encode( certs.create_csr_that_has_not_been_signed()) self._set_order() self.assertRaises(excep.InvalidPKCS10Data, self.validator.validate, self.order_req) def test_should_raise_with_payload_in_order(self): self.meta['payload'] = 'payload' self.assertRaises(excep.InvalidObject, self.validator.validate, self.order_req) class WhenTestingFullCMCOrderValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingFullCMCOrderValidator, self).setUp() self.type = 'certificate' self.meta = {'request_type': 'full-cmc', 'request_data': VALID_FULL_CMC, 'requestor_name': 'Barbican User', 'requestor_email': 'barbican_user@example.com', 'requestor_phone': '555-1212'} self._set_order() self.validator = validators.TypeOrderValidator() def _set_order(self): self.order_req = {'type': self.type, 'meta': self.meta} def test_should_raise_not_yet_implemented(self): self.assertRaises(excep.FullCMCNotSupported, self.validator.validate, self.order_req) @testtools.skip("Feature not yet implemented") def test_should_pass_good_data(self): self.validator.validate(self.order_req) @testtools.skip("Feature not yet implemented") def test_should_raise_with_no_request_data(self): del self.meta['request_data'] self._set_order() self.assertRaises(excep.MissingMetadataField, self.validator.validate, self.order_req) @testtools.skip("Not yet implemented") def test_should_raise_with_bad_cmc_data(self): self.meta['request_data'] = 'Bad CMC Data' self._set_order() self.assertRaises(excep.InvalidCMCData, self.validator.validate, self.order_req) class WhenTestingCustomOrderValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingCustomOrderValidator, self).setUp() self.type = 'certificate' self.meta = {'request_type': 'custom', 'ca_param_1': 'value_1', 'ca_param_2': 'value_2', 'requestor_name': 'Barbican User', 'requestor_email': 'barbican_user@example.com', 'requestor_phone': '555-1212'} self._set_order() self.validator = validators.TypeOrderValidator() def _set_order(self): self.order_req = {'type': self.type, 'meta': self.meta} def test_should_pass_good_data(self): self.validator.validate(self.order_req) def test_should_pass_with_no_request_type(self): # defaults to custom del self.meta['request_type'] self._set_order() self.validator.validate(self.order_req) class WhenTestingStoredKeyOrderValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingStoredKeyOrderValidator, self).setUp() self.type = 'certificate' self.meta = {'request_type': 'stored-key', 'container_ref': 'https://localhost/v1/containers/good_container_ref', 'subject_dn': 'cn=barbican-server,o=example.com', 'requestor_name': 'Barbican User', 'requestor_email': 'barbican_user@example.com', 'requestor_phone': '555-1212'} self.order_req = {'type': self.type, 'meta': self.meta} self.validator = validators.TypeOrderValidator() def test_should_pass_good_data(self): self.validator.validate(self.order_req) def test_should_raise_with_no_container_ref(self): del self.meta['container_ref'] self.assertRaises(excep.MissingMetadataField, self.validator.validate, self.order_req) def test_should_raise_with_no_subject_dn(self): del self.meta['subject_dn'] self.assertRaises(excep.MissingMetadataField, self.validator.validate, self.order_req) def test_should_pass_with_profile_and_ca_id(self): self.meta['ca_id'] = 'my_ca_id' self.meta['profile'] = 'my_profile' self.validator.validate(self.order_req) def test_should_raise_with_profile_and_no_ca_id(self): self.meta['profile'] = 'my_profile' self.assertRaises(excep.MissingMetadataField, self.validator.validate, self.order_req) def test_should_raise_with_extensions_data(self): self.meta['extensions'] = VALID_EXTENSIONS self.assertRaises(excep.CertificateExtensionsNotSupported, self.validator.validate, self.order_req) @testtools.skip("Not yet implemented") def test_should_raise_with_bad_extensions_data(self): self.meta['extensions'] = 'Bad extensions data' self.assertRaises(excep.InvalidExtensionsData, self.validator.validate, self.order_req) def test_should_pass_with_one_cn_in_dn(self): self.meta['subject_dn'] = "CN=example1" self.validator.validate(self.order_req) def test_should_pass_with_two_cn_in_dn(self): self.meta['subject_dn'] = "CN=example1,CN=example2" self.validator.validate(self.order_req) def test_should_raise_with_blank_dn(self): self.meta['subject_dn'] = "" self.assertRaises(excep.InvalidSubjectDN, self.validator.validate, self.order_req) def test_should_raise_with_bad_subject_dn(self): self.meta['subject_dn'] = "Bad subject DN data" self.assertRaises(excep.InvalidSubjectDN, self.validator.validate, self.order_req) def test_should_raise_with_payload_in_order(self): self.meta['payload'] = 'payload' self.assertRaises(excep.InvalidObject, self.validator.validate, self.order_req) @utils.parameterized_test_case class WhenTestingAclValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingAclValidator, self).setUp() self.validator = validators.ACLValidator() @utils.parameterized_dataset({ 'one_reader': [{'read': {'users': ['reader'], 'project-access': True}}], 'two_reader': [{'read': {'users': ['r1', 'r2'], 'project-access': True}}], 'private': [{'read': {'users': [], 'project-access': False}}], 'default_users': [{'read': {'project-access': False}}], 'default_creator': [{'read': {'users': ['reader']}}], 'almost_empty': [{'read': {}}], 'empty': [{}], }) def test_should_validate(self, acl_req): self.validator.validate(acl_req) @utils.parameterized_dataset({ 'foo': ['foo'], 'bad_op': [{'bad_op': {'users': ['reader'], 'project-access': True}}], 'bad_field': [{'read': {'bad_field': ['reader'], 'project-access': True}}], 'bad_user': [{'read': {'users': [27], 'project-access': True}}], 'missing_op': [{'project-access': False}], }) def test_should_raise(self, acl_req): self.assertRaises(excep.InvalidObject, self.validator.validate, acl_req) @utils.parameterized_dataset({ 'write': [{'write': {'users': ['writer'], 'project-access': True}}], 'list': [{'list': {'users': ['lister'], 'project-access': True}}], 'delete': [{'delete': {'users': ['deleter'], 'project-access': True}}], }) def test_should_raise_future(self, acl_req): self.assertRaises(excep.InvalidObject, self.validator.validate, acl_req) class WhenTestingProjectQuotasValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingProjectQuotasValidator, self).setUp() self.good_project_quotas = {"project_quotas": {"secrets": 50, "orders": 10, "containers": 20, "cas": 30}} self.bad_project_quotas = {"bad key": "bad value"} self.validator = validators.ProjectQuotaValidator() def test_should_pass_good_data(self): self.validator.validate(self.good_project_quotas) def test_should_pass_empty_properties(self): self.validator.validate({"project_quotas": {}}) def test_should_raise_bad_data(self): self.assertRaises(excep.InvalidObject, self.validator.validate, self.bad_project_quotas) def test_should_raise_empty_dict(self): self.assertRaises(excep.InvalidObject, self.validator.validate, {}) def test_should_raise_secrets_non_int(self): self.good_project_quotas['project_quotas']['secrets'] = "abc" self.assertRaises(excep.InvalidObject, self.validator.validate, self.good_project_quotas) def test_should_raise_orders_non_int(self): self.good_project_quotas['project_quotas']['orders'] = "abc" self.assertRaises(excep.InvalidObject, self.validator.validate, self.good_project_quotas) def test_should_raise_containers_non_int(self): self.good_project_quotas['project_quotas']['containers'] = "abc" self.assertRaises(excep.InvalidObject, self.validator.validate, self.good_project_quotas) def test_should_raise_cas_non_int(self): self.good_project_quotas['project_quotas']['cas'] = "abc" self.assertRaises(excep.InvalidObject, self.validator.validate, self.good_project_quotas) @utils.parameterized_test_case class WhenTestingNewCAValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingNewCAValidator, self).setUp() self.new_ca_req = {'name': 'New CA', 'subject_dn': 'cn=barbican-server,o=example.com', 'parent_ca_ref': 'https://localhost/v1/cas/parent_ca_id', 'description': 'This is a subCA'} self.validator = validators.NewCAValidator() def test_should_raise_with_empty_data(self): self.assertRaises( excep.InvalidObject, self.validator.validate, {} ) @utils.parameterized_dataset({ 'name': ['name'], 'subject_dn': ['subject_dn'], 'parent_ca_ref': ['parent_ca_ref'], }) def should_raise_if_any_required_parameter_is_missing(self, parameter): del self.new_ca_req[parameter] exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.new_ca_req ) self.assertEqual(parameter, exception.invalid_property) @utils.parameterized_dataset({ 'name': ['name'], 'subject_dn': ['subject_dn'], 'parent_ca_ref': ['parent_ca_ref'], }) def should_raise_if_any_required_parameter_is_empty(self, parameter): self.new_ca_req[parameter] = '' exception = self.assertRaises( excep.InvalidObject, self.validator.validate, self.new_ca_req ) self.assertEqual(parameter, exception.invalid_property) def test_should_pass_with_valid_data(self): self.validator.validate(self.new_ca_req) def test_should_raise_with_invalid_subject_dn(self): self.new_ca_req['subject_dn'] = 'I am an invalid subject_dn!' self.assertRaises( excep.InvalidSubjectDN, self.validator.validate, self.new_ca_req ) @utils.parameterized_test_case class WhenTestingSecretMetadataValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingSecretMetadataValidator, self).setUp() self.top_key = 'metadata' self.key1 = 'city' self.value1 = 'Austin' self.key2 = 'state' self.value2 = 'Texas' self.key3 = 'country' self.value3 = 'USA' self.metadata_req = { self.top_key: { self.key1: self.value1, self.key2: self.value2, self.key3: self.value3 } } self.validator = validators.NewSecretMetadataValidator() def test_should_validate_all_fields(self): self.validator.validate(self.metadata_req) def test_should_validate_all_fields_and_make_key_lowercase(self): self.key1 = "DOgg" self.value1 = "poodle" self.metadata_req = { self.top_key: { self.key1: self.value1, self.key2: self.value2, self.key3: self.value3 } } metadata = self.validator.validate(self.metadata_req) self.assertNotIn("DOgg", metadata.keys()) self.assertIn("dogg", metadata.keys()) def test_should_validate_no_keys(self): del self.metadata_req[self.top_key][self.key1] del self.metadata_req[self.top_key][self.key2] del self.metadata_req[self.top_key][self.key3] self.validator.validate(self.metadata_req) def test_should_raise_invalid_key_no_metadata(self): del self.metadata_req[self.top_key] exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.metadata_req) self.assertIn("metadata' is a required property", six.text_type(exception)) def test_should_raise_invalid_key_non_string(self): self.key1 = 0 metadata_req = { self.top_key: { self.key1: self.value1 } } exception = self.assertRaises(excep.InvalidMetadataRequest, self.validator.validate, metadata_req) self.assertIn("Invalid Metadata. Keys and Values must be Strings.", six.text_type(exception)) def test_should_raise_invalid_key_non_url_safe_string(self): self.key1 = "key/01" metadata_req = { self.top_key: { self.key1: self.value1 } } exception = self.assertRaises(excep.InvalidMetadataKey, self.validator.validate, metadata_req) self.assertIn("Invalid Key. Key must be URL safe.", six.text_type(exception)) def test_should_raise_invalid_value_non_string(self): self.value1 = 0 metadata_req = { self.top_key: { self.key1: self.value1 } } exception = self.assertRaises(excep.InvalidMetadataRequest, self.validator.validate, metadata_req) self.assertIn("Invalid Metadata. Keys and Values must be Strings.", six.text_type(exception)) @utils.parameterized_test_case class WhenTestingSecretMetadatumValidator(utils.BaseTestCase): def setUp(self): super(WhenTestingSecretMetadatumValidator, self).setUp() self.key1 = 'key' self.value1 = 'city' self.key2 = 'value' self.value2 = 'Austin' self.metadata_req = { self.key1: self.value1, self.key2: self.value2 } self.validator = validators.NewSecretMetadatumValidator() def test_should_validate_all_fields(self): self.validator.validate(self.metadata_req) def test_should_validate_all_fields_and_make_key_lowercase(self): self.value1 = "DOgg" self.value2 = "poodle" self.metadata_req = { self.key1: self.value1, self.key2: self.value2 } metadata = self.validator.validate(self.metadata_req) self.assertEqual("dogg", metadata['key']) def test_should_raise_invalid_empty(self): del self.metadata_req[self.key1] del self.metadata_req[self.key2] exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.metadata_req) self.assertIn("Provided object does not match schema " "'SecretMetadatum'", six.text_type(exception)) def test_should_raise_invalid_key_no_key(self): del self.metadata_req[self.key2] exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.metadata_req) self.assertIn("Provided object does not match schema " "'SecretMetadatum'", six.text_type(exception)) def test_should_raise_invalid_key_no_value(self): del self.metadata_req[self.key1] exception = self.assertRaises(excep.InvalidObject, self.validator.validate, self.metadata_req) self.assertIn("Provided object does not match schema " "'SecretMetadatum'", six.text_type(exception)) def test_should_raise_invalid_key_non_string(self): self.value1 = 0 metadata_req = { self.key1: self.value1, self.key2: self.value2 } exception = self.assertRaises(excep.InvalidObject, self.validator.validate, metadata_req) self.assertIn("Provided object does not match schema " "'SecretMetadatum'", six.text_type(exception)) def test_should_raise_invalid_key_non_url_safe_string(self): self.value1 = "key/01" metadata_req = { self.key1: self.value1, self.key2: self.value2 } exception = self.assertRaises(excep.InvalidMetadataKey, self.validator.validate, metadata_req) self.assertIn("Invalid Key. Key must be URL safe.", six.text_type(exception)) def test_should_raise_invalid_value_non_string(self): self.value2 = 0 metadata_req = { self.key1: self.value1, self.key2: self.value2 } exception = self.assertRaises(excep.InvalidObject, self.validator.validate, metadata_req) self.assertIn("Provided object does not match schema " "'SecretMetadatum'", six.text_type(exception)) def test_should_raise_invalid_extra_sent_key(self): self.value2 = 0 metadata_req = { self.key1: self.value1, self.key2: self.value2, "extra_key": "extra_value" } exception = self.assertRaises(excep.InvalidObject, self.validator.validate, metadata_req) self.assertIn("Provided object does not match schema " "'SecretMetadatum'", six.text_type(exception)) if __name__ == '__main__': unittest.main() barbican-2.0.0/barbican/tests/api/0000775000567000056710000000000012701406024020063 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/api/__init__.py0000664000567000056710000000000012701405673022173 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/api/test_transport_keys_resource.py0000664000567000056710000002620712701405673026512 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ This test module focuses on typical-flow business logic tests with the transport key resource classes. """ import mock import pecan from six import moves import webtest from barbican.api import app from barbican.api import controllers from barbican.common import exception as excep import barbican.context from barbican.model import models from barbican.tests import utils def get_barbican_env(external_project_id): class NoopPolicyEnforcer(object): def enforce(self, *args, **kwargs): return kwargs = {'roles': None, 'user': None, 'project': external_project_id, 'is_admin': True, 'policy_enforcer': NoopPolicyEnforcer()} barbican_env = {'barbican.context': barbican.context.RequestContext(**kwargs)} return barbican_env SAMPLE_TRANSPORT_KEY = """ -----BEGIN CERTIFICATE----- MIIDlDCCAnygAwIBAgIBGDANBgkqhkiG9w0BAQsFADBCMR8wHQYDVQQKDBZ0b21j YXQgMjggZG9tYWluIHRyeSAzMR8wHQYDVQQDDBZDQSBTaWduaW5nIENlcnRpZmlj YXRlMB4XDTE0MDMyNzA0MTU0OFoXDTE2MDMxNjA0MTU0OFowRTEfMB0GA1UECgwW dG9tY2F0IDI4IGRvbWFpbiB0cnkgMzEiMCAGA1UEAwwZRFJNIFRyYW5zcG9ydCBD ZXJ0aWZpY2F0ZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANEjiTHn xWKKnzgBzR8kHo5YKXDbYi01ar0pAiJQ8Xx4MXj3Uf6ckfxvJ7Icb2PhigAgINLe td8butAXM0164kHeEMJWI2TG/+2f42Kla2KeU0bdgKbw1egyZreDvhGk/3P46LQt LtRBCb5eQWS2gTFocgA5phzRQnmSS4BRTh1MnGxaFLZsPOXqZKptAYaeXyLG63vL woBwFVGoodHrRrpYpCd+D6JABBdUEgSCaYG9JBDC5ElSjJnBlCNrUZ2kxokxbsQp UHm70LV9c+5n0o1VLJSqnUDuOkoovVWytlKbz0dw0KiTUDjkb4F4D6s+IePV1ufJ 6cXvXCLLSQa42AcCAwEAAaOBkTCBjjAfBgNVHSMEGDAWgBSiQq7mBrAcTqqsPRvn l8pk4uZCWTBGBggrBgEFBQcBAQQ6MDgwNgYIKwYBBQUHMAGGKmh0dHA6Ly9hbGVl LXdvcmtwYy5yZWRoYXQuY29tOjgyODAvY2Evb2NzcDAOBgNVHQ8BAf8EBAMCBPAw EwYDVR0lBAwwCgYIKwYBBQUHAwIwDQYJKoZIhvcNAQELBQADggEBALmAtjactFHA d4nBFpwpwh3tGhkfwoSCuKThX54UXsJawQrx5gaxP0JE7YVLDRe4jn+RHjkXxdxX Xt4IugdTsPNq0nvWVAzwZwoGlJZjqghHpD3AB4E5DEoOnVnmJRLFLF0Xg/R5Sw3F j9wdVE/hGShrF+fOqNZhTG2Mf4f9TUR1Y8PtoBmtkwnFUoeiaI+Nq6Dd1Qw8ysar i/sOzOOjou4vcbYnrKnn2hlSgF6toza0BCGVA8fMyGBh16JtTR1REL7Bf0m3ZQDy 4hjmPjvUTN3YO2RlLVZXArhhmqcQzCl94P37pAEN/JhAIYvQ2PPM/ofK9XHc9u9j rQJGkMpu7ck= -----END CERTIFICATE-----""" def create_transport_key(id_ref="id", plugin_name="default_plugin", transport_key=None): """Generate a transport cert entity instance.""" tkey = models.TransportKey(plugin_name, transport_key) tkey.id = id_ref return tkey class FunctionalTest(utils.BaseTestCase): def setUp(self): super(FunctionalTest, self).setUp() root = self.root config = {'app': {'root': root}} pecan.set_config(config, overwrite=True) self.app = webtest.TestApp(pecan.make_app(root)) def tearDown(self): super(FunctionalTest, self).tearDown() pecan.set_config({}, overwrite=True) @property def root(self): return controllers.versions.VersionController() class WhenGettingTransKeysListUsingTransportKeysResource(FunctionalTest): def setUp(self): super( WhenGettingTransKeysListUsingTransportKeysResource, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): transport_keys = controllers.transportkeys.TransportKeysController( self.repo) return RootController() def _init(self): self.plugin_name = "default_plugin" self.external_project_id = 'keystoneid1234' self.params = {'offset': 2, 'limit': 2} self.transport_key = SAMPLE_TRANSPORT_KEY self.num_keys = 10 self.offset = 2 self.limit = 2 tk_params = {'plugin_name': self.plugin_name, 'transport_key': self.transport_key} self.tkeys = [create_transport_key( id_ref='id' + str(tkid), **tk_params) for tkid in moves.range(self.num_keys)] self.total = len(self.tkeys) self.repo = mock.MagicMock() self.repo.get_by_create_date.return_value = (self.tkeys, self.offset, self.limit, self.total) self.params = { 'offset': self.offset, 'limit': self.limit } def test_should_get_list_transport_keys(self): resp = self.app.get('/transport_keys/', self.params) self.repo.get_by_create_date.assert_called_once_with( plugin_name=None, offset_arg=u'{0}'.format(self.offset), limit_arg=u'{0}'.format(self.limit), suppress_exception=True ) self.assertIn('previous', resp.namespace) self.assertIn('next', resp.namespace) url_nav_next = self._create_url(self.external_project_id, self.offset + self.limit, self.limit) self.assertEqual(1, resp.body.count(url_nav_next)) url_nav_prev = self._create_url(self.external_project_id, 0, self.limit) self.assertEqual(1, resp.body.count(url_nav_prev)) url_hrefs = self._create_url(self.external_project_id) self.assertEqual((self.num_keys + 2), resp.body.count(url_hrefs)) def test_response_should_include_total(self): resp = self.app.get('/transport_keys/', self.params) self.assertIn('total', resp.namespace) self.assertEqual(self.total, resp.namespace['total']) def test_should_handle_no_transport_keys(self): del self.tkeys[:] resp = self.app.get('/transport_keys/', self.params) self.repo.get_by_create_date.assert_called_once_with( plugin_name=None, offset_arg=u'{0}'.format(self.offset), limit_arg=u'{0}'.format(self.limit), suppress_exception=True ) self.assertNotIn('previous', resp.namespace) self.assertNotIn('next', resp.namespace) def _create_url(self, external_project_id, offset_arg=None, limit_arg=None): if limit_arg: offset = int(offset_arg) limit = int(limit_arg) return '/transport_keys?limit={0}&offset={1}'.format( limit, offset) else: return '/transport_keys' class WhenCreatingTransKeysListUsingTransportKeysResource(FunctionalTest): def setUp(self): super( WhenCreatingTransKeysListUsingTransportKeysResource, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): transport_keys = controllers.transportkeys.TransportKeysController( self.repo) return RootController() def _init(self): self.plugin_name = "default_plugin" self.external_project_id = 'keystoneid1234' self.repo = mock.MagicMock() self.transport_key_req = { 'plugin_name': self.plugin_name, 'transport_key': SAMPLE_TRANSPORT_KEY } def test_should_add_new_transport_key(self): resp = self.app.post_json( '/transport_keys/', self.transport_key_req ) self.assertEqual(201, resp.status_int) args, kwargs = self.repo.create_from.call_args order = args[0] self.assertIsInstance(order, models.TransportKey) def test_should_raise_add_new_transport_key_no_secret(self): resp = self.app.post_json( '/transport_keys/', {}, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_should_raise_add_new_transport_key_bad_json(self): resp = self.app.post( '/transport_keys/', '', expect_errors=True, content_type='application/json' ) self.assertEqual(400, resp.status_int) def test_should_raise_add_new_transport_key_no_content_type_header(self): resp = self.app.post( '/transport_keys/', self.transport_key_req, expect_errors=True, ) self.assertEqual(415, resp.status_int) class WhenGettingOrDeletingTransKeyUsingTransportKeyResource(FunctionalTest): def setUp(self): super( WhenGettingOrDeletingTransKeyUsingTransportKeyResource, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): transport_keys = controllers.transportkeys.TransportKeysController( self.repo) return RootController() def _init(self): self.external_project_id = 'keystoneid1234' self.transport_key = SAMPLE_TRANSPORT_KEY self.tkey_id = "id1" self.tkey = create_transport_key( id_ref=self.tkey_id, plugin_name="default_plugin", transport_key=self.transport_key) self.repo = mock.MagicMock() self.repo.get.return_value = self.tkey def test_should_get_transport_key(self): self.app.get('/transport_keys/{0}/'.format(self.tkey.id)) self.repo.get.assert_called_once_with(entity_id=self.tkey.id) def test_should_throw_exception_for_get_when_trans_key_not_found(self): self.repo.get.return_value = None resp = self.app.get( '/transport_keys/{0}/'.format(self.tkey.id), expect_errors=True ) self.assertEqual(404, resp.status_int) def test_should_delete_transport_key(self): self.app.delete('/transport_keys/{0}/'.format(self.tkey.id)) self.repo.delete_entity_by_id.assert_called_once_with( entity_id=self.tkey.id, external_project_id=self.external_project_id) def test_should_throw_exception_for_delete_when_trans_key_not_found(self): self.repo.delete_entity_by_id.side_effect = excep.NotFound( "Test not found exception") resp = self.app.delete( '/transport_keys/{0}/'.format(self.tkey.id), expect_errors=True ) self.assertEqual(404, resp.status_int) barbican-2.0.0/barbican/tests/api/controllers/0000775000567000056710000000000012701406024022431 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/api/controllers/test_containers.py0000664000567000056710000002707412701405673026232 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os import uuid from barbican.common import config from barbican.common import exception from barbican.model import repositories from barbican.tests.api.controllers import test_secrets as secret_helper from barbican.tests import utils containers_repo = repositories.get_container_repository() class SuccessfulContainerCreateMixin(object): def _assert_successful_container_create(self, resp, container_uuid): self.assertEqual(201, resp.status_int) # this will raise if the container uuid is not proper uuid.UUID(container_uuid) class WhenCreatingContainersUsingContainersResource( utils.BarbicanAPIBaseTestCase, SuccessfulContainerCreateMixin): def test_should_add_new_empty_container(self): container_name = 'test container name' container_type = 'generic' resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(container_name, container.name) self.assertEqual(container_type, container.type) def test_should_add_new_populated_container(self): secret_name = 'test secret 1' resp, _ = secret_helper.create_secret( self.app, name=secret_name ) self.assertEqual(201, resp.status_int) secret_ref = resp.json.get('secret_ref') container_name = 'test container name' container_type = 'generic' secret_refs = [ { 'name': secret_name, 'secret_ref': secret_ref } ] resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type, secret_refs=secret_refs ) self._assert_successful_container_create(resp, container_uuid) container = containers_repo.get(container_uuid, self.project_id) self.assertEqual(container_name, container.name) self.assertEqual(container_type, container.type) def test_should_create_container_w_empty_name(self): # Name key missing container_type = 'generic' resp, container_uuid = create_container( self.app, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) # Name key is null request = { 'name': None, 'type': container_type, } resp = self.app.post_json( '/containers/', request, ) container_ref = resp.json.get('container_ref', '') _, container_uuid = os.path.split(container_ref) self._assert_successful_container_create(resp, container_uuid) def test_should_raise_container_bad_json(self): resp, container_uuid = create_container( self.app, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_should_raise_container_bad_content_type_header(self): resp, container_uuid = create_container( self.app, name='test container name', container_type='generic', expect_errors=True, headers={'Content-Type': 'bad_content_type'} ) self.assertEqual(415, resp.status_int) def test_should_sanitize_location_from_response_header(self): resp, container_uuid = create_container( self.app, name='test container name', container_type='generic' ) self._assert_successful_container_create(resp, container_uuid) self.assertNotIn(self.project_id, resp.headers['Location']) def test_should_throw_exception_when_secret_ref_doesnt_exist(self): config.CONF.set_override("host_href", "http://localhost:9311", enforce_type=True) secret_refs = [ { 'name': 'bad secret', 'secret_ref': 'http://localhost:9311/secrets/does_not_exist' } ] resp, container_uuid = create_container( self.app, name='test container name', container_type='generic', secret_refs=secret_refs, expect_errors=True, ) self.assertEqual(404, resp.status_int) config.CONF.clear_override('host_href') class WhenGettingContainersListUsingContainersResource( utils.BarbicanAPIBaseTestCase, SuccessfulContainerCreateMixin): def setUp(self): super(WhenGettingContainersListUsingContainersResource, self).setUp() self.num_containers = 10 self.offset = 2 self.limit = 2 self.params = { 'offset': self.offset, 'limit': self.limit } def _create_containers(self): for i in range(self.num_containers): resp, container_uuid = create_container( self.app, name='test container name {num}'.format(num=i), container_type='generic' ) self._assert_successful_container_create(resp, container_uuid) def _create_url(self, offset_arg=None, limit_arg=None): if limit_arg: offset = int(offset_arg) limit = int(limit_arg) return '/containers?limit={limit}&offset={offset}'.format( limit=limit, offset=offset) else: return '/containers' def test_should_get_list_containers(self): self._create_containers() resp = self.app.get( '/containers/', self.params ) self.assertEqual(200, resp.status_int) self.assertIn('previous', resp.namespace) self.assertIn('next', resp.namespace) url_nav_next = self._create_url(self.offset + self.limit, self.limit) self.assertEqual(1, resp.body.decode('utf-8').count(url_nav_next)) url_nav_prev = self._create_url(0, self.limit) self.assertEqual(1, resp.body.decode('utf-8').count(url_nav_prev)) url_hrefs = self._create_url() self.assertEqual((self.limit + 2), resp.body.decode('utf-8').count(url_hrefs)) def test_response_should_include_total(self): self._create_containers() resp = self.app.get( '/containers/', self.params ) self.assertIn('total', resp.namespace) self.assertEqual(self.num_containers, resp.namespace['total']) def test_should_handle_no_containers(self): resp = self.app.get( '/containers/', self.params ) self.assertEqual(0, resp.namespace['total']) self.assertNotIn('previous', resp.namespace) self.assertNotIn('next', resp.namespace) class WhenGettingOrDeletingContainerUsingContainerResource( utils.BarbicanAPIBaseTestCase, SuccessfulContainerCreateMixin): def test_should_get_container(self): container_name = 'test container name' container_type = 'generic' resp, container_uuid = create_container( self.app, name=container_name, container_type=container_type ) self._assert_successful_container_create(resp, container_uuid) resp = self.app.get('/containers/{container_id}/'.format( container_id=container_uuid )) self.assertEqual(200, resp.status_int) self.assertEqual(container_name, resp.json.get('name', '')) self.assertEqual(container_type, resp.json.get('type', '')) def test_should_delete_container(self): resp, container_uuid = create_container( self.app, name='test container name', container_type='generic' ) self._assert_successful_container_create(resp, container_uuid) resp = self.app.delete('/containers/{container_id}/'.format( container_id=container_uuid )) self.assertEqual(204, resp.status_int) self.assertRaises(exception.NotFound, containers_repo.get, container_uuid, self.project_id) def test_should_throw_exception_for_get_when_container_not_found(self): resp = self.app.get('/containers/bad_id/', expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_throw_exception_for_delete_when_container_not_found(self): resp = self.app.delete('/containers/bad_id/', expect_errors=True) self.assertEqual(404, resp.status_int) # Error response should have json content type self.assertEqual("application/json", resp.content_type) class WhenPerformingUnallowedOperationsOnContainers( utils.BarbicanAPIBaseTestCase, SuccessfulContainerCreateMixin): container_req = [ { 'name': 'test container name', 'type': 'generic', 'secret_refs': [] } ] def test_should_not_allow_put_on_containers(self): resp = self.app.put_json( '/containers/', self.container_req, expect_errors=True ) self.assertEqual(405, resp.status_int) def test_should_not_allow_post_on_container_by_id(self): resp, container_uuid = create_container( self.app, name='test container name', container_type='generic' ) self._assert_successful_container_create(resp, container_uuid) resp = self.app.post_json( '/containers/{container_id}/'.format(container_id=container_uuid), self.container_req, expect_errors=True ) self.assertEqual(405, resp.status_int) def test_should_not_allow_put_on_container_by_id(self): resp, container_uuid = create_container( self.app, name='test container name', container_type='generic' ) self._assert_successful_container_create(resp, container_uuid) resp = self.app.put_json( '/containers/{container_id}/'.format(container_id=container_uuid), self.container_req, expect_errors=True ) self.assertEqual(405, resp.status_int) # ----------------------- Helper Functions --------------------------- def create_container(app, name=None, container_type=None, secret_refs=None, expect_errors=False, headers=None): request = { 'name': name, 'type': container_type, 'secret_refs': secret_refs if secret_refs else [] } cleaned_request = {key: val for key, val in request.items() if val is not None} resp = app.post_json( '/containers/', cleaned_request, expect_errors=expect_errors, headers=headers ) created_uuid = None if resp.status_int == 201: container_ref = resp.json.get('container_ref', '') _, created_uuid = os.path.split(container_ref) return (resp, created_uuid) barbican-2.0.0/barbican/tests/api/controllers/__init__.py0000664000567000056710000000000012701405673024541 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/api/controllers/test_secrets.py0000664000567000056710000005572612701405674025543 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import os import mock from oslo_utils import timeutils from barbican.common import validators from barbican.model import models from barbican.model import repositories from barbican.tests import utils project_repo = repositories.get_project_repository() secrets_repo = repositories.get_secret_repository() tkey_repo = repositories.get_transport_key_repository() @utils.parameterized_test_case class WhenTestingSecretsResource(utils.BarbicanAPIBaseTestCase): def test_can_create_new_secret_one_step(self): resp, secret_uuid = create_secret( self.app, payload=b'not-encrypted', content_type='text/plain' ) self.assertEqual(201, resp.status_int) self.assertIsNotNone(secret_uuid) def test_can_create_new_secret_without_payload(self): resp, secret_uuid = create_secret(self.app, name='test') self.assertEqual(201, resp.status_int) secret = secrets_repo.get(secret_uuid, self.project_id) self.assertEqual('test', secret.name) self.assertEqual([], secret.encrypted_data) def test_can_create_new_secret_if_project_doesnt_exist(self): # Build new context new_project_context = self._build_context('test_project_id') self.app.extra_environ = {'barbican.context': new_project_context} # Create a generic secret resp, _ = create_secret(self.app, name='test_secret') self.assertEqual(201, resp.status_int) # Verify the new project was created project = project_repo.find_by_external_project_id('test_project_id') self.assertIsNotNone(project) def test_can_create_new_secret_with_payload_just_under_max(self): large_payload = b'A' * (validators.DEFAULT_MAX_SECRET_BYTES - 8) resp, _ = create_secret( self.app, payload=large_payload, content_type='text/plain' ) self.assertEqual(201, resp.status_int) def test_creating_new_secret_with_oversized_payload_should_fail(self): oversized_payload = b'A' * (validators.DEFAULT_MAX_SECRET_BYTES + 10) resp, _ = create_secret( self.app, payload=oversized_payload, content_type='text/plain', expect_errors=True ) self.assertEqual(413, resp.status_int) def test_create_new_secret_with_empty_payload_should_fail(self): resp, _ = create_secret( self.app, payload='', content_type='text/plain', expect_errors=True ) self.assertEqual(400, resp.status_int) def test_expiration_should_be_normalized_with_new_secret(self): target_expiration = '2114-02-28 12:14:44.180394-05:00' resp, secret_uuid = create_secret( self.app, expiration=target_expiration ) self.assertEqual(201, resp.status_int) # Verify that the system normalizes time to UTC secret = secrets_repo.get(secret_uuid, self.project_id) local_datetime = timeutils.parse_isotime(target_expiration) datetime_utc = timeutils.normalize_time(local_datetime) self.assertEqual(datetime_utc, secret.expiration) @mock.patch('barbican.plugin.resources.store_secret') def test_can_create_new_secret_meta_w_transport_key(self, mocked_store): transport_key_model = models.TransportKey('default_plugin', 'tkey1234') # TODO(jvrbanac): Look into removing this patch mocked_store.return_value = models.Secret(), transport_key_model # Make sure to add the transport key tkey_repo.create_from(transport_key_model) transport_key_id = transport_key_model.id resp, secret_uuid = create_secret( self.app, name='test', transport_key_needed='true' ) self.assertEqual(201, resp.status_int) self.assertIsNotNone(secret_uuid) self.assertIn(transport_key_id, resp.json.get('transport_key_ref')) @mock.patch('barbican.plugin.resources.store_secret') def test_can_create_new_secret_with_transport_key(self, mocked_store): # TODO(jvrbanac): Look into removing this patch mocked_store.return_value = models.Secret(), None # Create Transport Key (keeping for session scoping reasons) transport_key_model = models.TransportKey('default_plugin', 'tkey1234') transport_key_id = transport_key_model.id tkey_repo.create_from(transport_key_model) # Create a normal secret with the TransportKey resp, secret_uuid = create_secret( self.app, payload=b'not-encrypted', content_type='text/plain', transport_key_id=transport_key_id ) self.assertEqual(201, resp.status_int) # We're interested in the transport key values mocked_store.assert_called_once_with( unencrypted_raw='not-encrypted', content_type_raw='text/plain', content_encoding=None, secret_model=mock.ANY, project_model=mock.ANY, transport_key_id=transport_key_id, transport_key_needed=False ) def test_new_secret_fails_with_invalid_transport_key_ref(self): resp, _ = create_secret( self.app, payload=b'superdupersecret', content_type='text/plain', transport_key_id="non_existing_transport_key_id", transport_key_needed="true", expect_errors=True ) self.assertEqual(400, resp.status_int) def test_new_secret_w_unsupported_content_type_should_fail(self): resp, _ = create_secret( self.app, payload=b'something_here', content_type='bogus_content_type', expect_errors=True ) self.assertEqual(400, resp.status_int) @utils.parameterized_dataset({ 'no_encoding': [None, 'application/octet-stream'], 'bad_encoding': ['purple', 'application/octet-stream'], 'no_content_type': ['base64', None] }) def test_new_secret_fails_with_binary_payload_and(self, encoding=None, content_type=None): resp, _ = create_secret( self.app, payload=b'lOtfqHaUUpe6NqLABgquYQ==', content_type=content_type, content_encoding=encoding, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_new_secret_fails_with_bad_payload(self): resp, _ = create_secret( self.app, payload='AAAAAAAAA', content_type='application/octet-stream', content_encoding='base64', expect_errors=True ) self.assertEqual(400, resp.status_int) class WhenGettingSecretsList(utils.BarbicanAPIBaseTestCase): def test_list_secrets_by_name(self): # Creating a secret to be retrieved later create_resp, _ = create_secret( self.app, name='secret mission' ) self.assertEqual(201, create_resp.status_int) params = {'name': 'secret mission'} get_resp = self.app.get('/secrets/', params) self.assertEqual(200, get_resp.status_int) secret_list = get_resp.json.get('secrets') self.assertEqual('secret mission', secret_list[0].get('name')) def test_list_secrets(self): # Creating a secret to be retrieved later create_resp, _ = create_secret( self.app, name='James Bond' ) self.assertEqual(201, create_resp.status_int) get_resp = self.app.get('/secrets/') self.assertEqual(200, get_resp.status_int,) self.assertIn('total', get_resp.json) secret_list = get_resp.json.get('secrets') self.assertGreater(len(secret_list), 0) def test_pagination_attributes(self): # Create a list of secrets greater than default limit (10) for _ in range(11): create_resp, _ = create_secret(self.app, name='Sterling Archer') self.assertEqual(201, create_resp.status_int) params = {'limit': '2', 'offset': '2'} get_resp = self.app.get('/secrets/', params) self.assertEqual(200, get_resp.status_int) self.assertIn('previous', get_resp.json) self.assertIn('next', get_resp.json) previous_ref = get_resp.json.get('previous') next_ref = get_resp.json.get('next') self.assertIn('offset=0', previous_ref) self.assertIn('offset=4', next_ref) def test_empty_list_of_secrets(self): params = {'name': 'Austin Powers'} get_resp = self.app.get('/secrets/', params) self.assertEqual(200, get_resp.status_int) secret_list = get_resp.json.get('secrets') self.assertEqual(0, len(secret_list)) # These should never exist in this scenario self.assertNotIn('previous', get_resp.json) self.assertNotIn('next', get_resp.json) class WhenGettingPuttingOrDeletingSecret(utils.BarbicanAPIBaseTestCase): def test_get_secret_as_plain(self): payload = 'this message will self destruct in 10 seconds' resp, secret_uuid = create_secret( self.app, payload=payload, content_type='text/plain' ) self.assertEqual(201, resp.status_int) headers = {'Accept': 'text/plain'} get_resp = self.app.get( '/secrets/{0}'.format(secret_uuid), headers=headers ) self.assertEqual(200, get_resp.status_int) self.assertEqual(payload, get_resp.body) def test_get_secret_payload_with_pecan_default_accept_header(self): payload = 'a very interesting string' resp, secret_uuid = create_secret( self.app, payload=payload, content_type='text/plain' ) self.assertEqual(201, resp.status_int) headers = {'Accept': '*/*'} get_resp = self.app.get( '/secrets/{0}/payload'.format(secret_uuid), headers=headers ) self.assertEqual(200, get_resp.status_int) self.assertEqual(payload, get_resp.body) def test_get_secret_payload_with_blank_accept_header(self): payload = 'a very interesting string' resp, secret_uuid = create_secret( self.app, payload=payload, content_type='text/plain' ) self.assertEqual(201, resp.status_int) headers = {'Accept': ''} get_resp = self.app.get( '/secrets/{0}/payload'.format(secret_uuid), headers=headers ) self.assertEqual(200, get_resp.status_int) self.assertEqual(payload, get_resp.body) def test_get_secret_payload_with_no_accept_header(self): payload = 'a very interesting string' resp, secret_uuid = create_secret( self.app, payload=payload, content_type='text/plain' ) self.assertEqual(201, resp.status_int) headers = {} get_resp = self.app.get( '/secrets/{0}/payload'.format(secret_uuid), headers=headers ) self.assertEqual(200, get_resp.status_int) self.assertEqual(payload, get_resp.body) def test_get_secret_is_decoded_for_binary(self): payload = 'a123' resp, secret_uuid = create_secret( self.app, payload=payload, content_type='application/octet-stream', content_encoding='base64' ) headers = { 'Accept': 'application/octet-stream', } get_resp = self.app.get( '/secrets/{0}'.format(secret_uuid), headers=headers ) decoded = 'k]\xb7' self.assertEqual(decoded, get_resp.body) def test_returns_404_on_get_when_not_found(self): get_resp = self.app.get( '/secrets/98c876d9-aaac-44e4-8ea8-441932962b05', headers={'Accept': 'application/json'}, expect_errors=True ) self.assertEqual(404, get_resp.status_int) def test_returns_404_on_get_with_bad_uuid(self): get_resp = self.app.get( '/secrets/98c876d9-aaac-44e4-8ea8-441932962b05X', headers={'Accept': 'application/json'}, expect_errors=True ) self.assertEqual(404, get_resp.status_int) def test_returns_406_with_get_bad_accept_header(self): resp, secret_uuid = create_secret( self.app, payload='blah', content_type='text/plain' ) self.assertEqual(201, resp.status_int) get_resp = self.app.get( '/secrets/{0}'.format(secret_uuid), headers={'Accept': 'golden gun', 'Accept-Encoding': 'gzip'}, expect_errors=True ) self.assertEqual(406, get_resp.status_int) def test_put_plain_text_secret(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) message = 'Babou! Serpentine!' put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), message, headers={'Content-Type': 'text/plain'} ) self.assertEqual(204, put_resp.status_int) get_resp = self.app.get( '/secrets/{0}'.format(secret_uuid), headers={'Accept': 'text/plain'} ) self.assertEqual(200, get_resp.status_int) self.assertEqual(message, get_resp.body) def test_put_binary_secret(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) binary_string = b'a binary string' put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), binary_string, headers={'Content-Type': 'application/octet-stream'} ) self.assertEqual(204, put_resp.status_int) get_resp = self.app.get( '/secrets/{0}'.format(secret_uuid), headers={'Accept': 'application/octet-stream'} ) self.assertEqual(200, get_resp.status_int) self.assertEqual(binary_string, get_resp.body) def test_put_base64_secret(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) payload = base64.b64encode('I had something for this') put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), payload, headers={ 'Content-Type': 'application/octet-stream', 'Content-Encoding': 'base64' } ) self.assertEqual(204, put_resp.status_int) get_resp = self.app.get( '/secrets/{0}'.format(secret_uuid), headers={ 'Accept': 'application/octet-stream', 'Content-Encoding': 'base64' } ) self.assertEqual(200, get_resp.status_int) self.assertEqual(base64.b64decode(payload), get_resp.body) def test_returns_400_with_put_unknown_encoding(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) payload = base64.b64encode('I had something for this') put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), payload, headers={ 'Accept': 'text/plain', 'Content-Type': 'application/octet-stream', 'Content-Encoding': 'unknownencoding' }, expect_errors=True ) self.assertEqual(400, put_resp.status_int) def test_returns_415_with_put_unsupported_media_type(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), 'rampage', headers={ 'Content-Type': 'application/json' }, expect_errors=True ) self.assertEqual(415, put_resp.status_int) def test_returns_415_with_put_no_media_type(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), 'rampage again', headers={ 'Content-Type': '' }, expect_errors=True ) self.assertEqual(415, put_resp.status_int) def test_returns_404_put_secret_not_found(self): put_resp = self.app.put( '/secrets/98c876d9-aaac-44e4-8ea8-441932962b05', 'some text', headers={'Content-Type': 'text/plain'}, expect_errors=True ) self.assertEqual(404, put_resp.status_int) def test_returns_409_put_to_existing_secret(self): resp, secret_uuid = create_secret( self.app, payload='blah', content_type='text/plain' ) self.assertEqual(201, resp.status_int) put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), 'do not want', headers={'Content-Type': 'text/plain'}, expect_errors=True ) self.assertEqual(409, put_resp.status_int) def test_returns_400_put_no_payload(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), headers={'Content-Type': 'text/plain'}, expect_errors=True ) self.assertEqual(400, put_resp.status_int) def test_returns_400_put_with_empty_payload(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), '', headers={'Content-Type': 'text/plain'}, expect_errors=True ) self.assertEqual(400, put_resp.status_int) def test_returns_413_put_with_text_too_large(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) text_too_big = 'x' * 10050 put_resp = self.app.put( '/secrets/{0}'.format(secret_uuid), text_too_big, headers={'Content-Type': 'text/plain'}, expect_errors=True ) self.assertEqual(413, put_resp.status_int) def test_delete_secret(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) delete_resp = self.app.delete( '/secrets/{0}/'.format(secret_uuid) ) self.assertEqual(204, delete_resp.status_int) def test_raise_404_for_delete_secret_not_found(self): delete_resp = self.app.delete( '/secrets/98c876d9-aaac-44e4-8ea8-441932962b05', expect_errors=True ) self.assertEqual(404, delete_resp.status_int) self.assertEqual('application/json', delete_resp.content_type) def test_delete_with_json_accept_header(self): resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, resp.status_int) delete_resp = self.app.delete( '/secrets/{0}/'.format(secret_uuid), headers={'Accept': 'application/json'} ) self.assertEqual(204, delete_resp.status_int) @utils.parameterized_test_case class WhenPerformingUnallowedOperations(utils.BarbicanAPIBaseTestCase): def test_returns_405_for_put_json_on_secrets(self): test_json = { 'name': 'Barry', 'algorithm': 'AES', 'bit_length': 256, 'mode': 'CBC' } resp = self.app.put_json( '/secrets/', test_json, expect_errors=True ) self.assertEqual(405, resp.status_int) def test_returns_405_for_delete_on_secrets(self): resp = self.app.delete( '/secrets/', expect_errors=True ) self.assertEqual(405, resp.status_int) def test_returns_405_for_get_payload(self): created_resp, secret_uuid = create_secret( self.app ) resp = self.app.post( '/secrets/{0}/payload'.format(secret_uuid), 'Do you want ants? This is how you get ants!', headers={'Content-Type': 'text/plain'}, expect_errors=True ) self.assertEqual(405, resp.status_int) @utils.parameterized_dataset({ 'delete': ['delete'], 'put': ['put'], 'post': ['post'] }) def test_returns_405_for_calling_secret_payload_uri_with( self, http_verb=None ): created_resp, secret_uuid = create_secret( self.app ) self.assertEqual(201, created_resp.status_int) operation = getattr(self.app, http_verb) resp = operation( '/secrets/{0}/payload'.format(secret_uuid), 'boop', expect_errors=True ) self.assertEqual(405, resp.status_int) # ----------------------- Helper Functions --------------------------- def create_secret(app, name=None, algorithm=None, bit_length=None, mode=None, expiration=None, payload=None, content_type=None, content_encoding=None, transport_key_id=None, transport_key_needed=None, expect_errors=False): # TODO(chellygel): Once test resources is split out, refactor this # and similar functions into a generalized helper module and reduce # duplication. request = { 'name': name, 'algorithm': algorithm, 'bit_length': bit_length, 'mode': mode, 'expiration': expiration, 'payload': payload, 'payload_content_type': content_type, 'payload_content_encoding': content_encoding, 'transport_key_id': transport_key_id, 'transport_key_needed': transport_key_needed } cleaned_request = {key: val for key, val in request.items() if val is not None} resp = app.post_json( '/secrets/', cleaned_request, expect_errors=expect_errors ) created_uuid = None if resp.status_int == 201: secret_ref = resp.json.get('secret_ref', '') _, created_uuid = os.path.split(secret_ref) return (resp, created_uuid) barbican-2.0.0/barbican/tests/api/controllers/test_acls.py0000664000567000056710000013052012701405673024776 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os import uuid from barbican.api.controllers import acls from barbican.model import repositories from barbican.tests.api import test_resources_policy as test_policy from barbican.tests import utils class TestACLsWithContextMixin(test_policy.BaseTestCase): """Mixin for performing common acls operation used with policy logic.""" def _create_secret_with_creator_user(self, app, creator_user_id): # define creator user for new secret entry. app.extra_environ = { 'barbican.context': self._build_context(self.project_id, user=creator_user_id) } secret_id, _ = create_secret(app) return secret_id def _create_container_with_creator_user(self, app, creator_user_id): # define creator user for new container entry. app.extra_environ = { 'barbican.context': self._build_context(self.project_id, user=creator_user_id) } container_id, _ = create_container(app) return container_id def _set_acls_with_context(self, app, entity_type=None, op_type=None, entity_id=None, roles=None, user=None, enforce_policy=True, expect_errors=False): """Perform acl create/update/delete operation with policy logic. Before performing acl create/update/delete, provided input is used for setting custom barbican context. Operation is done under policy enforcement logic. """ policy_enforcer = self.policy_enforcer if enforce_policy else None app.extra_environ = { 'barbican.context': self._build_context( self.project_id, roles=roles, user=user, is_admin=False, policy_enforcer=policy_enforcer) } resp = None if op_type == 'create': resp = create_acls(app, entity_type, entity_id, read_user_ids=['u1', 'u2'], expect_errors=expect_errors) elif op_type == 'update': resp = update_acls(app, entity_type, entity_id, read_user_ids=['u1', 'u2'], partial_update=True, expect_errors=expect_errors) elif op_type == 'delete': resp = app.delete('/{0}/{1}/acl'.format(entity_type, entity_id), expect_errors=expect_errors) return resp class WhenTestingSecretACLsResource(utils.BarbicanAPIBaseTestCase, TestACLsWithContextMixin): def test_can_create_new_secret_acls(self): """Create secret acls and compare stored values with request data.""" secret_uuid, _ = create_secret(self.app) resp = create_acls( self.app, 'secrets', secret_uuid, read_user_ids=['u1', 'u2']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/secrets/{0}/acl'.format(secret_uuid), resp.json['acl_ref']) acl_map = _get_acl_map(secret_uuid, is_secret=True) # Check project_access is True when not provided self.assertTrue(acl_map['read']['project_access']) def test_who_can_create_new_secret_acls(self): """Test who can create new secret ACLs as per policy rules. New secret ACLs can be created by user who created the secret. Other user with 'creator' role in secret project cannot create ACL if user is not creator of the secret. User with 'admin' role in secret project can create ACL for that secret. """ creator_user_id = 'creatorUserId' secret_uuid = self._create_secret_with_creator_user( self.app, creator_user_id) secret_uuid2 = self._create_secret_with_creator_user( self.app, creator_user_id) resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='create', entity_id=secret_uuid, roles=['creator'], user='NotSecretCreator', expect_errors=True) self.assertEqual(403, resp.status_int) resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='create', entity_id=secret_uuid, roles=['creator'], user=creator_user_id, expect_errors=False) self.assertEqual(200, resp.status_int) # test for user with 'admin' role in secret project resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='create', entity_id=secret_uuid2, roles=['admin'], user='AdminUser', expect_errors=False) self.assertEqual(200, resp.status_int) def test_create_new_secret_acls_with_project_access_false(self): """Should allow creating acls for a new secret with project-access.""" secret_uuid, _ = create_secret(self.app) resp = create_acls( self.app, 'secrets', secret_uuid, read_project_access=False) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/secrets/{0}/acl'.format(secret_uuid), resp.json['acl_ref']) acl_map = _get_acl_map(secret_uuid, is_secret=True) self.assertFalse(acl_map['read']['project_access']) def test_new_secret_acls_with_invalid_project_access_value_should_fail( self): """Should fail if project-access flag is provided as string value.""" secret_uuid, _ = create_secret(self.app) resp = create_acls( self.app, 'secrets', secret_uuid, read_project_access="False", read_user_ids=['u1', 'u3', 'u4'], expect_errors=True) self.assertEqual(400, resp.status_int) resp = create_acls( self.app, 'secrets', secret_uuid, read_project_access="None", expect_errors=True) self.assertEqual(400, resp.status_int) def test_get_secret_acls_with_complete_acl_data(self): """Read existing acls for a with complete acl data.""" secret_id, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_id, read_user_ids=['u1', 'u3'], read_project_access=False) resp = self.app.get( '/secrets/{0}/acl'.format(secret_id), expect_errors=False) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('read', resp.json) self.assertFalse(resp.json['read']['project-access']) self.assertIsNotNone(resp.json['read']['created']) self.assertIsNotNone(resp.json['read']['updated']) self.assertEqual(set(['u1', 'u3']), set(resp.json['read']['users'])) def test_get_secret_acls_with_project_access_data(self): """Read existing acls for acl when only project-access flag is set.""" secret_id, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_id, read_project_access=False) resp = self.app.get( '/secrets/{0}/acl'.format(secret_id), expect_errors=False) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertEqual([], resp.json['read']['users']) self.assertFalse(resp.json['read']['project-access']) self.assertIsNotNone(resp.json['read']['created']) self.assertIsNotNone(resp.json['read']['updated']) def test_get_secret_acls_invalid_secret_should_fail(self): """Get secret acls should fail for invalid secret id. This test applies to all secret ACLs methods as secret entity is populated in same manner for get, put, patch, delete methods. """ secret_id, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_id, read_project_access=True, read_user_ids=['u1', 'u3', 'u4']) resp = self.app.get( '/secrets/{0}/acl'.format(uuid.uuid4().hex), expect_errors=True) self.assertEqual(404, resp.status_int) def test_get_secret_acls_no_acls_defined_return_default_acl(self): """Get secret acls should pass when no acls defined for a secret.""" secret_id, _ = create_secret(self.app) resp = self.app.get( '/secrets/{0}/acl'.format(secret_id), expect_errors=True) self.assertEqual(200, resp.status_int) self.assertEqual(acls.DEFAULT_ACL, resp.json) def test_get_secret_acls_with_incorrect_uri_should_fail(self): """Get secret acls should fail when no acls defined for a secret.""" secret_id, _ = create_secret(self.app) resp = self.app.get( '/secrets/{0}/incorrect_acls'.format(secret_id), expect_errors=True) self.assertEqual(405, resp.status_int) def test_full_update_secret_acls_modify_project_access_value(self): """ACLs full update with userids where project-access flag modified.""" secret_uuid, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_uuid, read_user_ids=['u1', 'u2'], read_project_access=False) # update acls with no user input so it should delete existing users resp = update_acls( self.app, 'secrets', secret_uuid, partial_update=False, read_project_access=True) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/secrets/{0}/acl'.format(secret_uuid), resp.json['acl_ref']) acl_map = _get_acl_map(secret_uuid, is_secret=True) self.assertTrue(acl_map['read']['project_access']) self.assertIsNone(acl_map['read'].to_dict_fields().get('users')) def test_full_update_secret_acls_modify_users_only(self): """ACLs full update where specific operation acl is modified.""" secret_uuid, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_uuid, read_user_ids=['u1', 'u2'], read_project_access=False) resp = update_acls( self.app, 'secrets', secret_uuid, partial_update=False, read_user_ids=['u1', 'u3', 'u5']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/secrets/{0}/acl'.format(secret_uuid), resp.json['acl_ref']) acl_map = _get_acl_map(secret_uuid, is_secret=True) self.assertTrue(acl_map['read']['project_access']) self.assertNotIn('u2', acl_map['read'].to_dict_fields()['users']) self.assertEqual(set(['u1', 'u3', 'u5']), set(acl_map['read'].to_dict_fields()['users'])) def test_full_update_secret_acls_with_read_users_only(self): """Acls full update where specific operation acl is modified.""" secret_uuid, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_uuid, read_user_ids=['u1', 'u2']) acl_map = _get_acl_map(secret_uuid, is_secret=True) # ACL api does not support 'list' operation so making direct db update # in acl operation data to make sure full update removes this existing # ACL. secret_acl = acl_map['read'] secret_acl.operation = 'list' secret_acl.save() acl_map = _get_acl_map(secret_uuid, is_secret=True) # check 'list' operation is there in db self.assertIn('list', acl_map) resp = update_acls( self.app, 'secrets', secret_uuid, partial_update=False, read_user_ids=['u1', 'u3', 'u5']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/secrets/{0}/acl'.format(secret_uuid), resp.json['acl_ref']) acl_map = _get_acl_map(secret_uuid, is_secret=True) # make sure 'list' operation is no longer after full update self.assertNotIn('list', acl_map) self.assertTrue(acl_map['read']['project_access']) self.assertEqual(set(['u1', 'u3', 'u5']), set(acl_map['read'].to_dict_fields()['users'])) self.assertNotIn('u2', acl_map['read'].to_dict_fields()['users']) def test_partial_update_secret_acls_with_read_users_only(self): """Acls update where specific operation acl is modified.""" secret_uuid, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_uuid, read_user_ids=['u1', 'u2']) acl_map = _get_acl_map(secret_uuid, is_secret=True) secret_acl = acl_map['read'] secret_acl.operation = 'list' secret_acl.save() acl_map = _get_acl_map(secret_uuid, is_secret=True) # check 'list' operation is there in db self.assertIn('list', acl_map) resp = update_acls( self.app, 'secrets', secret_uuid, partial_update=True, read_user_ids=['u1', 'u3', 'u5']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/secrets/{0}/acl'.format(secret_uuid), resp.json['acl_ref']) acl_map = _get_acl_map(secret_uuid, is_secret=True) # For partial update, existing other operation ACL is not tocuhed. self.assertIn('list', acl_map) self.assertEqual(set(['u1', 'u2']), set(acl_map['list'].to_dict_fields()['users'])) self.assertTrue(acl_map['read']['project_access']) self.assertEqual(set(['u1', 'u3', 'u5']), set(acl_map['read'].to_dict_fields()['users'])) def test_partial_update_secret_acls_when_no_acls_defined_should_pass(self): """Acls partial update pass when no acls are defined for a secret. Partial update (PATCH) is applicable even when no explicit ACL has been set as by default every secret has implicit acl definition. If PUT is used, then new ACL is created instead. """ secret_id, _ = create_secret(self.app) resp = update_acls( self.app, 'secrets', secret_id, partial_update=True, read_user_ids=['u1', 'u3', 'u5'], expect_errors=False) self.assertEqual(200, resp.status_int) acl_map = _get_acl_map(secret_id, is_secret=True) self.assertTrue(acl_map['read']['project_access']) def test_who_can_update_secret_acls(self): """Test PATCH update existing secret ACLs as per policy rules. Existing secret ACLs can be updated by user who created the secret. Other user with 'creator' role in secret project cannot update ACL if user is not creator of the secret. User with 'admin' role in secret project can update ACL for that secret. """ creator_user_id = 'creatorUserId' secret_uuid = self._create_secret_with_creator_user( self.app, creator_user_id) self._set_acls_with_context( self.app, entity_type='secrets', op_type='create', entity_id=secret_uuid, enforce_policy=False) resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='update', entity_id=secret_uuid, roles=['creator'], user='NotSecretCreator', expect_errors=True) self.assertEqual(403, resp.status_int) resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='update', entity_id=secret_uuid, roles=['creator'], user=creator_user_id) self.assertEqual(200, resp.status_int) # test for user with 'admin' role in secret project resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='update', entity_id=secret_uuid, roles=['admin'], user='AdminUser') self.assertEqual(200, resp.status_int) def test_partial_update_secret_acls_modify_project_access_values(self): """Acls partial update where project-access flag is modified.""" secret_uuid, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_uuid, read_user_ids=['u1', 'u2'], read_project_access=False) resp = update_acls( self.app, 'secrets', secret_uuid, partial_update=True, read_project_access=True) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/secrets/{0}/acl'.format(secret_uuid), resp.json['acl_ref']) acl_map = _get_acl_map(secret_uuid, is_secret=True) self.assertTrue(acl_map['read']['project_access']) self.assertEqual(set(['u1', 'u2']), set(acl_map['read'].to_dict_fields()['users'])) def test_delete_secret_acls_with_valid_secret_id(self): """Delete existing acls for a given secret.""" secret_id, _ = create_secret(self.app) create_acls( self.app, 'secrets', secret_id, read_project_access=True) resp = self.app.delete( '/secrets/{0}/acl'.format(secret_id), expect_errors=False) content = resp.json self.assertIsNone(content) # make sure there is no response self.assertEqual(200, resp.status_int) acl_map = _get_acl_map(secret_id, is_secret=True) self.assertFalse(acl_map) def test_delete_secret_acls_no_acl_defined_should_pass(self): """Delete acls should pass when no acls are defined for a secret.""" secret_id, _ = create_secret(self.app) resp = self.app.delete( '/secrets/{0}/acl'.format(secret_id), expect_errors=False) self.assertEqual(200, resp.status_int) def test_who_can_delete_secret_acls(self): """Test who can delete existing secret ACLs as per policy rules. Existing secret ACLs can be deleted by user who created the secret. Other user with 'creator' role in secret project cannot delete ACL if user is not creator of the secret. User with 'admin' role in secret project can delete ACL for that secret. """ creator_user_id = 'creatorUserId' secret_uuid = self._create_secret_with_creator_user( self.app, creator_user_id) self._set_acls_with_context( self.app, entity_type='secrets', op_type='create', entity_id=secret_uuid, enforce_policy=False) resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='delete', entity_id=secret_uuid, roles=['creator'], user='NotSecretCreator', expect_errors=True) self.assertEqual(403, resp.status_int) resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='delete', entity_id=secret_uuid, roles=['creator'], user=creator_user_id) self.assertEqual(200, resp.status_int) # Create new secret ACLs again. self._set_acls_with_context( self.app, entity_type='secrets', op_type='create', entity_id=secret_uuid, enforce_policy=False) # test for user with 'admin' role in secret project resp = self._set_acls_with_context( self.app, entity_type='secrets', op_type='delete', entity_id=secret_uuid, roles=['admin'], user='AdminUser') self.assertEqual(200, resp.status_int) def test_invoke_secret_acls_head_should_fail(self): """Should fail as put request to secret acls URI is not supported.""" secret_id, _ = create_secret(self.app) resp = self.app.head( '/secrets/{0}/acl'.format(secret_id), expect_errors=True) self.assertEqual(405, resp.status_int) def test_list_secrets_with_no_acls_and_acl_only_should_be_empty(self): """Return list should be empty""" creator_user_id = 'creatorUserID' self._create_secret_with_creator_user( self.app, creator_user_id) resp = self.app.get( '/secrets/?acl_only=TRUE') self.assertEqual(200, resp.status_int) self.assertEqual([], resp.json['secrets']) def test_list_secrets_with_acls(self): """Return List should not include secrets with no ACL for user""" creator_user_id = 'creatorUserID' secret_uuid_acl_1 = self._create_secret_with_creator_user( self.app, creator_user_id) secret_uuid_acl_2 = self._create_secret_with_creator_user( self.app, creator_user_id) secret_uuid_no_acl = self._create_secret_with_creator_user( self.app, creator_user_id) create_acls( self.app, 'secrets', secret_uuid_acl_1, read_user_ids=[creator_user_id], read_project_access=False) create_acls( self.app, 'secrets', secret_uuid_acl_2, read_user_ids=[creator_user_id], read_project_access=False) resp = self.app.get( '/secrets/?acl_only=TrUe') self.assertEqual(200, resp.status_int) secret_list = resp.json.get('secrets') self.assertEqual(2, len(secret_list)) self.assertNotIn(secret_uuid_no_acl, secret_list) class WhenTestingContainerAclsResource(utils.BarbicanAPIBaseTestCase, TestACLsWithContextMixin): def test_can_create_new_container_acls(self): """Create container acls and compare db values with request data.""" container_id, _ = create_container(self.app) resp = create_acls( self.app, 'containers', container_id, read_user_ids=['u1', 'u2']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/containers/{0}/acl'.format(container_id), resp.json['acl_ref']) acl_map = _get_acl_map(container_id, is_secret=False) # Check project_access is True when not provided self.assertTrue(acl_map['read']['project_access']) self.assertEqual(set(['u1', 'u2']), set(acl_map['read'].to_dict_fields()['users'])) def test_who_can_create_new_container_acls(self): """Test who can create new container ACLs as per policy rules. New container ACLs can be created by user who created the container. Other user with 'creator' role in container project cannot create ACL if user is not creator of the container. User with 'admin' role in container project can create ACL for that container. """ creator_user_id = 'creatorUserId' container_id = self._create_container_with_creator_user( self.app, creator_user_id) container_id2 = self._create_container_with_creator_user( self.app, creator_user_id) resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='create', entity_id=container_id, roles=['creator'], user='NotContainerCreator', expect_errors=True) self.assertEqual(403, resp.status_int) resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='create', entity_id=container_id, roles=['creator'], user=creator_user_id, expect_errors=False) self.assertEqual(200, resp.status_int) # test for user with 'admin' role in container project resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='create', entity_id=container_id2, roles=['admin'], user='AdminUser', expect_errors=False) self.assertEqual(200, resp.status_int) def test_create_new_container_acls_with_project_access_true(self): """Should allow creating acls for new container with project-access.""" container_id, _ = create_container(self.app) resp = create_acls( self.app, 'containers', container_id, read_project_access=True, read_user_ids=['u1', 'u3', 'u4']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/containers/{0}/acl'.format(container_id), resp.json['acl_ref']) acl_map = _get_acl_map(container_id, is_secret=False) self.assertTrue(acl_map['read']['project_access']) def test_create_new_container_acls_with_project_access_false(self): """Should allow creating acls for new container with project-access.""" container_id, _ = create_container(self.app) resp = create_acls( self.app, 'containers', container_id, read_project_access=False, read_user_ids=['u1', 'u3', 'u4']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/containers/{0}/acl'.format(container_id), resp.json['acl_ref']) acl_map = _get_acl_map(container_id, is_secret=False) self.assertFalse(acl_map['read']['project_access']) def test_container_acls_with_invalid_project_access_value_fail(self): """Should fail if project-access flag is provided as string value.""" container_id, _ = create_container(self.app) resp = create_acls( self.app, 'containers', container_id, read_project_access="False", read_user_ids=['u1', 'u3', 'u4'], expect_errors=True) self.assertEqual(400, resp.status_int) resp = create_acls( self.app, 'containers', container_id, read_project_access="None", expect_errors=True) self.assertEqual(400, resp.status_int) def test_get_container_acls_with_complete_acl_data(self): """Read existing acls for a with complete acl data.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_user_ids=['u1', 'u3'], read_project_access=False) resp = self.app.get( '/containers/{0}/acl'.format(container_id), expect_errors=False) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('read', resp.json) self.assertFalse(resp.json['read']['project-access']) self.assertIsNotNone(resp.json['read']['created']) self.assertIsNotNone(resp.json['read']['updated']) self.assertEqual(set(['u1', 'u3']), set(resp.json['read']['users'])) def test_get_container_acls_with_project_access_data(self): """Read existing acls for acl when only project-access flag is set.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_project_access=False) resp = self.app.get( '/containers/{0}/acl'.format(container_id), expect_errors=False) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertEqual([], resp.json['read']['users']) self.assertFalse(resp.json['read']['project-access']) self.assertIsNotNone(resp.json['read']['created']) self.assertIsNotNone(resp.json['read']['updated']) def test_get_container_acls_invalid_container_id_should_fail(self): """Get container acls should fail for invalid secret id. This test applies to all container ACLs methods as secret entity is populated in same manner for get, put, patch, delete methods. """ container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_project_access=True) resp = self.app.get( '/containers/{0}/acl'.format(uuid.uuid4().hex), expect_errors=True) self.assertEqual(404, resp.status_int) def test_get_container_acls_invalid_non_uuid_secret_should_fail(self): """Get container acls should fail for invalid (non-uuid) id.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_project_access=True) resp = self.app.get( '/containers/{0}/acl'.format('my_container_id'), expect_errors=True) self.assertEqual(404, resp.status_int) def test_get_container_acls_no_acls_defined_return_default_acl(self): """Get container acls should pass when no acls defined for a secret.""" container_id, _ = create_container(self.app) resp = self.app.get( '/containers/{0}/acl'.format(container_id), expect_errors=True) self.assertEqual(200, resp.status_int) self.assertEqual(acls.DEFAULT_ACL, resp.json) def test_full_update_container_acls_modify_all_acls(self): """Acls update where only user ids list is modified.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_project_access=False, read_user_ids=['u1', 'u2']) resp = update_acls( self.app, 'containers', container_id, partial_update=False, read_user_ids=['u1', 'u2', 'u5']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/containers/{0}/acl'.format(container_id), resp.json['acl_ref']) acl_map = _get_acl_map(container_id, is_secret=False) # Check project_access is True when not provided self.assertTrue(acl_map['read']['project_access']) self.assertIn('u5', acl_map['read'].to_dict_fields()['users']) def test_full_update_container_acls_modify_project_access_values(self): """Acls update where user ids and project-access flag is modified.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_user_ids=['u1', 'u2']) resp = update_acls( self.app, 'containers', container_id, partial_update=False, read_project_access=False) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/containers/{0}/acl'.format(container_id), resp.json['acl_ref']) acl_map = _get_acl_map(container_id, is_secret=False) self.assertFalse(acl_map['read']['project_access']) self.assertIsNone(acl_map['read'].to_dict_fields().get('users')) def test_full_update_container_acls_with_read_users_only(self): """Acls full update where specific operation acl is modified.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_user_ids=['u1', 'u2']) acl_map = _get_acl_map(container_id, is_secret=False) # ACL api does not support 'list' operation so making direct db update # in acl operation data to make sure full update removes this existing # ACL. container_acl = acl_map['read'] container_acl.operation = 'list' container_acl.save() acl_map = _get_acl_map(container_id, is_secret=False) # check 'list' operation is there in db self.assertIn('list', acl_map) resp = update_acls( self.app, 'containers', container_id, partial_update=False, read_user_ids=['u1', 'u3', 'u5']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/containers/{0}/acl'.format(container_id), resp.json['acl_ref']) acl_map = _get_acl_map(container_id, is_secret=False) # make sure 'list' operation is no longer after full update self.assertNotIn('list', acl_map) self.assertTrue(acl_map['read']['project_access']) self.assertEqual(set(['u1', 'u3', 'u5']), set(acl_map['read'].to_dict_fields()['users'])) self.assertNotIn('u2', acl_map['read'].to_dict_fields()['users']) def test_partial_update_container_acls_with_read_users_only(self): """Acls update where specific operation acl is modified.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_user_ids=['u1', 'u2']) acl_map = _get_acl_map(container_id, is_secret=False) secret_acl = acl_map['read'] secret_acl.operation = 'list' secret_acl.save() acl_map = _get_acl_map(container_id, is_secret=False) # check 'list' operation is there in db self.assertIn('list', acl_map) resp = update_acls( self.app, 'containers', container_id, partial_update=True, read_user_ids=['u1', 'u3', 'u5']) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/containers/{0}/acl'.format(container_id), resp.json['acl_ref']) acl_map = _get_acl_map(container_id, is_secret=False) # For partial update, existing other operation ACL is not tocuhed. self.assertIn('list', acl_map) self.assertEqual(set(['u1', 'u2']), set(acl_map['list'].to_dict_fields()['users'])) self.assertTrue(acl_map['read']['project_access']) self.assertEqual(set(['u1', 'u3', 'u5']), set(acl_map['read'].to_dict_fields()['users'])) def test_partial_update_container_acls_when_no_acls_defined(self): """Acls partial update pass when no acls are defined for container. Partial update (PATCH) is applicable even when no explicit ACL has been set as by default every container has implicit acl definition. If PUT is used, then new ACL is created instead. """ container_id, _ = create_container(self.app) resp = update_acls( self.app, 'containers', container_id, partial_update=True, read_user_ids=['u1', 'u3', 'u5'], expect_errors=False) self.assertEqual(200, resp.status_int) acl_map = _get_acl_map(container_id, is_secret=False) self.assertTrue(acl_map['read']['project_access']) def test_partial_update_container_acls_modify_project_access_values(self): """Acls partial update where project-access flag is modified.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_user_ids=['u1', 'u2'], read_project_access=False) resp = update_acls( self.app, 'containers', container_id, partial_update=True, read_project_access=True) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.json) self.assertIn('/containers/{0}/acl'.format(container_id), resp.json['acl_ref']) acl_map = _get_acl_map(container_id, is_secret=False) self.assertTrue(acl_map['read']['project_access']) self.assertEqual(set(['u1', 'u2']), set(acl_map['read'].to_dict_fields()['users'])) def test_who_can_update_container_acls(self): """Test PATCH update existing container ACLs as per policy rules. Existing container ACLs can be updated by user who created the container. Other user with 'creator' role in container project cannot update ACL if user is not creator of the container. User with 'admin' role in container project can update ACL for that container. """ creator_user_id = 'creatorUserId' container_id = self._create_container_with_creator_user( self.app, creator_user_id) self._set_acls_with_context( self.app, entity_type='containers', op_type='create', entity_id=container_id, enforce_policy=False) resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='update', entity_id=container_id, roles=['creator'], user='NotCreator', expect_errors=True) self.assertEqual(403, resp.status_int) resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='update', entity_id=container_id, roles=['creator'], user=creator_user_id) self.assertEqual(200, resp.status_int) # test for user with 'admin' role in container project resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='update', entity_id=container_id, roles=['admin'], user='AdminUser') self.assertEqual(200, resp.status_int) def test_delete_container_acls_with_valid_container_id(self): """Delete existing acls for a given container.""" container_id, _ = create_container(self.app) create_acls( self.app, 'containers', container_id, read_project_access=True) resp = self.app.delete( '/containers/{0}/acl'.format(container_id), expect_errors=False) content = resp.json self.assertIsNone(content) # make sure there is no response self.assertEqual(200, resp.status_int) acl_map = _get_acl_map(container_id, is_secret=False) self.assertFalse(acl_map) def test_delete_container_acls_no_acl_defined_should_pass(self): """Delete acls should pass when no acls are defined for a container.""" container_id, _ = create_container(self.app) resp = self.app.delete( '/containers/{0}/acl'.format(container_id), expect_errors=False) self.assertEqual(200, resp.status_int) def test_who_can_delete_container_acls(self): """Test who can delete existing container ACLs as per policy rules. Existing container ACLs can be deleted by user who created the container. Other user with 'creator' role in container project cannot delete ACL if user is not creator of the container. User with 'admin' role in container project can delete ACL for that container. """ creator_user_id = 'creatorUserId' container_id = self._create_container_with_creator_user( self.app, creator_user_id) self._set_acls_with_context( self.app, entity_type='containers', op_type='create', entity_id=container_id, enforce_policy=False) resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='delete', entity_id=container_id, roles=['creator'], user='NotCreator', expect_errors=True) self.assertEqual(403, resp.status_int) resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='delete', entity_id=container_id, roles=['creator'], user=creator_user_id) self.assertEqual(200, resp.status_int) # Create new container ACLs again. self._set_acls_with_context( self.app, entity_type='containers', op_type='create', entity_id=container_id, enforce_policy=False) # test for user with 'admin' role in container project resp = self._set_acls_with_context( self.app, entity_type='containers', op_type='delete', entity_id=container_id, roles=['admin'], user='AdminUser') self.assertEqual(200, resp.status_int) def test_invoke_container_acls_head_should_fail(self): """PUT request to container acls URI is not supported.""" container_id, _ = create_container(self.app) resp = self.app.head( '/containers/{0}/acl/'.format(container_id), expect_errors=True) self.assertEqual(405, resp.status_int) # ----------------------- Helper Functions --------------------------- def create_secret(app, name=None, algorithm=None, bit_length=None, mode=None, expiration=None, payload='not-encrypted', content_type='text/plain', content_encoding=None, transport_key_id=None, transport_key_needed=None, expect_errors=False): request = { 'name': name, 'algorithm': algorithm, 'bit_length': bit_length, 'mode': mode, 'expiration': expiration, 'payload': payload, 'payload_content_type': content_type, 'payload_content_encoding': content_encoding, 'transport_key_id': transport_key_id, 'transport_key_needed': transport_key_needed } cleaned_request = {key: val for key, val in request.items() if val is not None} resp = app.post_json( '/secrets/', cleaned_request, expect_errors=expect_errors ) created_uuid = None if resp.status_int == 201: secret_ref = resp.json.get('secret_ref', '') _, created_uuid = os.path.split(secret_ref) return created_uuid, resp def create_container(app): _, resp = create_secret(app) secret_ref = resp.json['secret_ref'] request = { "name": "container name", "type": "generic", "secret_refs": [ { "name": "any_key", "secret_ref": secret_ref } ] } resp = app.post_json( '/containers/', request, expect_errors=False ) created_uuid = None if resp.status_int == 201: container_ref = resp.json.get('container_ref', '') _, created_uuid = os.path.split(container_ref) return created_uuid, resp def create_acls(app, entity_type, entity_id, read_user_ids=None, read_project_access=None, expect_errors=False): return manage_acls(app, entity_type, entity_id, read_user_ids=read_user_ids, read_project_access=read_project_access, is_update=False, partial_update=False, expect_errors=expect_errors) def update_acls(app, entity_type, entity_id, read_user_ids=None, read_project_access=None, partial_update=False, expect_errors=False): return manage_acls(app, entity_type, entity_id, read_user_ids=read_user_ids, read_project_access=read_project_access, is_update=True, partial_update=partial_update, expect_errors=expect_errors) def manage_acls(app, entity_type, entity_id, read_user_ids=None, read_project_access=None, is_update=False, partial_update=None, expect_errors=False): request = {} _append_acl_to_request(request, 'read', read_user_ids, read_project_access) cleaned_request = {key: val for key, val in request.items() if val is not None} if is_update and partial_update: # patch for partial update resp = app.patch_json( '/{0}/{1}/acl'.format(entity_type, entity_id), cleaned_request, expect_errors=expect_errors) else: # put (for create or complete update) resp = app.put_json( '/{0}/{1}/acl'.format(entity_type, entity_id), cleaned_request, expect_errors=expect_errors) return resp def _append_acl_to_request(req, operation, user_ids=None, project_access=None): op_dict = {} if user_ids is not None: op_dict['users'] = user_ids if project_access is not None: op_dict['project-access'] = project_access if op_dict: req[operation] = op_dict def _get_acl_map(entity_id, is_secret=True): """Provides map of operation: acl_entity for given entity id.""" if is_secret: acl_repo = repositories.get_secret_acl_repository() acl_map = {acl.operation: acl for acl in acl_repo.get_by_secret_id(entity_id)} else: acl_repo = repositories.get_container_acl_repository() acl_map = {acl.operation: acl for acl in acl_repo.get_by_container_id(entity_id)} return acl_map barbican-2.0.0/barbican/tests/api/controllers/test_orders.py0000664000567000056710000007044312701405673025361 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os import uuid import mock from barbican.common import resources from barbican.model import models from barbican.model import repositories from barbican.tests.api.controllers import test_acls from barbican.tests.api import test_resources_policy as test_policy from barbican.tests import utils order_repo = repositories.get_order_repository() project_repo = repositories.get_project_repository() ca_repo = repositories.get_ca_repository() project_ca_repo = repositories.get_project_ca_repository() container_repo = repositories.get_container_repository() generic_key_meta = { 'name': 'secretname', 'algorithm': 'AES', 'bit_length': 256, 'mode': 'cbc', 'payload_content_type': 'application/octet-stream' } class WhenCreatingOrdersUsingOrdersResource(utils.BarbicanAPIBaseTestCase): def test_can_create_a_new_order(self): resp, order_uuid = create_order( self.app, order_type='key', meta=generic_key_meta ) self.assertEqual(202, resp.status_int) # Make sure we get a valid uuid for the order uuid.UUID(order_uuid) order = order_repo.get(order_uuid, self.project_id) self.assertIsInstance(order, models.Order) def test_order_creation_should_allow_unknown_algorithm(self): meta = { 'bit_length': 128, 'algorithm': 'unknown' } resp, _ = create_order( self.app, order_type='key', meta=meta ) self.assertEqual(202, resp.status_int) def test_order_creation_should_fail_without_a_type(self): resp, _ = create_order( self.app, meta=generic_key_meta, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_order_creation_should_fail_without_metadata(self): resp, _ = create_order( self.app, order_type='key', expect_errors=True ) self.assertEqual(400, resp.status_int) def test_order_create_should_fail_w_unsupported_payload_content_type(self): meta = { 'bit_length': 128, 'algorithm': 'aes', 'payload_content_type': 'something_unsupported' } resp, _ = create_order( self.app, order_type='key', meta=meta, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_order_creation_should_fail_with_bogus_content(self): resp = self.app.post( '/orders/', 'random_stuff', headers={'Content-Type': 'application/json'}, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_order_creation_should_fail_with_empty_dict(self): resp = self.app.post_json( '/orders/', {}, headers={'Content-Type': 'application/json'}, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_order_creation_should_fail_without_content_type_header(self): resp = self.app.post( '/orders/', 'doesn\'t matter. headers are validated first', expect_errors=True, ) self.assertEqual(415, resp.status_int) class WhenGettingOrdersListUsingOrdersResource(utils.BarbicanAPIBaseTestCase): def test_can_get_a_list_of_orders(self): # Make sure we have atleast one order to created resp, order_uuid = create_order( self.app, order_type='key', meta=generic_key_meta ) self.assertEqual(202, resp.status_int) # Get the list of orders resp = self.app.get( '/orders/', headers={'Content-Type': 'application/json'} ) self.assertEqual(200, resp.status_int) self.assertIn('total', resp.json) self.assertGreater(len(resp.json.get('orders')), 0) def test_pagination_attributes_not_available_with_empty_order_list(self): params = {'name': 'no_orders_with_this_name'} resp = self.app.get( '/orders/', params ) self.assertEqual(200, resp.status_int) self.assertEqual(0, len(resp.json.get('orders'))) class WhenGettingOrDeletingOrders(utils.BarbicanAPIBaseTestCase): def test_can_get_order(self): # Make sure we have a order to retrieve create_resp, order_uuid = create_order( self.app, order_type='key', meta=generic_key_meta ) self.assertEqual(202, create_resp.status_int) # Retrieve the order get_resp = self.app.get('/orders/{0}/'.format(order_uuid)) self.assertEqual(200, get_resp.status_int) def test_can_delete_order(self): # Make sure we have a order to retrieve create_resp, order_uuid = create_order( self.app, order_type='key', meta=generic_key_meta ) self.assertEqual(202, create_resp.status_int) delete_resp = self.app.delete('/orders/{0}'.format(order_uuid)) self.assertEqual(204, delete_resp.status_int) def test_get_call_on_non_existant_order_should_give_404(self): bogus_uuid = uuid.uuid4() resp = self.app.get( '/orders/{0}'.format(bogus_uuid), expect_errors=True ) self.assertEqual(404, resp.status_int) def test_returns_404_on_get_with_bad_uuid(self): resp = self.app.get( '/orders/98c876d9-aaac-44e4-8ea8-441932962b05X', expect_errors=True ) self.assertEqual(404, resp.status_int) def test_delete_call_on_non_existant_order_should_give_404(self): bogus_uuid = uuid.uuid4() resp = self.app.delete( '/orders/{0}'.format(bogus_uuid), expect_errors=True ) self.assertEqual(404, resp.status_int) @utils.parameterized_test_case class WhenPuttingAnOrderWithMetadata(utils.BarbicanAPIBaseTestCase): def setUp(self): # Temporarily mock the queue until we can figure out a better way # TODO(jvrbanac): Remove dependence on mocks self.update_order_mock = mock.MagicMock() repositories.OrderRepo.update_order = self.update_order_mock super(WhenPuttingAnOrderWithMetadata, self).setUp() def _create_generic_order_for_put(self): """Create a real order to modify and perform PUT actions on This makes sure that a project exists for our order and that there is an order within the database. This is a little hacky due to issues testing certificate order types. """ # Create generic order resp, order_uuid = create_order( self.app, order_type='key', meta=generic_key_meta ) self.assertEqual(202, resp.status_int) # Modify the order in the DB to allow actions to be performed order_model = order_repo.get(order_uuid, self.project_id) order_model.type = 'certificate' order_model.status = models.States.PENDING order_model.meta = {'nope': 'nothing'} order_model.save() repositories.commit() return order_uuid def test_putting_on_a_order(self): order_uuid = self._create_generic_order_for_put() body = { 'type': 'certificate', 'meta': {'nope': 'thing'} } resp = self.app.put_json( '/orders/{0}'.format(order_uuid), body, headers={'Content-Type': 'application/json'} ) self.assertEqual(204, resp.status_int) self.assertEqual(1, self.update_order_mock.call_count) @utils.parameterized_dataset({ 'bogus_content': ['bogus'], 'bad_order_type': ['{"type": "secret", "meta": {}}'], }) def test_return_400_on_put_with(self, body): order_uuid = self._create_generic_order_for_put() resp = self.app.put( '/orders/{0}'.format(order_uuid), body, headers={'Content-Type': 'application/json'}, expect_errors=True ) self.assertEqual(400, resp.status_int) def test_return_400_on_put_when_order_is_active(self): order_uuid = self._create_generic_order_for_put() # Put the order in a active state to prevent modification order_model = order_repo.get(order_uuid, self.project_id) order_model.status = models.States.ACTIVE order_model.save() repositories.commit() resp = self.app.put_json( '/orders/{0}'.format(order_uuid), {'type': 'certificate', 'meta': {}}, headers={'Content-Type': 'application/json'}, expect_errors=True ) self.assertEqual(400, resp.status_int) class WhenCreatingOrders(utils.BarbicanAPIBaseTestCase): def test_should_add_new_order(self): order_meta = { 'name': 'secretname', 'expiration': '2114-02-28T17:14:44.180394', 'algorithm': 'AES', 'bit_length': 256, 'mode': 'cbc', 'payload_content_type': 'application/octet-stream' } create_resp, order_uuid = create_order( self.app, order_type='key', meta=order_meta ) self.assertEqual(202, create_resp.status_int) order = order_repo.get(order_uuid, self.project_id) self.assertIsInstance(order, models.Order) self.assertEqual('key', order.type) self.assertEqual(order_meta, order.meta) def test_should_return_400_when_creating_with_empty_json(self): resp = self.app.post_json('/orders/', {}, expect_errors=True) self.assertEqual(400, resp.status_int,) def test_should_return_415_when_creating_with_blank_body(self): resp = self.app.post('/orders/', '', expect_errors=True) self.assertEqual(415, resp.status_int) class WhenCreatingCertificateOrders(utils.BarbicanAPIBaseTestCase): def setUp(self): super(WhenCreatingCertificateOrders, self).setUp() self.certificate_meta = { 'request': 'XXXXXX' } # Make sure we have a project self.project = resources.get_or_create_project(self.project_id) # Create CA's in the db self.available_ca_ids = [] for i in range(2): ca_information = { 'plugin_name': 'plugin_name', 'plugin_ca_id': 'plugin_name ca_id1', 'name': 'plugin name', 'description': 'Master CA for default plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY' } ca_model = models.CertificateAuthority(ca_information) ca = ca_repo.create_from(ca_model) self.available_ca_ids.append(ca.id) foreign_project = resources.get_or_create_project('foreign_project') foreign_ca_information = { 'project_id': foreign_project.id, 'plugin_name': 'plugin_name', 'plugin_ca_id': 'plugin_name ca_id1', 'name': 'plugin name', 'description': 'Master CA for default plugin', 'ca_signing_certificate': 'XXXXX', 'intermediates': 'YYYYY' } foreign_ca_model = models.CertificateAuthority(foreign_ca_information) foreign_ca = ca_repo.create_from(foreign_ca_model) self.foreign_ca_id = foreign_ca.id repositories.commit() def test_can_create_new_cert_order(self): create_resp, order_uuid = create_order( self.app, order_type='certificate', meta=self.certificate_meta ) self.assertEqual(202, create_resp.status_int) order = order_repo.get(order_uuid, self.project_id) self.assertIsInstance(order, models.Order) def test_can_add_new_cert_order_with_ca_id(self): self.certificate_meta['ca_id'] = self.available_ca_ids[0] create_resp, order_uuid = create_order( self.app, order_type='certificate', meta=self.certificate_meta ) self.assertEqual(202, create_resp.status_int) order = order_repo.get(order_uuid, self.project_id) self.assertIsInstance(order, models.Order) def test_can_add_new_cert_order_with_ca_id_project_ca_defined(self): # Create a Project CA and add it project_ca_model = models.ProjectCertificateAuthority( self.project.id, self.available_ca_ids[0] ) project_ca_repo.create_from(project_ca_model) repositories.commit() # Attempt to create an order self.certificate_meta['ca_id'] = self.available_ca_ids[0] create_resp, order_uuid = create_order( self.app, order_type='certificate', meta=self.certificate_meta ) self.assertEqual(202, create_resp.status_int) order = order_repo.get(order_uuid, self.project_id) self.assertIsInstance(order, models.Order) def test_create_w_invalid_ca_id_should_fail(self): self.certificate_meta['ca_id'] = 'bogus_ca_id' create_resp, order_uuid = create_order( self.app, order_type='certificate', meta=self.certificate_meta, expect_errors=True ) self.assertEqual(400, create_resp.status_int) def test_create_should_fail_when_ca_not_in_defined_project_ca_ids(self): # Create a Project CA and add it project_ca_model = models.ProjectCertificateAuthority( self.project.id, self.available_ca_ids[0] ) project_ca_repo.create_from(project_ca_model) repositories.commit() # Make sure we set the ca_id to an id not defined in the project self.certificate_meta['ca_id'] = self.available_ca_ids[1] create_resp, order_uuid = create_order( self.app, order_type='certificate', meta=self.certificate_meta, expect_errors=True ) self.assertEqual(403, create_resp.status_int) def test_create_with_wrong_projects_subca_should_fail(self): self.certificate_meta['ca_id'] = self.foreign_ca_id create_resp, order_uuid = create_order( self.app, order_type='certificate', meta=self.certificate_meta, expect_errors=True ) self.assertEqual(403, create_resp.status_int) self.assertIn("not owned", create_resp.json['description']) class WhenCreatingStoredKeyOrders(utils.BarbicanAPIBaseTestCase, test_policy.BaseTestCase): def setUp(self): super(WhenCreatingStoredKeyOrders, self).setUp() # Make sure we have a project self.project = resources.get_or_create_project(self.project_id) self.creator_user_id = 'creatorUserId' def test_can_create_new_stored_key_order(self): container_name = 'rsa container name' container_type = 'rsa' secret_refs = [] resp, container_id = create_container( self.app, name=container_name, container_type=container_type, secret_refs=secret_refs ) stored_key_meta = { 'request_type': 'stored-key', 'subject_dn': 'cn=barbican-server,o=example.com', 'container_ref': 'https://localhost/v1/containers/' + container_id } create_resp, order_uuid = create_order( self.app, order_type='certificate', meta=stored_key_meta ) self.assertEqual(202, create_resp.status_int) order = order_repo.get(order_uuid, self.project_id) self.assertIsInstance(order, models.Order) def _setup_acl_order_context_and_create_order( self, add_acls=False, read_project_access=True, order_roles=None, order_user=None, expect_errors=False): """Helper method to setup acls, order context and return created order. Create order uses actual oslo policy enforcer instead of being None. Create ACLs for container if 'add_acls' is True. Make container private when 'read_project_access' is False. """ container_name = 'rsa container name' container_type = 'rsa' secret_refs = [] self.app.extra_environ = { 'barbican.context': self._build_context(self.project_id, user=self.creator_user_id) } _, container_id = create_container( self.app, name=container_name, container_type=container_type, secret_refs=secret_refs ) if add_acls: test_acls.manage_acls( self.app, 'containers', container_id, read_user_ids=['u1', 'u3', 'u4'], read_project_access=read_project_access, is_update=False) self.app.extra_environ = { 'barbican.context': self._build_context( self.project_id, roles=order_roles, user=order_user, is_admin=False, policy_enforcer=self.policy_enforcer) } stored_key_meta = { 'request_type': 'stored-key', 'subject_dn': 'cn=barbican-server,o=example.com', 'container_ref': 'https://localhost/v1/containers/' + container_id } return create_order( self.app, order_type='certificate', meta=stored_key_meta, expect_errors=expect_errors ) def test_can_create_new_stored_key_order_no_acls_and_policy_check(self): """Create stored key order with actual policy enforcement logic. Order can be created as long as order project and user roles are allowed in policy. In the test, user requesting order has container project and has 'creator' role. Order should be created regardless of what user id is. """ create_resp, order_id = self._setup_acl_order_context_and_create_order( add_acls=False, read_project_access=True, order_roles=['creator'], order_user='anyUserId', expect_errors=False) self.assertEqual(202, create_resp.status_int) order = order_repo.get(order_id, self.project_id) self.assertIsInstance(order, models.Order) self.assertEqual('anyUserId', order.creator_id) def test_should_fail_for_user_observer_role_no_acls_and_policy_check(self): """Should not allow create order when user doesn't have necessary role. Order can be created as long as order project and user roles are allowed in policy. In the test, user requesting order has container project but has 'observer' role. Create order should fail as expected role is 'admin' or 'creator'. """ create_resp, _ = self._setup_acl_order_context_and_create_order( add_acls=False, read_project_access=True, order_roles=['observer'], order_user='anyUserId', expect_errors=True) self.assertEqual(403, create_resp.status_int) def test_can_create_order_with_private_container_and_creator_user(self): """Create order using private container with creator user. Container has been marked private via ACLs. Still creator of container should be able to create stored key order using that container successfully. """ create_resp, order_id = self._setup_acl_order_context_and_create_order( add_acls=True, read_project_access=False, order_roles=['creator'], order_user=self.creator_user_id, expect_errors=False) self.assertEqual(202, create_resp.status_int) order = order_repo.get(order_id, self.project_id) self.assertIsInstance(order, models.Order) self.assertEqual(self.creator_user_id, order.creator_id) def test_can_create_order_with_private_container_and_acl_user(self): """Create order using private container with acl user. Container has been marked private via ACLs. So *generally* project user should not be able to create stored key order using that container. But here it can create order as that user is defined in read ACL user list. Here project user means user which has 'creator' role in the container project. Order project is same as container. """ create_resp, order_id = self._setup_acl_order_context_and_create_order( add_acls=True, read_project_access=False, order_roles=['creator'], order_user='u3', expect_errors=False) self.assertEqual(202, create_resp.status_int) order = order_repo.get(order_id, self.project_id) self.assertIsInstance(order, models.Order) self.assertEqual('u3', order.creator_id) def test_should_raise_with_private_container_and_project_user(self): """Create order should fail using private container for project user. Container has been marked private via ACLs. So project user should not be able to create stored key order using that container. Here project user means user which has 'creator' role in the container project. Order project is same as container. If container was not marked private, this user would have been able to create order. See next test. """ create_resp, _ = self._setup_acl_order_context_and_create_order( add_acls=True, read_project_access=False, order_roles=['creator'], order_user='anyProjectUser', expect_errors=True) self.assertEqual(403, create_resp.status_int) def test_can_create_order_with_non_private_acls_and_project_user(self): """Create order using non-private container with project user. Container has not been marked private via ACLs. So project user should be able to create stored key order using that container successfully. Here project user means user which has 'creator' role in the container project. Order project is same as container. """ create_resp, order_id = self._setup_acl_order_context_and_create_order( add_acls=True, read_project_access=True, order_roles=['creator'], order_user='anyProjectUser', expect_errors=False) self.assertEqual(202, create_resp.status_int) order = order_repo.get(order_id, self.project_id) self.assertIsInstance(order, models.Order) self.assertEqual('anyProjectUser', order.creator_id) def test_can_create_order_with_non_private_acls_and_creator_user(self): """Create order using non-private container with creator user. Container has not been marked private via ACLs. So user who created container should be able to create stored key order using that container successfully. Order project is same as container. """ create_resp, order_id = self._setup_acl_order_context_and_create_order( add_acls=True, read_project_access=True, order_roles=['creator'], order_user=self.creator_user_id, expect_errors=False) self.assertEqual(202, create_resp.status_int) order = order_repo.get(order_id, self.project_id) self.assertIsInstance(order, models.Order) self.assertEqual(self.creator_user_id, order.creator_id) def test_should_raise_with_bad_container_ref(self): stored_key_meta = { 'request_type': 'stored-key', 'subject_dn': 'cn=barbican-server,o=example.com', 'container_ref': 'bad_ref' } create_resp, order_uuid = create_order( self.app, order_type='certificate', meta=stored_key_meta, expect_errors=True ) self.assertEqual(400, create_resp.status_int) def test_should_raise_with_container_not_found(self): stored_key_meta = { 'request_type': 'stored-key', 'subject_dn': 'cn=barbican-server,o=example.com', 'container_ref': 'https://localhost/v1/containers/not_found' } create_resp, order_uuid = create_order( self.app, order_type='certificate', meta=stored_key_meta, expect_errors=True ) self.assertEqual(400, create_resp.status_int) def test_should_raise_with_container_wrong_type(self): container_name = 'generic container name' container_type = 'generic' secret_refs = [] resp, container_id = create_container( self.app, name=container_name, container_type=container_type, secret_refs=secret_refs ) stored_key_meta = { 'request_type': 'stored-key', 'subject_dn': 'cn=barbican-server,o=example.com', 'container_ref': 'https://localhost/v1/containers/' + container_id } create_resp, order_uuid = create_order( self.app, order_type='certificate', meta=stored_key_meta, expect_errors=True ) self.assertEqual(400, create_resp.status_int) def test_should_raise_with_container_no_access(self): stored_key_meta = { 'request_type': 'stored-key', 'subject_dn': 'cn=barbican-server,o=example.com', 'container_ref': 'https://localhost/v1/containers/no_access' } create_resp, order_uuid = create_order( self.app, order_type='certificate', meta=stored_key_meta, expect_errors=True ) self.assertEqual(400, create_resp.status_int) class WhenPerformingUnallowedOperations(utils.BarbicanAPIBaseTestCase): def test_should_not_allow_put_orders(self): resp = self.app.put_json('/orders/', expect_errors=True) self.assertEqual(405, resp.status_int) def test_should_not_allow_delete_orders(self): resp = self.app.delete('/orders/', expect_errors=True) self.assertEqual(405, resp.status_int) def test_should_not_allow_post_order_by_id(self): # Create generic order so we don't get a 404 on POST resp, order_uuid = create_order( self.app, order_type='key', meta=generic_key_meta ) self.assertEqual(202, resp.status_int) resp = self.app.post_json( '/orders/{0}'.format(order_uuid), {}, expect_errors=True ) self.assertEqual(405, resp.status_int) # ----------------------- Helper Functions --------------------------- def create_order(app, order_type=None, meta=None, expect_errors=False): # TODO(jvrbanac): Once test resources is split out, refactor this # and similar functions into a generalized helper module and reduce # duplication. request = { 'type': order_type, 'meta': meta } cleaned_request = {key: val for key, val in request.items() if val is not None} resp = app.post_json( '/orders/', cleaned_request, expect_errors=expect_errors ) created_uuid = None if resp.status_int == 202: order_ref = resp.json.get('order_ref', '') _, created_uuid = os.path.split(order_ref) return (resp, created_uuid) def create_container(app, name=None, container_type=None, secret_refs=None, expect_errors=False, headers=None): request = { 'name': name, 'type': container_type, 'secret_refs': secret_refs if secret_refs else [] } cleaned_request = {key: val for key, val in request.items() if val is not None} resp = app.post_json( '/containers/', cleaned_request, expect_errors=expect_errors, headers=headers ) created_uuid = None if resp.status_int == 201: container_ref = resp.json.get('container_ref', '') _, created_uuid = os.path.split(container_ref) return (resp, created_uuid) barbican-2.0.0/barbican/tests/api/controllers/test_quotas.py0000664000567000056710000001535712701405673025402 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Cisco Systems # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from barbican.tests import utils class WhenTestingQuotas(utils.BarbicanAPIBaseTestCase): def test_should_get_quotas(self): params = {} resp = self.app.get('/quotas', params) self.assertEqual(200, resp.status_int) quotas_list = resp.json.get('quotas') self.assertEqual({'consumers': -1, 'containers': -1, 'orders': -1, 'secrets': -1, 'cas': -1}, quotas_list) def test_should_get_specific_project_quotas(self): params = {} self.create_a_project_quotas() resp = self.app.get( '/project-quotas/{0}'.format(self.get_test_project_id()), params) self.assertEqual(200, resp.status_int) project_quotas = resp.json.get('project_quotas') self.assertEqual({'consumers': 105, 'containers': 103, 'orders': 102, 'secrets': 101, 'cas': 106}, project_quotas) def test_should_return_not_found_get_specific_project_quotas(self): params = {} resp = self.app.get( '/project-quotas/{0}'.format(self.get_test_project_id()), params, expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_get_project_quotas_list(self): self.create_project_quotas() params = {} resp = self.app.get('/project-quotas', params) self.assertEqual(200, resp.status_int) project_quotas_list = resp.json.get('project_quotas') self.assertEqual(3, len(project_quotas_list)) self.assertIn('total', resp.json) def test_should_get_empty_project_quotas_list(self): params = {} resp = self.app.get('/project-quotas', params) self.assertEqual(200, resp.status_int) project_quotas_list = resp.json.get('project_quotas') self.assertEqual([], project_quotas_list) self.assertIn('total', resp.json) def test_pagination_attributes(self): for index in range(11): self.create_a_project_quotas(index) params = {'limit': '2', 'offset': '2'} resp = self.app.get('/project-quotas', params) self.assertEqual(200, resp.status_int) self.assertIn('previous', resp.json) self.assertIn('next', resp.json) previous_ref = resp.json.get('previous') next_ref = resp.json.get('next') self.assertIn('offset=0', previous_ref) self.assertIn('offset=4', next_ref) def test_should_put_project_quotas(self): request = {'project_quotas': {}} resp = self.app.put_json( '/project-quotas/{0}'.format(self.project_id), request) self.assertEqual(204, resp.status_int) def test_should_return_bad_value_put_project_quotas(self): request = '{"project_quotas": {"secrets": "foo"}}' resp = self.app.put( '/project-quotas/{0}'.format(self.project_id), request, headers={'Content-Type': 'application/json'}, expect_errors=True) self.assertEqual(400, resp.status_int) def test_should_return_bad_data_put_project_quotas(self): """PUT not allowed operation for /project-quotas/{project-id}""" params = {'bad data'} resp = self.app.put( '/project-quotas/{0}'.format(self.project_id), params, expect_errors=True) self.assertEqual(400, resp.status_int) def test_should_return_no_payload_for_put_project_quotas(self): """PUT not allowed operation for /project-quotas/{project-id}""" params = {} resp = self.app.put( '/project-quotas/{0}'.format(self.project_id), params, expect_errors=True) self.assertEqual(400, resp.status_int) def test_should_delete_specific_project_quotas(self): params = {} self.create_a_project_quotas() resp = self.app.delete( '/project-quotas/{0}'.format(self.get_test_project_id()), params) self.assertEqual(204, resp.status_int) def test_should_return_not_found_delete_specific_project_quotas(self): params = {} resp = self.app.delete( '/project-quotas/{0}'.format('dummy'), params, expect_errors=True) self.assertEqual(404, resp.status_int) def test_check_put_quotas_not_allowed(self): """PuT not allowed operation for /quotas""" params = {} resp = self.app.put('/quotas/', params, expect_errors=True) self.assertEqual(405, resp.status_int) def test_check_put_project_quotas_list_not_allowed(self): """PUT not allowed operation for /project-quotas""" params = {} resp = self.app.put('/project-quotas', params, expect_errors=True) self.assertEqual(405, resp.status_int) def test_check_post_project_quotas_not_allowed(self): """POST not allowed operation for /project-quotas/{project-id}""" params = {} resp = self.app.post( '/project-quotas/{0}'.format(self.project_id), params, expect_errors=True) self.assertEqual(405, resp.status_int) def test_check_post_project_quotas_list_not_allowed(self): """POST not allowed operation for /project-quotas""" params = {} resp = self.app.post('/project-quotas', params, expect_errors=True) self.assertEqual(405, resp.status_int) # ----------------------- Helper Functions --------------------------- def get_test_project_id(self, index=1): return 'project' + str(index) def create_a_project_quotas(self, index=1): project_id = self.get_test_project_id(index) parsed_project_quotas = { 'secrets': index * 100 + 1, 'orders': index * 100 + 2, 'containers': index * 100 + 3, 'consumers': index * 100 + 5, 'cas': index * 100 + 6} request = {'project_quotas': parsed_project_quotas} resp = self.app.put_json( '/project-quotas/{0}'.format(project_id), request) self.assertEqual(204, resp.status_int) def create_project_quotas(self): for index in [1, 2, 3]: self.create_a_project_quotas(index) if __name__ == '__main__': unittest.main() barbican-2.0.0/barbican/tests/api/controllers/test_secretmeta.py0000664000567000056710000003216612701405673026217 0ustar jenkinsjenkins00000000000000# Copyright (c) 2017 IBM # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import json import mock import os import uuid from barbican.tests import utils @utils.parameterized_test_case class WhenTestingSecretMetadataResource(utils.BarbicanAPIBaseTestCase): def setUp(self): super(WhenTestingSecretMetadataResource, self).setUp() self.valid_metadata = { "metadata": { "latitude": "30.393805", "longitude": "-97.724077" } } def test_create_secret_metadata(self): secret_resp, secret_uuid = create_secret(self.app) meta_resp = create_secret_metadata(self.app, self.valid_metadata, secret_resp) self.assertEqual(201, meta_resp.status_int) self.assertIsNotNone(meta_resp.json) def test_can_get_secret_metadata(self): secret_resp, secret_uuid = create_secret(self.app) meta_resp = create_secret_metadata(self.app, self.valid_metadata, secret_resp) self.assertEqual(201, meta_resp.status_int) get_resp = self.app.get('/secrets/%s/metadata' % secret_resp) self.assertEqual(200, get_resp.status_int) self.assertEqual(self.valid_metadata, get_resp.json) def test_get_secret_metadata_invalid_secret_should_fail(self): secret_resp, secret_uuid = create_secret(self.app) create_secret_metadata(self.app, self.valid_metadata, secret_resp) get_resp = self.app.get('/secrets/%s/metadata' % uuid.uuid4().hex, expect_errors=True) self.assertEqual(404, get_resp.status_int) @utils.parameterized_test_case class WhenTestingSecretMetadatumResource(utils.BarbicanAPIBaseTestCase): def setUp(self): super(WhenTestingSecretMetadatumResource, self).setUp() self.valid_metadata = { "metadata": { "latitude": "30.393805", "longitude": "-97.724077" } } self.updated_valid_metadata = { "metadata": { "latitude": "30.393805", "longitude": "-97.724077", "access-limit": "2" } } self.valid_metadatum = { 'key': 'access-limit', 'value': '2' } @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_can_create_secret_metadatum(self, mocked_get): secret_resp, secret_uuid = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] meta_resp = create_secret_metadatum(self.app, self.valid_metadatum, secret_resp) self.assertEqual(201, meta_resp.status_int) self.assertIsNotNone(meta_resp.json) @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_conflict_create_same_key_secret_metadatum(self, mocked_get): secret_resp, secret_uuid = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] latitude_metadatum = { "key": "latitude", "value": "30.393805" } meta_resp = create_secret_metadatum(self.app, latitude_metadatum, secret_resp, expect_errors=True) self.assertEqual(409, meta_resp.status_int) self.assertIsNotNone(meta_resp.json) @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_can_delete_secret_metadatum(self, mocked_get): secret_resp, secret_uuid = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] meta_resp = create_secret_metadatum(self.app, self.valid_metadatum, secret_resp) self.assertEqual(201, meta_resp.status_int) delete_resp = self.app.delete('/secrets/%s/metadata/access-limit' % secret_resp) self.assertEqual(204, delete_resp.status_int) @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_can_get_secret_metadatum(self, mocked_get): secret_resp, secret_uuid = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] meta_resp = create_secret_metadatum(self.app, self.valid_metadatum, secret_resp) self.assertEqual(201, meta_resp.status_int) mocked_get.return_value = self.updated_valid_metadata['metadata'] get_resp = self.app.get('/secrets/%s/metadata/access-limit' % secret_resp) self.assertEqual(200, get_resp.status_int) self.assertEqual(self.valid_metadatum, get_resp.json) @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_get_secret_metadatum_not_found(self, mocked_get): secret_resp, secret_uuid = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] meta_resp = create_secret_metadatum(self.app, self.valid_metadatum, secret_resp) self.assertEqual(201, meta_resp.status_int) mocked_get.return_value = self.updated_valid_metadata['metadata'] get_resp = self.app.get('/secrets/%s/metadata/nothere' % secret_resp, expect_errors=True) self.assertEqual(404, get_resp.status_int) @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_can_update_secret_metadatum(self, mocked_get): secret_resp, secret_uuid = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] meta_resp = create_secret_metadatum(self.app, self.valid_metadatum, secret_resp) self.assertEqual(201, meta_resp.status_int) new_metadatum = { 'key': 'access-limit', 'value': '5' } new_metadatum_json = json.dumps(new_metadatum) mocked_get.return_value = self.updated_valid_metadata['metadata'] put_resp = self.app.put('/secrets/%s/metadata/access-limit' % secret_resp, new_metadatum_json, headers={'Content-Type': 'application/json'}) self.assertEqual(200, put_resp.status_int) self.assertEqual(new_metadatum, put_resp.json) @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_can_update_secret_metadatum_not_found(self, mocked_get): secret_resp, secret_uuid = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] meta_resp = create_secret_metadatum(self.app, self.valid_metadatum, secret_resp) self.assertEqual(201, meta_resp.status_int) new_metadatum = { 'key': 'newwwww', 'value': '5' } new_metadatum_json = json.dumps(new_metadatum) mocked_get.return_value = self.updated_valid_metadata['metadata'] put_resp = self.app.put('/secrets/%s/metadata/newwwww' % secret_resp, new_metadatum_json, headers={'Content-Type': 'application/json'}, expect_errors=True) self.assertEqual(404, put_resp.status_int) @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_conflict_update_secret_metadatum(self, mocked_get): secret_resp, secret_uuid = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] meta_resp = create_secret_metadatum(self.app, self.valid_metadatum, secret_resp) self.assertEqual(201, meta_resp.status_int) new_metadatum = { 'key': 'snoop', 'value': '5' } new_metadatum_json = json.dumps(new_metadatum) mocked_get.return_value = self.updated_valid_metadata['metadata'] put_resp = self.app.put('/secrets/%s/metadata/access-limit' % secret_resp, new_metadatum_json, headers={'Content-Type': 'application/json'}, expect_errors=True) self.assertEqual(409, put_resp.status_int) def test_returns_405_for_delete_on_metadata(self): secret_id, secret_resp = create_secret(self.app) resp = self.app.delete('/secrets/{0}/metadata/'.format(secret_id), expect_errors=True) self.assertEqual(405, resp.status_int) @mock.patch('barbican.model.repositories.SecretUserMetadatumRepo.' 'get_metadata_for_secret') def test_returns_405_for_head_on_metadatum(self, mocked_get): secret_id, secret_resp = create_secret(self.app) mocked_get.return_value = self.valid_metadata['metadata'] meta_resp = create_secret_metadatum(self.app, self.valid_metadatum, secret_id) self.assertEqual(201, meta_resp.status_int) resp = self.app.head('/secrets/{0}/metadata/access-limit'.format( secret_id), expect_errors=True) self.assertEqual(405, resp.status_int) # ----------------------- Helper Functions --------------------------- def create_secret(app, name=None, algorithm=None, bit_length=None, mode=None, expiration=None, payload='not-encrypted', content_type='text/plain', content_encoding=None, transport_key_id=None, transport_key_needed=None, expect_errors=False): request = { 'name': name, 'algorithm': algorithm, 'bit_length': bit_length, 'mode': mode, 'expiration': expiration, 'payload': payload, 'payload_content_type': content_type, 'payload_content_encoding': content_encoding, 'transport_key_id': transport_key_id, 'transport_key_needed': transport_key_needed } cleaned_request = {key: val for key, val in request.items() if val is not None} resp = app.post_json( '/secrets/', cleaned_request, expect_errors=expect_errors ) created_uuid = None if resp.status_int == 201: secret_ref = resp.json.get('secret_ref', '') _, created_uuid = os.path.split(secret_ref) return created_uuid, resp def create_secret_metadata(app, metadata, secret_uuid, expect_errors=False): request = {} for metadatum in metadata: request[metadatum] = metadata.get(metadatum) cleaned_request = {key: val for key, val in request.items() if val is not None} url = '/secrets/%s/metadata/' % secret_uuid resp = app.put_json( url, cleaned_request, expect_errors=expect_errors ) return resp def create_secret_metadatum(app, metadata, secret_uuid, remainder=None, update=False, expect_errors=False): request = {} for metadatum in metadata: request[metadatum] = metadata.get(metadatum) cleaned_request = {key: val for key, val in request.items() if val is not None} url = '/secrets/%s/metadata/' % secret_uuid if remainder: url = url + remainder if update: resp = app.put_json( url, cleaned_request, expect_errors=expect_errors ) else: resp = app.post_json( url, cleaned_request, expect_errors=expect_errors ) return resp barbican-2.0.0/barbican/tests/api/controllers/test_cas.py0000664000567000056710000005333312701405673024630 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock from six import moves from barbican.common import exception from barbican.common import hrefs from barbican.common import resources as res from barbican.model import models from barbican.model import repositories from barbican.tests import utils project_repo = repositories.get_project_repository() ca_repo = repositories.get_ca_repository() project_ca_repo = repositories.get_project_repository() preferred_ca_repo = repositories.get_preferred_ca_repository() def create_ca(parsed_ca, id_ref="id"): """Generate a CA entity instance.""" ca = models.CertificateAuthority(parsed_ca) ca.id = id_ref return ca class WhenTestingCAsResource(utils.BarbicanAPIBaseTestCase): def test_should_get_list_certificate_authorities(self): self.app.extra_environ = { 'barbican.context': self._build_context(self.project_id, user="user1") } self.create_cas(set_project_cas=False) resp = self.app.get('/cas/', self.params) self.assertEqual(self.limit, len(resp.namespace['cas'])) self.assertIn('previous', resp.namespace) self.assertIn('next', resp.namespace) url_nav_next = self._create_url(self.project_id, self.offset + self.limit, self.limit) self.assertEqual(1, resp.body.decode('utf-8').count(url_nav_next)) url_nav_prev = self._create_url(self.project_id, 0, self.limit) self.assertEqual(1, resp.body.decode('utf-8').count(url_nav_prev)) url_hrefs = self._create_url(self.project_id) self.assertEqual((self.limit + 2), resp.body.decode('utf-8').count(url_hrefs)) def test_response_should_list_subca_and_project_cas(self): self.create_cas() self.app.extra_environ = { 'barbican.context': self._build_context(self.project_id, user="user1") } self.params['limit'] = 100 self.params['offset'] = 0 resp = self.app.get('/cas/', self.params) self.assertIn('total', resp.namespace) self.assertEqual(3, resp.namespace['total']) ca_refs = list(resp.namespace['cas']) for ca_ref in ca_refs: ca_id = hrefs.get_ca_id_from_ref(ca_ref) if not ((ca_id in self.project_ca_ids) or (ca_id == self.subca.id)): self.fail("Invalid CA reference returned") def test_response_should_all_except_subca(self): self.create_cas() self.app.extra_environ = { 'barbican.context': self._build_context("other_project", user="user1") } self.params['limit'] = 100 self.params['offset'] = 0 self.params['plugin_name'] = self.plugin_name resp = self.app.get('/cas/', self.params) self.assertIn('total', resp.namespace) self.assertEqual(self.num_cas - 1, resp.namespace['total']) ca_refs = list(resp.namespace['cas']) for ca_ref in ca_refs: ca_id = hrefs.get_ca_id_from_ref(ca_ref) self.assertNotEqual(ca_id, self.subca.id) def test_response_should_all_except_subca_from_all_subresource(self): self.create_cas() self.app.extra_environ = { 'barbican.context': self._build_context("other_project", user="user1") } self.params['limit'] = 100 self.params['offset'] = 0 self.params['plugin_name'] = self.plugin_name resp = self.app.get('/cas/all', self.params) self.assertIn('total', resp.namespace) self.assertEqual(self.num_cas - 1, resp.namespace['total']) ca_refs = list(resp.namespace['cas']) for ca_ref in ca_refs: ca_id = hrefs.get_ca_id_from_ref(ca_ref) self.assertNotEqual(ca_id, self.subca.id) def test_response_should_all_from_all_subresource(self): self.create_cas() self.app.extra_environ = { 'barbican.context': self._build_context(self.project_id, user="user1") } self.params['limit'] = 100 self.params['offset'] = 0 self.params['plugin_name'] = self.plugin_name resp = self.app.get('/cas/all', self.params) self.assertIn('total', resp.namespace) self.assertEqual(self.num_cas, resp.namespace['total']) def test_response_should_all_cas(self): self.create_cas(set_project_cas=False) self.app.extra_environ = { 'barbican.context': self._build_context(self.project_id, user="user1") } self.params['limit'] = 100 self.params['offset'] = 0 self.params['plugin_name'] = self.plugin_name resp = self.app.get('/cas/', self.params) self.assertIn('total', resp.namespace) self.assertEqual(self.num_cas, resp.namespace['total']) def test_should_get_list_certificate_authorities_with_params(self): self.create_cas(set_project_cas=False) self.params['plugin_name'] = self.plugin_name self.params['plugin_ca_id'] = self.plugin_ca_id + str(1) self.params['offset'] = 0 resp = self.app.get('/cas/', self.params) self.assertNotIn('previous', resp.namespace) self.assertNotIn('next', resp.namespace) self.assertEqual(1, resp.namespace['total']) def test_should_get_with_params_on_all_resource(self): self.create_cas(set_project_cas=False) self.params['plugin_name'] = self.plugin_name self.params['plugin_ca_id'] = self.plugin_ca_id + str(1) self.params['offset'] = 0 resp = self.app.get('/cas/all', self.params) self.assertNotIn('previous', resp.namespace) self.assertNotIn('next', resp.namespace) self.assertEqual(1, resp.namespace['total']) def test_should_handle_no_cas(self): self.params = {'offset': 0, 'limit': 2, 'plugin_name': 'dummy'} resp = self.app.get('/cas/', self.params) self.assertEqual([], resp.namespace.get('cas')) self.assertEqual(0, resp.namespace.get('total')) self.assertNotIn('previous', resp.namespace) self.assertNotIn('next', resp.namespace) def test_should_get_global_preferred_ca(self): self.create_cas() resp = self.app.get('/cas/global-preferred') self.assertEqual( hrefs.convert_certificate_authority_to_href( self.global_preferred_ca.id), resp.namespace['ca_ref']) def test_should_get_no_global_preferred_ca(self): resp = self.app.get('/cas/global-preferred', expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_get_preferred_ca_not_found(self): self.project = res.get_or_create_project(self.project_id) project_repo.save(self.project) resp = self.app.get('/cas/preferred', expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_get_preferred_ca(self): self.create_cas() resp = self.app.get('/cas/preferred') self.assertEqual( hrefs.convert_certificate_authority_to_href( self.preferred_ca.id), resp.namespace['ca_ref']) def test_should_get_ca(self): self.create_cas() resp = self.app.get('/cas/{0}'.format(self.selected_ca_id)) self.assertEqual(self.selected_ca_id, resp.namespace['ca_id']) self.assertEqual(self.selected_plugin_ca_id, resp.namespace['plugin_ca_id']) def test_should_throw_exception_for_get_when_ca_not_found(self): self.create_cas() resp = self.app.get('/cas/bogus_ca_id', expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_get_signing_certificate(self): self.create_cas() resp = self.app.get('/cas/{0}/cacert'.format(self.selected_ca_id)) self.assertEqual(self.selected_signing_cert, resp.body.decode('utf-8')) def test_should_raise_for_get_signing_certificate_ca_not_found(self): resp = self.app.get('/cas/bogus_ca/cacert', expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_get_cert_chain(self): self.create_cas() resp = self.app.get('/cas/{0}/intermediates'.format( self.selected_ca_id)) self.assertEqual(self.selected_intermediates, resp.body.decode('utf-8')) def test_should_raise_for_get_cert_chain_ca_not_found(self): resp = self.app.get('/cas/bogus_ca/intermediates', expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_raise_for_ca_attribute_not_found(self): self.create_cas() resp = self.app.get('/cas/{0}/bogus'.format(self.selected_ca_id), expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_add_to_project(self): self.create_cas() resp = self.app.post('/cas/{0}/add-to-project'.format( self.selected_ca_id)) self.assertEqual(204, resp.status_int) # TODO(alee) need more detailed tests here def test_should_add_existing_project_ca_to_project(self): self.create_cas() resp = self.app.post('/cas/{0}/add-to-project'.format( self.project_ca_ids[0])) self.assertEqual(204, resp.status_int) # TODO(alee) need more detailed tests here def test_should_raise_add_to_project_on_ca_not_found(self): resp = self.app.post( '/cas/bogus_ca/add-to-project', expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_raise_add_to_project_on_ca_not_owned_by_project(self): self.create_cas() self.app.extra_environ = { 'barbican.context': self._build_context("other_project", user="user1") } resp = self.app.post('/cas/{0}/add-to-project'.format( self.subca.id), expect_errors=True) self.assertEqual(403, resp.status_int) def test_should_raise_add_to_project_not_post(self): self.create_cas() resp = self.app.get( '/cas/{0}/add_to_project'.format(self.selected_ca_id), expect_errors=True) self.assertEqual(405, resp.status_int) def test_should_remove_from_project(self): self.create_cas() resp = self.app.post('/cas/{0}/remove-from-project'.format( self.project_ca_ids[0])) self.assertEqual(204, resp.status_int) # TODO(alee) need more detailed tests here def test_should_raise_remove_from_project_preferred_ca(self): self.create_cas() resp = self.app.post('/cas/{0}/remove-from-project'.format( self.project_ca_ids[1]), expect_errors=True) self.assertEqual(409, resp.status_int) def test_should_remove_preferred_ca_if_last_project_ca(self): self.create_cas() resp = self.app.post('/cas/{0}/remove-from-project'.format( self.project_ca_ids[0])) self.assertEqual(204, resp.status_int) resp = self.app.post('/cas/{0}/remove-from-project'.format( self.project_ca_ids[1])) self.assertEqual(204, resp.status_int) def test_should_raise_remove_from_project_not_currently_set(self): self.create_cas() resp = self.app.post( '/cas/{0}/remove-from-project'.format(self.selected_ca_id), expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_raise_remove_form_project_on_ca_not_found(self): self.create_cas() resp = self.app.post('/cas/bogus_ca/remove-from-project', expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_raise_remove_from_project_not_post(self): self.create_cas() resp = self.app.get( '/cas/{0}/remove-from-project'.format(self.selected_ca_id), expect_errors=True) self.assertEqual(405, resp.status_int) def test_should_set_preferred_modify_existing(self): self.create_cas() self.app.post( '/cas/{0}/set-preferred'.format(self.project_ca_ids[1])) def test_should_raise_set_preferred_ca_not_found(self): self.create_cas() resp = self.app.post('/cas/bogus_ca/set-preferred', expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_raise_set_preferred_ca_not_in_project(self): self.create_cas() resp = self.app.post( '/cas/{0}/set-preferred'.format(self.selected_ca_id), expect_errors=True) self.assertEqual(400, resp.status_int) def test_should_raise_set_preferred_ca_not_post(self): self.create_cas() resp = self.app.get( '/cas/{0}/set-preferred'.format(self.selected_ca_id), expect_errors=True) self.assertEqual(405, resp.status_int) def test_should_set_global_preferred(self): self.create_cas() self.app.post( '/cas/{0}/set-global-preferred'.format(self.selected_ca_id)) def test_should_raise_set_global_preferred_ca_not_found(self): resp = self.app.post( '/cas/bogus_ca/set-global-preferred', expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_raise_set_global_preferred_ca_not_post(self): self.create_cas() resp = self.app.get( '/cas/{0}/set-global-preferred'.format(self.selected_ca_id), expect_errors=True) self.assertEqual(405, resp.status_int) def test_should_unset_global_preferred(self): self.create_cas() resp = self.app.post( '/cas/unset-global-preferred') self.assertEqual(204, resp.status_int) def test_should_unset_global_preferred_not_post(self): self.create_cas() resp = self.app.get( '/cas/unset-global-preferred', expect_errors=True) self.assertEqual(405, resp.status_int) def test_should_get_projects(self): self.create_cas() resp = self.app.get( '/cas/{0}/projects'.format(self.project_ca_ids[0])) self.assertEqual( self.project.external_id, resp.namespace['projects'][0]) def test_should_get_no_projects(self): self.create_cas() resp = self.app.get('/cas/{0}/projects'.format(self.selected_ca_id)) self.assertEqual([], resp.namespace['projects']) def test_should_raise_get_projects_ca_not_found(self): self.create_cas() resp = self.app.get( '/cas/bogus_ca/projects'.format(self.project_ca_ids[0]), expect_errors=True) self.assertEqual(404, resp.status_int) @mock.patch('barbican.tasks.certificate_resources.create_subordinate_ca') def test_should_create_subca(self, mocked_task): self.create_cas() self.create_subca_request(self.selected_ca_id) mocked_task.return_value = models.CertificateAuthority( self.parsed_subca) resp = self.app.post_json( '/cas', self.subca_request, expect_errors=False) self.assertEqual(201, resp.status_int) def test_should_raise_delete_subca_not_found(self): self.create_cas() resp = self.app.delete('/cas/foobar', expect_errors=True) self.assertEqual(404, resp.status_int) @mock.patch('barbican.tasks.certificate_resources.delete_subordinate_ca') def test_should_delete_subca(self, mocked_task): self.create_cas() resp = self.app.delete('/cas/' + self.subca.id) mocked_task.assert_called_once_with(self.project_id, self.subca) self.assertEqual(204, resp.status_int) @mock.patch('barbican.tasks.certificate_resources.delete_subordinate_ca') def test_should_raise_delete_not_a_subca(self, mocked_task): self.create_cas() mocked_task.side_effect = exception.CannotDeleteBaseCA() resp = self.app.delete('/cas/' + self.subca.id, expect_errors=True) mocked_task.assert_called_once_with(self.project_id, self.subca) self.assertEqual(403, resp.status_int) @mock.patch('barbican.tasks.certificate_resources.delete_subordinate_ca') def test_should_raise_delete_not_authorized(self, mocked_task): self.create_cas() mocked_task.side_effect = exception.UnauthorizedSubCA() resp = self.app.delete('/cas/' + self.subca.id, expect_errors=True) mocked_task.assert_called_once_with(self.project_id, self.subca) self.assertEqual(403, resp.status_int) def create_subca_request(self, parent_ca_id): self.subca_request = { 'name': "Subordinate CA", 'subject_dn': 'cn=subordinate ca signing cert, o=example.com', 'parent_ca_ref': "https://localhost:9311/cas/" + parent_ca_id } self.parsed_subca = { 'plugin_name': self.plugin_name, 'plugin_ca_id': self.plugin_ca_id + '_subca_id', 'name': self.plugin_name, 'description': 'Subordinate CA', 'ca_signing_certificate': 'ZZZZZ...Subordinate...', 'intermediates': 'YYYYY...subordinate...', 'parent_ca_id': parent_ca_id } def create_cas(self, set_project_cas=True): self.project = res.get_or_create_project(self.project_id) self.global_project = res.get_or_create_global_preferred_project() project_repo.save(self.project) self.project_ca_ids = [] self.plugin_name = 'default_plugin' self.plugin_ca_id = 'default_plugin_ca_id_' self.ca_id = "id1" self.num_cas = 10 self.offset = 2 self.limit = 4 self.params = {'offset': self.offset, 'limit': self.limit} self._do_create_cas(set_project_cas) # create subca for DELETE testing parsed_ca = { 'plugin_name': self.plugin_name, 'plugin_ca_id': self.plugin_ca_id + "subca 1", 'name': self.plugin_name, 'description': 'Sub CA for default plugin', 'ca_signing_certificate': 'ZZZZZ' + "sub ca1", 'intermediates': 'YYYYY' + "sub ca1", 'project_id': self.project.id, 'creator_id': 'user12345' } ca = models.CertificateAuthority(parsed_ca) ca_repo.create_from(ca) ca_repo.save(ca) self.subca = ca self.num_cas += 1 def _do_create_cas(self, set_project_cas): for ca_id in moves.range(self.num_cas): parsed_ca = { 'plugin_name': self.plugin_name, 'plugin_ca_id': self.plugin_ca_id + str(ca_id), 'name': self.plugin_name, 'description': 'Master CA for default plugin', 'ca_signing_certificate': 'ZZZZZ' + str(ca_id), 'intermediates': 'YYYYY' + str(ca_id) } ca = models.CertificateAuthority(parsed_ca) ca_repo.create_from(ca) ca_repo.save(ca) if ca_id == 1: # set global preferred ca pref_ca = models.PreferredCertificateAuthority( self.global_project.id, ca.id) preferred_ca_repo.create_from(pref_ca) preferred_ca_repo.save(pref_ca) self.global_preferred_ca = ca if ca_id == 2 and set_project_cas: # set project CA project_ca = models.ProjectCertificateAuthority( self.project.id, ca.id) project_ca_repo.create_from(project_ca) project_ca_repo.save(project_ca) self.project_ca_ids.append(ca.id) if ca_id == 3 and set_project_cas: # set project preferred CA project_ca = models.ProjectCertificateAuthority( self.project.id, ca.id) project_ca_repo.create_from(project_ca) project_ca_repo.save(project_ca) self.project_ca_ids.append(ca.id) pref_ca = models.PreferredCertificateAuthority( self.project.id, ca.id) preferred_ca_repo.create_from(pref_ca) preferred_ca_repo.save(pref_ca) self.preferred_ca = ca if ca_id == 4: # set ca for testing GETs for a single CA self.selected_ca_id = ca.id self.selected_plugin_ca_id = self.plugin_ca_id + str(ca_id) self.selected_signing_cert = 'ZZZZZ' + str(ca_id) self.selected_intermediates = 'YYYYY' + str(ca_id) def _create_url(self, external_project_id, offset_arg=None, limit_arg=None): if limit_arg: offset = int(offset_arg) limit = int(limit_arg) return '/cas?limit={0}&offset={1}'.format( limit, offset) else: return '/cas' barbican-2.0.0/barbican/tests/api/controllers/test_versions.py0000664000567000056710000000361712701405673025732 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.api import controllers from barbican.tests import utils class WhenTestingVersionsResource(utils.BarbicanAPIBaseTestCase): root_controller = controllers.versions.VersionsController() def test_should_return_multiple_choices_on_get(self): resp = self.app.get('/') self.assertEqual(300, resp.status_int) def test_should_return_multiple_choices_on_get_if_json_accept_header(self): headers = {'Accept': 'application/json'} resp = self.app.get('/', headers=headers) self.assertEqual(300, resp.status_int) def test_should_redirect_if_json_home_accept_header_present(self): headers = {'Accept': 'application/json-home'} resp = self.app.get('/', headers=headers) self.assertEqual(302, resp.status_int) def test_should_return_version_json(self): resp = self.app.get('/') versions_response = resp.json['versions']['values'] v1_info = versions_response[0] # NOTE(jaosorior): I used assertIn instead of assertEqual because we # might start using decimal numbers in the future. So when that happens # this test will still be valid. self.assertIn('v1', v1_info['id']) self.assertEqual(1, len(v1_info['media-types'])) self.assertEqual('application/json', v1_info['media-types'][0]['base']) barbican-2.0.0/barbican/tests/api/test_resources.py0000664000567000056710000012324012701405673023521 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ This test module focuses on typical-flow business logic tests with the API resource classes. For RBAC tests of these classes, see the 'resources_policy_test.py' module. """ import mimetypes import mock import pecan from testtools import testcase import webtest from barbican import api from barbican.api import app from barbican.api import controllers from barbican.common import exception as excep from barbican.common import hrefs from barbican.common import utils as barbican_utils import barbican.context from barbican.model import models from barbican.tests import utils def get_barbican_env(external_project_id): """Create and return a barbican.context for use with the RBAC decorator Injects the provided external_project_id. """ kwargs = {'roles': None, 'user': None, 'project': external_project_id, 'is_admin': True} ctx = barbican.context.RequestContext(**kwargs) ctx.policy_enforcer = None barbican_env = {'barbican.context': ctx} return barbican_env def create_secret(id_ref="id", name="name", algorithm=None, bit_length=None, mode=None, encrypted_datum=None, content_type=None): """Generate a Secret entity instance.""" info = {'id': id_ref, 'name': name, 'algorithm': algorithm, 'bit_length': bit_length, 'mode': mode} secret = models.Secret(info) secret.id = id_ref if encrypted_datum: secret.encrypted_data = [encrypted_datum] if content_type: content_meta = models.SecretStoreMetadatum('content_type', content_type) secret.secret_store_metadata['content_type'] = content_meta return secret def create_order_with_meta(id_ref="id", order_type="certificate", meta={}, status='PENDING'): """Generate an Order entity instance with Metadata.""" order = models.Order() order.id = id_ref order.type = order_type order.meta = meta order.status = status return order def validate_datum(test, datum): test.assertIsNone(datum.kek_meta_extended) test.assertIsNotNone(datum.kek_meta_project) test.assertTrue(datum.kek_meta_project.bind_completed) test.assertIsNotNone(datum.kek_meta_project.plugin_name) test.assertIsNotNone(datum.kek_meta_project.kek_label) def create_container(id_ref, project_id=None, external_project_id=None): """Generate a Container entity instance.""" container = models.Container() container.id = id_ref container.name = 'test name' container.type = 'rsa' container_secret = models.ContainerSecret() container_secret.container_id = id container_secret.secret_id = '123' container.container_secrets.append(container_secret) if project_id: project = models.Project() project.id = project_id project.external_id = external_project_id container.project = project return container def create_consumer(container_id, project_id, id_ref): """Generate a ContainerConsumerMetadatum entity instance.""" data = { 'name': 'test name', 'URL': 'http://test/url' } consumer = models.ContainerConsumerMetadatum(container_id, project_id, data) consumer.id = id_ref return consumer class SecretAllowAllMimeTypesDecoratorTest(utils.BaseTestCase): def setUp(self): super(SecretAllowAllMimeTypesDecoratorTest, self).setUp() self.mimetype_values = set(mimetypes.types_map.values()) @pecan.expose(generic=True) @barbican_utils.allow_all_content_types def _empty_pecan_exposed_function(self): pass def _empty_function(self): pass def test_mimetypes_successfully_added_to_mocked_function(self): empty_function = mock.MagicMock() empty_function._pecan = {} func = barbican_utils.allow_all_content_types(empty_function) cfg = func._pecan self.assertEqual(len(self.mimetype_values), len(cfg['content_types'])) def test_mimetypes_successfully_added_to_pecan_exposed_function(self): cfg = self._empty_pecan_exposed_function._pecan self.assertEqual(len(self.mimetype_values), len(cfg['content_types'])) def test_decorator_raises_if_function_not_pecan_exposed(self): self.assertRaises(AttributeError, barbican_utils.allow_all_content_types, self._empty_function) class FunctionalTest(utils.BaseTestCase, utils.MockModelRepositoryMixin, testcase.WithAttributes): def setUp(self): super(FunctionalTest, self).setUp() root = self.root config = {'app': {'root': root}} pecan.set_config(config, overwrite=True) self.app = webtest.TestApp(pecan.make_app(root)) def tearDown(self): super(FunctionalTest, self).tearDown() pecan.set_config({}, overwrite=True) @property def root(self): return controllers.versions.VersionController() class BaseSecretsResource(FunctionalTest): """Base test class for the Secrets resource.""" def setUp(self): super(BaseSecretsResource, self).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): secrets = controllers.secrets.SecretsController() return RootController() def _init(self, payload=b'not-encrypted', payload_content_type='text/plain', payload_content_encoding=None): self.name = 'name' self.payload = payload self.payload_content_type = payload_content_type self.payload_content_encoding = payload_content_encoding self.secret_algorithm = 'AES' self.secret_bit_length = 256 self.secret_mode = 'CBC' self.secret_req = {'name': self.name, 'algorithm': self.secret_algorithm, 'bit_length': self.secret_bit_length, 'creator_id': None, 'mode': self.secret_mode} if payload: self.secret_req['payload'] = payload if payload_content_type: self.secret_req['payload_content_type'] = payload_content_type if payload_content_encoding: self.secret_req['payload_content_encoding'] = ( payload_content_encoding) # Set up mocked project self.external_project_id = 'keystone1234' self.project_entity_id = 'tid1234' self.project = models.Project() self.project.id = self.project_entity_id self.project.external_id = self.external_project_id # Set up mocked project repo self.project_repo = mock.MagicMock() self.project_repo.find_by_external_project_id.return_value = ( self.project) self.setup_project_repository_mock(self.project_repo) # Set up mocked secret self.secret = models.Secret() self.secret.id = utils.generate_test_uuid(tail_value=1) # Set up mocked secret repo self.secret_repo = mock.MagicMock() self.secret_repo.create_from.return_value = self.secret self.setup_secret_repository_mock(self.secret_repo) # Set up mocked encrypted datum repo self.datum_repo = mock.MagicMock() self.datum_repo.create_from.return_value = None self.setup_encrypted_datum_repository_mock(self.datum_repo) # Set up mocked kek datum self.kek_datum = models.KEKDatum() self.kek_datum.kek_label = "kek_label" self.kek_datum.bind_completed = False self.kek_datum.algorithm = '' self.kek_datum.bit_length = 0 self.kek_datum.mode = '' self.kek_datum.plugin_meta = '' # Set up mocked kek datum repo self.kek_repo = mock.MagicMock() self.kek_repo.find_or_create_kek_datum.return_value = self.kek_datum self.setup_kek_datum_repository_mock(self.kek_repo) # Set up mocked secret meta repo self.setup_secret_meta_repository_mock() # Set up mocked transport key self.transport_key = models.TransportKey( 'default_plugin_name', 'XXXABCDEF') self.transport_key_id = 'tkey12345' self.tkey_url = hrefs.convert_transport_key_to_href( self.transport_key.id) # Set up mocked transport key self.setup_transport_key_repository_mock() class WhenGettingPuttingOrDeletingSecretUsingSecretResource(FunctionalTest): def setUp(self): super( WhenGettingPuttingOrDeletingSecretUsingSecretResource, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): secrets = controllers.secrets.SecretsController() return RootController() def _init(self): self.project_id = 'projectid1234' self.external_project_id = 'keystone1234' self.name = 'name1234' secret_id = utils.generate_test_uuid(tail_value=1) datum_id = "iddatum1" kek_id = "idkek1" self.secret_algorithm = "AES" self.secret_bit_length = 256 self.secret_mode = "CBC" self.kek_project = models.KEKDatum() self.kek_project.id = kek_id self.kek_project.active = True self.kek_project.bind_completed = False self.kek_project.kek_label = "kek_label" self.datum = models.EncryptedDatum() self.datum.id = datum_id self.datum.secret_id = secret_id self.datum.kek_id = kek_id self.datum.kek_meta_project = self.kek_project self.datum.content_type = "text/plain" self.datum.cypher_text = "aaaa" # base64 value. self.secret = create_secret(id_ref=secret_id, name=self.name, algorithm=self.secret_algorithm, bit_length=self.secret_bit_length, mode=self.secret_mode, encrypted_datum=self.datum, content_type=self.datum.content_type) self.secret.secret_acls = [] self.secret.project = mock.MagicMock() self.secret.project.external_id = self.external_project_id # Set up mocked project self.project = models.Project() self.project.id = self.project_id self.project.external_id = self.external_project_id # Set up mocked project repo self.project_repo = mock.MagicMock() self.project_repo.get.return_value = self.project self.project_repo.find_by_external_project_id.return_value = ( self.project) self.setup_project_repository_mock(self.project_repo) # Set up mocked secret repo self.secret_repo = mock.Mock() self.secret_repo.get = mock.Mock(return_value=self.secret) self.secret_repo.get_secret_by_id = mock.Mock(return_value=self.secret) self.secret_repo.delete_entity_by_id = mock.Mock(return_value=None) self.setup_secret_repository_mock(self.secret_repo) # Set up mocked encrypted datum repo self.datum_repo = mock.MagicMock() self.datum_repo.create_from.return_value = None self.setup_encrypted_datum_repository_mock(self.datum_repo) # Set up mocked kek datum repo self.setup_kek_datum_repository_mock() # Set up mocked secret meta repo self.secret_meta_repo = mock.MagicMock() self.secret_meta_repo.get_metadata_for_secret.return_value = None self.setup_secret_meta_repository_mock(self.secret_meta_repo) # Set up mocked transport key self.transport_key_model = models.TransportKey( "default_plugin", "my transport key") # Set up mocked transport key repo self.transport_key_repo = mock.MagicMock() self.transport_key_repo.get.return_value = self.transport_key_model self.setup_transport_key_repository_mock(self.transport_key_repo) self.transport_key_id = 'tkey12345' @mock.patch('barbican.plugin.resources.get_transport_key_id_for_retrieval') def test_should_get_secret_as_json(self, mock_get_transport_key): mock_get_transport_key.return_value = None resp = self.app.get( '/secrets/{0}/'.format(self.secret.id), headers={'Accept': 'application/json', 'Accept-Encoding': 'gzip'} ) self.secret_repo.get_secret_by_id.assert_called_once_with( entity_id=self.secret.id, suppress_exception=True) self.assertEqual(200, resp.status_int) self.assertNotIn('content_encodings', resp.namespace) self.assertIn('content_types', resp.namespace) self.assertIn(self.datum.content_type, resp.namespace['content_types'].values()) self.assertNotIn('mime_type', resp.namespace) @testcase.attr('deprecated') @mock.patch('barbican.plugin.resources.get_secret') def test_should_get_secret_as_plain_based_on_content_type(self, mock_get_secret): data = 'unencrypted_data' mock_get_secret.return_value = data resp = self.app.get( '/secrets/{0}/payload/'.format(self.secret.id), headers={'Accept': 'text/plain'} ) self.secret_repo.get_secret_by_id.assert_called_once_with( entity_id=self.secret.id, suppress_exception=True) self.assertEqual(200, resp.status_int) self.assertEqual(data, resp.body) mock_get_secret.assert_called_once_with( 'text/plain', self.secret, self.project, None, None ) @mock.patch('barbican.plugin.resources.get_secret') def test_should_get_secret_as_plain_with_twsk(self, mock_get_secret): data = 'encrypted_data' mock_get_secret.return_value = data twsk = "trans_wrapped_session_key" resp = self.app.get( ('/secrets/{0}/payload/' '?trans_wrapped_session_key={1}&transport_key_id={2}') .format(self.secret.id, twsk, self.transport_key_id), headers={'Accept': 'text/plain'} ) self.secret_repo.get_secret_by_id.assert_called_once_with( entity_id=self.secret.id, suppress_exception=True) self.assertEqual(200, resp.status_int) self.assertEqual(data, resp.body) mock_get_secret.assert_called_once_with( 'text/plain', self.secret, self.project, twsk, self.transport_key_model.transport_key ) @testcase.attr('deprecated') @mock.patch('barbican.plugin.resources.get_secret') def test_should_get_secret_as_plain_with_twsk_based_on_content_type( self, mock_get_secret): data = 'encrypted_data' mock_get_secret.return_value = data twsk = "trans_wrapped_session_key" resp = self.app.get( ('/secrets/{0}/' '?trans_wrapped_session_key={1}&transport_key_id={2}') .format(self.secret.id, twsk, self.transport_key_id), headers={'Accept': 'text/plain'} ) self.secret_repo.get_secret_by_id.assert_called_once_with( entity_id=self.secret.id, suppress_exception=True) self.assertEqual(200, resp.status_int) self.assertEqual(data, resp.body) mock_get_secret.assert_called_once_with( 'text/plain', self.secret, self.project, twsk, self.transport_key_model.transport_key ) @mock.patch('barbican.plugin.resources.get_secret') def test_should_throw_exception_for_get_when_twsk_but_no_tkey_id( self, mock_get_secret): data = 'encrypted_data' mock_get_secret.return_value = data twsk = "trans_wrapped_session_key" resp = self.app.get( '/secrets/{0}/payload/?trans_wrapped_session_key={1}'.format( self.secret.id, twsk), headers={'Accept': 'text/plain'}, expect_errors=True ) self.secret_repo.get_secret_by_id.assert_called_once_with( entity_id=self.secret.id, suppress_exception=True) self.assertEqual(400, resp.status_int) @testcase.attr('deprecated') @mock.patch('barbican.plugin.resources.get_secret') def test_should_throw_exception_for_get_when_twsk_but_no_tkey_id_old_way( self, mock_get_secret): data = 'encrypted_data' mock_get_secret.return_value = data twsk = "trans_wrapped_session_key" resp = self.app.get( '/secrets/{0}/payload/?trans_wrapped_session_key={1}'.format( self.secret.id, twsk), headers={'Accept': 'text/plain'}, expect_errors=True ) self.secret_repo.get_secret_by_id.assert_called_once_with( entity_id=self.secret.id, suppress_exception=True) self.assertEqual(400, resp.status_int) @mock.patch('barbican.plugin.resources.get_transport_key_id_for_retrieval') def test_should_get_secret_meta_for_binary(self, mock_get_transport_key): mock_get_transport_key.return_value = None self.datum.content_type = "application/octet-stream" self.secret.secret_store_metadata['content_type'].value = ( self.datum.content_type ) self.datum.cypher_text = 'aaaa' resp = self.app.get( '/secrets/{0}/'.format(self.secret.id), headers={'Accept': 'application/json', 'Accept-Encoding': 'gzip'} ) self.secret_repo.get_secret_by_id.assert_called_once_with( entity_id=self.secret.id, suppress_exception=True) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.namespace) self.assertIn('content_types', resp.namespace) self.assertIn(self.datum.content_type, resp.namespace['content_types'].values()) @mock.patch('barbican.plugin.resources.get_transport_key_id_for_retrieval') def test_should_get_secret_meta_for_binary_with_tkey( self, mock_get_transport_key_id): mock_get_transport_key_id.return_value = self.transport_key_id self.datum.content_type = "application/octet-stream" self.secret.secret_store_metadata['content_type'].value = ( self.datum.content_type ) self.datum.cypher_text = 'aaaa' resp = self.app.get( '/secrets/{0}/?transport_key_needed=true'.format( self.secret.id), headers={'Accept': 'application/json', 'Accept-Encoding': 'gzip'} ) self.secret_repo.get_secret_by_id.assert_called_once_with( entity_id=self.secret.id, suppress_exception=True) self.assertEqual(200, resp.status_int) self.assertIsNotNone(resp.namespace) self.assertIn('content_types', resp.namespace) self.assertIn(self.datum.content_type, resp.namespace['content_types'].values()) self.assertIn('transport_key_ref', resp.namespace) self.assertEqual( hrefs.convert_transport_key_to_href(self.transport_key_id), resp.namespace['transport_key_ref'] ) @testcase.attr('deprecated') @mock.patch('barbican.plugin.resources.get_secret') def test_should_get_secret_as_binary_based_on_content_type( self, mock_get_secret): data = 'unencrypted_data' mock_get_secret.return_value = data self.datum.content_type = "application/octet-stream" self.datum.cypher_text = 'aaaa' resp = self.app.get( '/secrets/{0}/'.format(self.secret.id), headers={ 'Accept': 'application/octet-stream', 'Accept-Encoding': 'gzip' } ) self.assertEqual(data, resp.body) mock_get_secret.assert_called_once_with( 'application/octet-stream', self.secret, self.project, None, None ) @mock.patch('barbican.plugin.resources.store_secret') def test_should_put_secret_as_plain_with_tkey_id(self, mock_store_secret): self.secret.encrypted_data = [] self.secret.secret_store_metadata = {} resp = self.app.put( '/secrets/{0}/?transport_key_id={1}'.format( self.secret.id, self.transport_key_id), 'plain text', headers={'Accept': 'text/plain', 'Content-Type': 'text/plain'}, ) self.assertEqual(204, resp.status_int) mock_store_secret.assert_called_once_with( unencrypted_raw='plain text', content_type_raw='text/plain', content_encoding=None, secret_model=self.secret, project_model=self.project, transport_key_id=self.transport_key_id ) @mock.patch('barbican.plugin.resources.store_secret') def test_should_put_secret_as_binary_with_tkey_id(self, mock_store_secret): self.secret.encrypted_data = [] self.secret.secret_store_metadata = {} resp = self.app.put( '/secrets/{0}/?transport_key_id={1}'.format( self.secret.id, self.transport_key_id), 'plain text', headers={ 'Accept': 'text/plain', 'Content-Type': 'application/octet-stream' }, ) self.assertEqual(204, resp.status_int) mock_store_secret.assert_called_once_with( unencrypted_raw='plain text', content_type_raw='application/octet-stream', content_encoding=None, secret_model=self.secret, project_model=self.project, transport_key_id=self.transport_key_id ) class WhenAddingNavigationHrefs(utils.BaseTestCase): def setUp(self): super(WhenAddingNavigationHrefs, self).setUp() self.resource_name = 'orders' self.external_project_id = '12345' self.num_elements = 100 self.data = {} def test_add_nav_hrefs_adds_next_only(self): offset = 0 limit = 10 data_with_hrefs = hrefs.add_nav_hrefs( self.resource_name, offset, limit, self.num_elements, self.data) self.assertNotIn('previous', data_with_hrefs) self.assertIn('next', data_with_hrefs) def test_add_nav_hrefs_adds_both_next_and_previous(self): offset = 10 limit = 10 data_with_hrefs = hrefs.add_nav_hrefs( self.resource_name, offset, limit, self.num_elements, self.data) self.assertIn('previous', data_with_hrefs) self.assertIn('next', data_with_hrefs) def test_add_nav_hrefs_adds_previous_only(self): offset = 90 limit = 10 data_with_hrefs = hrefs.add_nav_hrefs( self.resource_name, offset, limit, self.num_elements, self.data) self.assertIn('previous', data_with_hrefs) self.assertNotIn('next', data_with_hrefs) class TestingJsonSanitization(utils.BaseTestCase): def test_json_sanitization_without_array(self): json_without_array = {"name": "name", "algorithm": "AES", "payload_content_type": " text/plain ", "mode": "CBC", "bit_length": 256, "payload": "not-encrypted"} self.assertTrue(json_without_array['payload_content_type'] .startswith(' '), "whitespace should be there") self.assertTrue(json_without_array['payload_content_type'] .endswith(' '), "whitespace should be there") api.strip_whitespace(json_without_array) self.assertFalse(json_without_array['payload_content_type'] .startswith(' '), "whitespace should be gone") self.assertFalse(json_without_array['payload_content_type'] .endswith(' '), "whitespace should be gone") def test_json_sanitization_with_array(self): json_with_array = {"name": "name", "algorithm": "AES", "payload_content_type": "text/plain", "mode": "CBC", "bit_length": 256, "payload": "not-encrypted", "an-array": [{"name": " item 1"}, {"name": "item2 "}]} self.assertTrue(json_with_array['an-array'][0]['name'] .startswith(' '), "whitespace should be there") self.assertTrue(json_with_array['an-array'][1]['name'] .endswith(' '), "whitespace should be there") api.strip_whitespace(json_with_array) self.assertFalse(json_with_array['an-array'][0]['name'] .startswith(' '), "whitespace should be gone") self.assertFalse(json_with_array['an-array'][1]['name'] .endswith(' '), "whitespace should be gone") class WhenCreatingConsumersUsingConsumersResource(FunctionalTest): def setUp(self): super( WhenCreatingConsumersUsingConsumersResource, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): containers = controllers.containers.ContainersController() return RootController() def _init(self): self.name = 'test container name' self.type = 'generic' self.secret_refs = [ { 'name': 'test secret 1', 'secret_ref': '1231' }, { 'name': 'test secret 2', 'secret_ref': '1232' }, { 'name': 'test secret 3', 'secret_ref': '1233' } ] self.consumer_ref = { 'name': 'test_consumer1', 'URL': 'http://consumer/1' } self.project_internal_id = 'projectid1234' self.external_project_id = 'keystoneid1234' # Set up mocked project self.project = models.Project() self.project.id = self.project_internal_id self.project.external_id = self.external_project_id # Set up mocked project repo self.project_repo = mock.MagicMock() self.project_repo.get.return_value = self.project self.setup_project_repository_mock(self.project_repo) # Set up mocked quota enforcer self.quota_patch = mock.patch( 'barbican.common.quota.QuotaEnforcer.enforce', return_value=None) self.quota_patch.start() self.addCleanup(self.quota_patch.stop) # Set up mocked container self.container = create_container( id_ref='id1', project_id=self.project_internal_id, external_project_id=self.external_project_id) # Set up mocked container repo self.container_repo = mock.MagicMock() self.container_repo.get.return_value = self.container self.container_repo.get_container_by_id.return_value = self.container self.setup_container_repository_mock(self.container_repo) # Set up secret repo self.secret_repo = mock.MagicMock() self.secret_repo.create_from.return_value = None self.setup_secret_repository_mock(self.secret_repo) # Set up container consumer repo self.consumer_repo = mock.MagicMock() self.consumer_repo.create_from.return_value = None self.setup_container_consumer_repository_mock(self.consumer_repo) self.container_req = {'name': self.name, 'type': self.type, 'secret_refs': self.secret_refs} def test_should_add_new_consumer(self): resp = self.app.post_json( '/containers/{0}/consumers/'.format(self.container.id), self.consumer_ref ) self.assertEqual(200, resp.status_int) self.assertNotIn(self.external_project_id, resp.headers['Location']) args, kwargs = self.consumer_repo.create_or_update_from.call_args consumer = args[0] self.assertIsInstance(consumer, models.ContainerConsumerMetadatum) def test_should_fail_consumer_bad_json(self): resp = self.app.post( '/containers/{0}/consumers/'.format(self.container.id), '', expect_errors=True ) self.assertEqual(415, resp.status_int) def test_should_404_consumer_bad_container_id(self): self.container_repo.get.side_effect = excep.NotFound() resp = self.app.post_json( '/containers/{0}/consumers/'.format('bad_id'), self.consumer_ref, expect_errors=True ) self.container_repo.get.side_effect = None self.assertEqual(404, resp.status_int) def test_should_raise_exception_when_container_ref_doesnt_exist(self): self.container_repo.get.return_value = None resp = self.app.post_json( '/containers/{0}/consumers/'.format(self.container.id), self.consumer_ref, expect_errors=True ) self.assertEqual(404, resp.status_int) class WhenGettingOrDeletingConsumersUsingConsumerResource(FunctionalTest): def setUp(self): super( WhenGettingOrDeletingConsumersUsingConsumerResource, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): containers = controllers.containers.ContainersController() return RootController() def _init(self): self.external_project_id = 'keystoneid1234' self.project_internal_id = 'projectid1234' # Set up mocked project self.project = models.Project() self.project.id = self.project_internal_id self.project.external_id = self.external_project_id # Set up mocked project repo self.project_repo = mock.MagicMock() self.project_repo.get.return_value = self.project self.setup_project_repository_mock(self.project_repo) # Set up mocked container self.container = create_container( id_ref='id1', project_id=self.project_internal_id, external_project_id=self.external_project_id) # Set up mocked consumers self.consumer = create_consumer(self.container.id, self.project_internal_id, id_ref='id2') self.consumer2 = create_consumer(self.container.id, self.project_internal_id, id_ref='id3') self.consumer_ref = { 'name': self.consumer.name, 'URL': self.consumer.URL } # Set up mocked container repo self.container_repo = mock.MagicMock() self.container_repo.get.return_value = self.container self.container_repo.get_container_by_id.return_value = self.container self.setup_container_repository_mock(self.container_repo) # Set up mocked container consumer repo self.consumer_repo = mock.MagicMock() self.consumer_repo.get_by_values.return_value = self.consumer self.consumer_repo.delete_entity_by_id.return_value = None self.setup_container_consumer_repository_mock(self.consumer_repo) # Set up mocked secret repo self.setup_secret_repository_mock() def test_should_get_consumer(self): ret_val = ([self.consumer], 0, 0, 1) self.consumer_repo.get_by_container_id.return_value = ret_val resp = self.app.get('/containers/{0}/consumers/'.format( self.container.id )) self.assertEqual(200, resp.status_int) self.consumer_repo.get_by_container_id.assert_called_once_with( self.container.id, limit_arg=None, offset_arg=0, suppress_exception=True ) self.assertEqual(self.consumer.name, resp.json['consumers'][0]['name']) self.assertEqual(self.consumer.URL, resp.json['consumers'][0]['URL']) def test_should_404_with_bad_container_id(self): self.container_repo.get.side_effect = excep.NotFound() resp = self.app.get('/containers/{0}/consumers/'.format( 'bad_id' ), expect_errors=True) self.container_repo.get.side_effect = None self.assertEqual(404, resp.status_int) def test_should_get_consumer_by_id(self): self.consumer_repo.get.return_value = self.consumer resp = self.app.get('/containers/{0}/consumers/{1}/'.format( self.container.id, self.consumer.id )) self.assertEqual(200, resp.status_int) def test_should_404_with_bad_consumer_id(self): self.consumer_repo.get.return_value = None resp = self.app.get('/containers/{0}/consumers/{1}/'.format( self.container.id, 'bad_id' ), expect_errors=True) self.assertEqual(404, resp.status_int) def test_should_get_no_consumers(self): self.consumer_repo.get_by_container_id.return_value = ([], 0, 0, 0) resp = self.app.get('/containers/{0}/consumers/'.format( self.container.id )) self.assertEqual(200, resp.status_int) def test_should_delete_consumer(self): self.app.delete_json('/containers/{0}/consumers/'.format( self.container.id ), self.consumer_ref) self.consumer_repo.delete_entity_by_id.assert_called_once_with( self.consumer.id, self.external_project_id) def test_should_fail_deleting_consumer_bad_json(self): resp = self.app.delete( '/containers/{0}/consumers/'.format(self.container.id), '', expect_errors=True ) self.assertEqual(415, resp.status_int) def test_should_404_on_delete_when_consumer_not_found(self): old_return = self.consumer_repo.get_by_values.return_value self.consumer_repo.get_by_values.return_value = None resp = self.app.delete_json('/containers/{0}/consumers/'.format( self.container.id ), self.consumer_ref, expect_errors=True) self.consumer_repo.get_by_values.return_value = old_return self.assertEqual(404, resp.status_int) # Error response should have json content type self.assertEqual("application/json", resp.content_type) def test_should_404_on_delete_when_consumer_not_found_later(self): self.consumer_repo.delete_entity_by_id.side_effect = excep.NotFound() resp = self.app.delete_json('/containers/{0}/consumers/'.format( self.container.id ), self.consumer_ref, expect_errors=True) self.consumer_repo.delete_entity_by_id.side_effect = None self.assertEqual(404, resp.status_int) # Error response should have json content type self.assertEqual("application/json", resp.content_type) def test_should_delete_consumers_on_container_delete(self): consumers = [self.consumer, self.consumer2] ret_val = (consumers, 0, 0, 1) self.consumer_repo.get_by_container_id.return_value = ret_val resp = self.app.delete( '/containers/{0}/'.format(self.container.id) ) self.assertEqual(204, resp.status_int) # Verify consumers were deleted calls = [] for consumer in consumers: calls.append(mock.call(consumer.id, self.external_project_id)) self.consumer_repo.delete_entity_by_id.assert_has_calls( calls, any_order=True ) def test_should_pass_on_container_delete_with_missing_consumers(self): consumers = [self.consumer, self.consumer2] ret_val = (consumers, 0, 0, 1) self.consumer_repo.get_by_container_id.return_value = ret_val self.consumer_repo.delete_entity_by_id.side_effect = excep.NotFound resp = self.app.delete( '/containers/{0}/'.format(self.container.id) ) self.assertEqual(204, resp.status_int) # Verify consumers were deleted calls = [] for consumer in consumers: calls.append(mock.call(consumer.id, self.external_project_id)) self.consumer_repo.delete_entity_by_id.assert_has_calls( calls, any_order=True ) class WhenPerformingUnallowedOperationsOnConsumers(FunctionalTest): def setUp(self): super( WhenPerformingUnallowedOperationsOnConsumers, self ).setUp() self.app = webtest.TestApp(app.build_wsgi_app(self.root)) self.app.extra_environ = get_barbican_env(self.external_project_id) @property def root(self): self._init() class RootController(object): containers = controllers.containers.ContainersController() return RootController() def _init(self): self.name = 'test container name' self.type = 'generic' self.secret_refs = [ { 'name': 'test secret 1', 'secret_ref': '1231' }, { 'name': 'test secret 2', 'secret_ref': '1232' }, { 'name': 'test secret 3', 'secret_ref': '1233' } ] self.consumer_ref = { 'name': 'test_consumer1', 'URL': 'http://consumer/1' } self.external_project_id = 'keystoneid1234' self.project_internal_id = 'projectid1234' # Set up mocked project self.project = models.Project() self.project.id = self.project_internal_id self.project.external_id = self.external_project_id # Set up mocked project repo self.project_repo = mock.MagicMock() self.project_repo.get.return_value = self.project self.setup_project_repository_mock(self.project_repo) # Set up mocked container self.container = create_container( id_ref='id1', project_id=self.project_internal_id, external_project_id=self.external_project_id) # Set up mocked container consumers self.consumer = create_consumer(self.container.id, self.project_internal_id, id_ref='id2') self.consumer2 = create_consumer(self.container.id, self.project_internal_id, id_ref='id3') self.consumer_ref = { 'name': self.consumer.name, 'URL': self.consumer.URL } # Set up container repo self.container_repo = mock.MagicMock() self.container_repo.get.return_value = self.container self.setup_container_repository_mock(self.container_repo) # Set up container consumer repo self.consumer_repo = mock.MagicMock() self.consumer_repo.get_by_values.return_value = self.consumer self.consumer_repo.delete_entity_by_id.return_value = None self.setup_container_consumer_repository_mock(self.consumer_repo) # Set up secret repo self.setup_secret_repository_mock() def test_should_not_allow_put_on_consumers(self): ret_val = ([self.consumer], 0, 0, 1) self.consumer_repo.get_by_container_id.return_value = ret_val resp = self.app.put_json( '/containers/{0}/consumers/'.format(self.container.id), self.consumer_ref, expect_errors=True ) self.assertEqual(405, resp.status_int) def test_should_not_allow_post_on_consumer_by_id(self): self.consumer_repo.get.return_value = self.consumer resp = self.app.post_json( '/containers/{0}/consumers/{1}/'.format(self.container.id, self.consumer.id), self.consumer_ref, expect_errors=True ) self.assertEqual(405, resp.status_int) def test_should_not_allow_put_on_consumer_by_id(self): self.consumer_repo.get.return_value = self.consumer resp = self.app.put_json( '/containers/{0}/consumers/{1}/'.format(self.container.id, self.consumer.id), self.consumer_ref, expect_errors=True ) self.assertEqual(405, resp.status_int) def test_should_not_allow_delete_on_consumer_by_id(self): self.consumer_repo.get.return_value = self.consumer resp = self.app.delete( '/containers/{0}/consumers/{1}/'.format(self.container.id, self.consumer.id), expect_errors=True ) self.assertEqual(405, resp.status_int) barbican-2.0.0/barbican/tests/api/middleware/0000775000567000056710000000000012701406024022200 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/api/middleware/__init__.py0000664000567000056710000000000012701405673024310 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/tests/api/middleware/test_context.py0000664000567000056710000000417212701405673025312 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import mock import oslotest.base as oslotest from barbican.api.middleware import context class TestUnauthenticatedContextMiddleware(oslotest.BaseTestCase): def setUp(self): super(TestUnauthenticatedContextMiddleware, self).setUp() self.app = mock.MagicMock() self.middleware = context.UnauthenticatedContextMiddleware(self.app) def test_role_defaults_to_admin(self): request = mock.MagicMock() request.headers = {'X-Project-Id': 'trace'} request.environ = {} with mock.patch('barbican.context.RequestContext') as rc: self.middleware.process_request(request) rc.assert_called_with( project='trace', is_admin=True, user=None, roles=['admin'], request_id=request.request_id, project_domain=None, domain=None, user_domain=None ) def test_role_used_from_header(self): request = mock.MagicMock() request.headers = {'X-Project-Id': 'trace', 'X-Roles': 'something'} request.environ = {} with mock.patch('barbican.context.RequestContext') as rc: self.middleware.process_request(request) rc.assert_called_with( project='trace', is_admin=False, user=None, roles=['something'], request_id=request.request_id, project_domain=None, domain=None, user_domain=None ) barbican-2.0.0/barbican/tests/api/middleware/test_simple.py0000664000567000056710000000161712701405673025120 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import mock from barbican.api.middleware import simple class WhenTestingSimpleMiddleware(unittest.TestCase): def setUp(self): self.app = mock.MagicMock() self.middle = simple.SimpleFilter(self.app) self.req = mock.MagicMock() def test_should_process_request(self): self.middle.process_request(self.req) barbican-2.0.0/barbican/tests/api/test_resources_policy.py0000664000567000056710000013665212701405673025113 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ This test module focuses on RBAC interactions with the API resource classes. For typical-flow business logic tests of these classes, see the 'resources_test.py' module. """ import os import mock from oslo_policy import policy from webob import exc from barbican.api.controllers import consumers from barbican.api.controllers import containers from barbican.api.controllers import orders from barbican.api.controllers import secrets from barbican.api.controllers import versions from barbican.common import config from barbican import context from barbican.model import models from barbican.tests import utils # Point to the policy.json file located in source control. TEST_VAR_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../etc', 'barbican')) CONF = config.new_config() ENFORCER = policy.Enforcer(CONF) class TestableResource(object): def __init__(self, *args, **kwargs): self.controller = self.controller_cls(*args, **kwargs) def on_get(self, req, resp, *args, **kwargs): with mock.patch('pecan.request', req): with mock.patch('pecan.response', resp): return self.controller.on_get(*args, **kwargs) def on_post(self, req, resp, *args, **kwargs): with mock.patch('pecan.request', req): with mock.patch('pecan.response', resp): return self.controller.on_post(*args, **kwargs) def on_put(self, req, resp, *args, **kwargs): with mock.patch('pecan.request', req): with mock.patch('pecan.response', resp): return self.controller.on_put(*args, **kwargs) def on_delete(self, req, resp, *args, **kwargs): with mock.patch('pecan.request', req): with mock.patch('pecan.response', resp): return self.controller.on_delete(*args, **kwargs) class VersionsResource(TestableResource): controller_cls = versions.VersionsController class SecretsResource(TestableResource): controller_cls = secrets.SecretsController class SecretResource(TestableResource): controller_cls = secrets.SecretController class OrdersResource(TestableResource): controller_cls = orders.OrdersController class OrderResource(TestableResource): controller_cls = orders.OrderController class ContainerResource(TestableResource): controller_cls = containers.ContainerController class ConsumersResource(TestableResource): controller_cls = consumers.ContainerConsumersController class ConsumerResource(TestableResource): controller_cls = consumers.ContainerConsumerController class BaseTestCase(utils.BaseTestCase, utils.MockModelRepositoryMixin): def setUp(self): super(BaseTestCase, self).setUp() CONF(args=['--config-dir', TEST_VAR_DIR]) self.policy_enforcer = ENFORCER self.policy_enforcer.load_rules(True) self.resp = mock.MagicMock() def _generate_req(self, roles=None, accept=None, content_type=None, user_id=None, project_id=None): """Generate a fake HTTP request with security context added to it.""" req = mock.MagicMock() req.get_param.return_value = None kwargs = { 'user': user_id, 'project': project_id, 'roles': roles or [], 'policy_enforcer': self.policy_enforcer, } req.environ = {} req.environ['barbican.context'] = context.RequestContext(**kwargs) req.content_type = content_type if accept: req.accept.header_value.return_value = accept else: req.accept = None return req def _generate_stream_for_exit(self): """Mock HTTP stream generator, to force RBAC-pass exit. Generate a fake HTTP request stream that forces an IOError to occur, which short circuits API resource processing when RBAC checks under test here pass. """ stream = mock.MagicMock() read = mock.MagicMock(return_value=None, side_effect=IOError()) stream.read = read return stream def _assert_post_rbac_exception(self, exception, role): """Assert that we received the expected RBAC-passed exception.""" self.assertEqual(500, exception.status_int) def _generate_get_error(self): """Falcon exception generator to throw from early-exit mocks. Creates an exception that should be raised by GET tests that pass RBAC. This allows such flows to short-circuit normal post-RBAC processing that is not tested in this module. :return: Python exception that should be raised by repo get methods. """ # The 'Read Error' clause needs to match that asserted in # _assert_post_rbac_exception() above. return exc.HTTPServerError(message='Read Error') def _assert_pass_rbac(self, roles, method_under_test, accept=None, content_type=None, user_id=None, project_id=None): """Assert that RBAC authorization rules passed for the specified roles. :param roles: List of roles to check, one at a time :param method_under_test: The test method to invoke for each role. :param accept Optional Accept header to set on the HTTP request :return: None """ for role in roles: self.req = self._generate_req(roles=[role] if role else [], accept=accept, content_type=content_type, user_id=user_id, project_id=project_id) # Force an exception early past the RBAC passing. type(self.req).body = mock.PropertyMock(side_effect=IOError) self.req.body_file = self._generate_stream_for_exit() exception = self.assertRaises(exc.HTTPServerError, method_under_test) self._assert_post_rbac_exception(exception, role) def _assert_fail_rbac(self, roles, method_under_test, accept=None, content_type=None, user_id=None, project_id=None): """Assert that RBAC rules failed for one of the specified roles. :param roles: List of roles to check, one at a time :param method_under_test: The test method to invoke for each role. :param accept Optional Accept header to set on the HTTP request :return: None """ for role in roles: self.req = self._generate_req(roles=[role] if role else [], accept=accept, content_type=content_type, user_id=user_id, project_id=project_id) exception = self.assertRaises(exc.HTTPForbidden, method_under_test) self.assertEqual(403, exception.status_int) class WhenTestingVersionsResource(BaseTestCase): """RBAC tests for the barbican.api.resources.VersionsResource class.""" def setUp(self): super(WhenTestingVersionsResource, self).setUp() self.resource = VersionsResource() def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_get_versions(self): # Can't use base method that short circuits post-RBAC processing here, # as version GET is trivial for role in ['admin', 'observer', 'creator', 'audit']: self.req = self._generate_req(roles=[role] if role else []) self._invoke_on_get() def test_should_pass_get_versions_with_bad_roles(self): self.req = self._generate_req(roles=[None, 'bunkrolehere']) self._invoke_on_get() def test_should_pass_get_versions_with_no_roles(self): self.req = self._generate_req() self._invoke_on_get() def test_should_pass_get_versions_multiple_roles(self): self.req = self._generate_req(roles=['admin', 'observer', 'creator', 'audit']) self._invoke_on_get() def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) class WhenTestingSecretsResource(BaseTestCase): """RBAC tests for the barbican.api.resources.SecretsResource class.""" def setUp(self): super(WhenTestingSecretsResource, self).setUp() self.external_project_id = '12345' # Force an error on GET calls that pass RBAC, as we are not testing # such flows in this test module. self.secret_repo = mock.MagicMock() get_by_create_date = mock.MagicMock(return_value=None, side_effect=self ._generate_get_error()) self.secret_repo.get_by_create_date = get_by_create_date self.setup_secret_repository_mock(self.secret_repo) self.setup_encrypted_datum_repository_mock() self.setup_kek_datum_repository_mock() self.setup_project_repository_mock() self.setup_secret_meta_repository_mock() self.setup_transport_key_repository_mock() self.resource = SecretsResource() def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_create_secret(self): self._assert_pass_rbac(['admin', 'creator'], self._invoke_on_post, content_type='application/json') def test_should_raise_create_secret(self): self._assert_fail_rbac([None, 'audit', 'observer', 'bogus'], self._invoke_on_post, content_type='application/json') def test_should_pass_get_secrets(self): self._assert_pass_rbac(['admin', 'observer', 'creator'], self._invoke_on_get, content_type='application/json') def test_should_raise_get_secrets(self): self._assert_fail_rbac([None, 'audit', 'bogus'], self._invoke_on_get, content_type='application/json') def _invoke_on_post(self): self.resource.on_post(self.req, self.resp) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) class WhenTestingSecretResource(BaseTestCase): """RBAC tests for SecretController class.""" def setUp(self): super(WhenTestingSecretResource, self).setUp() self.external_project_id = '12345project' self.secret_id = '12345secret' self.user_id = '123456user' self.creator_user_id = '123456CreatorUser' # Force an error on GET and DELETE calls that pass RBAC, # as we are not testing such flows in this test module. self.secret_repo = mock.MagicMock() fail_method = mock.MagicMock(return_value=None, side_effect=self._generate_get_error()) self.secret_repo.get = fail_method self.secret_repo.delete_entity_by_id = fail_method self.setup_secret_repository_mock(self.secret_repo) self.setup_encrypted_datum_repository_mock() self.setup_kek_datum_repository_mock() self.setup_project_repository_mock() self.setup_secret_meta_repository_mock() self.setup_transport_key_repository_mock() acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=True, user_ids=[self.user_id, 'anyRandomId']) self.acl_list = [acl_read] secret = mock.MagicMock() secret.secret_acls.__iter__.return_value = self.acl_list secret.project.external_id = self.external_project_id secret.creator_id = self.creator_user_id self.resource = SecretResource(secret) # self.resource.controller.get_acl_tuple = mock.MagicMock( # return_value=(None, None)) def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_decrypt_secret(self): self._assert_pass_rbac(['admin', 'observer', 'creator'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_decrypt_secret(self): self._assert_fail_rbac([None, 'audit', 'bogus'], self._invoke_on_get, accept='notjsonaccepttype') def test_should_pass_decrypt_secret_for_same_project_with_no_acl(self): """Token and secret project needs to be same in no ACL defined case.""" self.acl_list.pop() # remove read acl from default setup self._assert_pass_rbac(['admin', 'observer', 'creator'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_decrypt_secret_with_project_access_disabled(self): """Should raise authz error as secret is marked private. As secret is private so project users should not be able to access the secret. Admin project user can still access it. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_fail_rbac(['observer', 'creator', 'audit'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id=self.user_id, project_id=self.external_project_id) def test_pass_decrypt_secret_for_admin_user_project_access_disabled(self): """Should pass authz for admin role user as secret is marked private. Even when secret is private, admin user should still have access to the secret. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_pass_rbac(['admin'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_decrypt_secret_for_with_project_access_nolist(self): """Should raise authz error as secret is marked private. As secret is private so project users should not be able to access the secret. This test passes user_ids as empty list, which is a valid and common case. Admin project user can still access it. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=False, user_ids=[]) self.acl_list.append(acl_read) self._assert_fail_rbac(['observer', 'creator', 'audit'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id=self.user_id, project_id=self.external_project_id) def test_should_pass_decrypt_secret_private_enabled_with_read_acl(self): """Should pass authz as user has read acl for private secret. Even though secret is private, user with read acl should be able to access the secret. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit', 'bogusRole'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id='aclUser1', project_id=self.external_project_id) def test_should_pass_decrypt_secret_different_user_valid_read_acl(self): self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=True, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) # token project_id is different from secret's project id but another # user (from different project) has read acl for secret so should pass self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit', 'bogusRole'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id='aclUser1', project_id='different_project_id') def test_should_raise_decrypt_secret_for_different_user_no_read_acl(self): self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='write', project_access=True, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) # token project_id is different from secret's project id but another # user (from different project) has read acl for secret so should pass self._assert_fail_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id='aclUser1', project_id='different_project_id') def test_fail_decrypt_secret_for_creator_user_with_different_project(self): """Check for creator user rule for secret decrypt/get call. If token's user is creator of secret but its scoped to different project, then he/she is not allowed access to secret when project is marked private. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='write', project_access=True, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self.resource.controller.secret.creator_id = 'creatorUserX' # token user is creator but scoped to project different from secret # project so don't allow decrypt secret call to creator of that secret self._assert_fail_rbac(['admin', 'observer', 'creator', 'audit', 'bogusRole'], self._invoke_on_get, accept='notjsonaccepttype', content_type='application/json', user_id='creatorUserX', project_id='different_project_id') def test_should_pass_get_secret(self): self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id=self.user_id, project_id=self.external_project_id) def test_should_pass_get_secret_with_no_context(self): """In unauthenticated flow, get secret should work.""" self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get_without_context, user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_get_secret_for_different_project_no_acl(self): """Should raise error when secret and token's project is different.""" self.acl_list.pop() # remove read acl from default setup # token project_id is different from secret's project id so should fail self._assert_fail_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id=self.user_id, project_id='different_id') def test_should_pass_get_secret_for_same_project_but_different_user(self): # user id should not matter as long token and secret's project match self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id='different_user_id', project_id=self.external_project_id) def test_should_pass_get_secret_for_same_project_with_no_acl(self): self.acl_list.pop() # remove read acl from default setup self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_get_secret_for_with_project_access_disabled(self): """Should raise authz error as secret is marked private. As secret is private so project users should not be able to access the secret. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_fail_rbac(['observer', 'creator', 'audit'], self._invoke_on_get, user_id=self.user_id, project_id=self.external_project_id) def test_pass_get_secret_for_admin_user_with_project_access_disabled(self): """Should pass authz for admin user as secret is marked private. Even when secret is private, admin user should have access the secret. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_pass_rbac(['admin'], self._invoke_on_get, user_id=self.user_id, project_id=self.external_project_id) def test_should_pass_get_secret_for_private_enabled_with_read_acl(self): """Should pass authz as user has read acl for private secret. Even though secret is private, user with read acl should be able to access the secret. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit', 'bogusRole'], self._invoke_on_get, user_id='aclUser1', project_id=self.external_project_id) def test_should_pass_get_secret_different_user_with_valid_read_acl(self): """Should allow when read ACL is defined for a user. Secret's own project and token's project is different but read is allowed because of valid read ACL. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='read', project_access=True, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) # token project_id is different from secret's project id but another # user (from different project) has read acl for secret so should pass self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit', 'bogusRole'], self._invoke_on_get, user_id='aclUser1', project_id='different_project_id') def test_should_raise_get_secret_for_different_user_with_no_read_acl(self): """Get secret fails when no read acl is defined. With different secret and token's project, read is not allowed without a read ACL. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.SecretACL(secret_id=self.secret_id, operation='write', project_access=True, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) # token project_id is different from secret's project id but another # user (from different project) has read acl for secret so should pass self._assert_fail_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id='aclUser1', project_id='different_project_id') def test_fail_get_secret_for_creator_user_with_different_project(self): """Check for creator user rule for secret get call. If token's user is creator of secret but its scoped to different project, then he/she is not allowed access to secret when project is marked private. """ self.acl_list.pop() # remove read acl from default setup self.resource.controller.secret.creator_id = 'creatorUserX' self._assert_fail_rbac(['admin', 'observer', 'creator', 'audit', 'bogusRole'], self._invoke_on_get, user_id='creatorUserX', project_id='different_project_id') def test_should_raise_get_secret(self): self._assert_fail_rbac([None, 'bogus'], self._invoke_on_get) def test_should_pass_put_secret(self): self._assert_pass_rbac(['admin', 'creator'], self._invoke_on_put, content_type="application/octet-stream", user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_put_secret(self): self._assert_fail_rbac([None, 'audit', 'observer', 'bogus'], self._invoke_on_put, content_type="application/octet-stream") def test_should_pass_delete_secret(self): self._assert_pass_rbac(['admin'], self._invoke_on_delete, user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_delete_secret(self): self._assert_fail_rbac([None, 'audit', 'observer', 'creator', 'bogus'], self._invoke_on_delete) # @mock.patch.object(secrets.SecretController, 'get_acl_tuple', # return_value=(None, None)) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) def _invoke_on_get_without_context(self): # Adding this to get code coverage around context check lines self.req.environ.pop('barbican.context') self.resource.on_get(self.req, self.resp, self.external_project_id) def _invoke_on_put(self): self.resource.on_put(self.req, self.resp) def _invoke_on_delete(self): self.resource.on_delete(self.req, self.resp) class WhenTestingContainerResource(BaseTestCase): """RBAC tests for ContainerController class. Container controller tests are quite similar to SecretController as policy logic is same. Just adding them here to make sure logic related to acl gathering data works as expected. """ def setUp(self): super(WhenTestingContainerResource, self).setUp() self.external_project_id = '12345project' self.container_id = '12345secret' self.user_id = '123456user' self.creator_user_id = '123456CreatorUser' # Force an error on GET and DELETE calls that pass RBAC, # as we are not testing such flows in this test module. self.container_repo = mock.MagicMock() fail_method = mock.MagicMock(return_value=None, side_effect=self._generate_get_error()) self.container_repo.get = fail_method self.container_repo.delete_entity_by_id = fail_method acl_read = models.ContainerACL( container_id=self.container_id, operation='read', project_access=True, user_ids=[self.user_id, 'anyRandomId']) self.acl_list = [acl_read] container = mock.MagicMock() container.to_dict_fields = mock.MagicMock(side_effect=IOError) container.id = self.container_id container.container_acls.__iter__.return_value = self.acl_list container.project.external_id = self.external_project_id container.creator_id = self.creator_user_id self.container_repo.get_container_by_id.return_value = container self.setup_container_repository_mock(self.container_repo) self.resource = ContainerResource(container) def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_get_container(self): self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id=self.user_id, project_id=self.external_project_id) def test_should_pass_get_container_with_no_context(self): """In unauthenticated flow, get container should work.""" self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get_without_context, user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_get_container_for_different_project_no_acl(self): """Raise error when container and token's project is different.""" self.acl_list.pop() # remove read acl from default setup # token project_id is different from secret's project id so should fail self._assert_fail_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id=self.user_id, project_id='different_id') def test_should_pass_get_container_for_same_project_but_different_user( self): """Should pass if token and secret's project match. User id should not matter as long token and container's project match. """ self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id='different_user_id', project_id=self.external_project_id) def test_should_pass_get_container_for_same_project_with_no_acl(self): self.acl_list.pop() # remove read acl from default setup self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_get_container_for_with_project_access_disabled(self): """Should raise authz error as container is marked private. As container is private so project users should not be able to access the secret (other than admin user). """ self.acl_list.pop() # remove read acl from default setup acl_read = models.ContainerACL( container_id=self.container_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_fail_rbac(['observer', 'creator', 'audit'], self._invoke_on_get, user_id=self.user_id, project_id=self.external_project_id) def test_pass_get_container_for_admin_user_project_access_disabled(self): """Should pass authz for admin user when container is marked private. For private container, admin user should still be able to access the secret. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.ContainerACL( container_id=self.container_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_pass_rbac(['admin'], self._invoke_on_get, user_id=self.user_id, project_id=self.external_project_id) def test_should_pass_get_container_for_private_enabled_with_read_acl(self): """Should pass authz as user has read acl for private container. Even though container is private, user with read acl should be able to access the container. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.ContainerACL( container_id=self.container_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit', 'bogusRole'], self._invoke_on_get, user_id='aclUser1', project_id=self.external_project_id) def test_should_pass_get_container_different_user_with_valid_read_acl( self): """Should allow when read ACL is defined for a user. Container's own project and token's project is different but read is allowed because of valid read ACL. User can read regardless of what is token's project as it has necessary ACL. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.ContainerACL( container_id=self.container_id, operation='read', project_access=True, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit', 'bogusRole'], self._invoke_on_get, user_id='aclUser1', project_id='different_project_id') def test_should_raise_get_container_for_different_user_with_no_read_acl( self): """Get secret fails when no read acl is defined. With different container and token's project, read is not allowed without a read ACL. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.ContainerACL( container_id=self.container_id, operation='write', project_access=True, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) # token project_id is different from secret's project id but another # user (from different project) has read acl for secret so should pass self._assert_fail_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, user_id='aclUser1', project_id='different_project_id') def test_fail_get_container_for_creator_user_different_project(self): """Check for creator user rule for container get call. If token's user is creator of container but its scoped to different project, then he/she is not allowed access to container when project is marked private. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.ContainerACL( container_id=self.container_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_fail_rbac(['creator'], self._invoke_on_get, user_id=self.creator_user_id, project_id='differet_project_id') def test_pass_get_container_for_creator_user_project_access_disabled(self): """Should pass authz for creator user when container is marked private. As container is private so user who created the container can still access it as long as user has 'creator' role in container project. """ self.acl_list.pop() # remove read acl from default setup acl_read = models.ContainerACL( container_id=self.container_id, operation='read', project_access=False, user_ids=['anyRandomUserX', 'aclUser1']) self.acl_list.append(acl_read) self._assert_pass_rbac(['creator'], self._invoke_on_get, user_id=self.creator_user_id, project_id=self.external_project_id) def test_should_raise_get_container(self): self._assert_fail_rbac([None, 'bogus'], self._invoke_on_get) def test_should_pass_delete_container(self): self._assert_pass_rbac(['admin'], self._invoke_on_delete, user_id=self.user_id, project_id=self.external_project_id) def test_should_raise_delete_container(self): self._assert_fail_rbac([None, 'audit', 'observer', 'creator', 'bogus'], self._invoke_on_delete) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) def _invoke_on_get_without_context(self): # Adding this to get code coverage around context check lines self.req.environ.pop('barbican.context') self.resource.on_get(self.req, self.resp) def _invoke_on_put(self): self.resource.on_put(self.req, self.resp) def _invoke_on_delete(self): self.resource.on_delete(self.req, self.resp) class WhenTestingOrdersResource(BaseTestCase): """RBAC tests for the barbican.api.resources.OrdersResource class.""" def setUp(self): super(WhenTestingOrdersResource, self).setUp() self.external_project_id = '12345' # Force an error on GET calls that pass RBAC, as we are not testing # such flows in this test module. self.order_repo = mock.MagicMock() get_by_create_date = mock.MagicMock(return_value=None, side_effect=self ._generate_get_error()) self.order_repo.get_by_create_date = get_by_create_date self.setup_order_repository_mock(self.order_repo) self.setup_project_repository_mock() self.resource = OrdersResource(queue_resource=mock.MagicMock()) def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_create_order(self): self._assert_pass_rbac(['admin', 'creator'], self._invoke_on_post, content_type='application/json') def test_should_raise_create_order(self): self._assert_fail_rbac([None, 'audit', 'observer', 'bogus'], self._invoke_on_post) def test_should_pass_get_orders(self): self._assert_pass_rbac(['admin', 'observer', 'creator'], self._invoke_on_get) def test_should_raise_get_orders(self): self._assert_fail_rbac([None, 'audit', 'bogus'], self._invoke_on_get) def _invoke_on_post(self): self.resource.on_post(self.req, self.resp) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) class WhenTestingOrderResource(BaseTestCase): """RBAC tests for the barbican.api.resources.OrderResource class.""" def setUp(self): super(WhenTestingOrderResource, self).setUp() self.external_project_id = '12345project' self.order_id = '12345order' # Force an error on GET and DELETE calls that pass RBAC, # as we are not testing such flows in this test module. self.order_repo = mock.MagicMock() fail_method = mock.MagicMock(return_value=None, side_effect=self._generate_get_error()) self.order_repo.get = fail_method self.order_repo.delete_entity_by_id = fail_method self.setup_order_repository_mock(self.order_repo) self.resource = OrderResource(self.order_id) def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_get_order(self): self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get) def test_should_raise_get_order(self): self._assert_fail_rbac([None, 'bogus'], self._invoke_on_get) def test_should_pass_delete_order(self): self._assert_pass_rbac(['admin'], self._invoke_on_delete) def test_should_raise_delete_order(self): self._assert_fail_rbac([None, 'audit', 'observer', 'creator', 'bogus'], self._invoke_on_delete) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) def _invoke_on_delete(self): self.resource.on_delete(self.req, self.resp) class WhenTestingConsumersResource(BaseTestCase): """RBAC tests for the barbican.api.resources.ConsumersResource class.""" def setUp(self): super(WhenTestingConsumersResource, self).setUp() self.external_project_id = '12345project' self.container_id = '12345container' # Force an error on GET calls that pass RBAC, as we are not testing # such flows in this test module. self.consumer_repo = mock.MagicMock() get_by_container_id = mock.MagicMock(return_value=None, side_effect=self ._generate_get_error()) self.consumer_repo.get_by_container_id = get_by_container_id self.setup_project_repository_mock() self.setup_container_consumer_repository_mock(self.consumer_repo) self.setup_container_repository_mock() self.resource = ConsumersResource(container_id=self.container_id) def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_create_consumer(self): self._assert_pass_rbac(['admin'], self._invoke_on_post, content_type='application/json') def test_should_raise_create_consumer(self): self._assert_fail_rbac([None, 'audit', 'observer', 'creator', 'bogus'], self._invoke_on_post, content_type='application/json') def test_should_pass_delete_consumer(self): self._assert_pass_rbac(['admin'], self._invoke_on_delete, content_type='application/json') def test_should_raise_delete_consumer(self): self._assert_fail_rbac([None, 'audit', 'observer', 'creator', 'bogus'], self._invoke_on_delete) def test_should_pass_get_consumers(self): self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get, content_type='application/json') def test_should_raise_get_consumers(self): self._assert_fail_rbac([None, 'bogus'], self._invoke_on_get, content_type='application/json') def _invoke_on_post(self): self.resource.on_post(self.req, self.resp) def _invoke_on_delete(self): self.resource.on_delete(self.req, self.resp) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) class WhenTestingConsumerResource(BaseTestCase): """RBAC tests for the barbican.api.resources.ConsumerResource class.""" def setUp(self): super(WhenTestingConsumerResource, self).setUp() self.external_project_id = '12345project' self.consumer_id = '12345consumer' # Force an error on GET calls that pass RBAC, as we are not testing # such flows in this test module. self.consumer_repo = mock.MagicMock() fail_method = mock.MagicMock(return_value=None, side_effect=self._generate_get_error()) self.consumer_repo.get = fail_method self.setup_project_repository_mock() self.setup_container_consumer_repository_mock(self.consumer_repo) self.resource = ConsumerResource(consumer_id=self.consumer_id) def test_rules_should_be_loaded(self): self.assertIsNotNone(self.policy_enforcer.rules) def test_should_pass_get_consumer(self): self._assert_pass_rbac(['admin', 'observer', 'creator', 'audit'], self._invoke_on_get) def test_should_raise_get_consumer(self): self._assert_fail_rbac([None, 'bogus'], self._invoke_on_get) def _invoke_on_get(self): self.resource.on_get(self.req, self.resp) barbican-2.0.0/barbican/tests/api/test_init.py0000664000567000056710000000676012701405673022461 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ This test module tests the barbican.api.__init__.py module functionality. """ import mock from oslo_serialization import jsonutils as json from barbican import api from barbican.common import exception from barbican.plugin.interface import secret_store from barbican.tests import utils class WhenInvokingLoadBodyFunction(utils.BaseTestCase): """Tests the load_body function.""" def setUp(self): super(WhenInvokingLoadBodyFunction, self).setUp() @mock.patch('pecan.abort') def test_should_abort_with_read_error(self, mock_pecan_abort): mock_pecan_abort.side_effect = ValueError('Abort!') req = mock.MagicMock() req.body_file = mock.MagicMock() req.body_file.read.side_effect = IOError('Dummy IOError') exception = self.assertRaises( ValueError, api.load_body, req) self.assertEqual('Abort!', exception.message) @mock.patch('pecan.abort') def test_should_abort_with_validation_unsupported_field( self, mock_pecan_abort): mock_pecan_abort.side_effect = ValueError('Abort!') body = json.dumps({'key1': 'value1'}) req = mock.MagicMock() req.body_file = mock.MagicMock() req.body_file.read.return_value = body validator = mock.MagicMock() validator.validate.side_effect = exception.UnsupportedField('Field') exception_result = self.assertRaises( ValueError, api.load_body, req, validator=validator) self.assertEqual('Abort!', exception_result.message) validator.validate.assert_called_once_with(json.loads(body)) class WhenInvokingGenerateSafeExceptionMessageFunction(utils.BaseTestCase): """Tests the generate_safe_exception_message function.""" def setUp(self): super(WhenInvokingGenerateSafeExceptionMessageFunction, self).setUp() def test_handle_secret_content_type_not_supported_exception(self): operation = 'operation' content_type = 'application/octet-stream' test_exception = secret_store.SecretContentTypeNotSupportedException( content_type) status, message = api.generate_safe_exception_message( operation, test_exception) self.assertEqual(400, status) self.assertEqual("operation issue seen - content-type of " "'application/octet-stream' not " "supported.", message) def test_handle_secret_content_encoding_not_supported_exception(self): operation = 'operation' content_encoding = 'application/octet-stream' test_excep = secret_store.SecretContentEncodingNotSupportedException( content_encoding) status, message = api.generate_safe_exception_message( operation, test_excep) self.assertEqual(400, status) self.assertEqual("operation issue seen - content-encoding of " "'application/octet-stream' not " "supported.", message) barbican-2.0.0/barbican/__init__.py0000664000567000056710000000123012701405673020266 0ustar jenkinsjenkins00000000000000# Copyright 2010-2011 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Barbican module root """ barbican-2.0.0/barbican/queue/0000775000567000056710000000000012701406024017274 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/queue/server.py0000664000567000056710000002367412701405673021201 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Server-side (i.e. worker side) classes and logic. """ import datetime import functools try: import newrelic.agent from newrelic.api import application newrelic_loaded = True except ImportError: newrelic_loaded = False from oslo_service import service from barbican.common import config from barbican.common import utils from barbican import i18n as u from barbican.model import models from barbican.model import repositories from barbican import queue from barbican.tasks import common from barbican.tasks import resources if newrelic_loaded: newrelic.agent.initialize('/etc/newrelic/newrelic.ini') LOG = utils.getLogger(__name__) CONF = config.CONF # Maps the common/shared RetryTasks (returned from lower-level business logic # and plugin processing) to top-level RPC tasks in the Tasks class below. MAP_RETRY_TASKS = { common.RetryTasks.INVOKE_CERT_STATUS_CHECK_TASK: 'check_certificate_status' } def find_function_name(func, if_no_name=None): """Returns pretty-formatted function name.""" return getattr(func, '__name__', if_no_name) def retryable_order(fn): """Provides retry/scheduling support to Order-related tasks.""" @functools.wraps(fn) def wrapper(method_self, *args, **kwargs): result = fn(method_self, *args, **kwargs) retry_rpc_method = schedule_order_retry_tasks( fn, result, *args, **kwargs) if retry_rpc_method: LOG.info( u._LI("Scheduled RPC method for retry: '%s'"), retry_rpc_method) else: LOG.info( u._LI("Task '%s' did not have to be retried"), find_function_name(fn, if_no_name='???')) return wrapper def transactional(fn): """Provides request-scoped database transaction support to tasks.""" @functools.wraps(fn) def wrapper(*args, **kwargs): fn_name = find_function_name(fn, if_no_name='???') if not queue.is_server_side(): # Non-server mode directly invokes tasks. fn(*args, **kwargs) LOG.info(u._LI("Completed worker task: '%s'"), fn_name) else: # Manage session/transaction. try: fn(*args, **kwargs) repositories.commit() LOG.info( u._LI("Completed worker task (post-commit): '%s'"), fn_name) except Exception: """NOTE: Wrapped functions must process with care! Exceptions that reach here will revert the entire transaction, including any updates made to entities such as setting error codes and error messages. """ LOG.exception( u._LE("Problem seen processing worker task: '%s'"), fn_name ) repositories.rollback() finally: repositories.clear() return wrapper def monitored(fn): # pragma: no cover """Provides monitoring capabilities for task methods.""" # TODO(jvrbanac): Figure out how we should test third-party monitoring # Support NewRelic Monitoring if newrelic_loaded: # Create a NewRelic app instance app = application.application_instance() def newrelic_wrapper(*args, **kwargs): # Resolve real name since decorators are wrapper the method if len(args) > 0 and hasattr(args[0], fn.__name__): cls = type(args[0]) task_name = '{0}:{1}.{2}'.format( cls.__module__, cls.__name__, fn.__name__ ) else: task_name = newrelic.agent.callable_name(fn) # Execute task under a monitored context with newrelic.agent.BackgroundTask(app, task_name): fn(*args, **kwargs) return newrelic_wrapper return fn def schedule_order_retry_tasks( invoked_task, retry_result, context, *args, **kwargs): """Schedules an Order-related task for retry. :param invoked_task: The RPC method that was just invoked. :param retry_result: A :class:`FollowOnProcessingStatusDTO` if follow-on processing (such as retrying this or another task) is required, otherwise None indicates no such follow-on processing is required. :param context: Queue context, not used. :param order_id: ID of the Order entity the task to retry is for. :param args: List of arguments passed in to the just-invoked task. :param kwargs: Dict of arguments passed in to the just-invoked task. :return: Returns the RPC task method scheduled for a retry, None if no RPC task was scheduled. """ retry_rpc_method = None order_id = kwargs.get('order_id') if not retry_result or not order_id: pass elif common.RetryTasks.INVOKE_SAME_TASK == retry_result.retry_task: if invoked_task: retry_rpc_method = find_function_name(invoked_task) else: retry_rpc_method = MAP_RETRY_TASKS.get(retry_result.retry_task) if retry_rpc_method: LOG.debug( 'Scheduling RPC method for retry: {0}'.format(retry_rpc_method)) date_to_retry_at = datetime.datetime.utcnow() + datetime.timedelta( milliseconds=retry_result.retry_msec) retry_model = models.OrderRetryTask() retry_model.order_id = order_id retry_model.retry_task = retry_rpc_method retry_model.retry_at = date_to_retry_at retry_model.retry_args = args retry_model.retry_kwargs = kwargs retry_model.retry_count = 0 retry_repo = repositories.get_order_retry_tasks_repository() retry_repo.create_from(retry_model) return retry_rpc_method class Tasks(object): """Tasks that can be invoked asynchronously in Barbican. Only place task methods and implementations on this class, as they can be called directly from the client side for non-asynchronous standalone single-node operation. If a new method is added that can be retried, please also add its method name to MAP_RETRY_TASKS above. The TaskServer class below extends this class to implement a worker-side server utilizing Oslo messaging's RPC server. This RPC server can invoke methods on itself, which include the methods in this class. """ @monitored @transactional @retryable_order def process_type_order(self, context, order_id, project_id, request_id): """Process TypeOrder.""" message = u._LI( "Processing type order: " "order ID is '%(order)s' and request ID is '%(request)s'" ) LOG.info(message, {'order': order_id, 'request': request_id}) return resources.BeginTypeOrder().process_and_suppress_exceptions( order_id, project_id) @monitored @transactional @retryable_order def update_order(self, context, order_id, project_id, updated_meta, request_id): """Update Order.""" message = u._LI( "Processing update order: " "order ID is '%(order)s' and request ID is '%(request)s'" ) LOG.info(message, {'order': order_id, 'request': request_id}) return resources.UpdateOrder().process_and_suppress_exceptions( order_id, project_id, updated_meta) @monitored @transactional @retryable_order def check_certificate_status(self, context, order_id, project_id, request_id): """Check the status of a certificate order.""" message = u._LI( "Processing check certificate status on order: " "order ID is '%(order)s' and request ID is '%(request)s'" ) LOG.info(message, {'order': order_id, 'request': request_id}) check_cert_order = resources.CheckCertificateStatusOrder() return check_cert_order.process_and_suppress_exceptions( order_id, project_id) class TaskServer(Tasks, service.Service): """Server to process asynchronous tasking from Barbican API nodes. This server is an Oslo service that exposes task methods that can be invoked from the Barbican API nodes. It delegates to an Oslo RPC messaging server to invoke methods asynchronously on this class. Since this class also extends the Tasks class above, its task-based methods are hence available to the RPC messaging server. """ def __init__(self): super(TaskServer, self).__init__() # Setting up db engine to avoid lazy initialization repositories.setup_database_engine_and_factory() # This property must be defined for the 'endpoints' specified below, # as the oslo_messaging RPC server will ask for it. self.target = queue.get_target() # Create an oslo RPC server, that calls back on to this class # instance to invoke tasks, such as 'process_order()' on the # extended Tasks class above. self._server = queue.get_server(target=self.target, endpoints=[self]) def start(self): LOG.info(u._LI("Starting the TaskServer")) self._server.start() super(TaskServer, self).start() def stop(self): LOG.info(u._LI("Halting the TaskServer")) super(TaskServer, self).stop() self._server.stop() barbican-2.0.0/barbican/queue/retry_scheduler.py0000664000567000056710000001351412701405673023066 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Retry/scheduler classes and logic. """ import datetime import random from oslo_service import periodic_task from oslo_service import service from barbican.common import config from barbican.common import utils from barbican import i18n as u from barbican.model import models from barbican.model import repositories from barbican.queue import client as async_client LOG = utils.getLogger(__name__) CONF = config.CONF def _compute_next_periodic_interval(): periodic_interval = ( CONF.retry_scheduler.periodic_interval_max_seconds ) # Return +- 20% of interval. return random.uniform(0.8 * periodic_interval, # nosec 1.2 * periodic_interval) class PeriodicServer(service.Service): """Server to process retry and scheduled tasks. This server is an Oslo periodic-task service (see http://docs.openstack.org/developer/oslo-incubator/api/openstack.common .periodic_task.html). On a periodic basis, this server checks for tasks that need to be retried, and then sends them up to the RPC queue for later processing by a worker node. """ def __init__(self, queue_resource=None): super(PeriodicServer, self).__init__() # Setting up db engine to avoid lazy initialization repositories.setup_database_engine_and_factory() # Connect to the worker queue, to send retry RPC tasks to it later. self.queue = queue_resource or async_client.TaskClient() # Start the task retry periodic scheduler process up. periodic_interval = ( CONF.retry_scheduler.periodic_interval_max_seconds ) self.tg.add_dynamic_timer( self._check_retry_tasks, initial_delay=CONF.retry_scheduler.initial_delay_seconds, periodic_interval_max=periodic_interval) self.order_retry_repo = repositories.get_order_retry_tasks_repository() def start(self): LOG.info("Starting the PeriodicServer") super(PeriodicServer, self).start() def stop(self, graceful=True): LOG.info("Halting the PeriodicServer") super(PeriodicServer, self).stop(graceful=graceful) @periodic_task.periodic_task def _check_retry_tasks(self): """Periodically check to see if tasks need to be scheduled. :return: Return the number of seconds to wait before invoking this method again. """ total_tasks_processed = 0 try: total_tasks_processed = self._process_retry_tasks() except Exception: LOG.exception( u._LE("Problem seen processing scheduled retry tasks") ) # Return the next delay before this method is invoked again. check_again_in_seconds = _compute_next_periodic_interval() LOG.info( u._LI("Done processing '%(total)s' tasks, will check again in " "'%(next)s' seconds."), { 'total': total_tasks_processed, 'next': check_again_in_seconds } ) return check_again_in_seconds def _process_retry_tasks(self): """Scan for and then re-queue tasks that are ready to retry.""" LOG.info(u._LI("Processing scheduled retry tasks:")) # Retrieve tasks to retry. entities, total = self._retrieve_tasks() # Create RPC tasks for each retry task found. for task in entities: self._enqueue_task(task) return total def _retrieve_tasks(self): """Retrieve a list of tasks to retry.""" repositories.start() try: entities, _, _, total = self.order_retry_repo.get_by_create_date( only_at_or_before_this_date=datetime.datetime.utcnow(), suppress_exception=True) finally: repositories.clear() return entities, total def _enqueue_task(self, task): """Re-enqueue the specified task.""" retry_task_name = 'N/A' retry_args = 'N/A' retry_kwargs = 'N/A' # Start a new isolated database transaction just for this task. repositories.start() try: # Invoke queue client to place retried RPC task on queue. retry_task_name = task.retry_task retry_args = task.retry_args retry_kwargs = task.retry_kwargs retry_method = getattr(self.queue, retry_task_name) retry_method(*retry_args, **retry_kwargs) # Remove the retry record from the queue. task.status = models.States.ACTIVE self.order_retry_repo.delete_entity_by_id(task.id, None) repositories.commit() LOG.debug( "(Enqueued method '{0}' with args '{1}' and " "kwargs '{2}')".format( retry_task_name, retry_args, retry_kwargs)) except Exception: LOG.exception( u._LE( "Problem enqueuing method '%(name)s' with args '%(args)s' " "and kwargs '%(kwargs)s'."), { 'name': retry_task_name, 'args': retry_args, 'kwargs': retry_kwargs } ) repositories.rollback() finally: repositories.clear() barbican-2.0.0/barbican/queue/__init__.py0000664000567000056710000000723412701405673021424 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Queue objects for Barbican """ import oslo_messaging as messaging from oslo_messaging.notify import dispatcher as notify_dispatcher from oslo_messaging import server as msg_server from barbican.common import config from barbican.common import exception from barbican.common import utils LOG = utils.getLogger(__name__) # Constant at one place if this needs to be changed later KS_NOTIFICATIONS_GRP_NAME = config.KS_NOTIFICATIONS_GRP_NAME CONF = config.CONF TRANSPORT = None IS_SERVER_SIDE = True ALLOWED_EXMODS = [ exception.__name__, ] def get_allowed_exmods(): return ALLOWED_EXMODS def init(conf, is_server_side=True): global TRANSPORT, IS_SERVER_SIDE exmods = get_allowed_exmods() IS_SERVER_SIDE = is_server_side TRANSPORT = messaging.get_transport(conf, allowed_remote_exmods=exmods) def is_server_side(): return IS_SERVER_SIDE def cleanup(): global TRANSPORT TRANSPORT.cleanup() TRANSPORT = None def get_target(): return messaging.Target(topic=CONF.queue.topic, namespace=CONF.queue.namespace, version=CONF.queue.version, server=CONF.queue.server_name) def get_client(target=None, version_cap=None, serializer=None): if not CONF.queue.enable: return None queue_target = target or get_target() return messaging.RPCClient(TRANSPORT, target=queue_target, version_cap=version_cap, serializer=serializer) def get_server(target, endpoints, serializer=None): return messaging.get_rpc_server(TRANSPORT, target, endpoints, executor='eventlet', serializer=serializer) def get_notification_target(): conf_opts = getattr(CONF, KS_NOTIFICATIONS_GRP_NAME) return messaging.Target(exchange=conf_opts.control_exchange, topic=conf_opts.topic, version=conf_opts.version, fanout=True) def get_notification_server(targets, endpoints, serializer=None): """Retrieve notification server This Notification server uses same transport configuration as used by other barbican functionality like async order processing. Assumption is that messaging infrastructure is going to be shared (same) among different barbican features. """ allow_requeue = getattr(getattr(CONF, KS_NOTIFICATIONS_GRP_NAME), 'allow_requeue') TRANSPORT._require_driver_features(requeue=allow_requeue) dispatcher = notify_dispatcher.NotificationDispatcher(targets, endpoints, serializer, allow_requeue) # we don't want blocking executor so use eventlet as executor choice return msg_server.MessageHandlingServer(TRANSPORT, dispatcher, executor='eventlet') barbican-2.0.0/barbican/queue/keystone_listener.py0000664000567000056710000001511412701405673023427 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Server-side (i.e. worker side) Keystone notification related classes and logic. """ import oslo_messaging from oslo_service import service from barbican.common import utils from barbican import queue from barbican.tasks import keystone_consumer LOG = utils.getLogger(__name__) class NotificationTask(object): """Task which exposes the API for consuming priority based notifications. The Oslo notification framework delivers notifications based on priority to matching callback APIs as defined in its notification listener endpoint list. Currently from Keystone perspective, `info` API is sufficient as Keystone send notifications at `info` priority ONLY. Other priority level APIs (warn, error, critical, audit, debug) are not needed here. """ def __init__(self, conf): self.conf = conf def info(self, ctxt, publisher_id, event_type, payload, metadata): """Receives notification at info level.""" return self.process_event(ctxt, publisher_id, event_type, payload, metadata) def process_event(self, ctxt, publisher_id, event_type, payload, metadata): """Process Keystone Event based on event_type and payload data. Parses notification data to identify if the event is related to delete project or not. In case of delete project event, it passes project_id to KeystoneEventConsumer logic for further processing. Barbican service is not interested in other events so in that case it just returns None as acknowledgment. Messaging server considers message is acknowledged when either return value is `oslo_messaging.NotificationResult.HANDLED` or None. In case of successful processing of notification, the returned value is `oslo_messaging.NotificationResult.HANDLED` In case of notification processing error, the value returned is oslo_messaging.NotificationResult.REQUEUE when transport supports this feature otherwise `oslo_messaging.NotificationResult.HANDLED` is returned. """ LOG.debug("Input keystone event publisher_id = %s", publisher_id) LOG.debug("Input keystone event payload = %s", payload) LOG.debug("Input keystone event type = %s", event_type) LOG.debug("Input keystone event metadata = %s", metadata) project_id = self._parse_payload_for_project_id(payload) resource_type, operation_type = self._parse_event_type(event_type) LOG.debug('Keystone Event: resource type={0}, operation type={1}, ' 'keystone id={2}'.format(resource_type, operation_type, project_id)) if (project_id and resource_type == 'project' and operation_type == 'deleted'): task = keystone_consumer.KeystoneEventConsumer() try: task.process(project_id=project_id, resource_type=resource_type, operation_type=operation_type) return oslo_messaging.NotificationResult.HANDLED except Exception: # No need to log message here as task process method has # already logged it # TODO(john-wood-w) This really should be retried on a # schedule and really only if the database is down, not # for any exception otherwise tasks will be re-queued # repeatedly. Revisit as part of the retry task work later. if self.conf.keystone_notifications.allow_requeue: return oslo_messaging.NotificationResult.REQUEUE else: return oslo_messaging.NotificationResult.HANDLED return None # in case event is not project delete def _parse_event_type(self, event_type): """Parses event type provided as part of notification. Parses to identify what operation is performed and on which Keystone resource. A few event type sample values are provided below:: identity.project.deleted identity.role.created identity.domain.updated identity.authenticate """ resource_type = None operation_type = None if event_type: type_list = event_type.split('.') # 2 is min. number of dot delimiters expected in event_type value. if len(type_list) > 2: resource_type = type_list[-2].lower() operation_type = type_list[-1].lower() return resource_type, operation_type def _parse_payload_for_project_id(self, payload_s): """Gets project resource identifier from payload Sample payload is provided below:: {'resource_info': u'2b99a94ad02741978e613fb52dd1f4cd'} """ if payload_s: return payload_s.get('resource_info') class MessageServer(NotificationTask, service.Service): """Server to retrieve messages from queue used by Keystone. This is used to send public notifications for openstack service consumption. This server is an Oslo notification server that exposes set of standard APIs for events consumption based on event priority. Some of messaging server configuration needs to match with Keystone deployment notification configuration e.g. exchange name, topic name """ def __init__(self, conf): pool_size = conf.keystone_notifications.thread_pool_size NotificationTask.__init__(self, conf) service.Service.__init__(self, threads=pool_size) self.target = queue.get_notification_target() self._msg_server = queue.get_notification_server(targets=[self.target], endpoints=[self]) def start(self): self._msg_server.start() super(MessageServer, self).start() def stop(self): super(MessageServer, self).stop() self._msg_server.stop() queue.cleanup() barbican-2.0.0/barbican/queue/client.py0000664000567000056710000000674512701405673021151 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Client-side (i.e. API side) classes and logic. """ from barbican.common import utils from barbican import queue from barbican.queue import server LOG = utils.getLogger(__name__) class TaskClient(object): """API-side client interface to asynchronous queuing services. The class delegates calls to the oslo_messaging RPC framework. """ def __init__(self): super(TaskClient, self).__init__() # Establish either an asynchronous messaging/queuing client # interface (via Oslo's RPC messaging) or else allow for # synchronously invoking worker processes in support of a # standalone single-node mode for Barbican. self._client = queue.get_client() or _DirectTaskInvokerClient() def process_type_order(self, order_id, project_id, request_id): """Process TypeOrder.""" self._cast('process_type_order', order_id=order_id, project_id=project_id, request_id=request_id) def update_order(self, order_id, project_id, updated_meta, request_id): """Update Order.""" self._cast('update_order', order_id=order_id, project_id=project_id, updated_meta=updated_meta, request_id=request_id) def check_certificate_status(self, order_id, project_id, request_id): """Check the status of a certificate order.""" self._cast('check_certificate_status', order_id=order_id, project_id=project_id, request_id=request_id) def _cast(self, name, **kwargs): """Asynchronous call handler. Barbican probably only needs casts. :param name: Method name to invoke. :param kwargs: Arguments for the method invocation. :return: """ return self._client.cast({}, name, **kwargs) def _call(self, name, **kwargs): """Synchronous call handler. Barbican probably *never* uses calls.""" return self._client.call({}, name, **kwargs) class _DirectTaskInvokerClient(object): """Allows for direct invocation of queue.server Tasks. This class supports a standalone single-node mode of operation for Barbican, whereby typically asynchronous requests to Barbican are handled synchronously. """ def __init__(self): super(_DirectTaskInvokerClient, self).__init__() self._tasks = server.Tasks() def cast(self, context, method_name, **kwargs): try: getattr(self._tasks, method_name)(context, **kwargs) except Exception: LOG.exception(">>>>> Task exception seen for synchronous task " "invocation, so handling exception to mimic " "asynchronous behavior.") def call(self, context, method_name, **kwargs): raise ValueError("No support for call() client methods.") barbican-2.0.0/barbican/plugin/0000775000567000056710000000000012701406024017446 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/plugin/resources.py0000664000567000056710000004025112701405673022045 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from barbican.common import exception from barbican.common import utils from barbican.model import models from barbican.model import repositories as repos from barbican.plugin.interface import secret_store from barbican.plugin import store_crypto from barbican.plugin.util import translations as tr def _get_transport_key_model(key_spec, transport_key_needed): key_model = None if transport_key_needed: # get_plugin_store() will throw an exception if no suitable # plugin with transport key is found plugin_manager = secret_store.get_manager() store_plugin = plugin_manager.get_plugin_store( key_spec=key_spec, transport_key_needed=True) plugin_name = utils.generate_fullname_for(store_plugin) key_repo = repos.get_transport_key_repository() key_model = key_repo.get_latest_transport_key(plugin_name) if not key_model or not store_plugin.is_transport_key_current( key_model.transport_key): # transport key does not exist or is not current. # need to get a new transport key transport_key = store_plugin.get_transport_key() new_key_model = models.TransportKey(plugin_name, transport_key) key_model = key_repo.create_from(new_key_model) return key_model def _get_plugin_name_and_transport_key(transport_key_id): plugin_name = None transport_key = None if transport_key_id is not None: transport_key_repo = repos.get_transport_key_repository() try: transport_key_model = transport_key_repo.get( entity_id=transport_key_id) except exception.NotFound: raise exception.ProvidedTransportKeyNotFound(str(transport_key_id)) plugin_name = transport_key_model.plugin_name if plugin_name is None: raise ValueError("Invalid plugin name for transport key") transport_key = transport_key_model.transport_key return plugin_name, transport_key def store_secret(unencrypted_raw, content_type_raw, content_encoding, secret_model, project_model, transport_key_needed=False, transport_key_id=None): """Store a provided secret into secure backend.""" if _secret_already_has_stored_data(secret_model): raise ValueError('Secret already has encrypted data stored for it.') # Create a KeySpec to find a plugin that will support storing the secret key_spec = secret_store.KeySpec(alg=secret_model.algorithm, bit_length=secret_model.bit_length, mode=secret_model.mode) # If there is no secret data to store, then just create Secret entity and # leave. A subsequent call to this method should provide both the Secret # entity created here *and* the secret data to store into it. if not unencrypted_raw: key_model = _get_transport_key_model(key_spec, transport_key_needed) _save_secret_in_repo(secret_model, project_model) return secret_model, key_model plugin_name, transport_key = _get_plugin_name_and_transport_key( transport_key_id) unencrypted, content_type = tr.normalize_before_encryption( unencrypted_raw, content_type_raw, content_encoding, secret_model.secret_type, enforce_text_only=True) plugin_manager = secret_store.get_manager() store_plugin = plugin_manager.get_plugin_store(key_spec=key_spec, plugin_name=plugin_name) secret_dto = secret_store.SecretDTO(type=secret_model.secret_type, secret=unencrypted, key_spec=key_spec, content_type=content_type, transport_key=transport_key) secret_metadata = _store_secret_using_plugin(store_plugin, secret_dto, secret_model, project_model) _save_secret_in_repo(secret_model, project_model) _save_secret_metadata_in_repo(secret_model, secret_metadata, store_plugin, content_type) return secret_model, None def get_secret(requesting_content_type, secret_model, project_model, twsk=None, transport_key=None): secret_metadata = _get_secret_meta(secret_model) # NOTE: */* is the pecan default meaning no content type sent in. In this # case we should use the mime type stored in the metadata. if requesting_content_type == '*/*': requesting_content_type = secret_metadata['content_type'] tr.analyze_before_decryption(requesting_content_type) if twsk is not None: secret_metadata['trans_wrapped_session_key'] = twsk secret_metadata['transport_key'] = transport_key # Locate a suitable plugin to store the secret. plugin_manager = secret_store.get_manager() retrieve_plugin = plugin_manager.get_plugin_retrieve_delete( secret_metadata.get('plugin_name')) # Retrieve the secret. secret_dto = _get_secret( retrieve_plugin, secret_metadata, secret_model, project_model) if twsk is not None: del secret_metadata['transport_key'] del secret_metadata['trans_wrapped_session_key'] # Denormalize the secret. return tr.denormalize_after_decryption(secret_dto.secret, requesting_content_type) def get_transport_key_id_for_retrieval(secret_model): """Return a transport key ID for retrieval if the plugin supports it.""" secret_metadata = _get_secret_meta(secret_model) plugin_manager = secret_store.get_manager() retrieve_plugin = plugin_manager.get_plugin_retrieve_delete( secret_metadata.get('plugin_name')) transport_key_id = retrieve_plugin.get_transport_key() return transport_key_id def generate_secret(spec, content_type, project_model): """Generate a secret and store into a secure backend.""" # Locate a suitable plugin to store the secret. key_spec = secret_store.KeySpec(alg=spec.get('algorithm'), bit_length=spec.get('bit_length'), mode=spec.get('mode')) plugin_manager = secret_store.get_manager() generate_plugin = plugin_manager.get_plugin_generate(key_spec) # Create secret model to eventually save metadata to. secret_model = models.Secret(spec) secret_model['secret_type'] = secret_store.SecretType.SYMMETRIC # Generate the secret. secret_metadata = _generate_symmetric_key( generate_plugin, key_spec, secret_model, project_model, content_type) # Save secret and metadata. _save_secret_in_repo(secret_model, project_model) _save_secret_metadata_in_repo(secret_model, secret_metadata, generate_plugin, content_type) return secret_model def generate_asymmetric_secret(spec, content_type, project_model): """Generate an asymmetric secret and store into a secure backend.""" # Locate a suitable plugin to store the secret. key_spec = secret_store.KeySpec(alg=spec.get('algorithm'), bit_length=spec.get('bit_length'), passphrase=spec.get('passphrase')) plugin_manager = secret_store.get_manager() generate_plugin = plugin_manager.get_plugin_generate(key_spec) # Create secret models to eventually save metadata to. private_secret_model = models.Secret(spec) private_secret_model['secret_type'] = secret_store.SecretType.PRIVATE public_secret_model = models.Secret(spec) public_secret_model['secret_type'] = secret_store.SecretType.PUBLIC passphrase_secret_model = (models.Secret(spec) if spec.get('passphrase') else None) if passphrase_secret_model: passphrase_type = secret_store.SecretType.PASSPHRASE passphrase_secret_model['secret_type'] = passphrase_type asymmetric_meta_dto = _generate_asymmetric_key( generate_plugin, key_spec, private_secret_model, public_secret_model, passphrase_secret_model, project_model, content_type ) _save_secret_in_repo(private_secret_model, project_model) _save_secret_metadata_in_repo(private_secret_model, asymmetric_meta_dto.private_key_meta, generate_plugin, content_type) _save_secret_in_repo(public_secret_model, project_model) _save_secret_metadata_in_repo(public_secret_model, asymmetric_meta_dto.public_key_meta, generate_plugin, content_type) if passphrase_secret_model: _save_secret_in_repo(passphrase_secret_model, project_model) _save_secret_metadata_in_repo(passphrase_secret_model, asymmetric_meta_dto.passphrase_meta, generate_plugin, content_type) container_model = _create_container_for_asymmetric_secret(spec, project_model) _save_asymmetric_secret_in_repo( container_model, private_secret_model, public_secret_model, passphrase_secret_model) return container_model def delete_secret(secret_model, project_id): """Remove a secret from secure backend.""" secret_metadata = _get_secret_meta(secret_model) # We should only try to delete a secret using the plugin interface if # there's the metadata available. This addresses bug/1377330. if secret_metadata: # Locate a suitable plugin to delete the secret from. plugin_manager = secret_store.get_manager() delete_plugin = plugin_manager.get_plugin_retrieve_delete( secret_metadata.get('plugin_name')) # Delete the secret from plugin storage. delete_plugin.delete_secret(secret_metadata) # Delete the secret from data model. secret_repo = repos.get_secret_repository() secret_repo.delete_entity_by_id(entity_id=secret_model.id, external_project_id=project_id) def _store_secret_using_plugin(store_plugin, secret_dto, secret_model, project_model): if isinstance(store_plugin, store_crypto.StoreCryptoAdapterPlugin): context = store_crypto.StoreCryptoContext( project_model, secret_model=secret_model) secret_metadata = store_plugin.store_secret(secret_dto, context) else: secret_metadata = store_plugin.store_secret(secret_dto) return secret_metadata def _generate_symmetric_key( generate_plugin, key_spec, secret_model, project_model, content_type): if isinstance(generate_plugin, store_crypto.StoreCryptoAdapterPlugin): context = store_crypto.StoreCryptoContext( project_model, secret_model=secret_model, content_type=content_type) secret_metadata = generate_plugin.generate_symmetric_key( key_spec, context) else: secret_metadata = generate_plugin.generate_symmetric_key(key_spec) return secret_metadata def _generate_asymmetric_key(generate_plugin, key_spec, private_secret_model, public_secret_model, passphrase_secret_model, project_model, content_type): if isinstance(generate_plugin, store_crypto.StoreCryptoAdapterPlugin): context = store_crypto.StoreCryptoContext( project_model, private_secret_model=private_secret_model, public_secret_model=public_secret_model, passphrase_secret_model=passphrase_secret_model, content_type=content_type) asymmetric_meta_dto = generate_plugin.generate_asymmetric_key( key_spec, context) else: asymmetric_meta_dto = generate_plugin.generate_asymmetric_key(key_spec) return asymmetric_meta_dto def _get_secret(retrieve_plugin, secret_metadata, secret_model, project_model): if isinstance(retrieve_plugin, store_crypto.StoreCryptoAdapterPlugin): context = store_crypto.StoreCryptoContext( project_model, secret_model=secret_model) secret_dto = retrieve_plugin.get_secret(secret_model.secret_type, secret_metadata, context) else: secret_dto = retrieve_plugin.get_secret(secret_model.secret_type, secret_metadata) return secret_dto def _get_secret_meta(secret_model): if secret_model: secret_meta_repo = repos.get_secret_meta_repository() return secret_meta_repo.get_metadata_for_secret(secret_model.id) else: return {} def _save_secret_metadata_in_repo(secret_model, secret_metadata, store_plugin, content_type): """Add secret metadata to a secret.""" if not secret_metadata: secret_metadata = {} secret_metadata['plugin_name'] = utils.generate_fullname_for(store_plugin) secret_metadata['content_type'] = content_type secret_meta_repo = repos.get_secret_meta_repository() secret_meta_repo.save(secret_metadata, secret_model) def _save_secret_in_repo(secret_model, project_model): """Save a Secret entity.""" secret_repo = repos.get_secret_repository() # Create Secret entities in data store. if not secret_model.id: secret_model.project_id = project_model.id secret_repo.create_from(secret_model) else: secret_repo.save(secret_model) def _secret_already_has_stored_data(secret_model): if not secret_model: return False return secret_model.encrypted_data or secret_model.secret_store_metadata def _create_container_for_asymmetric_secret(spec, project_model): container_model = models.Container() container_model.name = spec.get('name') container_model.type = spec.get('algorithm', '').lower() container_model.status = models.States.ACTIVE container_model.project_id = project_model.id container_model.creator_id = spec.get('creator_id') return container_model def _save_asymmetric_secret_in_repo(container_model, private_secret_model, public_secret_model, passphrase_secret_model): container_repo = repos.get_container_repository() container_repo.create_from(container_model) # create container_secret for private_key _create_container_secret_association('private_key', private_secret_model, container_model) # create container_secret for public_key _create_container_secret_association('public_key', public_secret_model, container_model) if passphrase_secret_model: # create container_secret for passphrase _create_container_secret_association('private_key_passphrase', passphrase_secret_model, container_model) def _create_container_secret_association(assoc_name, secret_model, container_model): container_secret = models.ContainerSecret() container_secret.name = assoc_name container_secret.container_id = container_model.id container_secret.secret_id = secret_model.id container_secret_repo = repos.get_container_secret_repository() container_secret_repo.create_from(container_secret) barbican-2.0.0/barbican/plugin/symantec.py0000664000567000056710000002603612701405673021663 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Barbican certificate processing plugins and support. """ from oslo_config import cfg from requests import exceptions as request_exceptions from symantecssl.core import Symantec from symantecssl import exceptions as symantec_exceptions from barbican.common import config from barbican import i18n as u from barbican.plugin.interface import certificate_manager as cert CONF = config.new_config() symantec_plugin_group = cfg.OptGroup(name='symantec_plugin', title='Symantec Plugin Options') symantec_plugin_opts = [ cfg.StrOpt('username', help=u._('Symantec username for authentication')), cfg.StrOpt('password', help=u._('Symantec password for authentication'), secret=True), cfg.StrOpt('url', help=u._('Domain of Symantec API')) ] CONF.register_group(symantec_plugin_group) CONF.register_opts(symantec_plugin_opts, group=symantec_plugin_group) config.parse_args(CONF) class SymantecCertificatePlugin(cert.CertificatePluginBase): """Symantec certificate plugin.""" def __init__(self, conf=CONF): self.username = conf.symantec_plugin.username self.password = conf.symantec_plugin.password self.url = conf.symantec_plugin.url if self.username is None: raise ValueError(u._("username is required")) if self.password is None: raise ValueError(u._("password is required")) if self.url is None: raise ValueError(u._("url is required")) def get_default_ca_name(self): return "Symantec CA" def get_default_signing_cert(self): # TODO(chellygel) Add code to get the signing cert return None def get_default_intermediates(self): # TODO(chellygel) Add code to get the cert chain return None def issue_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Create the initial order with CA :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf. :param barbican_meta_dto: additional data needed to process order. :returns: ResultDTO """ successful, error_msg, can_retry = _ca_create_order(order_meta, plugin_meta) status = cert.CertificateStatus.CA_UNAVAILABLE_FOR_REQUEST message = None if successful: status = cert.CertificateStatus.WAITING_FOR_CA elif can_retry: status = cert.CertificateStatus.CLIENT_DATA_ISSUE_SEEN message = error_msg return cert.ResultDTO(status=status, status_message=message) def modify_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Update the order meta-data :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf. :param barbican_meta_dto: additional data needed to process order. """ raise NotImplementedError # pragma: no cover def cancel_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Cancel the order :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf. :param barbican_meta_dto: additional data needed to process order. """ raise NotImplementedError # pragma: no cover def check_certificate_status(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Check status of the order :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf. :param barbican_meta_dto: additional data needed to process order. """ raise NotImplementedError # pragma: no cover def supports(self, certificate_spec): """Indicates if the plugin supports the certificate type. :param certificate_spec: Contains details on the certificate to generate the certificate order :returns: boolean indicating if the plugin supports the certificate type """ # TODO(chellygel): Research what certificate types are supported by # symantec. Returning True for testing purposes return True def _ca_create_order(self, order_meta, plugin_meta): """Creates an order with the Symantec CA. The PartnerOrderId and GeoTrustOrderId are returned and stored in plugin_meta. PartnerCode and ProductCode are also stored in plugin_meta for future use. All required order parameters must be stored as a dict in order_meta. Required fields are: PartnerCode, ProductCode, PartnerOrderId, OrganizationName, AddressLine1, City, Region, PostalCode, Country, OrganizationPhone ValidityPeriod, ServerCount, WebServerType, AdminContactFirstName, AdminContactLastName, AdminContactPhone, AdminContactEmail, AdminContactTitle, AdminContactAddressLine1, AdminContactCity, AdminContactRegion, AdminContactPostalCode, AdminContactCountry, BillingContact*, TechContact*, and CSR. *The Billing and Tech contact information follows the same convention as the AdminContact fields. Optional Parameters: TechSameAsAdmin, BillSameAsAdmin, more options can be found in Symantec's API docs. Contact Symantec for the API document. :returns: tuple with success, error message, and can retry """ api = Symantec(self.username, self.password, self.url) try: order_data = api.order(**order_meta) # GeotrustOrderId is used to handle emails from Symantec. # PartnerCode and ProductCode are being stored in plugin_meta for # convenience when calling _ca_get_order_status, _ca_modify_order, etc. plugin_meta["GeotrustOrderID"] = order_data["GeotrustOrderID"] plugin_meta["PartnerOrderID"] = order_data["PartnerOrderID"] plugin_meta["PartnerCode"] = order_meta["OrderDetails"]["PartnerCode"] plugin_meta["ProductCode"] = order_meta["OrderDetails"]["ProductCode"] return True, None, False except symantec_exceptions.SymantecError as e: return False, e, False except request_exceptions.RequestException as e: return False, e, True def _ca_get_order_status(self, plugin_meta): """Sends a request to the Symantec CA for details on an order. Parameters needed for GetOrderByPartnerOrderID: plugin_meta parameters: PartnerOrderId, PartnerCode If the order is complete, the Certificate is returned as a string. returns: tuple with success, error message, can retry, and the certificate (if available). """ api = Symantec(self.username, self.password, self.url) order_details = { "PartnerOrderID": plugin_meta["PartnerOrderID"], "PartnerCode": plugin_meta["PartnerCode"], "ReturnCertificateInfo": "TRUE", "ReturnFulfillment": "TRUE", "ReturnCaCerts": "TRUE", } try: order_data = api.get_order_by_partner_order_id(**order_details) if order_data["OrderInfo"]["OrderState"] == "COMPLETED": ca = order_data["Fulfillment"]["CACertificates"]["CACertificate"] return True, None, False, ca["CACert"] return True, None, False, None except symantec_exceptions.SymantecError as e: return False, e, False, None except request_exceptions.RequestException as e: return False, e, True, None def _ca_modify_order(self, order_meta, plugin_meta): """Sends a request to the Symantec CA to modify an order. Parameters needed for modifyOrder: PartnerOrderID - Needed to specify order PartnerCode - Needed to specify order ProductCode - Needed to specify order Also need a dict, order_meta with the parameters/values to modify. returns: tuple with success, error message, and can retry. """ api = Symantec(self.username, self.password, self.url) order_details = { "PartnerOrderID": plugin_meta["PartnerOrderID"], "PartnerCode": plugin_meta["PartnerCode"], "ProductCode": plugin_meta["ProductCode"], } order_details.update(order_meta) try: api.validate_order_parameters(**order_details) return True, None, False except symantec_exceptions.SymantecError as e: return False, e, False except request_exceptions.RequestException as e: return False, e, True def _ca_cancel_order(self, plugin_meta): """Sends a request to the Symantec CA to cancel an order. Parameters needed for modifyOrder: PartnerOrderID - Needed to specify order PartnerCode - Needed to specify order ProductCode - Needed to specify order returns: tuple with success, error message, and can retry. """ api = Symantec(self.username, self.password, self.url) order_details = { "PartnerOrderID": plugin_meta["PartnerOrderID"], "PartnerCode": plugin_meta["PartnerCode"], "ProductCode": plugin_meta["ProductCode"], "ModifyOrderOperation": "CANCEL", } try: api.modify_order(**order_details) return True, None, False except symantec_exceptions.SymantecError as e: return False, e, False except request_exceptions.RequestException as e: return False, e, True barbican-2.0.0/barbican/plugin/util/0000775000567000056710000000000012701406024020423 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/plugin/util/utils.py0000664000567000056710000000412112701405673022144 0ustar jenkinsjenkins00000000000000# Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Utilities to support plugins and plugin managers. """ from barbican.common import utils from barbican import i18n as u LOG = utils.getLogger(__name__) def instantiate_plugins(extension_manager, invoke_args=(), invoke_kwargs={}): """Attempt to create each plugin managed by a stevedore manager. While we could have let the stevedore 'extension_manager' create our plugins by passing 'invoke_on_load=True' to its initializer, its logic handles and suppresses any root cause exceptions emanating from the plugins' initializers. This function allows those exceptions to be exposed. :param extension_manager: A :class:`NamedExtensionManager` instance that has already processed the configured plugins, but has not yet created instances of these plugins. :param invoke_args: Arguments to pass to the new plugin instance. :param invoke_kwargs: Keyword arguments to pass to the new plugin instance. """ for ext in extension_manager.extensions: if not ext.obj: try: plugin_instance = ext.plugin(*invoke_args, **invoke_kwargs) except Exception: LOG.logger.disabled = False # Ensure not suppressing logs. LOG.exception( u._LE("Problem seen creating plugin: '%s'"), ext.name ) else: ext.obj = plugin_instance def get_active_plugins(extension_manager): return [ext.obj for ext in extension_manager.extensions if ext.obj] barbican-2.0.0/barbican/plugin/util/__init__.py0000664000567000056710000000000012701405673022533 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/plugin/util/mime_types.py0000664000567000056710000001345712701405673023173 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Barbican defined mime-types """ import six from barbican.common import utils # Supported content types # Note: These types may be provided by clients. PLAIN_TEXT = ['text/plain', 'text/plain;charset=utf-8', 'text/plain; charset=utf-8'] PLAIN_TEXT_CHARSETS = ['utf-8'] BINARY = ['application/octet-stream', 'application/pkcs8', 'application/pkix-cert'] SUPPORTED = PLAIN_TEXT + BINARY # Normalizes client types to internal types. INTERNAL_CTYPES = {'text/plain': 'text/plain', 'text/plain;charset=utf-8': 'text/plain', 'text/plain; charset=utf-8': 'text/plain', 'application/octet-stream': 'application/octet-stream', 'application/pkcs8': 'application/pkcs8', 'application/pkix-cert': 'application/pkix-cert', 'application/aes': 'application/aes'} # Maps mime-types used to specify secret data formats to the types that can # be requested for secrets via GET calls. # Note: Raw client types are converted into the 'INTERNAL_CTYPES' types # which are then used as the keys to the 'CTYPES_MAPPINGS' below. CTYPES_PLAIN = {'default': 'text/plain'} CTYPES_BINARY = {'default': 'application/octet-stream'} CTYPES_PKCS8 = {'default': 'application/pkcs8'} CTYPES_PKIX_CERT = {'default': 'application/pkix-cert'} CTYPES_AES = {'default': 'application/aes'} CTYPES_MAPPINGS = {'text/plain': CTYPES_PLAIN, 'application/octet-stream': CTYPES_BINARY, 'application/pkcs8': CTYPES_PKCS8, 'application/pkix-cert': CTYPES_PKIX_CERT, 'application/aes': CTYPES_AES} # Supported encodings ENCODINGS = ['base64'] # Maps normalized content-types to supported encoding(s) CTYPES_TO_ENCODINGS = {'text/plain': None, 'application/octet-stream': ['base64', 'binary'], 'application/pkcs8': ['base64', 'binary'], 'application/pkix-cert': ['base64', 'binary'], 'application/aes': None} def normalize_content_type(mime_type): """Normalize the supplied content-type to an internal form.""" stripped = list(six.moves.map(lambda x: x.strip(), mime_type.split(';'))) mime = stripped[0].lower() if len(stripped) > 1: # mime type includes charset charset_type = stripped[1].lower() if '=' not in charset_type: # charset is malformed return mime_type else: charset = list(six.moves.map(lambda x: x.strip(), charset_type.split('=')))[1] if charset not in PLAIN_TEXT_CHARSETS: # unsupported charset return mime_type return INTERNAL_CTYPES.get(mime, mime_type) def is_supported(mime_type): normalized_type = normalize_content_type(mime_type) return normalized_type in SUPPORTED def is_base64_encoding_supported(mime_type): if is_supported(mime_type): encodings = CTYPES_TO_ENCODINGS[INTERNAL_CTYPES[mime_type]] return encodings and ('base64' in encodings) return False def is_content_type_with_encoding_supported(content_type, content_encoding): if not is_supported(content_type): return False normalized_type = normalize_content_type(content_type) encodings = CTYPES_TO_ENCODINGS[INTERNAL_CTYPES[normalized_type]] if encodings: return content_encoding in encodings else: return content_encoding is None def get_supported_encodings(content_type): normalized_type = normalize_content_type(content_type) return CTYPES_TO_ENCODINGS[INTERNAL_CTYPES[normalized_type]] def is_base64_processing_needed(content_type, content_encoding): content_encodings = utils.get_accepted_encodings_direct(content_encoding) if content_encodings: if 'base64' not in content_encodings: return False if is_supported(content_type): encodings = CTYPES_TO_ENCODINGS[INTERNAL_CTYPES[content_type]] return encodings and 'base64' in encodings return False def use_binary_content_as_is(content_type, content_encoding): """Checks if headers are valid to allow binary content as-is.""" content_encodings = utils.get_accepted_encodings_direct(content_encoding) if content_encodings: if 'binary' not in content_encodings: return False if is_supported(content_type): encodings = CTYPES_TO_ENCODINGS[INTERNAL_CTYPES.get(content_type)] return encodings and 'binary' in encodings return INTERNAL_CTYPES.get(content_type) in BINARY def augment_fields_with_content_types(secret): """Add content-types and encodings information to a Secret's fields. Generate a dict of content types based on the data associated with the specified secret. :param secret: The models.Secret instance to add 'content_types' to. """ fields = secret.to_dict_fields() if not secret.secret_store_metadata: return fields content_type = secret.secret_store_metadata.get('content_type') if content_type and content_type.value in CTYPES_MAPPINGS: fields.update( {'content_types': CTYPES_MAPPINGS[content_type.value]} ) return fields barbican-2.0.0/barbican/plugin/util/translations.py0000664000567000056710000001361512701405673023535 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 from Crypto.PublicKey import RSA from OpenSSL import crypto from barbican import i18n as u # noqa from barbican.plugin.interface import secret_store as s from barbican.plugin.util import mime_types def normalize_before_encryption(unencrypted, content_type, content_encoding, secret_type, enforce_text_only=False): """Normalize unencrypted prior to plugin encryption processing. This normalizes the secrets before they are handed off to the SecretStore for storage. This converts all data to Base64 data. If the data is plain text then it encoded using utf-8 first and then Base64 encoded. Binary data is simply converted to Base64. :param str unencrypted: Raw payload :param str content_type: The media type for the payload :param str content_encoding: Transfer encoding :param str secret_type: The type of secret :param bool enforce_text_only: Require text content_type or base64 content_encoding :returns: Tuple containing the normalized (base64 encoded) payload and the normalized media type. """ if not unencrypted: raise s.SecretNoPayloadProvidedException() # Validate and normalize content-type. normalized_media_type = normalize_content_type(content_type) # Process plain-text type. if normalized_media_type in mime_types.PLAIN_TEXT: # normalize text to binary and then base64 encode it unencrypted_bytes = unencrypted.encode('utf-8') b64payload = base64.b64encode(unencrypted_bytes) # Process binary type. else: if not content_encoding: b64payload = base64.b64encode(unencrypted) elif content_encoding.lower() == 'base64': b64payload = unencrypted elif enforce_text_only: # For text-based protocols (such as the one-step secret POST), # only 'base64' encoding is possible/supported. raise s.SecretContentEncodingMustBeBase64() else: # Unsupported content-encoding request. raise s.SecretContentEncodingNotSupportedException( content_encoding ) return b64payload, normalized_media_type def normalize_content_type(content_type): """Normalize the content type and validate that it is supported.""" normalized_mime = mime_types.normalize_content_type(content_type) if not mime_types.is_supported(normalized_mime): raise s.SecretContentTypeNotSupportedException(content_type) return normalized_mime def analyze_before_decryption(content_type): """Determine support for desired content type.""" if not mime_types.is_supported(content_type): raise s.SecretAcceptNotSupportedException(content_type) def denormalize_after_decryption(unencrypted, content_type): """Translate the decrypted data into the desired content type. This is called when the raw keys are requested by the user. The secret returned from the SecretStore is the unencrypted parameter. This 'denormalizes' the data back to its binary format. """ # Process plain-text type. if content_type in mime_types.PLAIN_TEXT: # normalize text to binary string try: unencrypted = base64.b64decode(unencrypted) unencrypted = unencrypted.decode('utf-8') except UnicodeDecodeError: raise s.SecretAcceptNotSupportedException(content_type) # Process binary type. elif content_type in mime_types.BINARY: unencrypted = base64.b64decode(unencrypted) else: raise s.SecretContentTypeNotSupportedException(content_type) return unencrypted def convert_pem_to_der(pem, secret_type): if secret_type == s.SecretType.PRIVATE: return _convert_private_pem_to_der(pem) elif secret_type == s.SecretType.PUBLIC: return _convert_public_pem_to_der(pem) elif secret_type == s.SecretType.CERTIFICATE: return _convert_certificate_pem_to_der(pem) else: reason = u._("Secret type can not be converted to DER") raise s.SecretGeneralException(reason=reason) def convert_der_to_pem(der, secret_type): if secret_type == s.SecretType.PRIVATE: return _convert_private_der_to_pem(der) elif secret_type == s.SecretType.PUBLIC: return _convert_public_der_to_pem(der) elif secret_type == s.SecretType.CERTIFICATE: return _convert_certificate_der_to_pem(der) else: reason = u._("Secret type can not be converted to PEM") raise s.SecretGeneralException(reason=reason) def _convert_private_pem_to_der(pem): private_key = RSA.importKey(pem) der = private_key.exportKey('DER', pkcs=8) return der def _convert_private_der_to_pem(der): private_key = RSA.importKey(der) pem = private_key.exportKey('PEM', pkcs=8) return pem def _convert_public_pem_to_der(pem): pubkey = RSA.importKey(pem) der = pubkey.exportKey('DER') return der def _convert_public_der_to_pem(der): pubkey = RSA.importKey(der) pem = pubkey.exportKey('PEM') return pem def _convert_certificate_pem_to_der(pem): cert = crypto.load_certificate(crypto.FILETYPE_PEM, pem) der = crypto.dump_certificate(crypto.FILETYPE_ASN1, cert) return der def _convert_certificate_der_to_pem(der): cert = crypto.load_certificate(crypto.FILETYPE_ASN1, der) pem = crypto.dump_certificate(crypto.FILETYPE_PEM, cert) return pem barbican-2.0.0/barbican/plugin/crypto/0000775000567000056710000000000012701406024020766 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/plugin/crypto/pkcs11.py0000664000567000056710000005506712701405673022470 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import collections import textwrap import cffi from barbican.common import exception from barbican.common import utils from barbican import i18n as u LOG = utils.getLogger(__name__) Attribute = collections.namedtuple("Attribute", ["type", "value"]) CKAttributes = collections.namedtuple("CKAttributes", ["template", "cffivals"]) CKMechanism = collections.namedtuple("CKMechanism", ["mech", "cffivals"]) CKR_OK = 0 CKF_RW_SESSION = (1 << 1) CKF_SERIAL_SESSION = (1 << 2) CKU_SO = 0 CKU_USER = 1 CKS_RO_PUBLIC_SESSION = 0 CKS_RO_USER_FUNCTIONS = 1 CKS_RW_PUBLIC_SESSION = 2 CKS_RW_USER_FUNCTIONS = 3 CKO_SECRET_KEY = 4 CKK_AES = 0x1f CKA_CLASS = 0 CKA_TOKEN = 1 CKA_PRIVATE = 2 CKA_LABEL = 3 CKA_APPLICATION = 0x10 CKA_VALUE = 0x11 CKA_OBJECT_ID = 0x12 CKA_CERTIFICATE_TYPE = 0x80 CKA_ISSUER = 0x81 CKA_SERIAL_NUMBER = 0x82 CKA_AC_ISSUER = 0x83 CKA_OWNER = 0x84 CKA_ATTR_TYPES = 0x85 CKA_TRUSTED = 0x86 CKA_CERTIFICATE_CATEGORY = 0x87 CKA_JAVA_MIDP_SECURITY_DOMAIN = 0x88 CKA_URL = 0x89 CKA_HASH_OF_SUBJECT_PUBLIC_KEY = 0x8a CKA_HASH_OF_ISSUER_PUBLIC_KEY = 0x8b CKA_CHECK_VALUE = 0x90 CKA_KEY_TYPE = 0x100 CKA_SUBJECT = 0x101 CKA_ID = 0x102 CKA_SENSITIVE = 0x103 CKA_ENCRYPT = 0x104 CKA_DECRYPT = 0x105 CKA_WRAP = 0x106 CKA_UNWRAP = 0x107 CKA_SIGN = 0x108 CKA_SIGN_RECOVER = 0x109 CKA_VERIFY = 0x10a CKA_VERIFY_RECOVER = 0x10b CKA_DERIVE = 0x10c CKA_START_DATE = 0x110 CKA_END_DATE = 0x111 CKA_MODULUS = 0x120 CKA_MODULUS_BITS = 0x121 CKA_PUBLIC_EXPONENT = 0x122 CKA_PRIVATE_EXPONENT = 0x123 CKA_PRIME_1 = 0x124 CKA_PRIME_2 = 0x125 CKA_EXPONENT_1 = 0x126 CKA_EXPONENT_2 = 0x127 CKA_COEFFICIENT = 0x128 CKA_PRIME = 0x130 CKA_SUBPRIME = 0x131 CKA_BASE = 0x132 CKA_PRIME_BITS = 0x133 CKA_SUB_PRIME_BITS = 0x134 CKA_VALUE_BITS = 0x160 CKA_VALUE_LEN = 0x161 CKA_EXTRACTABLE = 0x162 CKA_LOCAL = 0x163 CKA_NEVER_EXTRACTABLE = 0x164 CKA_ALWAYS_SENSITIVE = 0x165 CKA_KEY_GEN_MECHANISM = 0x166 CKA_MODIFIABLE = 0x170 CKA_ECDSA_PARAMS = 0x180 CKA_EC_PARAMS = 0x180 CKA_EC_POINT = 0x181 CKA_SECONDARY_AUTH = 0x200 CKA_AUTH_PIN_FLAGS = 0x201 CKA_ALWAYS_AUTHENTICATE = 0x202 CKA_WRAP_WITH_TRUSTED = 0x210 CKA_HW_FEATURE_TYPE = 0x300 CKA_RESET_ON_INIT = 0x301 CKA_HAS_RESET = 0x302 CKA_PIXEL_X = 0x400 CKA_PIXEL_Y = 0x401 CKA_RESOLUTION = 0x402 CKA_CHAR_ROWS = 0x403 CKA_CHAR_COLUMNS = 0x404 CKA_COLOR = 0x405 CKA_BITS_PER_PIXEL = 0x406 CKA_CHAR_SETS = 0x480 CKA_ENCODING_METHODS = 0x481 CKA_MIME_TYPES = 0x482 CKA_MECHANISM_TYPE = 0x500 CKA_REQUIRED_CMS_ATTRIBUTES = 0x501 CKA_DEFAULT_CMS_ATTRIBUTES = 0x502 CKA_SUPPORTED_CMS_ATTRIBUTES = 0x503 CKM_SHA256_HMAC = 0x251 CKM_AES_KEY_GEN = 0x1080 CKM_AES_CBC = 0x1082 CKM_AES_CBC_PAD = 0x1085 CKM_AES_GCM = 0x1087 CKM_AES_KEY_WRAP = 0x1090 VENDOR_SAFENET_CKM_AES_GCM = 0x8000011c CKM_NAMES = { 'CKM_AES_GCM': CKM_AES_GCM, 'VENDOR_SAFENET_CKM_AES_GCM': VENDOR_SAFENET_CKM_AES_GCM } ERROR_CODES = { 1: 'CKR_CANCEL', 2: 'CKR_HOST_MEMORY', 3: 'CKR_SLOT_ID_INVALID', 5: 'CKR_GENERAL_ERROR', 6: 'CKR_FUNCTION_FAILED', 7: 'CKR_ARGUMENTS_BAD', 8: 'CKR_NO_EVENT', 9: 'CKR_NEED_TO_CREATE_THREADS', 0xa: 'CKR_CANT_LOCK', 0x10: 'CKR_ATTRIBUTE_READ_ONLY', 0x11: 'CKR_ATTRIBUTE_SENSITIVE', 0x12: 'CKR_ATTRIBUTE_TYPE_INVALID', 0x13: 'CKR_ATTRIBUTE_VALUE_INVALID', 0x20: 'CKR_DATA_INVALID', 0x21: 'CKR_DATA_LEN_RANGE', 0x30: 'CKR_DEVICE_ERROR', 0x31: 'CKR_DEVICE_MEMORY', 0x32: 'CKR_DEVICE_REMOVED', 0x40: 'CKR_ENCRYPTED_DATA_INVALID', 0x41: 'CKR_ENCRYPTED_DATA_LEN_RANGE', 0x50: 'CKR_FUNCTION_CANCELED', 0x51: 'CKR_FUNCTION_NOT_PARALLEL', 0x54: 'CKR_FUNCTION_NOT_SUPPORTED', 0x60: 'CKR_KEY_HANDLE_INVALID', 0x62: 'CKR_KEY_SIZE_RANGE', 0x63: 'CKR_KEY_TYPE_INCONSISTENT', 0x64: 'CKR_KEY_NOT_NEEDED', 0x65: 'CKR_KEY_CHANGED', 0x66: 'CKR_KEY_NEEDED', 0x67: 'CKR_KEY_INDIGESTIBLE', 0x68: 'CKR_KEY_FUNCTION_NOT_PERMITTED', 0x69: 'CKR_KEY_NOT_WRAPPABLE', 0x6a: 'CKR_KEY_UNEXTRACTABLE', 0x70: 'CKR_MECHANISM_INVALID', 0x71: 'CKR_MECHANISM_PARAM_INVALID', 0x82: 'CKR_OBJECT_HANDLE_INVALID', 0x90: 'CKR_OPERATION_ACTIVE', 0x91: 'CKR_OPERATION_NOT_INITIALIZED', 0xa0: 'CKR_PIN_INCORRECT', 0xa1: 'CKR_PIN_INVALID', 0xa2: 'CKR_PIN_LEN_RANGE', 0xa3: 'CKR_PIN_EXPIRED', 0xa4: 'CKR_PIN_LOCKED', 0xb0: 'CKR_SESSION_CLOSED', 0xb1: 'CKR_SESSION_COUNT', 0xb3: 'CKR_SESSION_HANDLE_INVALID', 0xb4: 'CKR_SESSION_PARALLEL_NOT_SUPPORTED', 0xb5: 'CKR_SESSION_READ_ONLY', 0xb6: 'CKR_SESSION_EXISTS', 0xb7: 'CKR_SESSION_READ_ONLY_EXISTS', 0xb8: 'CKR_SESSION_READ_WRITE_SO_EXISTS', 0xc0: 'CKR_SIGNATURE_INVALID', 0xc1: 'CKR_SIGNATURE_LEN_RANGE', 0xd0: 'CKR_TEMPLATE_INCOMPLETE', 0xd1: 'CKR_TEMPLATE_INCONSISTENT', 0xe0: 'CKR_TOKEN_NOT_PRESENT', 0xe1: 'CKR_TOKEN_NOT_RECOGNIZED', 0xe2: 'CKR_TOKEN_WRITE_PROTECTED', 0xf0: 'CKR_UNWRAPPING_KEY_HANDLE_INVALID', 0xf1: 'CKR_UNWRAPPING_KEY_SIZE_RANGE', 0xf2: 'CKR_UNWRAPPING_KEY_TYPE_INCONSISTENT', 0x100: 'CKR_USER_ALREADY_LOGGED_IN', 0x101: 'CKR_USER_NOT_LOGGED_IN', 0x102: 'CKR_USER_PIN_NOT_INITIALIZED', 0x103: 'CKR_USER_TYPE_INVALID', 0x104: 'CKR_USER_ANOTHER_ALREADY_LOGGED_IN', 0x105: 'CKR_USER_TOO_MANY_TYPES', 0x110: 'CKR_WRAPPED_KEY_INVALID', 0x112: 'CKR_WRAPPED_KEY_LEN_RANGE', 0x113: 'CKR_WRAPPING_KEY_HANDLE_INVALID', 0x114: 'CKR_WRAPPING_KEY_SIZE_RANGE', 0x115: 'CKR_WRAPPING_KEY_TYPE_INCONSISTENT', 0x120: 'CKR_RANDOM_SEED_NOT_SUPPORTED', 0x121: 'CKR_RANDOM_NO_RNG', 0x130: 'CKR_DOMAIN_PARAMS_INVALID', 0x150: 'CKR_BUFFER_TOO_SMALL', 0x160: 'CKR_SAVED_STATE_INVALID', 0x170: 'CKR_INFORMATION_SENSITIVE', 0x180: 'CKR_STATE_UNSAVEABLE', 0x190: 'CKR_CRYPTOKI_NOT_INITIALIZED', 0x191: 'CKR_CRYPTOKI_ALREADY_INITIALIZED', 0x1a0: 'CKR_MUTEX_BAD', 0x1a1: 'CKR_MUTEX_NOT_LOCKED', 0x200: 'CKR_FUNCTION_REJECTED', 1 << 31: 'CKR_VENDOR_DEFINED' } def build_ffi(): ffi = cffi.FFI() ffi.cdef(textwrap.dedent(""" typedef unsigned char CK_BYTE; typedef unsigned long CK_ULONG; typedef unsigned long CK_RV; typedef unsigned long CK_SESSION_HANDLE; typedef unsigned long CK_OBJECT_HANDLE; typedef unsigned long CK_SLOT_ID; typedef unsigned long CK_FLAGS; typedef unsigned long CK_STATE; typedef unsigned long CK_USER_TYPE; typedef unsigned char * CK_UTF8CHAR_PTR; typedef ... *CK_NOTIFY; typedef unsigned long ck_attribute_type_t; struct ck_attribute { ck_attribute_type_t type; void *value; unsigned long value_len; }; typedef struct ck_attribute CK_ATTRIBUTE; typedef CK_ATTRIBUTE *CK_ATTRIBUTE_PTR; typedef unsigned long ck_mechanism_type_t; struct ck_mechanism { ck_mechanism_type_t mechanism; void *parameter; unsigned long parameter_len; }; typedef struct ck_mechanism CK_MECHANISM; typedef CK_MECHANISM *CK_MECHANISM_PTR; typedef CK_BYTE *CK_BYTE_PTR; typedef CK_ULONG *CK_ULONG_PTR; typedef struct ck_session_info { CK_SLOT_ID slot_id; CK_STATE state; CK_FLAGS flags; unsigned long device_error; } CK_SESSION_INFO; typedef CK_SESSION_INFO *CK_SESSION_INFO_PTR; typedef struct CK_AES_GCM_PARAMS { char * pIv; unsigned long ulIvLen; unsigned long ulIvBits; char * pAAD; unsigned long ulAADLen; unsigned long ulTagBits; } CK_AES_GCM_PARAMS; """)) # FUNCTIONS ffi.cdef(textwrap.dedent(""" CK_RV C_Initialize(void *); CK_RV C_OpenSession(CK_SLOT_ID, CK_FLAGS, void *, CK_NOTIFY, CK_SESSION_HANDLE *); CK_RV C_CloseSession(CK_SESSION_HANDLE); CK_RV C_GetSessionInfo(CK_SESSION_HANDLE, CK_SESSION_INFO_PTR); CK_RV C_Login(CK_SESSION_HANDLE, CK_USER_TYPE, CK_UTF8CHAR_PTR, CK_ULONG); CK_RV C_SetAttributeValue(CK_SESSION_HANDLE, CK_OBJECT_HANDLE, CK_ATTRIBUTE *, CK_ULONG); CK_RV C_DestroyObject(CK_SESSION_HANDLE, CK_OBJECT_HANDLE); CK_RV C_FindObjectsInit(CK_SESSION_HANDLE, CK_ATTRIBUTE *, CK_ULONG); CK_RV C_FindObjects(CK_SESSION_HANDLE, CK_OBJECT_HANDLE *, CK_ULONG, CK_ULONG *); CK_RV C_FindObjectsFinal(CK_SESSION_HANDLE); CK_RV C_GenerateKey(CK_SESSION_HANDLE, CK_MECHANISM *, CK_ATTRIBUTE *, CK_ULONG, CK_OBJECT_HANDLE *); CK_RV C_UnwrapKey(CK_SESSION_HANDLE, CK_MECHANISM *, CK_OBJECT_HANDLE, CK_BYTE *, CK_ULONG, CK_ATTRIBUTE *, CK_ULONG, CK_OBJECT_HANDLE *); CK_RV C_WrapKey(CK_SESSION_HANDLE, CK_MECHANISM_PTR, CK_OBJECT_HANDLE, CK_OBJECT_HANDLE, CK_BYTE_PTR, CK_ULONG_PTR); CK_RV C_EncryptInit(CK_SESSION_HANDLE, CK_MECHANISM_PTR, CK_OBJECT_HANDLE); CK_RV C_Encrypt(CK_SESSION_HANDLE, CK_BYTE_PTR, CK_ULONG, CK_BYTE_PTR, CK_ULONG_PTR); CK_RV C_DecryptInit(CK_SESSION_HANDLE, CK_MECHANISM_PTR, CK_OBJECT_HANDLE); CK_RV C_Decrypt(CK_SESSION_HANDLE, CK_BYTE_PTR, CK_ULONG, CK_BYTE_PTR, CK_ULONG_PTR); CK_RV C_SignInit(CK_SESSION_HANDLE, CK_MECHANISM_PTR, CK_OBJECT_HANDLE); CK_RV C_Sign(CK_SESSION_HANDLE, CK_BYTE_PTR, CK_ULONG, CK_BYTE_PTR, CK_ULONG_PTR); CK_RV C_VerifyInit(CK_SESSION_HANDLE, CK_MECHANISM_PTR, CK_OBJECT_HANDLE); CK_RV C_Verify(CK_SESSION_HANDLE, CK_BYTE_PTR, CK_ULONG, CK_BYTE_PTR, CK_ULONG); CK_RV C_GenerateRandom(CK_SESSION_HANDLE, CK_BYTE_PTR, CK_ULONG); """)) return ffi class P11CryptoPluginKeyException(exception.BarbicanException): message = u._("More than one key found for label") class P11CryptoPluginException(exception.BarbicanException): message = u._("General exception") class P11CryptoKeyHandleException(exception.BarbicanException): message = u._("No key handle was found") class PKCS11(object): def __init__(self, library_path, login_passphrase, rw_session, slot_id, ffi=None, algorithm='CKM_AES_GCM'): self.ffi = ffi or build_ffi() self.lib = self.ffi.dlopen(library_path) rv = self.lib.C_Initialize(self.ffi.NULL) self._check_error(rv) # Session options self.login_passphrase = login_passphrase self.rw_session = rw_session self.slot_id = slot_id # Algorithm options self.algorithm = CKM_NAMES[algorithm] self.blocksize = 16 self.noncesize = 12 self.gcmtagsize = 16 # Validate configuration and RNG session = self.get_session() self._rng_self_test(session) self.return_session(session) def get_session(self): session = self._open_session(self.slot_id) # Get session info to check user state session_info = self._get_session_info(session) if session_info.state in (CKS_RO_PUBLIC_SESSION, CKS_RW_PUBLIC_SESSION): # Login public sessions self._login(self.login_passphrase, session) return session def return_session(self, session): self._close_session(session) def generate_random(self, length, session): buf = self._generate_random(length, session) return self.ffi.buffer(buf)[:] def get_key_handle(self, label, session): attributes = self._build_attributes([ Attribute(CKA_CLASS, CKO_SECRET_KEY), Attribute(CKA_KEY_TYPE, CKK_AES), Attribute(CKA_LABEL, str(label)) ]) rv = self.lib.C_FindObjectsInit( session, attributes.template, len(attributes.template) ) self._check_error(rv) count = self.ffi.new("CK_ULONG *") obj_handle_ptr = self.ffi.new("CK_OBJECT_HANDLE[2]") rv = self.lib.C_FindObjects(session, obj_handle_ptr, 2, count) self._check_error(rv) key = None if count[0] == 1: key = obj_handle_ptr[0] rv = self.lib.C_FindObjectsFinal(session) self._check_error(rv) if count[0] > 1: raise P11CryptoPluginKeyException() return key def encrypt(self, key, pt_data, session): iv = self._generate_random(self.noncesize, session) ck_mechanism = self._build_gcm_mechanism(iv) rv = self.lib.C_EncryptInit(session, ck_mechanism.mech, key) self._check_error(rv) pt_len = len(pt_data) ct_len = self.ffi.new("CK_ULONG *", pt_len + self.gcmtagsize) ct = self.ffi.new("CK_BYTE[{0}]".format(ct_len[0])) rv = self.lib.C_Encrypt(session, pt_data, pt_len, ct, ct_len) self._check_error(rv) return { "iv": self.ffi.buffer(iv)[:], "ct": self.ffi.buffer(ct, ct_len[0])[:] } def decrypt(self, key, iv, ct_data, session): iv = self.ffi.new("CK_BYTE[{0}]".format(len(iv)), iv) ck_mechanism = self._build_gcm_mechanism(iv) rv = self.lib.C_DecryptInit(session, ck_mechanism.mech, key) self._check_error(rv) ct_len = len(ct_data) pt_len = self.ffi.new("CK_ULONG *", ct_len) pt = self.ffi.new("CK_BYTE[{0}]".format(pt_len[0])) rv = self.lib.C_Decrypt(session, ct_data, ct_len, pt, pt_len) self._check_error(rv) pt = self.ffi.buffer(pt, pt_len[0])[:] # Secrets stored by the old code uses 16 byte IVs, while the new code # uses 12 byte IVs to be more efficient with GCM. We can use this to # detect secrets stored by the old code and perform padding removal. # If we find a 16 byte IV, we check to make sure the decrypted plain # text is a multiple of the block size, and then that the end of the # plain text looks like padding, ie the last character is a value # between 1 and blocksize, and that there are that many consecutive # bytes of that value at the end. If all of that is true, we remove # the found padding. if len(iv) == self.blocksize and \ (len(pt) % self.blocksize) == 0 and \ 1 <= ord(pt[-1]) <= self.blocksize and \ pt.endswith(pt[-1] * ord(pt[-1])): pt = pt[:-(ord(pt[-1]))] return pt def generate_key(self, key_length, session, key_label=None, encrypt=False, sign=False, wrap=False, master_key=False): if not encrypt and not sign and not wrap: raise P11CryptoPluginException() if master_key and not key_label: raise ValueError(u._("key_label must be set for master_keys")) token = True if master_key else False extractable = False if master_key else True ck_attributes = [ Attribute(CKA_CLASS, CKO_SECRET_KEY), Attribute(CKA_KEY_TYPE, CKK_AES), Attribute(CKA_VALUE_LEN, key_length), Attribute(CKA_TOKEN, token), Attribute(CKA_PRIVATE, True), Attribute(CKA_SENSITIVE, True), Attribute(CKA_ENCRYPT, encrypt), Attribute(CKA_DECRYPT, encrypt), Attribute(CKA_SIGN, sign), Attribute(CKA_VERIFY, sign), Attribute(CKA_WRAP, wrap), Attribute(CKA_UNWRAP, wrap), Attribute(CKA_EXTRACTABLE, extractable) ] if master_key: ck_attributes.append(Attribute(CKA_LABEL, key_label)) ck_attributes = self._build_attributes(ck_attributes) mech = self.ffi.new("CK_MECHANISM *") mech.mechanism = CKM_AES_KEY_GEN obj_handle_ptr = self.ffi.new("CK_OBJECT_HANDLE *") rv = self.lib.C_GenerateKey( session, mech, ck_attributes.template, len(ck_attributes.template), obj_handle_ptr ) self._check_error(rv) return obj_handle_ptr[0] def wrap_key(self, wrapping_key, key_to_wrap, session): mech = self.ffi.new("CK_MECHANISM *") mech.mechanism = CKM_AES_CBC_PAD iv = self._generate_random(16, session) mech.parameter = iv mech.parameter_len = 16 # Ask for length of the wrapped key wrapped_key_len = self.ffi.new("CK_ULONG *") rv = self.lib.C_WrapKey( session, mech, wrapping_key, key_to_wrap, self.ffi.NULL, wrapped_key_len ) self._check_error(rv) # Wrap key wrapped_key = self.ffi.new("CK_BYTE[{0}]".format(wrapped_key_len[0])) rv = self.lib.C_WrapKey( session, mech, wrapping_key, key_to_wrap, wrapped_key, wrapped_key_len ) self._check_error(rv) return { 'iv': self.ffi.buffer(iv)[:], 'wrapped_key': self.ffi.buffer(wrapped_key, wrapped_key_len[0])[:] } def unwrap_key(self, wrapping_key, iv, wrapped_key, session): ck_iv = self.ffi.new("CK_BYTE[]", iv) ck_wrapped_key = self.ffi.new("CK_BYTE[]", wrapped_key) unwrapped_key = self.ffi.new("CK_OBJECT_HANDLE *") mech = self.ffi.new("CK_MECHANISM *") mech.mechanism = CKM_AES_CBC_PAD mech.parameter = ck_iv mech.parameter_len = len(iv) ck_attributes = self._build_attributes([ Attribute(CKA_CLASS, CKO_SECRET_KEY), Attribute(CKA_KEY_TYPE, CKK_AES), Attribute(CKA_TOKEN, False), Attribute(CKA_PRIVATE, True), Attribute(CKA_SENSITIVE, True), Attribute(CKA_ENCRYPT, True), Attribute(CKA_DECRYPT, True), Attribute(CKA_EXTRACTABLE, True) ]) rv = self.lib.C_UnwrapKey( session, mech, wrapping_key, ck_wrapped_key, len(wrapped_key), ck_attributes.template, len(ck_attributes.template), unwrapped_key ) self._check_error(rv) return unwrapped_key[0] def compute_hmac(self, hmac_key, data, session): mech = self.ffi.new("CK_MECHANISM *") mech.mechanism = CKM_SHA256_HMAC rv = self.lib.C_SignInit(session, mech, hmac_key) self._check_error(rv) ck_data = self.ffi.new("CK_BYTE[]", data) buf = self.ffi.new("CK_BYTE[32]") buf_len = self.ffi.new("CK_ULONG *", 32) rv = self.lib.C_Sign(session, ck_data, len(data), buf, buf_len) self._check_error(rv) return self.ffi.buffer(buf, buf_len[0])[:] def verify_hmac(self, hmac_key, sig, data, session): mech = self.ffi.new("CK_MECHANISM *") mech.mechanism = CKM_SHA256_HMAC rv = self.lib.C_VerifyInit(session, mech, hmac_key) self._check_error(rv) ck_data = self.ffi.new("CK_BYTE[]", data) ck_sig = self.ffi.new("CK_BYTE[]", sig) rv = self.lib.C_Verify(session, ck_data, len(data), ck_sig, len(sig)) self._check_error(rv) def destroy_object(self, obj_handle, session): rv = self.lib.C_DestroyObject(session, obj_handle) self._check_error(rv) def _check_error(self, value): if value != CKR_OK: # TODO(jkf) Expand error handling to raise different exceptions # for notable errors we want to handle programmatically raise P11CryptoPluginException(u._( "HSM returned response code: {hex_value} {code}").format( hex_value=hex(value), code=ERROR_CODES.get(value, 'CKR_????'))) def _generate_random(self, length, session): buf = self.ffi.new("CK_BYTE[{0}]".format(length)) rv = self.lib.C_GenerateRandom(session, buf, length) self._check_error(rv) return buf def _build_attributes(self, attrs): attributes = self.ffi.new("CK_ATTRIBUTE[{0}]".format(len(attrs))) val_list = [] for index, attr in enumerate(attrs): attributes[index].type = attr.type if isinstance(attr.value, bool): val_list.append(self.ffi.new("unsigned char *", int(attr.value))) attributes[index].value_len = 1 # sizeof(char) is 1 elif isinstance(attr.value, int): # second because bools are also considered ints val_list.append(self.ffi.new("CK_ULONG *", attr.value)) attributes[index].value_len = 8 elif isinstance(attr.value, str): buf = attr.value.encode('utf-8') val_list.append(self.ffi.new("char []", buf)) attributes[index].value_len = len(buf) elif isinstance(attr.value, bytes): val_list.append(self.ffi.new("char []", attr.value)) attributes[index].value_len = len(attr.value) else: raise TypeError(u._("Unknown attribute type provided.")) attributes[index].value = val_list[-1] return CKAttributes(attributes, val_list) def _open_session(self, slot): session_ptr = self.ffi.new("CK_SESSION_HANDLE *") flags = CKF_SERIAL_SESSION if self.rw_session: flags |= CKF_RW_SESSION rv = self.lib.C_OpenSession(slot, flags, self.ffi.NULL, self.ffi.NULL, session_ptr) self._check_error(rv) return session_ptr[0] def _close_session(self, session): rv = self.lib.C_CloseSession(session) self._check_error(rv) def _get_session_info(self, session): session_info_ptr = self.ffi.new("CK_SESSION_INFO *") rv = self.lib.C_GetSessionInfo(session, session_info_ptr) self._check_error(rv) return session_info_ptr[0] def _login(self, password, session): rv = self.lib.C_Login(session, CKU_USER, password, len(password)) self._check_error(rv) def _rng_self_test(self, session): test_random = self.generate_random(100, session) if test_random == b'\x00' * 100: raise P11CryptoPluginException( u._("Apparent RNG self-test failure.")) def _build_gcm_mechanism(self, iv): iv_len = len(iv) mech = self.ffi.new("CK_MECHANISM *") mech.mechanism = self.algorithm gcm = self.ffi.new("CK_AES_GCM_PARAMS *") gcm.pIv = iv gcm.ulIvLen = iv_len gcm.ulIvBits = iv_len * 8 gcm.ulTagBits = self.gcmtagsize * 8 mech.parameter = gcm mech.parameter_len = 48 return CKMechanism(mech, gcm) barbican-2.0.0/barbican/plugin/crypto/p11_crypto.py0000664000567000056710000003165312701405673023362 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import collections import threading import time from oslo_config import cfg from oslo_serialization import jsonutils as json from barbican.common import config from barbican.common import utils from barbican import i18n as u from barbican.plugin.crypto import crypto as plugin from barbican.plugin.crypto import pkcs11 CONF = config.new_config() LOG = utils.getLogger(__name__) CachedKEK = collections.namedtuple("CachedKEK", ["kek", "expires"]) p11_crypto_plugin_group = cfg.OptGroup(name='p11_crypto_plugin', title="PKCS11 Crypto Plugin Options") p11_crypto_plugin_opts = [ cfg.StrOpt('library_path', help=u._('Path to vendor PKCS11 library')), cfg.StrOpt('login', help=u._('Password to login to PKCS11 session'), secret=True), cfg.StrOpt('mkek_label', help=u._('Master KEK label (used in the HSM)')), cfg.IntOpt('mkek_length', help=u._('Master KEK length in bytes.')), cfg.StrOpt('hmac_label', help=u._('HMAC label (used in the HSM)')), cfg.IntOpt('slot_id', help=u._('HSM Slot ID'), default=1), cfg.BoolOpt('rw_session', help=u._('Flag for Read/Write Sessions'), default=True), cfg.IntOpt('pkek_length', help=u._('Project KEK length in bytes.'), default=32), cfg.IntOpt('pkek_cache_ttl', help=u._('Project KEK Cache Time To Live, in seconds'), default=900), cfg.IntOpt('pkek_cache_limit', help=u._('Project KEK Cache Item Limit'), default=100), cfg.StrOpt('algorithm', help=u._('Secret encryption algorithm'), default='VENDOR_SAFENET_CKM_AES_GCM'), ] CONF.register_group(p11_crypto_plugin_group) CONF.register_opts(p11_crypto_plugin_opts, group=p11_crypto_plugin_group) config.parse_args(CONF) def json_dumps_compact(data): return json.dumps(data, separators=(',', ':')) class P11CryptoPlugin(plugin.CryptoPluginBase): """PKCS11 supporting implementation of the crypto plugin. """ def __init__(self, conf=CONF, ffi=None, pkcs11=None): self.conf = conf plugin_conf = conf.p11_crypto_plugin if plugin_conf.library_path is None: raise ValueError(u._("library_path is required")) # Use specified or create new pkcs11 object self.pkcs11 = pkcs11 or self._create_pkcs11(plugin_conf, ffi) # Save conf arguments self.mkek_length = plugin_conf.mkek_length self.mkek_label = plugin_conf.mkek_label self.hmac_label = plugin_conf.hmac_label self.pkek_length = plugin_conf.pkek_length self.pkek_cache_ttl = plugin_conf.pkek_cache_ttl self.pkek_cache_limit = plugin_conf.pkek_cache_limit self.algorithm = plugin_conf.algorithm # Master Key cache self.mk_cache = {} self.mk_cache_lock = threading.RLock() # Project KEK cache self.pkek_cache = collections.OrderedDict() self.pkek_cache_lock = threading.RLock() # Session for object caching self.caching_session = self.pkcs11.get_session() self.caching_session_lock = threading.RLock() # Cache master keys self._get_master_key(self.mkek_label) self._get_master_key(self.hmac_label) def encrypt(self, encrypt_dto, kek_meta_dto, project_id): kek = self._load_kek_from_meta_dto(kek_meta_dto) try: session = self._get_session() ct_data = self.pkcs11.encrypt( kek, encrypt_dto.unencrypted, session ) finally: if 'session' in locals(): self._return_session(session) kek_meta_extended = json_dumps_compact( {'iv': base64.b64encode(ct_data['iv'])} ) return plugin.ResponseDTO(ct_data['ct'], kek_meta_extended) def decrypt(self, decrypt_dto, kek_meta_dto, kek_meta_extended, project_id): kek = self._load_kek_from_meta_dto(kek_meta_dto) meta_extended = json.loads(kek_meta_extended) iv = base64.b64decode(meta_extended['iv']) try: session = self._get_session() pt_data = self.pkcs11.decrypt( kek, iv, decrypt_dto.encrypted, session ) finally: if 'session' in locals(): self._return_session(session) return pt_data def bind_kek_metadata(self, kek_meta_dto): if not kek_meta_dto.plugin_meta: # Generate wrapped kek and jsonify wkek = self._generate_wrapped_kek( self.pkek_length, kek_meta_dto.kek_label ) # Persisted by Barbican kek_meta_dto.plugin_meta = json_dumps_compact(wkek) kek_meta_dto.algorithm = 'AES' kek_meta_dto.bit_length = self.pkek_length * 8 kek_meta_dto.mode = 'CBC' return kek_meta_dto def generate_symmetric(self, generate_dto, kek_meta_dto, project_id): kek = self._load_kek_from_meta_dto(kek_meta_dto) byte_length = int(generate_dto.bit_length) // 8 try: session = self._get_session() buf = self.pkcs11.generate_random(byte_length, session) ct_data = self.pkcs11.encrypt(kek, buf, session) finally: if 'session' in locals(): self._return_session(session) kek_meta_extended = json_dumps_compact( {'iv': base64.b64encode(ct_data['iv'])} ) return plugin.ResponseDTO(ct_data['ct'], kek_meta_extended) def generate_asymmetric(self, generate_dto, kek_meta_dto, project_id): raise NotImplementedError(u._("Feature not implemented for PKCS11")) def supports(self, type_enum, algorithm=None, bit_length=None, mode=None): if type_enum == plugin.PluginSupportTypes.ENCRYPT_DECRYPT: return True elif type_enum == plugin.PluginSupportTypes.SYMMETRIC_KEY_GENERATION: return True elif type_enum == plugin.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION: return False else: return False def _pkek_cache_add(self, kek, label): with self.pkek_cache_lock: if label in self.pkek_cache: raise ValueError('{0} is already in the cache'.format(label)) now = int(time.time()) ckek = CachedKEK(kek, now + self.pkek_cache_ttl) if len(self.pkek_cache) >= self.pkek_cache_limit: with self.caching_session_lock: session = self.caching_session self._pkek_cache_expire(now, session) # Test again if call above didn't remove any items if len(self.pkek_cache) >= self.pkek_cache_limit: (l, k) = self.pkek_cache.popitem(last=False) self.pkcs11.destroy_object(k.kek, session) self.pkek_cache[label] = ckek def _pkek_cache_get(self, label, default=None): kek = default with self.pkek_cache_lock: ckek = self.pkek_cache.get(label) if ckek is not None: if int(time.time()) < ckek.expires: kek = ckek.kek else: with self.caching_session_lock: self.pkcs11.destroy_object(ckek.kek, self.caching_session) del self.pkek_cache[label] return kek def _pkek_cache_expire(self, now, session): # Look for expired items, starting from oldest for (label, kek) in self.pkek_cache.items(): if now >= kek.expires: self.pkcs11.destroy_object(kek.kek, session) del self.pkek_cache[label] else: break def _create_pkcs11(self, plugin_conf, ffi): return pkcs11.PKCS11( library_path=plugin_conf.library_path, login_passphrase=plugin_conf.login, rw_session=plugin_conf.rw_session, slot_id=plugin_conf.slot_id, ffi=ffi, algorithm=plugin_conf.algorithm ) def _get_session(self): return self.pkcs11.get_session() def _return_session(self, session): self.pkcs11.return_session(session) def _get_master_key(self, label): with self.mk_cache_lock: session = self.caching_session key = self.mk_cache.get(label, None) if key is None: with self.caching_session_lock: key = self.pkcs11.get_key_handle(label, session) if key is None: raise pkcs11.P11CryptoKeyHandleException( u._("Could not find key labeled {0}").format(label) ) self.mk_cache[label] = key return key def _load_kek_from_meta_dto(self, kek_meta_dto): meta = json.loads(kek_meta_dto.plugin_meta) kek = self._load_kek( kek_meta_dto.kek_label, meta['iv'], meta['wrapped_key'], meta['hmac'], meta['mkek_label'], meta['hmac_label'] ) return kek def _load_kek(self, key_label, iv, wrapped_key, hmac, mkek_label, hmac_label): with self.pkek_cache_lock: kek = self._pkek_cache_get(key_label) if kek is None: # Decode data iv = base64.b64decode(iv) wrapped_key = base64.b64decode(wrapped_key) hmac = base64.b64decode(hmac) kek_data = iv + wrapped_key with self.caching_session_lock: session = self.caching_session # Get master keys mkek = self._get_master_key(mkek_label) mkhk = self._get_master_key(hmac_label) # Verify HMAC self.pkcs11.verify_hmac(mkhk, hmac, kek_data, session) # Unwrap KEK kek = self.pkcs11.unwrap_key(mkek, iv, wrapped_key, session) self._pkek_cache_add(kek, key_label) return kek def _generate_wrapped_kek(self, key_length, key_label): with self.caching_session_lock: session = self.caching_session # Get master keys mkek = self._get_master_key(self.mkek_label) mkhk = self._get_master_key(self.hmac_label) # Generate KEK kek = self.pkcs11.generate_key(key_length, session, encrypt=True) # Wrap KEK wkek = self.pkcs11.wrap_key(mkek, kek, session) # HMAC Wrapped KEK wkek_data = wkek['iv'] + wkek['wrapped_key'] wkek_hmac = self.pkcs11.compute_hmac(mkhk, wkek_data, session) # Cache KEK self._pkek_cache_add(kek, key_label) return { 'iv': base64.b64encode(wkek['iv']), 'wrapped_key': base64.b64encode(wkek['wrapped_key']), 'hmac': base64.b64encode(wkek_hmac), 'mkek_label': self.mkek_label, 'hmac_label': self.hmac_label } def _generate_mkek(self, key_length, key_label): with self.mk_cache_lock, self.caching_session_lock: session = self.caching_session if key_label in self.mk_cache or \ self.pkcs11.get_key_handle(key_label, session) is not None: raise pkcs11.P11CryptoPluginKeyException( u._("A master key with that label already exists") ) mk = self.pkcs11.generate_key( key_length, session, key_label, encrypt=True, wrap=True, master_key=True ) self.mk_cache[key_label] = mk return mk def _generate_mkhk(self, key_length, key_label): with self.mk_cache_lock, self.caching_session_lock: session = self.caching_session if key_label in self.mk_cache or \ self.pkcs11.get_key_handle(key_label, session) is not None: raise pkcs11.P11CryptoPluginKeyException( u._("A master key with that label already exists") ) mk = self.pkcs11.generate_key( key_length, session, key_label, sign=True, master_key=True ) self.mk_cache[key_label] = mk return mk barbican-2.0.0/barbican/plugin/crypto/__init__.py0000664000567000056710000000000012701405673023076 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/plugin/crypto/simple_crypto.py0000664000567000056710000002024412701405673024244 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os from Crypto.PublicKey import DSA from Crypto.PublicKey import RSA from Crypto.Util import asn1 from cryptography import fernet from oslo_config import cfg import six from barbican.common import config from barbican import i18n as u from barbican.plugin.crypto import crypto as c CONF = config.new_config() simple_crypto_plugin_group = cfg.OptGroup(name='simple_crypto_plugin', title="Simple Crypto Plugin Options") simple_crypto_plugin_opts = [ cfg.StrOpt('kek', default=b'dGhpcnR5X3R3b19ieXRlX2tleWJsYWhibGFoYmxhaGg=', help=u._('Key encryption key to be used by Simple Crypto ' 'Plugin')) ] CONF.register_group(simple_crypto_plugin_group) CONF.register_opts(simple_crypto_plugin_opts, group=simple_crypto_plugin_group) config.parse_args(CONF) class SimpleCryptoPlugin(c.CryptoPluginBase): """Insecure implementation of the crypto plugin.""" def __init__(self, conf=CONF): self.master_kek = conf.simple_crypto_plugin.kek def _get_kek(self, kek_meta_dto): if not kek_meta_dto.plugin_meta: raise ValueError(u._('KEK not yet created.')) # the kek is stored encrypted. Need to decrypt. encryptor = fernet.Fernet(self.master_kek) # Note : If plugin_meta type is unicode, encode to byte. if isinstance(kek_meta_dto.plugin_meta, six.text_type): kek_meta_dto.plugin_meta = kek_meta_dto.plugin_meta.encode('utf-8') return encryptor.decrypt(kek_meta_dto.plugin_meta) def encrypt(self, encrypt_dto, kek_meta_dto, project_id): kek = self._get_kek(kek_meta_dto) unencrypted = encrypt_dto.unencrypted if not isinstance(unencrypted, six.binary_type): raise ValueError( u._( 'Unencrypted data must be a byte type, but was ' '{unencrypted_type}' ).format( unencrypted_type=type(unencrypted) ) ) encryptor = fernet.Fernet(kek) cyphertext = encryptor.encrypt(unencrypted) return c.ResponseDTO(cyphertext, None) def decrypt(self, encrypted_dto, kek_meta_dto, kek_meta_extended, project_id): kek = self._get_kek(kek_meta_dto) encrypted = encrypted_dto.encrypted decryptor = fernet.Fernet(kek) return decryptor.decrypt(encrypted) def bind_kek_metadata(self, kek_meta_dto): kek_meta_dto.algorithm = 'aes' kek_meta_dto.bit_length = 128 kek_meta_dto.mode = 'cbc' if not kek_meta_dto.plugin_meta: # the kek is stored encrypted in the plugin_meta field encryptor = fernet.Fernet(self.master_kek) key = fernet.Fernet.generate_key() kek_meta_dto.plugin_meta = encryptor.encrypt(key) return kek_meta_dto def generate_symmetric(self, generate_dto, kek_meta_dto, project_id): byte_length = int(generate_dto.bit_length) // 8 unencrypted = os.urandom(byte_length) return self.encrypt(c.EncryptDTO(unencrypted), kek_meta_dto, project_id) def generate_asymmetric(self, generate_dto, kek_meta_dto, project_id): """Generate asymmetric keys based on below rules: - RSA, with passphrase (supported) - RSA, without passphrase (supported) - DSA, without passphrase (supported) - DSA, with passphrase (not supported) Note: PyCrypto is not capable of serializing DSA keys and DER formated keys. Such keys will be serialized to Base64 PEM to store in DB. TODO (atiwari/reaperhulk): PyCrypto is not capable to serialize DSA keys and DER formated keys, later we need to pick better crypto lib. """ if(generate_dto.algorithm is None or generate_dto .algorithm.lower() == 'rsa'): private_key = RSA.generate( generate_dto.bit_length, None, None, 65537) elif generate_dto.algorithm.lower() == 'dsa': private_key = DSA.generate(generate_dto.bit_length, None, None) else: raise c.CryptoPrivateKeyFailureException() public_key = private_key.publickey() # Note (atiwari): key wrapping format PEM only supported if generate_dto.algorithm.lower() == 'rsa': public_key, private_key = self._wrap_key(public_key, private_key, generate_dto.passphrase) if generate_dto.algorithm.lower() == 'dsa': if generate_dto.passphrase: raise ValueError(u._('Passphrase not supported for DSA key')) public_key, private_key = self._serialize_dsa_key(public_key, private_key) private_dto = self.encrypt(c.EncryptDTO(private_key), kek_meta_dto, project_id) public_dto = self.encrypt(c.EncryptDTO(public_key), kek_meta_dto, project_id) passphrase_dto = None if generate_dto.passphrase: if isinstance(generate_dto.passphrase, six.text_type): generate_dto.passphrase = generate_dto.passphrase.encode( 'utf-8') passphrase_dto = self.encrypt(c.EncryptDTO(generate_dto. passphrase), kek_meta_dto, project_id) return private_dto, public_dto, passphrase_dto def supports(self, type_enum, algorithm=None, bit_length=None, mode=None): if type_enum == c.PluginSupportTypes.ENCRYPT_DECRYPT: return True if type_enum == c.PluginSupportTypes.SYMMETRIC_KEY_GENERATION: return self._is_algorithm_supported(algorithm, bit_length) elif type_enum == c.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION: return self._is_algorithm_supported(algorithm, bit_length) else: return False def _wrap_key(self, public_key, private_key, passphrase): pkcs = 8 key_wrap_format = 'PEM' private_key = private_key.exportKey(key_wrap_format, passphrase, pkcs) public_key = public_key.exportKey(key_wrap_format) return public_key, private_key def _serialize_dsa_key(self, public_key, private_key): pub_seq = asn1.DerSequence() pub_seq[:] = [0, public_key.p, public_key.q, public_key.g, public_key.y] public_key = pub_seq.encode() prv_seq = asn1.DerSequence() prv_seq[:] = [0, private_key.p, private_key.q, private_key.g, private_key.y, private_key.x] private_key = prv_seq.encode() return public_key, private_key def _is_algorithm_supported(self, algorithm=None, bit_length=None): """check if algorithm and bit_length combination is supported.""" if algorithm is None or bit_length is None: return False if (algorithm.lower() in c.PluginSupportTypes.SYMMETRIC_ALGORITHMS and bit_length in c.PluginSupportTypes.SYMMETRIC_KEY_LENGTHS): return True elif (algorithm.lower() in c.PluginSupportTypes.ASYMMETRIC_ALGORITHMS and bit_length in c.PluginSupportTypes.ASYMMETRIC_KEY_LENGTHS): return True else: return False barbican-2.0.0/barbican/plugin/crypto/crypto.py0000664000567000056710000003404712701405673022701 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import abc import six from barbican.common import exception from barbican.common import utils from barbican import i18n as u LOG = utils.getLogger(__name__) class CryptoPluginNotFound(exception.BarbicanException): """Raised when no plugins are installed.""" message = u._("Crypto plugin not found.") class CryptoKEKBindingException(exception.BarbicanException): """Raised when the bind_kek_metadata method from a plugin returns None.""" def __init__(self, plugin_name=u._('Unknown')): super(CryptoKEKBindingException, self).__init__( u._('Failed to bind kek metadata for ' 'plugin: {name}').format(name=plugin_name) ) self.plugin_name = plugin_name class CryptoPrivateKeyFailureException(exception.BarbicanException): """Raised when could not generate private key.""" def __init__(self): super(CryptoPrivateKeyFailureException, self).__init__( u._('Could not generate private key') ) # TODO(john-wood-w) Need to harmonize these lower-level constants with the # higher level constants in secret_store.py. class PluginSupportTypes(object): """Class to hold the type enumeration that plugins may support.""" ENCRYPT_DECRYPT = "ENCRYPT_DECRYPT" SYMMETRIC_KEY_GENERATION = "SYMMETRIC_KEY_GENERATION" # A list of symmetric algorithms that are used to determine type of key gen SYMMETRIC_ALGORITHMS = ['aes', 'des', '3des', 'hmacsha1', 'hmacsha256', 'hmacsha384', 'hmacsha512'] SYMMETRIC_KEY_LENGTHS = [64, 128, 192, 256] ASYMMETRIC_KEY_GENERATION = "ASYMMETRIC_KEY_GENERATION" ASYMMETRIC_ALGORITHMS = ['rsa', 'dsa'] ASYMMETRIC_KEY_LENGTHS = [1024, 2048, 4096] class KEKMetaDTO(object): """Key Encryption Key Meta DTO Key Encryption Keys (KEKs) in Barbican are intended to represent a distinct key that is used to perform encryption on secrets for a particular project. ``KEKMetaDTO`` objects are provided to cryptographic backends by Barbican to allow plugins to persist metadata related to the project's KEK. For example, a plugin that interfaces with a Hardware Security Module (HSM) may want to use a different encryption key for each project. Such a plugin could use the ``KEKMetaDTO`` object to save the key ID used for that project. Barbican will persist the KEK metadata and ensure that it is provided to the plugin every time a request from that same project is processed. .. attribute:: plugin_name String attribute used by Barbican to identify the plugin that is bound to the KEK metadata. Plugins should not change this attribute. .. attribute:: kek_label String attribute used to label the project's KEK by the plugin. The value of this attribute should be meaningful to the plugin. Barbican does not use this value. .. attribute:: algorithm String attribute used to identify the encryption algorithm used by the plugin. e.g. "AES", "3DES", etc. This value should be meaningful to the plugin. Barbican does not use this value. .. attribute:: mode String attribute used to identify the algorithm mode used by the plugin. e.g. "CBC", "GCM", etc. This value should be meaningful to the plugin. Barbican does not use this value. .. attribute:: bit_length Integer attribute used to identify the bit length of the KEK by the plugin. This value should be meaningful to the plugin. Barbican does not use this value. .. attribute:: plugin_meta String attribute used to persist any additional metadata that does not fit in any other attribute. The value of this attribute is defined by the plugin. It could be used to store external system references, such as Key IDs in an HSM, URIs to an external service, or any other data that the plugin deems necessary to persist. Because this is just a plain text field, a plug in may even choose to persist data such as key value pairs in a JSON object. """ def __init__(self, kek_datum): """Plugins should not have to create their own instance of this class. kek_datum is typically a barbican.model.models.KEKDatum instance. """ self.kek_label = kek_datum.kek_label self.plugin_name = kek_datum.plugin_name self.algorithm = kek_datum.algorithm self.bit_length = kek_datum.bit_length self.mode = kek_datum.mode self.plugin_meta = kek_datum.plugin_meta class GenerateDTO(object): """Secret Generation DTO Data Transfer Object used to pass all the necessary data for the plugin to generate a secret on behalf of the user. .. attribute:: generation_type String attribute used to identify the type of secret that should be generated. This will be either ``"symmetric"`` or ``"asymmetric"``. .. attribute:: algorithm String attribute used to specify what type of algorithm the secret will be used for. e.g. ``"AES"`` for a ``"symmetric"`` type, or ``"RSA"`` for ``"asymmetric"``. .. attribute:: mode String attribute used to specify what algorithm mode the secret will be used for. e.g. ``"CBC"`` for ``"AES"`` algorithm. .. attribute:: bit_length Integer attribute used to specify the bit length of the secret. For example, this attribute could specify the key length for an encryption key to be used in AES-CBC. """ def __init__(self, algorithm, bit_length, mode, passphrase=None): self.algorithm = algorithm self.bit_length = bit_length self.mode = mode self.passphrase = passphrase class ResponseDTO(object): """Data transfer object for secret generation response. Barbican guarantees that both the ``cypher_text`` and ``kek_metadata_extended`` will be persisted and then given back to the plugin when requesting a decryption operation. ``kek_metadata_extended`` takes the idea of Key Encryption Key (KEK) metadata further by giving plugins the option to store secret-level KEK metadata. One example of using secret-level KEK metadata would be plugins that want to use a unique KEK for every secret that is encrypted. Such a plugin could use ``kek_metadata_extended`` to store the Key ID for the KEK used to encrypt this particular secret. :param cypher_text: Byte data resulting from the encryption of the secret data. :param kek_meta_extended: Optional String object to be persisted alongside the cyphertext. """ def __init__(self, cypher_text, kek_meta_extended=None): self.cypher_text = cypher_text self.kek_meta_extended = kek_meta_extended class DecryptDTO(object): """Secret Decryption DTO Data Transfer Object used to pass all the necessary data for the plugin to perform decryption of a secret. Currently, this DTO only contains the data produced by the plugin during encryption, but in the future this DTO will contain more information, such as a transport key for secret wrapping back to the client. .. attribute:: encrypted The data that was produced by the plugin during encryption. For some plugins this will be the actual bytes that need to be decrypted to produce the secret. In other implementations, this may just be a reference to some external system that can produce the unencrypted secret. """ def __init__(self, encrypted): self.encrypted = encrypted class EncryptDTO(object): """Secret Encryption DTO Data Transfer Object used to pass all the necessary data for the plugin to perform encryption of a secret. Currently, this DTO only contains the raw bytes to be encrypted by the plugin, but in the future this may contain more information. .. attribute:: unencrypted The secret data in Bytes to be encrypted by the plugin. """ def __init__(self, unencrypted): self.unencrypted = unencrypted @six.add_metaclass(abc.ABCMeta) class CryptoPluginBase(object): """Base class for all Crypto plugins. Barbican requests operations by invoking the methods on an instance of the implementing class. Barbican's plugin manager handles the life-cycle of the Data Transfer Objects (DTOs) that are passed into these methods, and persist the data that is assigned to these DTOs by the plugin. """ @abc.abstractmethod def encrypt(self, encrypt_dto, kek_meta_dto, project_id): """Encryption handler function This method will be called by Barbican when requesting an encryption operation on a secret on behalf of a project. :param encrypt_dto: :class:`EncryptDTO` instance containing the raw secret byte data to be encrypted. :type encrypt_dto: :class:`EncryptDTO` :param kek_meta_dto: :class:`KEKMetaDTO` instance containing information about the project's Key Encryption Key (KEK) to be used for encryption. Plugins may assume that binding via :meth:`bind_kek_metadata` has already taken place before this instance is passed in. :type kek_meta_dto: :class:`KEKMetaDTO` :param project_id: Project ID associated with the unencrypted data. :return: A response DTO containing the cyphertext and KEK information. :rtype: :class:`ResponseDTO` """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def decrypt(self, decrypt_dto, kek_meta_dto, kek_meta_extended, project_id): """Decrypt encrypted_datum in the context of the provided project. :param decrypt_dto: data transfer object containing the cyphertext to be decrypted. :param kek_meta_dto: Key encryption key metadata to use for decryption :param kek_meta_extended: Optional per-secret KEK metadata to use for decryption. :param project_id: Project ID associated with the encrypted datum. :returns: str -- unencrypted byte data """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def bind_kek_metadata(self, kek_meta_dto): """Key Encryption Key Metadata binding function Bind a key encryption key (KEK) metadata to the sub-system handling encryption/decryption, updating information about the key encryption key (KEK) metadata in the supplied 'kek_metadata' data-transfer-object instance, and then returning this instance. This method is invoked prior to the encrypt() method above. Implementors should fill out the supplied 'kek_meta_dto' instance (an instance of KEKMetadata above) as needed to completely describe the kek metadata and to complete the binding process. Barbican will persist the contents of this instance once this method returns. :param kek_meta_dto: Key encryption key metadata to bind, with the 'kek_label' attribute guaranteed to be unique, and the and 'plugin_name' attribute already configured. :returns: kek_meta_dto: Returns the specified DTO, after modifications. """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def generate_symmetric(self, generate_dto, kek_meta_dto, project_id): """Generate a new key. :param generate_dto: data transfer object for the record associated with this generation request. Some relevant parameters can be extracted from this object, including bit_length, algorithm and mode :param kek_meta_dto: Key encryption key metadata to use for decryption :param project_id: Project ID associated with the data. :returns: An object of type ResponseDTO containing encrypted data and kek_meta_extended, the former the resultant cypher text, the latter being optional per-secret metadata needed to decrypt (over and above the per-project metadata managed outside of the plugins) """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def generate_asymmetric(self, generate_dto, kek_meta_dto, project_id): """Create a new asymmetric key. :param generate_dto: data transfer object for the record associated with this generation request. Some relevant parameters can be extracted from this object, including bit_length, algorithm and passphrase :param kek_meta_dto: Key encryption key metadata to use for decryption :param project_id: Project ID associated with the data. :returns: A tuple containing objects for private_key, public_key and optionally one for passphrase. The objects will be of type ResponseDTO. Each object containing encrypted data and kek_meta_extended, the former the resultant cypher text, the latter being optional per-secret metadata needed to decrypt (over and above the per-project metadata managed outside of the plugins) """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def supports(self, type_enum, algorithm=None, bit_length=None, mode=None): """Used to determine if the plugin supports the requested operation. :param type_enum: Enumeration from PluginSupportsType class :param algorithm: String algorithm name if needed """ raise NotImplementedError # pragma: no cover barbican-2.0.0/barbican/plugin/crypto/manager.py0000664000567000056710000001036612701405673022771 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from oslo_config import cfg from stevedore import named import threading from barbican.common import config from barbican.common import utils from barbican import i18n as u from barbican.plugin.crypto import crypto from barbican.plugin.interface import secret_store from barbican.plugin.util import utils as plugin_utils _PLUGIN_MANAGER = None _PLUGIN_MANAGER_LOCK = threading.RLock() CONF = config.new_config() DEFAULT_PLUGIN_NAMESPACE = 'barbican.crypto.plugin' DEFAULT_PLUGINS = ['simple_crypto'] crypto_opt_group = cfg.OptGroup(name='crypto', title='Crypto Plugin Options') crypto_opts = [ cfg.StrOpt('namespace', default=DEFAULT_PLUGIN_NAMESPACE, help=u._('Extension namespace to search for plugins.') ), cfg.MultiStrOpt('enabled_crypto_plugins', default=DEFAULT_PLUGINS, help=u._('List of crypto plugins to load.') ) ] CONF.register_group(crypto_opt_group) CONF.register_opts(crypto_opts, group=crypto_opt_group) config.parse_args(CONF) class _CryptoPluginManager(named.NamedExtensionManager): def __init__(self, conf=CONF, invoke_args=(), invoke_kwargs={}): """Crypto Plugin Manager Each time this class is initialized it will load a new instance of each enabled crypto plugin. This is undesirable, so rather than initializing a new instance of this class use the PLUGIN_MANAGER at the module level. """ super(_CryptoPluginManager, self).__init__( conf.crypto.namespace, conf.crypto.enabled_crypto_plugins, invoke_on_load=False, # Defer creating plugins to utility below. invoke_args=invoke_args, invoke_kwds=invoke_kwargs ) plugin_utils.instantiate_plugins( self, invoke_args, invoke_kwargs) def get_plugin_store_generate(self, type_needed, algorithm=None, bit_length=None, mode=None): """Gets a secret store or generate plugin that supports provided type. :param type_needed: PluginSupportTypes that contains details on the type of plugin required :returns: CryptoPluginBase plugin implementation """ active_plugins = plugin_utils.get_active_plugins(self) if not active_plugins: raise crypto.CryptoPluginNotFound() for generating_plugin in active_plugins: if generating_plugin.supports( type_needed, algorithm, bit_length, mode): break else: raise secret_store.SecretStorePluginNotFound() return generating_plugin def get_plugin_retrieve(self, plugin_name_for_store): """Gets a secret retrieve plugin that supports the provided type. :param type_needed: PluginSupportTypes that contains details on the type of plugin required :returns: CryptoPluginBase plugin implementation """ active_plugins = plugin_utils.get_active_plugins(self) if not active_plugins: raise crypto.CryptoPluginNotFound() for decrypting_plugin in active_plugins: plugin_name = utils.generate_fullname_for(decrypting_plugin) if plugin_name == plugin_name_for_store: break else: raise secret_store.SecretStorePluginNotFound() return decrypting_plugin def get_manager(): """Return a singleton crypto plugin manager.""" global _PLUGIN_MANAGER global _PLUGIN_MANAGER_LOCK if not _PLUGIN_MANAGER: with _PLUGIN_MANAGER_LOCK: if not _PLUGIN_MANAGER: _PLUGIN_MANAGER = _CryptoPluginManager() return _PLUGIN_MANAGER barbican-2.0.0/barbican/plugin/__init__.py0000664000567000056710000000000012701405673021556 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/plugin/snakeoil_ca.py0000664000567000056710000004112712701405673022306 0ustar jenkinsjenkins00000000000000# Copyright 2014 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import base64 import datetime import fnmatch import os import re import subprocess # nosec from tempfile import mkstemp import uuid from OpenSSL import crypto from oslo_config import cfg from barbican.common import config from barbican.common import utils from barbican import i18n as u import barbican.plugin.interface.certificate_manager as cert_manager CONF = config.new_config() LOG = utils.getLogger(__name__) snakeoil_ca_plugin_group = cfg.OptGroup(name='snakeoil_ca_plugin', title="Snakeoil CA Plugin Options") snakeoil_ca_plugin_opts = [ cfg.StrOpt('ca_cert_path', help=u._('Path to CA certicate file')), cfg.StrOpt('ca_cert_key_path', help=u._('Path to CA certificate key file')), cfg.StrOpt('ca_cert_chain_path', help=u._('Path to CA certicate chain file')), cfg.StrOpt('ca_cert_pkcs7_path', help=u._('Path to CA chain pkcs7 file')), cfg.StrOpt('subca_cert_key_directory', default='/etc/barbican/snakeoil-cas', help=u._('Directory in which to store certs/keys for subcas')), ] CONF.register_group(snakeoil_ca_plugin_group) CONF.register_opts(snakeoil_ca_plugin_opts, group=snakeoil_ca_plugin_group) config.parse_args(CONF) def set_subject_X509Name(target, dn): """Set target X509Name object with parsed dn. This is very basic and should certainly be replaced by something using cryptography for instance, but will do for a basic test CA """ # TODO(alee) Figure out why C (country) is not working fields = dn.split(',') for field in fields: m = re.search(r"(\w+)\s*=\s*(.+)", field.strip()) name = m.group(1) value = m.group(2) if name.lower() == 'ou': target.OU = value elif name.lower() == 'st': target.ST = value elif name.lower() == 'cn': target.CN = value elif name.lower() == 'l': target.L = value elif name.lower() == 'o': target.O = value return target class SnakeoilCA(object): def __init__(self, cert_path=None, key_path=None, chain_path=None, pkcs7_path=None, name=None, serial=1, key_size=2048, expiry_days=10 * 365, x509_version=2, subject_dn=None, signing_dn=None, signing_key=None, parent_chain_path=None): self.cert_path = cert_path self.key_path = key_path self.chain_path = chain_path self.pkcs7_path = pkcs7_path self.name = name self.serial = serial self.key_size = key_size self.expiry_days = expiry_days self.x509_version = x509_version self.subject_dn = subject_dn if signing_dn is not None: self.signing_dn = signing_dn else: self.signing_dn = subject_dn # self-signed self.signing_key = signing_key self.parent_chain_path = parent_chain_path self._cert_val = None self._key_val = None self._chain_val = None self._pkcs7_val = None @property def cert(self): self.ensure_exists() if self.cert_path: with open(self.cert_path) as cert_fh: return crypto.load_certificate(crypto.FILETYPE_PEM, cert_fh.read()) else: return crypto.load_certificate(crypto.FILETYPE_PEM, self._cert_val) @cert.setter def cert(self, val): if self.cert_path: with open(self.cert_path, 'w') as cert_fh: cert_fh.write(crypto.dump_certificate(crypto.FILETYPE_PEM, val)) else: self._cert_val = crypto.dump_certificate(crypto.FILETYPE_PEM, val) @property def key(self): self.ensure_exists() if self.key_path: with open(self.key_path) as key_fh: return crypto.load_privatekey(crypto.FILETYPE_PEM, key_fh.read()) else: return crypto.load_privatekey(crypto.FILETYPE_PEM, self._key_val) @key.setter def key(self, val): if self.key_path: with open(self.key_path, 'w') as key_fh: key_fh.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, val)) else: self._key_val = crypto.dump_privatekey(crypto.FILETYPE_PEM, val) @property def chain(self): self.ensure_exists() if self.chain_path: with open(self.chain_path) as chain_fh: return chain_fh.read() else: return self._chain_val @chain.setter def chain(self, val): if self.chain_path: with open(self.chain_path, 'w') as chain_fh: chain_fh.write(val) else: self._chain_val = val @property def pkcs7(self): self.ensure_exists() if self.pkcs7_path: with open(self.pkcs7_path) as pkcs7_fh: return pkcs7_fh.read() else: return self._pkcs7_val @pkcs7.setter def pkcs7(self, val): if self.pkcs7_path: with open(self.pkcs7_path, 'w') as pkcs7_fh: pkcs7_fh.write(val) else: self._pkcs7_val = val @property def exists(self): if self.cert_path is not None: cert_exists = os.path.isfile(self.cert_path) else: cert_exists = self._cert_val is not None if self.key_path is not None: key_exists = os.path.isfile(self.key_path) else: key_exists = self._key_val is not None if self.chain_path is not None: chain_exists = os.path.isfile(self.chain_path) else: chain_exists = self._chain_val is not None if self.pkcs7_path is not None: pkcs7_exists = os.path.isfile(self.pkcs7_path) else: pkcs7_exists = self._pkcs7_val is not None return (cert_exists and key_exists and pkcs7_exists and chain_exists) def ensure_exists(self): if not self.exists: LOG.debug('Keypair not found, creating new cert/key') self.cert, self.key, self.chain, self.pkcs7 = ( self.create_keypair()) def create_keypair(self): LOG.debug('Generating Snakeoil CA') key = crypto.PKey() key.generate_key(crypto.TYPE_RSA, self.key_size) cert = crypto.X509() cert.set_version(self.x509_version) cert.set_serial_number(self.serial) subject = cert.get_subject() set_subject_X509Name(subject, self.subject_dn) cert.set_subject(subject) cert.gmtime_adj_notBefore(0) cert.gmtime_adj_notAfter(self.expiry_days) cert.set_issuer(set_subject_X509Name( cert.get_issuer(), self.signing_dn)) cert.set_pubkey(key) cert.add_extensions([ crypto.X509Extension(b"basicConstraints", True, b"CA:TRUE, pathlen:5"), ]) if not self.signing_key: self.signing_key = key # self-signed cert.sign(self.signing_key, 'sha256') LOG.debug('Snakeoil CA cert/key generated') chain = "" if self.parent_chain_path: with open(self.parent_chain_path) as fh: chain = fh.read() chain += crypto.dump_certificate(crypto.FILETYPE_PEM, cert) pkcs7 = self._generate_pkcs7(chain) return cert, key, chain, pkcs7 def _generate_pkcs7(self, chain): fin, temp_in = mkstemp() os.write(fin, chain) os.close(fin) fout, temp_out = mkstemp() os.close(fout) subprocess.call(['/usr/bin/openssl', 'crl2pkcs7', '-nocrl', # nosec '-out', temp_out, '-certfile', temp_in], shell=False) with open(temp_out) as pkcs7_fh: pkcs7 = pkcs7_fh.read() os.remove(temp_in) os.remove(temp_out) return pkcs7 class CertManager(object): def __init__(self, ca): self.ca = ca def get_new_serial(self): return uuid.uuid4().int def make_certificate(self, csr, expires=2 * 365): cert = crypto.X509() cert.set_serial_number(self.get_new_serial()) cert.gmtime_adj_notBefore(0) cert.gmtime_adj_notAfter(expires) cert.set_issuer(self.ca.cert.get_subject()) cert.set_subject(csr.get_subject()) cert.set_pubkey(csr.get_pubkey()) cert.sign(self.ca.key, 'sha256') return cert class SnakeoilCACertificatePlugin(cert_manager.CertificatePluginBase): """Snakeoil CA certificate plugin. This is used for easily generating certificates which are not useful in a production environment. """ def __init__(self, conf=CONF): self.cas = {} self.ca = SnakeoilCA( cert_path=conf.snakeoil_ca_plugin.ca_cert_path, key_path=conf.snakeoil_ca_plugin.ca_cert_key_path, chain_path=conf.snakeoil_ca_plugin.ca_cert_chain_path, pkcs7_path=conf.snakeoil_ca_plugin.ca_cert_pkcs7_path, name=self.get_default_ca_name(), subject_dn="cn=Snakeoil Certificate,o=example.com" ) self.cas[self.get_default_ca_name()] = self.ca self.subca_directory = conf.snakeoil_ca_plugin.subca_cert_key_directory if self.subca_directory: if not os.path.exists(self.subca_directory): os.makedirs(self.subca_directory) # pragma: no cover else: self._reload_previously_created_subcas() self.cert_manager = CertManager(self.ca) def _reload_previously_created_subcas(self): for file in os.listdir(self.subca_directory): if fnmatch.fnmatch(file, '*.key'): ca_id, _ext = os.path.splitext(file) self.cas[ca_id] = SnakeoilCA( cert_path=os.path.join(self.subca_directory, ca_id + ".cert"), key_path=os.path.join(self.subca_directory, file), chain_path=os.path.join(self.subca_directory, ca_id + ".chain"), pkcs7_path=os.path.join(self.subca_directory, ca_id + ".p7b") ) def get_default_ca_name(self): return "Snakeoil CA" def get_default_signing_cert(self): return crypto.dump_certificate(crypto.FILETYPE_PEM, self.ca.cert) def get_default_intermediates(self): return None def supported_request_types(self): return [cert_manager.CertificateRequestType.CUSTOM_REQUEST, cert_manager.CertificateRequestType.STORED_KEY_REQUEST] def issue_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): if barbican_meta_dto.generated_csr is not None: encoded_csr = barbican_meta_dto.generated_csr else: try: encoded_csr = base64.b64decode(order_meta['request_data']) except KeyError: return cert_manager.ResultDTO( cert_manager.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, status_message=u._("No request_data specified")) csr = crypto.load_certificate_request(crypto.FILETYPE_PEM, encoded_csr) ca_id = barbican_meta_dto.plugin_ca_id if ca_id: ca = self.cas.get(ca_id) if ca is None: raise cert_manager.CertificateGeneralException( "Invalid ca_id passed into snake oil plugin:" + ca_id) else: ca = self.ca cert_mgr = CertManager(ca) cert = cert_mgr.make_certificate(csr) cert_enc = crypto.dump_certificate(crypto.FILETYPE_PEM, cert) return cert_manager.ResultDTO( cert_manager.CertificateStatus.CERTIFICATE_GENERATED, certificate=base64.b64encode(cert_enc), intermediates=base64.b64encode(ca.pkcs7)) def modify_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): raise NotImplementedError def cancel_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): raise NotImplementedError def check_certificate_status(self, order_id, order_meta, plugin_meta, barbican_meta_dto): raise NotImplementedError def supports(self, certificate_spec): request_type = certificate_spec.get( cert_manager.REQUEST_TYPE, cert_manager.CertificateRequestType.CUSTOM_REQUEST) return request_type in self.supported_request_types() def supports_create_ca(self): return True def create_ca(self, ca_create_dto): # get the parent CA from the ca list, return error if not on list parent_ca_id = ca_create_dto.parent_ca_id if not parent_ca_id: raise cert_manager.CertificateGeneralException( "No parent id passed to snake oil plugin on create_ca") parent_ca = self.cas.get(parent_ca_id) if not parent_ca: raise cert_manager.CertificateGeneralException( "Invalid parent id passed to snake oil plugin:" + parent_ca_id) # create a new ca, passing in key and issuer from the parent new_ca_id = str(uuid.uuid4()) new_cert_path = os.path.join(self.subca_directory, new_ca_id + ".cert") new_key_path = os.path.join(self.subca_directory, new_ca_id + ".key") new_chain_path = os.path.join(self.subca_directory, new_ca_id + ".chain") new_pkcs7_path = os.path.join(self.subca_directory, new_ca_id + ".p7b") parent_chain_path = parent_ca.chain_path new_ca = SnakeoilCA(cert_path=new_cert_path, key_path=new_key_path, chain_path=new_chain_path, pkcs7_path=new_pkcs7_path, name=ca_create_dto.name, subject_dn=ca_create_dto.subject_dn, signing_dn=parent_ca.subject_dn, signing_key=parent_ca.key, parent_chain_path=parent_chain_path) self.cas[new_ca_id] = new_ca expiration = (datetime.datetime.utcnow() + datetime.timedelta( days=cert_manager.CA_INFO_DEFAULT_EXPIRATION_DAYS)) return { cert_manager.INFO_NAME: new_ca.name, cert_manager.INFO_CA_SIGNING_CERT: crypto.dump_certificate( crypto.FILETYPE_PEM, new_ca.cert), cert_manager.INFO_EXPIRATION: expiration.isoformat(), cert_manager.INFO_INTERMEDIATES: new_ca.pkcs7, cert_manager.PLUGIN_CA_ID: new_ca_id } def get_ca_info(self): expiration = (datetime.datetime.utcnow() + datetime.timedelta( days=cert_manager.CA_INFO_DEFAULT_EXPIRATION_DAYS)) ret = {} for ca_id, ca in self.cas.items(): ca_info = { cert_manager.INFO_NAME: ca.name, cert_manager.INFO_CA_SIGNING_CERT: crypto.dump_certificate( crypto.FILETYPE_PEM, ca.cert), cert_manager.INFO_INTERMEDIATES: ca.pkcs7, cert_manager.INFO_EXPIRATION: expiration.isoformat() } ret[ca_id] = ca_info return ret def delete_ca(self, ca_id): self.cas.pop(ca_id) ca_files = [os.path.join(self.subca_directory, ca_id + ".cert"), os.path.join(self.subca_directory, ca_id + ".key"), os.path.join(self.subca_directory, ca_id + ".chain"), os.path.join(self.subca_directory, ca_id + ".p7b")] for ca_file in ca_files: if os.path.exists(ca_file): os.remove(ca_file) barbican-2.0.0/barbican/plugin/kmip_secret_store.py0000664000567000056710000005771012701405674023565 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Johns Hopkins University Applied Physics Laboratory # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ An implementation of the SecretStore that uses the KMIP backend. """ import base64 import os import stat from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from kmip.core import enums from kmip.core.factories import credentials from kmip.pie import client from kmip.pie import objects from oslo_config import cfg from oslo_log import log from barbican.common import config from barbican import i18n as u # noqa from barbican.plugin.interface import secret_store as ss from barbican.plugin.util import translations LOG = log.getLogger(__name__) CONF = config.new_config() kmip_opt_group = cfg.OptGroup(name='kmip_plugin', title='KMIP Plugin') kmip_opts = [ cfg.StrOpt('username', help=u._('Username for authenticating with KMIP server') ), cfg.StrOpt('password', help=u._('Password for authenticating with KMIP server'), secret=True, ), cfg.StrOpt('host', default='localhost', help=u._('Address of the KMIP server') ), cfg.StrOpt('port', default='5696', help=u._('Port for the KMIP server'), ), cfg.StrOpt('ssl_version', default='PROTOCOL_TLSv1', help=u._('SSL version, maps to the module ssl\'s constants'), ), cfg.StrOpt('ca_certs', help=u._('File path to concatenated "certification authority" ' 'certificates'), ), cfg.StrOpt('certfile', help=u._('File path to local client certificate'), ), cfg.StrOpt('keyfile', help=u._('File path to local client certificate keyfile'), ), cfg.BoolOpt('pkcs1_only', default=False, help=u._('Only support PKCS#1 encoding of asymmetric keys'), ) ] CONF.register_group(kmip_opt_group) CONF.register_opts(kmip_opts, group=kmip_opt_group) config.parse_args(CONF) attribute_debug_msg = "Created attribute type %s with value %s" def convert_pem_to_der(pem_pkcs1): # cryptography adds an extra '\n' to end of PEM file # added if statement so if future version removes extra \n tests will not # break if pem_pkcs1.endswith('\n'): pem_pkcs1 = pem_pkcs1[:-1] # neither PyCrypto or cryptography support export in DER format with PKCS1 # encoding so doing by hand der_pkcs1_b64 = ''.join(pem_pkcs1.split('\n')[1:-1]) der_pkcs1 = base64.b64decode(der_pkcs1_b64) return der_pkcs1 def get_public_key_der_pkcs1(pem): """Converts PEM public key to DER PKCS1""" rsa_public = serialization.load_pem_public_key( pem, backend=default_backend()) pem_pkcs1 = rsa_public.public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.PKCS1) return convert_pem_to_der(pem_pkcs1) def get_private_key_der_pkcs1(pem): """Converts PEM private key to DER PKCS1""" rsa_private = serialization.load_pem_private_key( pem, None, backend=default_backend()) pem_pkcs1 = rsa_private.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption()) return convert_pem_to_der(pem_pkcs1) class KMIPSecretStoreError(Exception): def __init__(self, what): super(KMIPSecretStoreError, self).__init__(what) class KMIPSecretStore(ss.SecretStoreBase): KEY_UUID = "key_uuid" VALID_BIT_LENGTHS = "valid_bit_lengths" KMIP_ALGORITHM_ENUM = "kmip_algorithm_enum" def __init__(self, conf=CONF): """Initializes KMIPSecretStore Creates a dictionary of mappings between SecretStore enum values and pyKMIP enum values. Initializes the KMIP client with credentials needed to connect to the KMIP server. """ super(KMIPSecretStore, self).__init__() self.valid_alg_dict = { ss.KeyAlgorithm.AES: { KMIPSecretStore.VALID_BIT_LENGTHS: [128, 192, 256], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.AES}, ss.KeyAlgorithm.DES: { KMIPSecretStore.VALID_BIT_LENGTHS: [56], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.DES}, ss.KeyAlgorithm.DESEDE: { KMIPSecretStore.VALID_BIT_LENGTHS: [56, 64, 112, 128, 168, 192], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.TRIPLE_DES}, ss.KeyAlgorithm.DSA: { KMIPSecretStore.VALID_BIT_LENGTHS: [1024, 2048, 3072], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.DSA}, ss.KeyAlgorithm.HMACSHA1: { KMIPSecretStore.VALID_BIT_LENGTHS: [], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.HMAC_SHA1}, ss.KeyAlgorithm.HMACSHA256: { KMIPSecretStore.VALID_BIT_LENGTHS: [], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.HMAC_SHA256}, ss.KeyAlgorithm.HMACSHA384: { KMIPSecretStore.VALID_BIT_LENGTHS: [], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.HMAC_SHA384}, ss.KeyAlgorithm.HMACSHA512: { KMIPSecretStore.VALID_BIT_LENGTHS: [], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.HMAC_SHA512}, ss.KeyAlgorithm.RSA: { KMIPSecretStore.VALID_BIT_LENGTHS: [1024, 2048, 3072, 4096], KMIPSecretStore.KMIP_ALGORITHM_ENUM: enums.CryptographicAlgorithm.RSA}, } self.pkcs1_only = conf.kmip_plugin.pkcs1_only if self.pkcs1_only: LOG.debug("KMIP secret store only supports PKCS#1") del self.valid_alg_dict[ss.KeyAlgorithm.DSA] self.kmip_barbican_alg_map = { enums.CryptographicAlgorithm.AES: ss.KeyAlgorithm.AES, enums.CryptographicAlgorithm.DES: ss.KeyAlgorithm.DES, enums.CryptographicAlgorithm.TRIPLE_DES: ss.KeyAlgorithm.DESEDE, enums.CryptographicAlgorithm.DSA: ss.KeyAlgorithm.DSA, enums.CryptographicAlgorithm.HMAC_SHA1: ss.KeyAlgorithm.HMACSHA1, enums.CryptographicAlgorithm.HMAC_SHA256: ss.KeyAlgorithm.HMACSHA256, enums.CryptographicAlgorithm.HMAC_SHA384: ss.KeyAlgorithm.HMACSHA384, enums.CryptographicAlgorithm.HMAC_SHA512: ss.KeyAlgorithm.HMACSHA512, enums.CryptographicAlgorithm.RSA: ss.KeyAlgorithm.RSA } if conf.kmip_plugin.keyfile is not None: self._validate_keyfile_permissions(conf.kmip_plugin.keyfile) if (conf.kmip_plugin.username is None) and ( conf.kmip_plugin.password is None): self.credential = None else: credential_type = credentials.CredentialType.USERNAME_AND_PASSWORD credential_value = {'Username': conf.kmip_plugin.username, 'Password': conf.kmip_plugin.password} self.credential = ( credentials.CredentialFactory().create_credential( credential_type, credential_value)) config = conf.kmip_plugin self.client = client.ProxyKmipClient( hostname=config.host, port=config.port, cert=config.certfile, key=config.keyfile, ca=config.ca_certs, ssl_version=config.ssl_version, username=config.username, password=config.password) def generate_symmetric_key(self, key_spec): """Generate a symmetric key. Creates KMIP attribute objects based on the given KeySpec to send to the server. :param key_spec: KeySpec with symmetric algorithm and bit_length :returns: dictionary holding key_id returned by server :raises: SecretGeneralException, SecretAlgorithmNotSupportedException """ LOG.debug("Starting symmetric key generation with KMIP plugin") if not self.generate_supports(key_spec): raise ss.SecretAlgorithmNotSupportedException( key_spec.alg) if key_spec.alg.lower() not in ss.KeyAlgorithm.SYMMETRIC_ALGORITHMS: raise KMIPSecretStoreError( u._("An unsupported algorithm {algorithm} was passed to the " "'generate_symmetric_key' method").format( algorithm=key_spec.alg)) algorithm = self._get_kmip_algorithm(key_spec.alg.lower()) try: with self.client: LOG.debug("Opened connection to KMIP client for secret " + "generation") uuid = self.client.create(algorithm, key_spec.bit_length) LOG.debug("SUCCESS: Symmetric key generated with " "uuid: %s", uuid) return {KMIPSecretStore.KEY_UUID: uuid} except Exception as e: LOG.exception(u._LE("Error opening or writing to client")) raise ss.SecretGeneralException(str(e)) def generate_asymmetric_key(self, key_spec): """Generate an asymmetric key pair. Creates KMIP attribute objects based on the given KeySpec to send to the server. The KMIP Secret Store currently does not support protecting the private key with a passphrase. :param key_spec: KeySpec with asymmetric algorithm and bit_length :returns: AsymmetricKeyMetadataDTO with the key UUIDs :raises: SecretGeneralException, SecretAlgorithmNotSupportedException """ LOG.debug("Starting asymmetric key generation with KMIP plugin") if not self.generate_supports(key_spec): raise ss.SecretAlgorithmNotSupportedException( key_spec.alg) if key_spec.alg.lower() not in ss.KeyAlgorithm.ASYMMETRIC_ALGORITHMS: raise KMIPSecretStoreError( u._("An unsupported algorithm {algorithm} was passed to " "the 'generate_asymmetric_key' method").format( algorithm=key_spec.alg)) if key_spec.passphrase: raise KMIPSecretStoreError( u._('KMIP plugin does not currently support protecting the ' 'private key with a passphrase')) algorithm = self._get_kmip_algorithm(key_spec.alg.lower()) length = key_spec.bit_length try: with self.client: LOG.debug("Opened connection to KMIP client for asymmetric " + "secret generation") public_uuid, private_uuid = self.client.create_key_pair( algorithm, length) LOG.debug("SUCCESS: Asymmetric key pair generated with " "public key uuid: %s and private key uuid: %s", public_uuid, private_uuid) private_key_metadata = {KMIPSecretStore.KEY_UUID: private_uuid} public_key_metadata = {KMIPSecretStore.KEY_UUID: public_uuid} passphrase_metadata = None return ss.AsymmetricKeyMetadataDTO(private_key_metadata, public_key_metadata, passphrase_metadata) except Exception as e: LOG.exception(u._LE("Error opening or writing to client")) raise ss.SecretGeneralException(str(e)) def store_secret(self, secret_dto): """Stores a secret To store a secret in KMIP, the attributes must be known. :param secret_dto: SecretDTO of the secret to be stored :returns: Dictionary holding the key_uuid assigned by KMIP :raises: SecretGeneralException, SecretAlgorithmNotSupportedException """ LOG.debug("Starting secret storage with KMIP plugin") if not self.store_secret_supports(secret_dto.key_spec): raise ss.SecretAlgorithmNotSupportedException( secret_dto.key_spec.alg) secret_type = secret_dto.type object_type, key_format_type = ( self._map_type_ss_to_kmip(secret_type)) if object_type is None: raise KMIPSecretStoreError( u._('Secret object type {object_type} is ' 'not supported').format(object_type=object_type)) secret = self._get_kmip_secret(secret_dto) try: with self.client: LOG.debug("Opened connection to KMIP client") uuid = self.client.register(secret) LOG.debug("SUCCESS: Key stored with uuid: %s", uuid) return {KMIPSecretStore.KEY_UUID: uuid} except Exception as e: LOG.exception(u._LE("Error opening or writing to client")) raise ss.SecretGeneralException(str(e)) def get_secret(self, secret_type, secret_metadata): """Gets a secret :param secret_type: secret type :param secret_metadata: Dictionary of key metadata, requires: {'key_uuid': } :returns: SecretDTO of the retrieved Secret :raises: SecretGeneralException """ LOG.debug("Starting secret retrieval with KMIP plugin") uuid = str(secret_metadata[KMIPSecretStore.KEY_UUID]) try: with self.client: LOG.debug("Opened connection to KMIP client for secret " + "retrieval") managed_object = self.client.get(uuid) return self._get_barbican_secret(managed_object, secret_type) except Exception as e: LOG.exception(u._LE("Error opening or writing to client")) raise ss.SecretGeneralException(str(e)) def generate_supports(self, key_spec): """Key generation supported? Specifies whether the plugin supports key generation with the given key_spec. Currently, asymmetric key pair generation does not support encrypting the private key with a passphrase. Checks both the algorithm and the bit length. Only symmetric algorithms are currently supported. :param key_spec: KeySpec for secret to be generates :returns: boolean indicating if secret can be generated """ alg_dict_entry = self.valid_alg_dict.get(key_spec.alg.lower()) if alg_dict_entry: valid_bit_lengths = alg_dict_entry.get( KMIPSecretStore.VALID_BIT_LENGTHS) if (key_spec.bit_length in valid_bit_lengths or not valid_bit_lengths): return True return False def delete_secret(self, secret_metadata): """Deletes the secret whose metadata is included in the dictionary. Returns nothing if successful, raises an exception if an error occurs :param secret_metadata: Dictionary of key metadata, requires: {'key_uuid': } :raises: SecretGeneralException """ LOG.debug("Starting secret deletion with KMIP plugin") uuid = str(secret_metadata[KMIPSecretStore.KEY_UUID]) try: with self.client: LOG.debug("Opened connection to KMIP client") self.client.destroy(uuid) except Exception as e: LOG.exception(u._LE("Error opening or writing to client")) raise ss.SecretGeneralException(str(e)) def store_secret_supports(self, key_spec): """Key storage supported? Specifies whether the plugin supports storage of the secret given the attributes included in the KeySpec. For now this always returns true if the key spec's algorithm and bit length are not specified. The secret type may need to be added in the future. This must always return true if the algorithm and bit length are not specified because some secret types, like certificate, do not require algorithm and bit length, so true must always be returned for those cases. :param key_spec: KeySpec of secret to be stored :returns: boolean indicating if secret can be stored """ if key_spec.alg is not None and key_spec.bit_length is not None: return self.generate_supports(key_spec) else: return True def _get_kmip_secret(self, secret_dto): """Builds a KMIP object from a SecretDTO This is needed for register calls. The Barbican object needs to be converted to KMIP object before it can be stored :param secret_dto: SecretDTO of secret to be stored :returns: KMIP object """ secret_type = secret_dto.type key_spec = secret_dto.key_spec object_type, key_format_type = ( self._map_type_ss_to_kmip(secret_type)) normalized_secret = self._normalize_secret(secret_dto.secret, secret_type) kmip_object = None if object_type == enums.ObjectType.CERTIFICATE: kmip_object = objects.X509Certificate(normalized_secret) elif object_type == enums.ObjectType.OPAQUE_DATA: opaque_type = enums.OpaqueDataType.NONE kmip_object = objects.OpaqueObject(normalized_secret, opaque_type) elif object_type == enums.ObjectType.PRIVATE_KEY: algorithm = self._get_kmip_algorithm(key_spec.alg) length = key_spec.bit_length format_type = enums.KeyFormatType.PKCS_8 kmip_object = objects.PrivateKey( algorithm, length, normalized_secret, format_type) elif object_type == enums.ObjectType.PUBLIC_KEY: algorithm = self._get_kmip_algorithm(key_spec.alg) length = key_spec.bit_length format_type = enums.KeyFormatType.X_509 kmip_object = objects.PublicKey( algorithm, length, normalized_secret, format_type) elif object_type == enums.ObjectType.SYMMETRIC_KEY: algorithm = self._get_kmip_algorithm(key_spec.alg) length = key_spec.bit_length kmip_object = objects.SymmetricKey(algorithm, length, normalized_secret) elif object_type == enums.ObjectType.SECRET_DATA: data_type = enums.SecretDataType.PASSWORD kmip_object = objects.SecretData(normalized_secret, data_type) return kmip_object def _get_kmip_algorithm(self, ss_algorithm): alg_entry = self.valid_alg_dict.get(ss_algorithm) return alg_entry.get(KMIPSecretStore.KMIP_ALGORITHM_ENUM) def _get_barbican_secret(self, managed_object, secret_type): object_type = managed_object.object_type secret = managed_object.value if (object_type == enums.ObjectType.SYMMETRIC_KEY or object_type == enums.ObjectType.PRIVATE_KEY or object_type == enums.ObjectType.PUBLIC_KEY): algorithm = self.kmip_barbican_alg_map[ managed_object.cryptographic_algorithm] length = managed_object.cryptographic_length key_spec = ss.KeySpec(algorithm, length) else: key_spec = ss.KeySpec() secret = self._denormalize_secret(secret, secret_type) secret_dto = ss.SecretDTO( secret_type, secret, key_spec, content_type=None, transport_key=None) return secret_dto def _map_type_ss_to_kmip(self, object_type): """Map SecretType to KMIP type enum Returns None if the type is not supported. The KMIP plugin only supports symmetric and asymmetric keys for now. :param object_type: SecretType enum value :returns: KMIP type enums if supported, None if not supported """ if object_type == ss.SecretType.SYMMETRIC: return enums.ObjectType.SYMMETRIC_KEY, enums.KeyFormatType.RAW elif object_type == ss.SecretType.PRIVATE: if self.pkcs1_only: return enums.ObjectType.PRIVATE_KEY, enums.KeyFormatType.PKCS_1 else: return enums.ObjectType.PRIVATE_KEY, enums.KeyFormatType.PKCS_8 elif object_type == ss.SecretType.PUBLIC: if self.pkcs1_only: return enums.ObjectType.PUBLIC_KEY, enums.KeyFormatType.PKCS_1 else: return enums.ObjectType.PUBLIC_KEY, enums.KeyFormatType.X_509 elif object_type == ss.SecretType.CERTIFICATE: return enums.ObjectType.CERTIFICATE, enums.KeyFormatType.X_509 elif object_type == ss.SecretType.PASSPHRASE: return enums.ObjectType.SECRET_DATA, enums.KeyFormatType.RAW elif object_type == ss.SecretType.OPAQUE: return enums.ObjectType.OPAQUE_DATA, enums.KeyFormatType.RAW else: return None, None def _raise_secret_general_exception(self, result): msg = u._( "Status: {status}, Reason: {reason}, " "Message: {message}" ).format( status=result.result_status, reason=result.result_reason, message=result.result_message ) LOG.error(u._LE("ERROR from KMIP server: %s"), msg) raise ss.SecretGeneralException(msg) def _validate_keyfile_permissions(self, path): """Check that file has permissions appropriate for a sensitive key Key files are extremely sensitive, they should be owned by the user who they relate to. They should be readable only (to avoid accidental changes). They should not be readable or writeable by any other user. :raises: KMIPSecretStoreError """ expected = (stat.S_IRUSR | stat.S_IFREG) # 0o100400 st = os.stat(path) if st.st_mode != expected: raise KMIPSecretStoreError( u._('Bad key file permissions found, expected 400 ' 'for path: {file_path}').format(file_path=path) ) def _normalize_secret(self, secret, secret_type): """Normalizes secret for use by KMIP plugin""" data = base64.b64decode(secret) if (self.pkcs1_only and secret_type in [ss.SecretType.PUBLIC, ss.SecretType.PRIVATE]): if secret_type == ss.SecretType.PUBLIC: data = get_public_key_der_pkcs1(data) elif secret_type == ss.SecretType.PRIVATE: data = get_private_key_der_pkcs1(data) elif secret_type in [ss.SecretType.PUBLIC, ss.SecretType.PRIVATE, ss.SecretType.CERTIFICATE]: data = translations.convert_pem_to_der(data, secret_type) return data def _denormalize_secret(self, secret, secret_type): """Converts secret back to the format expected by Barbican core""" data = secret if secret_type in [ss.SecretType.PUBLIC, ss.SecretType.PRIVATE, ss.SecretType.CERTIFICATE]: data = translations.convert_der_to_pem(data, secret_type) return base64.b64encode(data) barbican-2.0.0/barbican/plugin/simple_certificate_manager.py0000664000567000056710000001566712701405673025375 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Default implementation of Barbican certificate processing plugins and support. """ from barbican.common import utils from barbican import i18n as u from barbican.plugin.interface import certificate_manager as cert LOG = utils.getLogger(__name__) MSEC_UNTIL_CHECK_STATUS = 5000 class SimpleCertificatePlugin(cert.CertificatePluginBase): """Simple/default certificate plugin.""" def get_default_ca_name(self): return "Simple CA" def get_default_signing_cert(self): return "XXXXXXXXXXXXXXXXX" def get_default_intermediates(self): return "YYYYYYYYYYYYYYYY" def issue_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Create the initial order with CA :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf. :param barbican_meta_dto: additional data needed to process order. :returns: A :class:`ResultDTO` instance containing the result populated by the plugin implementation :rtype: :class:`ResultDTO` """ LOG.info(u._LI('Invoking issue_certificate_request()')) return cert.ResultDTO( cert.CertificateStatus.WAITING_FOR_CA, retry_msec=MSEC_UNTIL_CHECK_STATUS) def modify_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Update the order meta-data :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf. :param barbican_meta_dto: additional data needed to process order. :returns: A :class:`ResultDTO` instance containing the result populated by the plugin implementation :rtype: :class:`ResultDTO` """ LOG.info(u._LI('Invoking modify_certificate_request()')) return cert.ResultDTO(cert.CertificateStatus.WAITING_FOR_CA) def cancel_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Cancel the order :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf. :param barbican_meta_dto: additional data needed to process order. :returns: A :class:`ResultDTO` instance containing the result populated by the plugin implementation :rtype: :class:`ResultDTO` """ LOG.info(u._LI('Invoking cancel_certificate_request()')) return cert.ResultDTO(cert.CertificateStatus.REQUEST_CANCELED) def check_certificate_status(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Check status of the order :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf. :param barbican_meta_dto: additional data needed to process order. :returns: A :class:`ResultDTO` instance containing the result populated by the plugin implementation :rtype: :class:`ResultDTO` """ LOG.info(u._LI('Invoking check_certificate_status()')) return cert.ResultDTO(cert.CertificateStatus.CERTIFICATE_GENERATED) def supports(self, certificate_spec): """Indicates whether the plugin supports the certificate type. :param certificate_spec: Contains details on the certificate to generate the certificate order :returns: boolean indicating if the plugin supports the certificate type """ return True def supported_request_types(self): """Returns the request types supported by this plugin. :returns: dict containing Barbican-core defined request types supported by this plugin. """ return [cert.CertificateRequestType.CUSTOM_REQUEST, cert.CertificateRequestType.SIMPLE_CMC_REQUEST, cert.CertificateRequestType.FULL_CMC_REQUEST, cert.CertificateRequestType.STORED_KEY_REQUEST] class SimpleCertificateEventPlugin(cert.CertificateEventPluginBase): """Simple/default certificate event plugin.""" def notify_certificate_is_ready( self, project_id, order_ref, container_ref): """Notify that a certificate has been generated and is ready to use. :param project_id: Project ID associated with this certificate :param order_ref: HATEOAS reference URI to the submitted Barbican Order :param container_ref: HATEOAS reference URI to the Container storing the certificate :returns: None """ LOG.info(u._LI('Invoking notify_certificate_is_ready()')) def notify_ca_is_unavailable( self, project_id, order_ref, error_msg, retry_in_msec): """Notify that the certificate authority (CA) isn't available. :param project_id: Project ID associated with this order :param order_ref: HATEOAS reference URI to the submitted Barbican Order :param error_msg: Error message if it is available :param retry_in_msec: Delay before attempting to talk to the CA again. If this is 0, then no attempt will be made. :returns: None """ LOG.info(u._LI('Invoking notify_ca_is_unavailable()')) barbican-2.0.0/barbican/plugin/dogtag.py0000664000567000056710000015123512701405673021305 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import copy import datetime import os import uuid from Crypto.PublicKey import RSA from Crypto.Util import asn1 from oslo_config import cfg import pki subcas_available = True try: import pki.authority as authority import pki.feature as feature except ImportError: subcas_available = False import pki.cert import pki.client import pki.crypto as cryptoutil import pki.key as key import pki.kra import pki.profile from requests import exceptions as request_exceptions from barbican.common import config from barbican.common import exception from barbican.common import utils from barbican import i18n as u import barbican.plugin.interface.certificate_manager as cm import barbican.plugin.interface.secret_store as sstore CONF = config.new_config() LOG = utils.getLogger(__name__) dogtag_plugin_group = cfg.OptGroup(name='dogtag_plugin', title="Dogtag Plugin Options") dogtag_plugin_opts = [ cfg.StrOpt('pem_path', help=u._('Path to PEM file for authentication')), cfg.StrOpt('dogtag_host', default="localhost", help=u._('Hostname for the Dogtag instance')), cfg.StrOpt('dogtag_port', default="8443", help=u._('Port for the Dogtag instance')), cfg.StrOpt('nss_db_path', help=u._('Path to the NSS certificate database')), cfg.StrOpt('nss_password', help=u._('Password for the NSS certificate databases'), secret=True), cfg.StrOpt('simple_cmc_profile', help=u._('Profile for simple CMC requests')), cfg.StrOpt('auto_approved_profiles', default="caServerCert", help=u._('List of automatically approved enrollment profiles')), cfg.StrOpt('ca_expiration_time', default=cm.CA_INFO_DEFAULT_EXPIRATION_DAYS, help=u._('Time in days for CA entries to expire')), cfg.StrOpt('plugin_working_dir', help=u._('Working directory for Dogtag plugin')) ] CONF.register_group(dogtag_plugin_group) CONF.register_opts(dogtag_plugin_opts, group=dogtag_plugin_group) config.parse_args(CONF) CERT_HEADER = "-----BEGIN CERTIFICATE-----" CERT_FOOTER = "-----END CERTIFICATE-----" KRA_TRANSPORT_NICK = "KRA transport cert" def _create_nss_db_if_needed(nss_db_path, nss_password): """Creates NSS DB if it's not setup already :returns: True or False whether the database was created or not. """ if not os.path.exists(nss_db_path): cryptoutil.NSSCryptoProvider.setup_database( nss_db_path, nss_password, over_write=True) return True else: LOG.info(u._LI("The nss_db_path provided already exists, so the " "database is assumed to be already set up.")) return False def _setup_nss_db_services(conf): """Sets up NSS Crypto functions This sets up the NSSCryptoProvider and the database it needs for it to store certificates. If the path specified in the configuration is already existent, it will assume that the database is already setup. This will also import the transport cert needed by the KRA if the NSS DB was created. """ nss_db_path, nss_password = (conf.dogtag_plugin.nss_db_path, conf.dogtag_plugin.nss_password) if nss_db_path is None: LOG.warning(u._LW("nss_db_path was not provided so the crypto " "provider functions were not initialized.")) return None if nss_password is None: raise ValueError(u._("nss_password is required")) nss_db_created = _create_nss_db_if_needed(nss_db_path, nss_password) crypto = cryptoutil.NSSCryptoProvider(nss_db_path, nss_password) if nss_db_created: _import_kra_transport_cert_to_nss_db(conf, crypto) return crypto def _import_kra_transport_cert_to_nss_db(conf, crypto): try: connection = create_connection(conf, 'kra') kraclient = pki.kra.KRAClient(connection, crypto) systemcert_client = kraclient.system_certs transport_cert = systemcert_client.get_transport_cert() crypto.import_cert(KRA_TRANSPORT_NICK, transport_cert, "u,u,u") except Exception as e: LOG.error("Error in importing transport cert." " KRA may not be enabled: " + str(e)) def create_connection(conf, subsystem_path): pem_path = conf.dogtag_plugin.pem_path if pem_path is None: raise ValueError(u._("pem_path is required")) connection = pki.client.PKIConnection( 'https', conf.dogtag_plugin.dogtag_host, conf.dogtag_plugin.dogtag_port, subsystem_path) connection.set_authentication_cert(pem_path) return connection crypto = _setup_nss_db_services(CONF) crypto.initialize() class DogtagPluginAlgorithmException(exception.BarbicanException): message = u._("Invalid algorithm passed in") class DogtagPluginNotSupportedException(exception.NotSupported): message = u._("Operation not supported by Dogtag Plugin") def __init__(self, msg=None): if not msg: message = self.message else: message = msg super(DogtagPluginNotSupportedException, self).__init__(message) class DogtagKRAPlugin(sstore.SecretStoreBase): """Implementation of the secret store plugin with KRA as the backend.""" # metadata constants ALG = "alg" BIT_LENGTH = "bit_length" GENERATED = "generated" KEY_ID = "key_id" SECRET_MODE = "secret_mode" PASSPHRASE_KEY_ID = "passphrase_key_id" CONVERT_TO_PEM = "convert_to_pem" # string constants DSA_PRIVATE_KEY_HEADER = '-----BEGIN DSA PRIVATE KEY-----' DSA_PRIVATE_KEY_FOOTER = '-----END DSA PRIVATE KEY-----' DSA_PUBLIC_KEY_HEADER = '-----BEGIN DSA PUBLIC KEY-----' DSA_PUBLIC_KEY_FOOTER = '-----END DSA PUBLIC KEY-----' def __init__(self, conf=CONF): """Constructor - create the keyclient.""" LOG.debug("starting DogtagKRAPlugin init") connection = create_connection(conf, 'kra') # create kraclient kraclient = pki.kra.KRAClient(connection, crypto) self.keyclient = kraclient.keys self.keyclient.set_transport_cert(KRA_TRANSPORT_NICK) LOG.debug("completed DogtagKRAPlugin init") def store_secret(self, secret_dto): """Store a secret in the KRA If secret_dto.transport_key is not None, then we expect secret_dto.secret to include a base64 encoded PKIArchiveOptions structure as defined in section 6.4 of RFC 2511. This package contains a transport key wrapped session key, the session key wrapped secret and parameters to specify the symmetric key wrapping. Otherwise, the data is unencrypted and we use a call to archive_key() to have the Dogtag KRA client generate the relevant session keys. The secret_dto contains additional information on the type of secret that is being stored. We will use that shortly. For, now, lets just assume that its all PASS_PHRASE_TYPE Returns a dict with the relevant metadata (which in this case is just the key_id """ data_type = key.KeyClient.PASS_PHRASE_TYPE client_key_id = uuid.uuid4().hex if secret_dto.transport_key is not None: # TODO(alee-3) send the transport key with the archival request # once the Dogtag Client API changes. response = self.keyclient.archive_pki_options( client_key_id, data_type, secret_dto.secret, key_algorithm=None, key_size=None) else: response = self.keyclient.archive_key( client_key_id, data_type, secret_dto.secret, key_algorithm=None, key_size=None) meta_dict = {DogtagKRAPlugin.KEY_ID: response.get_key_id()} self._store_secret_attributes(meta_dict, secret_dto) return meta_dict def get_secret(self, secret_type, secret_metadata): """Retrieve a secret from the KRA The secret_metadata is simply the dict returned by a store_secret() or get_secret() call. We will extract the key_id from this dict. Note: There are two ways to retrieve secrets from the KRA. The first method calls retrieve_key without a wrapping key. This relies on the KRA client to generate a wrapping key (and wrap it with the KRA transport cert), and is completely transparent to the Barbican server. What is returned to the caller is the unencrypted secret. The second way is to provide a wrapping key that would be generated on the barbican client. That way only the client will be able to unwrap the secret. This wrapping key is provided in the secret_metadata by Barbican core. Format/Type of the secret returned in the SecretDTO object. ----------------------------------------------------------- The type of the secret returned is always dependent on the way it is stored using the store_secret method. In case of strings - like passphrase/PEM strings, the return will be a string. In case of binary data - the return will be the actual binary data. In case of retrieving an asymmetric key that is generated using the dogtag plugin, then the binary representation of, the asymmetric key in PEM format, is returned """ key_id = secret_metadata[DogtagKRAPlugin.KEY_ID] key_spec = sstore.KeySpec( alg=secret_metadata.get(DogtagKRAPlugin.ALG, None), bit_length=secret_metadata.get(DogtagKRAPlugin.BIT_LENGTH, None), mode=secret_metadata.get(DogtagKRAPlugin.SECRET_MODE, None), passphrase=None ) generated = secret_metadata.get(DogtagKRAPlugin.GENERATED, False) passphrase = self._get_passphrase_for_a_private_key( secret_type, secret_metadata, key_spec) recovered_key = None twsk = DogtagKRAPlugin._get_trans_wrapped_session_key(secret_type, secret_metadata) if DogtagKRAPlugin.CONVERT_TO_PEM in secret_metadata: # Case for returning the asymmetric keys generated in KRA. # Asymmetric keys generated in KRA are not generated in PEM format. # This marker DogtagKRAPlugin.CONVERT_TO_PEM is set in the # secret_metadata for asymmetric keys generated in KRA to # help convert the returned private/public keys to PEM format and # eventually return the binary data of the keys in PEM format. if secret_type == sstore.SecretType.PUBLIC: # Public key should be retrieved using the get_key_info method # as it is treated as an attribute of the asymmetric key pair # stored in the KRA database. if key_spec.alg is None: raise sstore.SecretAlgorithmNotSupportedException('None') key_info = self.keyclient.get_key_info(key_id) if key_spec.alg.upper() == key.KeyClient.RSA_ALGORITHM: recovered_key = (RSA.importKey(key_info.public_key) .publickey() .exportKey('PEM')).encode('utf-8') elif key_spec.alg.upper() == key.KeyClient.DSA_ALGORITHM: pub_seq = asn1.DerSequence() pub_seq[:] = key_info.public_key recovered_key = ( ("%s\n%s%s" % (DogtagKRAPlugin.DSA_PUBLIC_KEY_HEADER, pub_seq.encode().encode("base64"), DogtagKRAPlugin.DSA_PUBLIC_KEY_FOOTER) ).encode('utf-8') ) else: raise sstore.SecretAlgorithmNotSupportedException( key_spec.alg.upper() ) elif secret_type == sstore.SecretType.PRIVATE: key_data = self.keyclient.retrieve_key(key_id) if key_spec.alg.upper() == key.KeyClient.RSA_ALGORITHM: recovered_key = ( (RSA.importKey(key_data.data) .exportKey('PEM', passphrase, 8)) .encode('utf-8') ) elif key_spec.alg.upper() == key.KeyClient.DSA_ALGORITHM: pub_seq = asn1.DerSequence() pub_seq[:] = key_data.data recovered_key = ( ("%s\n%s%s" % (DogtagKRAPlugin.DSA_PRIVATE_KEY_HEADER, pub_seq.encode().encode("base64"), DogtagKRAPlugin.DSA_PRIVATE_KEY_FOOTER) ).encode('utf-8') ) else: raise sstore.SecretAlgorithmNotSupportedException( key_spec.alg.upper() ) else: # TODO(alee-3) send transport key as well when dogtag client API # changes in case the transport key has changed. key_data = self.keyclient.retrieve_key(key_id, twsk) if twsk: # The data returned is a byte array. recovered_key = key_data.encrypted_data else: recovered_key = key_data.data # TODO(alee) remove final field when content_type is removed # from secret_dto if generated: recovered_key = base64.b64encode(recovered_key) ret = sstore.SecretDTO( type=secret_type, secret=recovered_key, key_spec=key_spec, content_type=None, transport_key=None) return ret def delete_secret(self, secret_metadata): """Delete a secret from the KRA There is currently no way to delete a secret in Dogtag. We will be implementing such a method shortly. """ pass def generate_symmetric_key(self, key_spec): """Generate a symmetric key This calls generate_symmetric_key() on the KRA passing in the algorithm, bit_length and id (used as the client_key_id) from the secret. The remaining parameters are not used. Returns a metadata object that can be used for retrieving the secret. """ usages = [key.SymKeyGenerationRequest.DECRYPT_USAGE, key.SymKeyGenerationRequest.ENCRYPT_USAGE] client_key_id = uuid.uuid4().hex algorithm = self._map_algorithm(key_spec.alg.lower()) if algorithm is None: raise DogtagPluginAlgorithmException passphrase = key_spec.passphrase if passphrase: raise DogtagPluginNotSupportedException( u._("Passphrase encryption is not supported for symmetric" " key generating algorithms.")) response = self.keyclient.generate_symmetric_key( client_key_id, algorithm, key_spec.bit_length, usages) # Barbican expects stored keys to be base 64 encoded. We need to # add flag to the keyclient.generate_symmetric_key() call above # to ensure that the key that is stored is base64 encoded. # # As a workaround until that update is available, we will store a # parameter "generated" to indicate that the response must be base64 # encoded on retrieval. Note that this will not work for transport # key encoded data. return {DogtagKRAPlugin.ALG: key_spec.alg, DogtagKRAPlugin.BIT_LENGTH: key_spec.bit_length, DogtagKRAPlugin.SECRET_MODE: key_spec.mode, DogtagKRAPlugin.KEY_ID: response.get_key_id(), DogtagKRAPlugin.GENERATED: True} def generate_asymmetric_key(self, key_spec): """Generate an asymmetric key. Note that barbican expects all secrets to be base64 encoded. """ usages = [key.AsymKeyGenerationRequest.DECRYPT_USAGE, key.AsymKeyGenerationRequest.ENCRYPT_USAGE] client_key_id = uuid.uuid4().hex algorithm = self._map_algorithm(key_spec.alg.lower()) passphrase = key_spec.passphrase if algorithm is None: raise DogtagPluginAlgorithmException passphrase_key_id = None passphrase_metadata = None if passphrase: if algorithm == key.KeyClient.DSA_ALGORITHM: raise DogtagPluginNotSupportedException( u._("Passphrase encryption is not " "supported for DSA algorithm") ) stored_passphrase_info = self.keyclient.archive_key( uuid.uuid4().hex, self.keyclient.PASS_PHRASE_TYPE, base64.b64encode(passphrase)) passphrase_key_id = stored_passphrase_info.get_key_id() passphrase_metadata = { DogtagKRAPlugin.KEY_ID: passphrase_key_id } # Barbican expects stored keys to be base 64 encoded. We need to # add flag to the keyclient.generate_asymmetric_key() call above # to ensure that the key that is stored is base64 encoded. # # As a workaround until that update is available, we will store a # parameter "generated" to indicate that the response must be base64 # encoded on retrieval. Note that this will not work for transport # key encoded data. response = self.keyclient.generate_asymmetric_key( client_key_id, algorithm, key_spec.bit_length, usages) public_key_metadata = { DogtagKRAPlugin.ALG: key_spec.alg, DogtagKRAPlugin.BIT_LENGTH: key_spec.bit_length, DogtagKRAPlugin.KEY_ID: response.get_key_id(), DogtagKRAPlugin.CONVERT_TO_PEM: "true", DogtagKRAPlugin.GENERATED: True } private_key_metadata = { DogtagKRAPlugin.ALG: key_spec.alg, DogtagKRAPlugin.BIT_LENGTH: key_spec.bit_length, DogtagKRAPlugin.KEY_ID: response.get_key_id(), DogtagKRAPlugin.CONVERT_TO_PEM: "true", DogtagKRAPlugin.GENERATED: True } if passphrase_key_id: private_key_metadata[DogtagKRAPlugin.PASSPHRASE_KEY_ID] = ( passphrase_key_id ) return sstore.AsymmetricKeyMetadataDTO(private_key_metadata, public_key_metadata, passphrase_metadata) def generate_supports(self, key_spec): """Key generation supported? Specifies whether the plugin supports key generation with the given key_spec. For now, we will just check the algorithm. When dogtag adds a call to check the bit length as well, we will use that call to take advantage of the bit_length information """ return self._map_algorithm(key_spec.alg) is not None def store_secret_supports(self, key_spec): """Key storage supported? Specifies whether the plugin supports storage of the secret given the attributes included in the KeySpec """ return True @staticmethod def _map_algorithm(algorithm): """Map Barbican algorithms to Dogtag plugin algorithms. Note that only algorithms supported by Dogtag will be mapped. """ if algorithm is None: return None if algorithm.lower() == sstore.KeyAlgorithm.AES.lower(): return key.KeyClient.AES_ALGORITHM elif algorithm.lower() == sstore.KeyAlgorithm.DES.lower(): return key.KeyClient.DES_ALGORITHM elif algorithm.lower() == sstore.KeyAlgorithm.DESEDE.lower(): return key.KeyClient.DES3_ALGORITHM elif algorithm.lower() == sstore.KeyAlgorithm.DSA.lower(): return key.KeyClient.DSA_ALGORITHM elif algorithm.lower() == sstore.KeyAlgorithm.RSA.lower(): return key.KeyClient.RSA_ALGORITHM elif algorithm.lower() == sstore.KeyAlgorithm.DIFFIE_HELLMAN.lower(): # may be supported, needs to be tested return None elif algorithm.lower() == sstore.KeyAlgorithm.EC.lower(): # asymmetric keys not yet supported return None else: return None @staticmethod def _store_secret_attributes(meta_dict, secret_dto): # store the following attributes for retrieval key_spec = secret_dto.key_spec if key_spec is not None: if key_spec.alg is not None: meta_dict[DogtagKRAPlugin.ALG] = key_spec.alg if key_spec.bit_length is not None: meta_dict[DogtagKRAPlugin.BIT_LENGTH] = key_spec.bit_length if key_spec.mode is not None: meta_dict[DogtagKRAPlugin.SECRET_MODE] = key_spec.mode def _get_passphrase_for_a_private_key(self, secret_type, secret_metadata, key_spec): """Retrieve the passphrase for the private key stored in the KRA.""" if secret_type is None: return None if key_spec.alg is None: return None passphrase = None if DogtagKRAPlugin.PASSPHRASE_KEY_ID in secret_metadata: if key_spec.alg.upper() == key.KeyClient.RSA_ALGORITHM: passphrase = self.keyclient.retrieve_key( secret_metadata.get(DogtagKRAPlugin.PASSPHRASE_KEY_ID) ).data else: if key_spec.alg.upper() == key.KeyClient.DSA_ALGORITHM: raise sstore.SecretGeneralException( u._("DSA keys should not have a passphrase in the" " database, for being used during retrieval.") ) raise sstore.SecretGeneralException( u._("Secrets of type {secret_type} should not have a " "passphrase in the database, for being used during " "retrieval.").format(secret_type=secret_type) ) # note that Barbican expects the passphrase to be base64 encoded when # stored, so we need to decode it. if passphrase: passphrase = base64.b64decode(passphrase) return passphrase @staticmethod def _get_trans_wrapped_session_key(secret_type, secret_metadata): twsk = secret_metadata.get('trans_wrapped_session_key', None) if secret_type in [sstore.SecretType.PUBLIC, sstore.SecretType.PRIVATE]: if twsk: raise DogtagPluginNotSupportedException( u._("Encryption using session key is not supported when " "retrieving a {secret_type} " "key.").format(secret_type=secret_type) ) return twsk def _catch_request_exception(ca_related_function): def _catch_ca_unavailable(self, *args, **kwargs): try: return ca_related_function(self, *args, **kwargs) except request_exceptions.RequestException: return cm.ResultDTO( cm.CertificateStatus.CA_UNAVAILABLE_FOR_REQUEST) return _catch_ca_unavailable def _catch_enrollment_exceptions(ca_related_function): def _catch_enrollment_exception(self, *args, **kwargs): try: return ca_related_function(self, *args, **kwargs) except pki.BadRequestException as e: return cm.ResultDTO( cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, status_message=e.message) except pki.PKIException as e: raise cm.CertificateGeneralException( u._("Exception thrown by enroll_cert: {message}").format( message=e.message)) return _catch_enrollment_exception def _catch_subca_creation_exceptions(ca_related_function): def _catch_subca_exception(self, *args, **kwargs): try: return ca_related_function(self, *args, **kwargs) except pki.BadRequestException as e: raise exception.BadSubCACreationRequest(reason=e.message) except pki.PKIException as e: raise exception.SubCACreationErrors(reason=e.message) except request_exceptions.RequestException: raise exception.SubCACreationErrors( reason="Unable to connect to CA") return _catch_subca_exception def _catch_subca_deletion_exceptions(ca_related_function): def _catch_subca_exception(self, *args, **kwargs): try: return ca_related_function(self, *args, **kwargs) except pki.ResourceNotFoundException as e: LOG.warning(u._LI("Sub-CA already deleted")) pass except pki.PKIException as e: raise exception.SubCADeletionErrors(reason=e.message) except request_exceptions.RequestException: raise exception.SubCACreationErrors( reason="Unable to connect to CA") return _catch_subca_exception class DogtagCAPlugin(cm.CertificatePluginBase): """Implementation of the cert plugin with Dogtag CA as the backend.""" # order_metadata fields PROFILE_ID = "profile_id" # plugin_metadata fields REQUEST_ID = "request_id" def __init__(self, conf=CONF): """Constructor - create the cert clients.""" connection = create_connection(conf, 'ca') self.certclient = pki.cert.CertClient(connection) self.simple_cmc_profile = conf.dogtag_plugin.simple_cmc_profile self.auto_approved_profiles = conf.dogtag_plugin.auto_approved_profiles self.working_dir = conf.dogtag_plugin.plugin_working_dir if not os.path.isdir(self.working_dir): os.mkdir(self.working_dir) self._expiration = None self._expiration_delta = conf.dogtag_plugin.ca_expiration_time self._expiration_data_path = os.path.join(self.working_dir, "expiration_data.txt") self._host_aid_path = os.path.join(self.working_dir, "host_aid.txt") self._host_aid = None if not os.path.isfile(self._expiration_data_path): self.expiration = datetime.datetime.utcnow() global subcas_available subcas_available = self._are_subcas_enabled_on_backend(connection) if subcas_available: self.authority_client = authority.AuthorityClient(connection) if not os.path.isfile(self._host_aid_path): self.host_aid = self.get_host_aid() @property def expiration(self): if self._expiration is None: try: with open(self._expiration_data_path) as expiration_fh: self._expiration = datetime.datetime.strptime( expiration_fh.read(), "%Y-%m-%d %H:%M:%S.%f" ) except (ValueError, TypeError): LOG.warning(u._LI("Invalid data read from expiration file")) self.expiration = datetime.utcnow() return self._expiration @expiration.setter def expiration(self, val): with open(self._expiration_data_path, 'w') as expiration_fh: expiration_fh.write(val.strftime("%Y-%m-%d %H:%M:%S.%f")) self._expiration = val @property def host_aid(self): if self._host_aid is None: with open(self._host_aid_path) as host_aid_fh: self._host_aid = host_aid_fh.read() return self._host_aid @host_aid.setter def host_aid(self, val): if val is not None: with open(self._host_aid_path, 'w') as host_aid_fh: host_aid_fh.write(val) self._host_aid = val def _are_subcas_enabled_on_backend(self, connection): """Check if subca feature is available SubCA creation must be supported in both the Dogtag client as well as on the back-end server. Moreover, it must be enabled on the backend server. This method sets the subcas_available global variable. :return: True/False """ global subcas_available if subcas_available: # subcas are supported in the Dogtag client try: feature_client = feature.FeatureClient(connection) authority_feature = feature_client.get_feature("authority") if authority_feature.enabled: LOG.info(u._LI("Sub-CAs are enabled by Dogtag server")) return True else: LOG.info(u._LI("Sub-CAs are not enabled by Dogtag server")) except (request_exceptions.HTTPError, pki.ResourceNotFoundException): LOG.info(u._LI("Sub-CAs are not supported by Dogtag server")) else: LOG.info(u._LI("Sub-CAs are not supported by Dogtag client")) return False def _get_request_id(self, order_id, plugin_meta, operation): request_id = plugin_meta.get(self.REQUEST_ID, None) if not request_id: raise cm.CertificateGeneralException( u._( "{request} not found for {operation} for " "order_id {order_id}" ).format( request=self.REQUEST_ID, operation=operation, order_id=order_id ) ) return request_id @_catch_request_exception def _get_request(self, request_id): try: return self.certclient.get_request(request_id) except pki.RequestNotFoundException: return None @_catch_request_exception def _get_cert(self, cert_id): try: return self.certclient.get_cert(cert_id) except pki.CertNotFoundException: return None def get_default_ca_name(self): return "Dogtag CA" def get_default_signing_cert(self): # TODO(alee) Add code to get the signing cert return None def get_default_intermediates(self): # TODO(alee) Add code to get the cert chain return None def check_certificate_status(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Check the status of a certificate request. :param order_id: ID of the order associated with this request :param order_meta: order_metadata associated with this order :param plugin_meta: data populated by previous calls for this order, in particular the request_id :param barbican_meta_dto: additional data needed to process order. :return: cm.ResultDTO """ request_id = self._get_request_id(order_id, plugin_meta, "checking") request = self._get_request(request_id) if not request: raise cm.CertificateGeneralException( u._( "No request found for request_id {request_id} for " "order {order_id}" ).format( request_id=request_id, order_id=order_id ) ) request_status = request.request_status if request_status == pki.cert.CertRequestStatus.REJECTED: return cm.ResultDTO( cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, status_message=request.error_message) elif request_status == pki.cert.CertRequestStatus.CANCELED: return cm.ResultDTO( cm.CertificateStatus.REQUEST_CANCELED) elif request_status == pki.cert.CertRequestStatus.PENDING: return cm.ResultDTO( cm.CertificateStatus.WAITING_FOR_CA) elif request_status == pki.cert.CertRequestStatus.COMPLETE: # get the cert cert_id = request.cert_id if not cert_id: raise cm.CertificateGeneralException( u._( "Request {request_id} reports status_complete, but no " "cert_id has been returned" ).format( request_id=request_id ) ) cert = self._get_cert(cert_id) if not cert: raise cm.CertificateGeneralException( u._("Certificate not found for cert_id: {cert_id}").format( cert_id=cert_id ) ) return cm.ResultDTO( cm.CertificateStatus.CERTIFICATE_GENERATED, certificate=cert.encoded, intermediates=cert.pkcs7_cert_chain) else: raise cm.CertificateGeneralException( u._("Invalid request_status returned by CA")) @_catch_request_exception def issue_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Issue a certificate request to the Dogtag CA Call the relevant certificate issuance function depending on the Barbican defined request type in the order_meta. :param order_id: ID of the order associated with this request :param order_meta: dict containing all the inputs for this request. This includes the request_type. :param plugin_meta: Used to store data for status check :param barbican_meta_dto: additional data needed to process order. :return: cm.ResultDTO """ request_type = order_meta.get( cm.REQUEST_TYPE, cm.CertificateRequestType.CUSTOM_REQUEST) jump_table = { cm.CertificateRequestType.SIMPLE_CMC_REQUEST: self._issue_simple_cmc_request, cm.CertificateRequestType.FULL_CMC_REQUEST: self._issue_full_cmc_request, cm.CertificateRequestType.STORED_KEY_REQUEST: self._issue_stored_key_request, cm.CertificateRequestType.CUSTOM_REQUEST: self._issue_custom_certificate_request } if request_type not in jump_table: raise DogtagPluginNotSupportedException(u._( "Dogtag plugin does not support %s request type").format( request_type)) return jump_table[request_type](order_id, order_meta, plugin_meta, barbican_meta_dto) @_catch_enrollment_exceptions def _issue_simple_cmc_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Issue a simple CMC request to the Dogtag CA. :param order_id: :param order_meta: :param plugin_meta: :param barbican_meta_dto: :return: cm.ResultDTO """ if barbican_meta_dto.generated_csr is not None: csr = barbican_meta_dto.generated_csr else: # we expect the CSR to be base64 encoded PEM # Dogtag CA needs it to be unencoded csr = base64.b64decode(order_meta.get('request_data')) profile_id = order_meta.get('profile', self.simple_cmc_profile) inputs = { 'cert_request_type': 'pkcs10', 'cert_request': csr } return self._issue_certificate_request( profile_id, inputs, plugin_meta, barbican_meta_dto) @_catch_enrollment_exceptions def _issue_full_cmc_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Issue a full CMC request to the Dogtag CA. :param order_id: :param order_meta: :param plugin_meta: :param barbican_meta_dto: :return: cm.ResultDTO """ raise DogtagPluginNotSupportedException(u._( "Dogtag plugin does not support %s request type").format( cm.CertificateRequestType.FULL_CMC_REQUEST)) @_catch_enrollment_exceptions def _issue_stored_key_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Issue a simple CMC request to the Dogtag CA. :param order_id: :param order_meta: :param plugin_meta: :param barbican_meta_dto: :return: cm.ResultDTO """ return self._issue_simple_cmc_request( order_id, order_meta, plugin_meta, barbican_meta_dto) @_catch_enrollment_exceptions def _issue_custom_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Issue a custom certificate request to Dogtag CA :param order_id: ID of the order associated with this request :param order_meta: dict containing all the inputs required for a particular profile. One of these must be the profile_id. The exact fields (both optional and mandatory) depend on the profile, but they will be exposed to the user in a method to expose syntax. Depending on the profile, only the relevant fields will be populated in the request. All others will be ignored. :param plugin_meta: Used to store data for status check. :param barbican_meta_dto: Extra data to aid in processing. :return: cm.ResultDTO """ profile_id = order_meta.get(self.PROFILE_ID, None) if not profile_id: return cm.ResultDTO( cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, status_message=u._("No profile_id specified")) # we expect the csr to be base64 encoded PEM data. Dogtag CA expects # PEM data though so we need to decode it. updated_meta = copy.deepcopy(order_meta) if 'cert_request' in updated_meta: updated_meta['cert_request'] = base64.b64decode( updated_meta['cert_request']) return self._issue_certificate_request( profile_id, updated_meta, plugin_meta, barbican_meta_dto) def _issue_certificate_request(self, profile_id, inputs, plugin_meta, barbican_meta_dto): """Actually send the cert request to the Dogtag CA If the profile_id is one of the auto-approved profiles, then use the convenience enroll_cert() method to create and approve the request using the Barbican agent cert credentials. If not, then submit the request and wait for approval by a CA agent on the Dogtag CA. :param profile_id: enrollment profile :param inputs: dict of request inputs :param plugin_meta: Used to store data for status check. :param barbican_meta_dto: Extra data to aid in processing. :return: cm.ResultDTO """ ca_id = barbican_meta_dto.plugin_ca_id or self.get_default_ca_name() if profile_id in self.auto_approved_profiles: if ca_id == self.get_default_ca_name(): results = self.certclient.enroll_cert(profile_id, inputs) else: results = self.certclient.enroll_cert( profile_id, inputs, ca_id) return self._process_auto_enrollment_results( results, plugin_meta, barbican_meta_dto) else: request = self.certclient.create_enrollment_request( profile_id, inputs) if ca_id == self.get_default_ca_name(): results = self.certclient.submit_enrollment_request(request) else: results = self.certclient.submit_enrollment_request( request, ca_id) return self._process_pending_enrollment_results( results, plugin_meta, barbican_meta_dto) def _process_auto_enrollment_results(self, enrollment_results, plugin_meta, barbican_meta_dto): """Process results received from Dogtag CA for auto-enrollment This processes data from enroll_cert, which submits, approves and gets the cert issued and returns as a list of CertEnrollment objects. :param enrollment_results: list of CertEnrollmentResult objects :param plugin_meta: metadata dict for storing plugin specific data :param barbican_meta_dto: object containing extra data to help process the request :return: cm.ResultDTO """ # Although it is possible to create multiple certs in an invocation # of enroll_cert, Barbican cannot handle this case. Assume # only once cert and request generated for now. enrollment_result = enrollment_results[0] request = enrollment_result.request if not request: raise cm.CertificateGeneralException( u._("No request returned in enrollment_results")) # store the request_id in the plugin metadata plugin_meta[self.REQUEST_ID] = request.request_id cert = enrollment_result.cert return self._create_dto(request.request_status, request.request_id, request.error_message, cert) def _process_pending_enrollment_results(self, results, plugin_meta, barbican_meta_dto): """Process results received from Dogtag CA for pending enrollment This method processes data returned by submit_enrollment_request(), which creates requests that still need to be approved by an agent. :param results: CertRequestInfoCollection object :param plugin_meta: metadata dict for storing plugin specific data :param barbican_meta_dto: object containing extra data to help process the request :return: cm.ResultDTO """ # Although it is possible to create multiple certs in an invocation # of enroll_cert, Barbican cannot handle this case. Assume # only once cert and request generated for now cert_request_info = results.cert_request_info_list[0] status = cert_request_info.request_status request_id = getattr(cert_request_info, 'request_id', None) error_message = getattr(cert_request_info, 'error_message', None) # store the request_id in the plugin metadata if request_id: plugin_meta[self.REQUEST_ID] = request_id return self._create_dto(status, request_id, error_message, None) def _create_dto(self, request_status, request_id, error_message, cert): dto = None if request_status == pki.cert.CertRequestStatus.COMPLETE: if cert is not None: # Barbican is expecting base 64 encoded PEM, so we base64 # encode below. # # Currently there is an inconsistency in what Dogtag returns # for certificates and intermediates. For certs, we return # PEM, whereas for intermediates, we return headerless PEM. # This is being addressed in Dogtag ticket: # https://fedorahosted.org/pki/ticket/1374 # # Until this is addressed, simply add the missing headers cert_chain = (CERT_HEADER + "\r\n" + cert.pkcs7_cert_chain + CERT_FOOTER) dto = cm.ResultDTO(cm.CertificateStatus.CERTIFICATE_GENERATED, certificate=base64.b64encode(cert.encoded), intermediates=base64.b64encode(cert_chain)) else: raise cm.CertificateGeneralException( u._("request_id {req_id} returns COMPLETE but no cert " "returned").format(req_id=request_id)) elif request_status == pki.cert.CertRequestStatus.REJECTED: dto = cm.ResultDTO(cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, status_message=error_message) elif request_status == pki.cert.CertRequestStatus.CANCELED: dto = cm.ResultDTO(cm.CertificateStatus.REQUEST_CANCELED) elif request_status == pki.cert.CertRequestStatus.PENDING: dto = cm.ResultDTO(cm.CertificateStatus.WAITING_FOR_CA) else: raise cm.CertificateGeneralException( u._("Invalid request_status {status} for " "request_id {request_id}").format( status=request_status, request_id=request_id) ) return dto def modify_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Modify a certificate request. Once a certificate request is generated, it cannot be modified. The only alternative is to cancel the request (if it has not already completed) and attempt a fresh enrolment. That is what will be attempted here. :param order_id: ID for this order :param order_meta: order metadata. It is assumed that the newly modified request data will be present here. :param plugin_meta: data stored on behalf of the plugin for further operations :param barbican_meta_dto: additional data needed to process order. :return: ResultDTO: """ result_dto = self.cancel_certificate_request( order_id, order_meta, plugin_meta, barbican_meta_dto) if result_dto.status == cm.CertificateStatus.REQUEST_CANCELED: return self.issue_certificate_request( order_id, order_meta, plugin_meta, barbican_meta_dto) elif result_dto.status == cm.CertificateStatus.INVALID_OPERATION: return cm.ResultDTO( cm.CertificateStatus.INVALID_OPERATION, status_message=u._( "Modify request: unable to cancel: " "{message}").format(message=result_dto.status_message) ) else: # other status (ca_unavailable, client_data_issue) # return result from cancel operation return result_dto @_catch_request_exception def cancel_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Cancel a certificate request. :param order_id: ID for the order associated with this request :param order_meta: order metadata fdr this request :param plugin_meta: data stored by plugin for further processing. In particular, the request_id :param barbican_meta_dto: additional data needed to process order. :return: cm.ResultDTO: """ request_id = self._get_request_id(order_id, plugin_meta, "cancelling") try: review_response = self.certclient.review_request(request_id) self.certclient.cancel_request(request_id, review_response) return cm.ResultDTO(cm.CertificateStatus.REQUEST_CANCELED) except pki.RequestNotFoundException: return cm.ResultDTO( cm.CertificateStatus.CLIENT_DATA_ISSUE_SEEN, status_message=u._("no request found for this order")) except pki.ConflictingOperationException as e: return cm.ResultDTO( cm.CertificateStatus.INVALID_OPERATION, status_message=e.message) def supports(self, certificate_spec): if cm.CA_TYPE in certificate_spec: return certificate_spec[cm.CA_TYPE] == cm.CA_PLUGIN_TYPE_DOGTAG if cm.CA_PLUGIN_TYPE_SYMANTEC in certificate_spec: # TODO(alee-3) Handle case where SKI is provided pass return True def supported_request_types(self): """Returns the request_types supported by this plugin. :returns: a list of the Barbican-core defined request_types supported by this plugin. """ return [cm.CertificateRequestType.SIMPLE_CMC_REQUEST, cm.CertificateRequestType.STORED_KEY_REQUEST, cm.CertificateRequestType.CUSTOM_REQUEST] def supports_create_ca(self): """Returns if this plugin and the backend CA supports subCAs :return: True/False """ return subcas_available @_catch_subca_creation_exceptions def create_ca(self, ca_create_dto): """Creates a subordinate CA upon request :param ca_create_dto: Data transfer object :class:`CACreateDTO` containing data required to generate a subordinate CA. This data includes the subject DN of the new CA signing certificate, a name for the new CA and a reference to the CA that will issue the new subordinate CA's signing certificate, :return: ca_info: Dictionary containing the data needed to create a models.CertificateAuthority object """ if not subcas_available: raise exception.SubCAsNotSupported( "Subordinate CAs are not supported by this Dogtag CA") parent_ca_id = self._get_correct_ca_id(ca_create_dto.parent_ca_id) ca_data = authority.AuthorityData( dn=ca_create_dto.subject_dn, parent_aid=parent_ca_id, description=ca_create_dto.name) new_ca_data = self.authority_client.create_ca(ca_data) cert = self.authority_client.get_cert(new_ca_data.aid, "PEM") chain = self.authority_client.get_chain(new_ca_data.aid, "PEM") return { cm.INFO_NAME: new_ca_data.description, cm.INFO_CA_SIGNING_CERT: cert, cm.INFO_EXPIRATION: self.expiration.isoformat(), cm.INFO_INTERMEDIATES: chain, cm.PLUGIN_CA_ID: new_ca_data.aid } def _get_correct_ca_id(self, plugin_ca_id): """Returns the correct authority id When the Dogtag plugin updates its CA list, any subcas will have a plugin_ca_id that matches the authority_id (aid) as returned from the backend CA. For migration purposes, though, ie. migrating from a non-subca environment to a subca one, we want the host CA to keep the same plugin_ca_id (which is the default_ca_name) so that no disruption occurs. Therefore, we need to store the host CA's authority ID (in get_ca_info) and return it here instead. """ if plugin_ca_id == self.get_default_ca_name(): return self.host_aid else: return plugin_ca_id @_catch_subca_deletion_exceptions def delete_ca(self, ca_id): """Deletes a subordinate CA :param ca_id: id for the CA as specified by the plugin :return: None """ if not subcas_available: raise exception.SubCAsNotSupported( "Subordinate CAs are not supported by this Dogtag CA") # ca must be disabled first self.authority_client.disable_ca(ca_id) self.authority_client.delete_ca(ca_id) def get_ca_info(self): if not subcas_available: return super(DogtagCAPlugin, self).get_ca_info() self.expiration = (datetime.datetime.utcnow() + datetime.timedelta( days=int(self._expiration_delta))) ret = {} cas = self.authority_client.list_cas() for ca_data in cas.ca_list: if not ca_data.enabled: continue cert = self.authority_client.get_cert(ca_data.aid, "PEM") chain = self.authority_client.get_chain(ca_data.aid, "PEM") ca_info = { cm.INFO_NAME: ca_data.description, cm.INFO_CA_SIGNING_CERT: cert, cm.INFO_INTERMEDIATES: chain, cm.INFO_EXPIRATION: self.expiration.isoformat() } # handle the migration case. The top level CA should continue # to work as before if ca_data.is_host_authority: ret[self.get_default_ca_name()] = ca_info self.host_aid = ca_data.aid else: ret[ca_data.aid] = ca_info return ret def get_host_aid(self): cas = self.authority_client.list_cas() for ca_data in cas.ca_list: if ca_data.is_host_authority: return ca_data.aid return None barbican-2.0.0/barbican/plugin/interface/0000775000567000056710000000000012701406024021406 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/plugin/interface/__init__.py0000664000567000056710000000000012701405673023516 0ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/plugin/interface/certificate_manager.py0000664000567000056710000007053612701405674025761 0ustar jenkinsjenkins00000000000000# Copyright (c) 2013-2014 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ SSL Certificate resources for Barbican. The resources here should be generic across all certificate-related implementations. Hence do not place vendor-specific content in this module. """ import abc import datetime from oslo_config import cfg import six from stevedore import named from barbican.common import config from barbican.common import exception import barbican.common.utils as utils from barbican import i18n as u from barbican.model import models from barbican.model import repositories as repos from barbican.plugin.util import utils as plugin_utils LOG = utils.getLogger(__name__) CONF = config.new_config() # Configuration for certificate processing plugins: DEFAULT_PLUGIN_NAMESPACE = 'barbican.certificate.plugin' DEFAULT_PLUGINS = ['simple_certificate'] cert_opt_group = cfg.OptGroup(name='certificate', title='Certificate Plugin Options') cert_opts = [ cfg.StrOpt('namespace', default=DEFAULT_PLUGIN_NAMESPACE, help=u._('Extension namespace to search for plugins.') ), cfg.MultiStrOpt('enabled_certificate_plugins', default=DEFAULT_PLUGINS, help=u._('List of certificate plugins to load.') ) ] CONF.register_group(cert_opt_group) CONF.register_opts(cert_opts, group=cert_opt_group) config.parse_args(CONF) # Configuration for certificate eventing plugins: DEFAULT_EVENT_PLUGIN_NAMESPACE = 'barbican.certificate.event.plugin' DEFAULT_EVENT_PLUGINS = ['simple_certificate_event'] cert_event_opt_group = cfg.OptGroup(name='certificate_event', title='Certificate Event Plugin Options') cert_event_opts = [ cfg.StrOpt('namespace', default=DEFAULT_EVENT_PLUGIN_NAMESPACE, help=u._('Extension namespace to search for eventing plugins.') ), cfg.MultiStrOpt('enabled_certificate_event_plugins', default=DEFAULT_EVENT_PLUGINS, help=u._('List of certificate plugins to load.') ) ] CONF.register_group(cert_event_opt_group) CONF.register_opts(cert_event_opts, group=cert_event_opt_group) ERROR_RETRY_MSEC = 300000 RETRY_MSEC = 3600000 CA_INFO_DEFAULT_EXPIRATION_DAYS = 1 CA_PLUGIN_TYPE_DOGTAG = "dogtag" CA_PLUGIN_TYPE_SYMANTEC = "symantec" # fields to distinguish CA types and subject key identifiers CA_TYPE = "ca_type" CA_SUBJECT_KEY_IDENTIFIER = "ca_subject_key_identifier" # field to get the certificate request type REQUEST_TYPE = "request_type" # fields for the ca_id, plugin_ca_id CA_ID = "ca_id" PLUGIN_CA_ID = "plugin_ca_id" # fields for ca_info dict keys INFO_NAME = "name" INFO_DESCRIPTION = "description" INFO_CA_SIGNING_CERT = "ca_signing_certificate" INFO_INTERMEDIATES = "intermediates" INFO_EXPIRATION = "expiration" class CertificateRequestType(object): """Constants to define the certificate request type.""" CUSTOM_REQUEST = "custom" FULL_CMC_REQUEST = "full-cmc" SIMPLE_CMC_REQUEST = "simple-cmc" STORED_KEY_REQUEST = "stored-key" class CertificatePluginNotFound(exception.BarbicanException): """Raised when no certificate plugin supporting a request is available.""" def __init__(self, plugin_name=None): if plugin_name: message = u._( 'Certificate plugin "{name}"' ' not found.').format(name=plugin_name) else: message = u._("Certificate plugin not found or configured.") super(CertificatePluginNotFound, self).__init__(message) class CertificatePluginNotFoundForCAID(exception.BarbicanException): """Raised when no certificate plugin is available for a CA_ID.""" def __init__(self, ca_id): message = u._( 'Certificate plugin not found for "{ca_id}".').format(ca_id=ca_id) super(CertificatePluginNotFoundForCAID, self).__init__(message) class CertificateEventPluginNotFound(exception.BarbicanException): """Raised with no certificate event plugin supporting request.""" def __init__(self, plugin_name=None): if plugin_name: message = u._( 'Certificate event plugin "{name}" ' 'not found.').format(name=plugin_name) else: message = u._("Certificate event plugin not found.") super(CertificateEventPluginNotFound, self).__init__(message) class CertificateStatusNotSupported(exception.BarbicanException): """Raised when cert status returned is unknown.""" def __init__(self, status): super(CertificateStatusNotSupported, self).__init__( u._("Certificate status of {status} not " "supported").format(status=status) ) self.status = status class CertificateGeneralException(exception.BarbicanException): """Raised when a system fault has occurred.""" def __init__(self, reason=u._('Unknown')): super(CertificateGeneralException, self).__init__( u._('Problem seen during certificate processing - ' 'Reason: {reason}').format(reason=reason) ) self.reason = reason class CertificateStatusClientDataIssue(exception.BarbicanHTTPException): """Raised when the CA has encountered an issue with request data.""" client_message = "" status_code = 400 def __init__(self, reason=u._('Unknown')): super(CertificateStatusClientDataIssue, self).__init__( u._('Problem with data in certificate request - ' 'Reason: {reason}').format(reason=reason) ) self.client_message = self.message class CertificateStatusInvalidOperation(exception.BarbicanHTTPException): """Raised when the CA has encountered an issue with request data.""" client_message = "" status_code = 400 def __init__(self, reason=u._('Unknown')): super(CertificateStatusInvalidOperation, self).__init__( u._('Invalid operation requested - ' 'Reason: {reason}').format(reason=reason) ) self.client_message = self.message @six.add_metaclass(abc.ABCMeta) class CertificateEventPluginBase(object): """Base class for certificate eventing plugins. This class is the base plugin contract for issuing certificate related events from Barbican. """ @abc.abstractmethod def notify_certificate_is_ready( self, project_id, order_ref, container_ref): """Notify that a certificate has been generated and is ready to use. :param project_id: Project ID associated with this certificate :param order_ref: HATEOAS reference URI to the submitted Barbican Order :param container_ref: HATEOAS reference URI to the Container storing the certificate :returns: None """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def notify_ca_is_unavailable( self, project_id, order_ref, error_msg, retry_in_msec): """Notify that the certificate authority (CA) isn't available. :param project_id: Project ID associated with this order :param order_ref: HATEOAS reference URI to the submitted Barbican Order :param error_msg: Error message if it is available :param retry_in_msec: Delay before attempting to talk to the CA again. If this is 0, then no attempt will be made. :returns: None """ raise NotImplementedError # pragma: no cover @six.add_metaclass(abc.ABCMeta) class CertificatePluginBase(object): """Base class for certificate plugins. This class is the base plugin contract for certificates. """ @abc.abstractmethod def get_default_ca_name(self): """Get the default CA name Provides a default CA name to be returned in the default get_ca_info() method. If get_ca_info() is overridden (to support multiple CAs for instance), then this method may not be called. In that case, just implement this method to return a dummy variable. If this value is used, it should be unique amongst all the CA plugins. :return: The default CA name :rtype: str """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def get_default_signing_cert(self): """Get the default CA signing cert Provides a default CA signing cert to be returned in the default get_ca_info() method. If get_ca_info() is overridden (to support multiple CAs for instance), then this method may not be called. In that case, just implement this method to return a dummy variable. :return: The default CA signing cert :rtype: str """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def get_default_intermediates(self): """Get the default CA certificate chain Provides a default CA certificate to be returned in the default get_ca_info() method. If get_ca_info() is overridden (to support multiple CAs for instance), then this method may not be called. In that case, just implement this method to return a dummy variable. :return: The default CA certificate chain :rtype: str """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def issue_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Create the initial order :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf :param barbican_meta_dto: Data transfer object :class:`BarbicanMetaDTO` containing data added to the request by the Barbican server to provide additional context for processing, but which are not in the original request. For example, the plugin_ca_id :returns: A :class:`ResultDTO` instance containing the result populated by the plugin implementation :rtype: :class:`ResultDTO` """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def modify_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Update the order meta-data :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf :param barbican_meta_dto: Data transfer object :class:`BarbicanMetaDTO` containing data added to the request by the Barbican server to provide additional context for processing, but which are not in the original request. For example, the plugin_ca_id :returns: A :class:`ResultDTO` instance containing the result populated by the plugin implementation :rtype: :class:`ResultDTO` """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def cancel_certificate_request(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Cancel the order :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order. :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf :param barbican_meta_dto: Data transfer object :class:`BarbicanMetaDTO` containing data added to the request by the Barbican server to provide additional context for processing, but which are not in the original request. For example, the plugin_ca_id :returns: A :class:`ResultDTO` instance containing the result populated by the plugin implementation :rtype: :class:`ResultDTO` """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def check_certificate_status(self, order_id, order_meta, plugin_meta, barbican_meta_dto): """Check status of the order :param order_id: ID associated with the order :param order_meta: Dict of meta-data associated with the order :param plugin_meta: Plugin meta-data previously set by calls to this plugin. Plugins may also update/add information here which Barbican will persist on their behalf :param barbican_meta_dto: Data transfer object :class:`BarbicanMetaDTO` containing data added to the request by the Barbican server to provide additional context for processing, but which are not in the original request. For example, the plugin_ca_id :returns: A :class:`ResultDTO` instance containing the result populated by the plugin implementation :rtype: :class:`ResultDTO` """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def supports(self, certificate_spec): """Returns if the plugin supports the certificate type. :param certificate_spec: Contains details on the certificate to generate the certificate order :returns: boolean indicating if the plugin supports the certificate type """ raise NotImplementedError # pragma: no cover def supported_request_types(self): """Returns the request_types supported by this plugin. :returns: a list of the Barbican-core defined request_types supported by this plugin. """ return [CertificateRequestType.CUSTOM_REQUEST] # pragma: no cover def get_ca_info(self): """Returns information about the CA(s) supported by this plugin. :returns: dictionary indexed by plugin_ca_id. Each entry consists of a dictionary of key-value pairs. An example dictionary containing the current supported attributes is shown below:: { "plugin_ca_id1": { INFO_NAME : "CA name", INFO_DESCRIPTION : "CA user friendly description", INFO_CA_SIGNING_CERT : "base 64 encoded signing cert", INFO_INTERMEDIATES = "base 64 encoded certificate chain" INFO_EXPIRATION = "ISO formatted UTC datetime for when this" "data will become stale" } } """ name = self.get_default_ca_name() expiration = (datetime.datetime.utcnow() + datetime.timedelta(days=CA_INFO_DEFAULT_EXPIRATION_DAYS)) default_info = { INFO_NAME: name, INFO_DESCRIPTION: "Certificate Authority - {0}".format(name), INFO_EXPIRATION: expiration.isoformat() } signing_cert = self.get_default_signing_cert() if signing_cert is not None: default_info[INFO_CA_SIGNING_CERT] = signing_cert intermediates = self.get_default_intermediates() if intermediates is not None: default_info[INFO_INTERMEDIATES] = intermediates return {name: default_info} def supports_create_ca(self): """Returns whether the plugin supports on-the-fly generation of subCAs :return: boolean, True if supported, defaults to False """ return False # pragma: no cover def create_ca(self, ca_create_dto): """Creates a subordinate CA upon request This call should only be made if a plugin returns True for supports_create_ca(). :param ca_create_dto: Data transfer object :class:`CACreateDTO` containing data required to generate a subordinate CA. This data includes the subject DN of the new CA signing certificate, a name for the new CA and a reference to the CA that will issue the new subordinate CA's signing certificate, :return: ca_info: Dictionary containing the data needed to create a models.CertificateAuthority object """ raise NotImplementedError # pragma: no cover def delete_ca(self, ca_id): """Deletes a subordinate CA Like the create_ca call, this should only be made if the plugin returns Ture for supports_create_ca() :param ca_id: id for the CA as specified by the plugin :return: None """ raise NotImplementedError # pragma: no cover class CACreateDTO(object): """Class that includes data needed to create a subordinate CA """ def __init__(self, name=None, description=None, subject_dn=None, parent_ca_id=None): """Creates a new CACreateDTO object. :param name: Name for the subordinate CA :param description: Description for the subordinate CA :param subject_dn: Subject DN for the new subordinate CA's signing certificate :param parent_ca_id: ID of the CA which is supposed to sign the subordinate CA's signing certificate. This is ID as known to the plugin (not the Barbican UUID) """ self.name = name self.description = description self.subject_dn = subject_dn self.parent_ca_id = parent_ca_id class CertificateStatus(object): """Defines statuses for certificate request process. In particular: CERTIFICATE_GENERATED - Indicates a certificate was created WAITING_FOR_CA - Waiting for Certificate authority (CA) to complete order CLIENT_DATA_ISSUE_SEEN - Problem was seen with client-provided data CA_UNAVAILABLE_FOR_REQUEST - CA was not available, will try again later REQUEST_CANCELED - The client or CA cancelled this order INVALID_OPERATION - Unexpected error seen processing order """ CERTIFICATE_GENERATED = "certificate generated" WAITING_FOR_CA = "waiting for CA" CLIENT_DATA_ISSUE_SEEN = "client data issue seen" CA_UNAVAILABLE_FOR_REQUEST = "CA unavailable for request" REQUEST_CANCELED = "request canceled" INVALID_OPERATION = "invalid operation" class ResultDTO(object): """Result data transfer object (DTO). An object of this type is returned by most certificate plugin methods, and is used to guide follow on processing and to provide status feedback to clients. """ def __init__(self, status, status_message=None, certificate=None, intermediates=None, retry_msec=RETRY_MSEC, retry_method=None): """Creates a new ResultDTO. :param status: Status for cert order :param status_message: Message to explain status type. :param certificate: Certificate returned from CA to be stored in container :param intermediates: Intermediates to be stored in container :param retry_msec: Number of milliseconds to wait for retry :param retry_method: Method to be called for retry, if None then retry the current method """ self.status = status self.status_message = status_message self.certificate = certificate self.intermediates = intermediates self.retry_msec = int(retry_msec) self.retry_method = retry_method class BarbicanMetaDTO(object): """Barbican meta data transfer object Information needed to process a certificate request that is not specified in the original request, and written by Barbican core, that is needed by the plugin to process requests. """ def __init__(self, plugin_ca_id=None, generated_csr=None): """Creates a new BarbicanMetaDTO. :param plugin_ca_id: ca_id as known to the plugin :param generated_csr: csr generated in the stored-key case :return: BarbicanMetaDTO """ self.plugin_ca_id = plugin_ca_id self.generated_csr = generated_csr class CertificatePluginManager(named.NamedExtensionManager): def __init__(self, conf=CONF, invoke_args=(), invoke_kwargs={}): self.ca_repo = repos.get_ca_repository() super(CertificatePluginManager, self).__init__( conf.certificate.namespace, conf.certificate.enabled_certificate_plugins, invoke_on_load=False, # Defer creating plugins to utility below. invoke_args=invoke_args, invoke_kwds=invoke_kwargs ) plugin_utils.instantiate_plugins( self, invoke_args, invoke_kwargs) def get_plugin(self, certificate_spec): """Gets a supporting certificate plugin. :param certificate_spec: Contains details on the certificate to generate the certificate order :returns: CertificatePluginBase plugin implementation """ request_type = certificate_spec.get( REQUEST_TYPE, CertificateRequestType.CUSTOM_REQUEST) for plugin in plugin_utils.get_active_plugins(self): supported_request_types = plugin.supported_request_types() if request_type not in supported_request_types: continue if plugin.supports(certificate_spec): return plugin raise CertificatePluginNotFound() def get_plugin_by_name(self, plugin_name): """Gets a supporting certificate plugin. :param plugin_name: Name of the plugin to invoke :returns: CertificatePluginBase plugin implementation """ for plugin in plugin_utils.get_active_plugins(self): if utils.generate_fullname_for(plugin) == plugin_name: return plugin raise CertificatePluginNotFound(plugin_name) def get_plugin_by_ca_id(self, ca_id): """Gets a plugin based on the ca_id. :param ca_id: id for CA in the CertificateAuthorities table :returns: CertificatePluginBase plugin implementation """ ca = self.ca_repo.get(ca_id, suppress_exception=True) if not ca: raise CertificatePluginNotFoundForCAID(ca_id) return self.get_plugin_by_name(ca.plugin_name) def refresh_ca_table(self): """Refreshes the CertificateAuthority table.""" updates_made = False for plugin in plugin_utils.get_active_plugins(self): plugin_name = utils.generate_fullname_for(plugin) cas, offset, limit, total = self.ca_repo.get_by_create_date( plugin_name=plugin_name, suppress_exception=True) if total < 1: # if no entries are found, then the plugin has not yet been # queried or that plugin's entries have expired. # Most of the time, this will be a no-op for plugins. self.update_ca_info(plugin) updates_made = True if updates_made: # commit to DB to avoid async issues with different threads repos.commit() def update_ca_info(self, cert_plugin): """Update the CA info for a particular plugin.""" plugin_name = utils.generate_fullname_for(cert_plugin) try: new_ca_infos = cert_plugin.get_ca_info() except Exception as e: # The plugin gave an invalid CA, log and return LOG.error(u._LE("ERROR getting CA from plugin: %s"), e.message) return old_cas, offset, limit, total = self.ca_repo.get_by_create_date( plugin_name=plugin_name, suppress_exception=True, show_expired=True) if old_cas: for old_ca in old_cas: plugin_ca_id = old_ca.plugin_ca_id if plugin_ca_id not in new_ca_infos.keys(): # remove CAs that no longer exist self._delete_ca(old_ca) else: # update those that still exist self.ca_repo.update_entity( old_ca, new_ca_infos[plugin_ca_id]) old_ids = set([ca.plugin_ca_id for ca in old_cas]) else: old_ids = set() new_ids = set(new_ca_infos.keys()) # add new CAs add_ids = new_ids - old_ids for add_id in add_ids: try: self._add_ca(plugin_name, add_id, new_ca_infos[add_id]) except Exception as e: # The plugin gave an invalid CA, log and continue LOG.error(u._LE("ERROR adding CA from plugin: %s"), e.message) def _add_ca(self, plugin_name, plugin_ca_id, ca_info): parsed_ca = dict(ca_info) parsed_ca['plugin_name'] = plugin_name parsed_ca['plugin_ca_id'] = plugin_ca_id new_ca = models.CertificateAuthority(parsed_ca) self.ca_repo.create_from(new_ca) def _delete_ca(self, ca): self.ca_repo.delete_entity_by_id(ca.id, None) class _CertificateEventPluginManager(named.NamedExtensionManager, CertificateEventPluginBase): """Provides services for certificate event plugins. This plugin manager differs from others in that it implements the same contract as the plugins that it manages. This allows eventing operations to occur on all installed plugins (with this class acting as a composite plugin), rather than just eventing via an individual plugin. Each time this class is initialized it will load a new instance of each enabled plugin. This is undesirable, so rather than initializing a new instance of this class use the EVENT_PLUGIN_MANAGER at the module level. """ def __init__(self, conf=CONF, invoke_args=(), invoke_kwargs={}): super(_CertificateEventPluginManager, self).__init__( conf.certificate_event.namespace, conf.certificate_event.enabled_certificate_event_plugins, invoke_on_load=False, # Defer creating plugins to utility below. invoke_args=invoke_args, invoke_kwds=invoke_kwargs ) plugin_utils.instantiate_plugins( self, invoke_args, invoke_kwargs) def get_plugin_by_name(self, plugin_name): """Gets a supporting certificate event plugin. :returns: CertificateEventPluginBase plugin implementation """ for plugin in plugin_utils.get_active_plugins(self): if utils.generate_fullname_for(plugin) == plugin_name: return plugin raise CertificateEventPluginNotFound(plugin_name) def notify_certificate_is_ready( self, project_id, order_ref, container_ref): self._invoke_certificate_plugins( 'notify_certificate_is_ready', project_id, order_ref, container_ref) def notify_ca_is_unavailable( self, project_id, order_ref, error_msg, retry_in_msec): self._invoke_certificate_plugins( 'notify_ca_is_unavailable', project_id, order_ref, error_msg, retry_in_msec) def _invoke_certificate_plugins(self, method, *args, **kwargs): """Invoke same function on plugins as calling function.""" active_plugins = plugin_utils.get_active_plugins(self) if not active_plugins: raise CertificateEventPluginNotFound() for plugin in active_plugins: getattr(plugin, method)(*args, **kwargs) EVENT_PLUGIN_MANAGER = _CertificateEventPluginManager() barbican-2.0.0/barbican/plugin/interface/secret_store.py0000664000567000056710000005312312701405673024476 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Johns Hopkins University Applied Physics Laboratory # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import abc from oslo_config import cfg import six from stevedore import named from barbican.common import config from barbican.common import exception from barbican.common import utils from barbican import i18n as u from barbican.plugin.util import utils as plugin_utils _SECRET_STORE = None CONF = config.new_config() DEFAULT_PLUGIN_NAMESPACE = 'barbican.secretstore.plugin' DEFAULT_PLUGINS = ['store_crypto'] store_opt_group = cfg.OptGroup(name='secretstore', title='Secret Store Plugin Options') store_opts = [ cfg.StrOpt('namespace', default=DEFAULT_PLUGIN_NAMESPACE, help=u._('Extension namespace to search for plugins.') ), cfg.MultiStrOpt('enabled_secretstore_plugins', default=DEFAULT_PLUGINS, help=u._('List of secret store plugins to load.') ) ] CONF.register_group(store_opt_group) CONF.register_opts(store_opts, group=store_opt_group) config.parse_args(CONF) class SecretStorePluginNotFound(exception.BarbicanHTTPException): """Raised when no plugins are installed.""" client_message = u._("No plugin was found that could support your request") status_code = 400 def __init__(self, plugin_name=None): if plugin_name: message = u._('Secret store plugin "{name}"' ' not found.').format(name=plugin_name) else: message = u._("Secret store plugin not found.") super(SecretStorePluginNotFound, self).__init__(message) class SecretStoreSupportedPluginNotFound(exception.BarbicanHTTPException): """Raised if no plugins are found that support the requested operation.""" client_message = u._("Secret store supported plugin not found.") status_code = 400 def __init__(self, plugin_name=None): message = u._("Secret store plugin not found for requested operation.") super(SecretStoreSupportedPluginNotFound, self).__init__( message) class SecretContentTypeNotSupportedException(exception.BarbicanHTTPException): """Raised when support for payload content type is not available.""" status_code = 400 def __init__(self, content_type): super(SecretContentTypeNotSupportedException, self).__init__( u._("A Content-Type of '{content_type}' for secrets is " "not supported").format( content_type=content_type) ) self.content_type = content_type self.client_message = u._( "content-type of '{content_type}' not supported").format( content_type=content_type) class SecretContentEncodingNotSupportedException( exception.BarbicanHTTPException): """Raised when support for payload content encoding is not available.""" status_code = 400 def __init__(self, content_encoding): super(SecretContentEncodingNotSupportedException, self).__init__( u._("Secret Content-Encoding of '{content_encoding}' " "not supported").format( content_encoding=content_encoding) ) self.content_encoding = content_encoding self.client_message = u._( "content-encoding of '{content_encoding}' not supported").format( content_encoding=content_encoding) class SecretNoPayloadProvidedException(exception.BarbicanException): """Raised when secret information is not provided.""" def __init__(self): super(SecretNoPayloadProvidedException, self).__init__( u._('No secret information provided to encrypt.') ) class SecretContentEncodingMustBeBase64(exception.BarbicanHTTPException): """Raised when encoding must be base64.""" client_message = u._("Text-based binary secret payloads must " "specify a content-encoding of 'base64'") status_code = 400 def __init__(self): super(SecretContentEncodingMustBeBase64, self).__init__( u._("Encoding type must be 'base64' for text-based payloads.") ) class SecretGeneralException(exception.BarbicanException): """Raised when a system fault has occurred.""" def __init__(self, reason=u._('Unknown')): super(SecretGeneralException, self).__init__( u._('Problem seen during crypto processing - ' 'Reason: {reason}').format(reason=reason) ) self.reason = reason class SecretPayloadDecodingError(exception.BarbicanHTTPException): """Raised when payload could not be decoded.""" client_message = u._("Problem decoding payload") status_code = 400 def __init__(self): super(SecretPayloadDecodingError, self).__init__( u._("Problem decoding payload") ) class SecretAcceptNotSupportedException(exception.BarbicanException): """Raised when requested decrypted content-type is not available.""" def __init__(self, accept): super(SecretAcceptNotSupportedException, self).__init__( u._("Secret Accept of '{accept}' not supported").format( accept=accept) ) self.accept = accept class SecretNotFoundException(exception.BarbicanHTTPException): """Raised when secret information could not be located.""" client_message = u._("Not Found. Sorry but your secret is in another " "castle") status_code = 404 def __init__(self): super(SecretNotFoundException, self).__init__( u._('No secret information found')) class SecretAlgorithmNotSupportedException(exception.BarbicanHTTPException): """Raised when support for an algorithm is not available.""" client_message = u._("Requested algorithm is not supported") status_code = 400 def __init__(self, algorithm): super(SecretAlgorithmNotSupportedException, self).__init__( u._("Secret algorithm of '{algorithm}' not supported").format( algorithm=algorithm) ) self.algorithm = algorithm class SecretStorePluginsNotConfigured(exception.BarbicanException): """Raised when there are no secret store plugins configured.""" def __init__(self): super(SecretStorePluginsNotConfigured, self).__init__( u._('No secret store plugins have been configured') ) class StorePluginNotAvailableOrMisconfigured(exception.BarbicanException): """Raised when a plugin that was previously used can not be found.""" def __init__(self, plugin_name): super(StorePluginNotAvailableOrMisconfigured, self).__init__( u._("The requested Store Plugin {plugin_name} is not " "currently available. This is probably a server " "misconfiguration.").format( plugin_name=plugin_name) ) self.plugin_name = plugin_name class SecretType(object): """Constant to define the symmetric key type. Used by getSecret to retrieve a symmetric key. """ SYMMETRIC = "symmetric" """Constant to define the public key type. Used by getSecret to retrieve a public key. """ PUBLIC = "public" """Constant to define the private key type. Used by getSecret to retrieve a private key. """ PRIVATE = "private" """Constant to define the passphrase type. Used by getSecret to retrieve a passphrase.""" PASSPHRASE = "passphrase" """Constant to define the certificate type. Used by getSecret to retrieve a certificate.""" CERTIFICATE = "certificate" """Constant to define the opaque date type. Used by getSecret to retrieve opaque data. Opaque data can be any kind of data. This data type signals to Barbican to just store the information and do not worry about the format or encoding. This is the default type if no type is specified by the user.""" OPAQUE = "opaque" class KeyAlgorithm(object): """Constant for the Diffie Hellman algorithm.""" DIFFIE_HELLMAN = "diffie_hellman" """Constant for the DSA algorithm.""" DSA = "dsa" """Constant for the RSA algorithm.""" RSA = "rsa" """Constant for the Elliptic Curve algorithm.""" EC = "ec" """Constant for the HMACSHA1 algorithm.""" HMACSHA1 = "hmacsha1" """Constant for the HMACSHA256 algorithm.""" HMACSHA256 = "hmacsha256" """Constant for the HMACSHA384 algorithm.""" HMACSHA384 = "hmacsha384" """Constant for the HMACSHA512 algorithm.""" HMACSHA512 = "hmacsha512" """List of asymmetric algorithms""" ASYMMETRIC_ALGORITHMS = [DIFFIE_HELLMAN, DSA, RSA, EC] """Constant for the AES algorithm.""" AES = "aes" """Constant for the DES algorithm.""" DES = "des" """Constant for the DESede (triple-DES) algorithm.""" DESEDE = "desede" """List of symmetric algorithms""" SYMMETRIC_ALGORITHMS = [AES, DES, DESEDE, HMACSHA1, HMACSHA256, HMACSHA384, HMACSHA512] class KeySpec(object): """This object specifies the algorithm and bit length for a key.""" def __init__(self, alg=None, bit_length=None, mode=None, passphrase=None): """Creates a new KeySpec. :param alg:algorithm for the key :param bit_length:bit length of the key :param mode:algorithm mode for the key :param passphrase:passphrase for the private_key """ self.alg = alg self.bit_length = bit_length self.mode = mode # TODO(john-wood-w) Paul, is 'mode' required? self.passphrase = passphrase class SecretDTO(object): """This object is a secret data transfer object (DTO). This object encapsulates a key and attributes about the key. The attributes include a KeySpec that contains the algorithm and bit length. The attributes also include information on the encoding of the key. """ # TODO(john-wood-w) Remove 'content_type' once secret normalization work is # completed. def __init__(self, type, secret, key_spec, content_type, transport_key=None): """Creates a new SecretDTO. The secret is stored in the secret parameter. In the future this DTO may include compression and key wrapping information. :param type: SecretType for secret :param secret: secret, as a base64-encoded string :param key_spec: KeySpec key specifications :param content_type: Content type of the secret, one of MIME types such as 'text/plain' or 'application/octet-stream' :param transport_key: presence of this parameter indicates that the secret has been encrypted using a transport key. The transport key is a base64 encoded x509 transport certificate. """ self.type = type or SecretType.OPAQUE self.secret = secret self.key_spec = key_spec self.content_type = content_type self.transport_key = transport_key class AsymmetricKeyMetadataDTO(object): """This DTO encapsulates metadata(s) for asymmetric key components. These components are private_key_meta, public_key_meta and passphrase_meta. """ def __init__(self, private_key_meta=None, public_key_meta=None, passphrase_meta=None): """Constructor for AsymmetricKeyMetadataDTO :param private_key_meta: private key metadata :param public_key_meta: public key metadata :param passphrase_meta: passphrase key metadata """ self.private_key_meta = private_key_meta self.public_key_meta = public_key_meta self.passphrase_meta = passphrase_meta @six.add_metaclass(abc.ABCMeta) class SecretStoreBase(object): @abc.abstractmethod def generate_symmetric_key(self, key_spec): """Generate a new symmetric key and store it. Generates a new symmetric key and stores it in the secret store. A dictionary is returned that contains metadata about the newly created symmetric key. The dictionary of metadata is stored by Barbican and passed into other methods to aid the plugins. This can be useful for plugins that generate a unique ID in the external data store and use it to retrieve the key in the future. The returned dictionary may be empty if the SecretStore does not require it. :param key_spec: KeySpec that contains details on the type of key to generate :returns: an optional dictionary containing metadata about the key """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def generate_asymmetric_key(self, key_spec): """Generate a new asymmetric key pair and store it. Generates a new asymmetric key pair and stores it in the secret store. An object of type AsymmetricKeyMetadataDTO will be returned containing attributes of metadata for newly created key pairs. The metadata is stored by Barbican and passed into other methods to aid the plugins. This can be useful for plugins that generate a unique ID in the external data store and use it to retrieve the key pairs in the future. :param key_spec: KeySpec that contains details on the type of key to generate :returns: An object of type AsymmetricKeyMetadataDTO containing metadata about the key pair. """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def store_secret(self, secret_dto): """Stores a key. The SecretDTO contains the bytes of the secret and properties of the secret. The SecretStore retrieves the secret bytes, stores them, and returns a dictionary of metadata about the secret. This can be useful for plugins that generate a unique ID in the external data store and use it to retrieve the secret in the future. The returned dictionary may be empty if the SecretStore does not require it. :param secret_dto: SecretDTO for secret :returns: an optional dictionary containing metadata about the secret """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def get_secret(self, secret_type, secret_metadata): """Retrieves a secret from the secret store. Retrieves a secret from the secret store and returns a SecretDTO that contains the secret. The secret_metadata parameter is the metadata returned from one of the generate or store methods. This data is used by the plugins to retrieve the key. The secret_type parameter may be useful for secret stores to know the expected format of the secret. For instance if the type is SecretDTO.PRIVATE then a PKCS8 structure is returned. This way secret stores do not need to manage the secret type on their own. :param secret_type: secret type :param secret_metadata: secret metadata :returns: SecretDTO that contains secret """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def generate_supports(self, key_spec): """Returns a boolean indicating if the secret type is supported. This checks if the algorithm and bit length are supported by the generate methods. This is useful to call before calling generate_symmetric_key or generate_asymetric_key to see if the key type is supported before trying to generate it. :param key_spec: KeySpec that contains details on the algorithm and bit length :returns: boolean indicating if the algorithm is supported """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def delete_secret(self, secret_metadata): """Deletes a secret from the secret store. Deletes a secret from a secret store. It can no longer be referenced after this call. :param secret_metadata: secret_metadata """ raise NotImplementedError # pragma: no cover @abc.abstractmethod def store_secret_supports(self, key_spec): """Returns a boolean indicating if the secret can be stored. Checks if the secret store can store the secret, give the attributes of the secret in the KeySpec. For example, some plugins may need to know the attributes in order to store the secret, but other plugins may be able to store the secret as a blob if no attributes are given. :param key_spec: KeySpec for the secret :returns: a boolean indicating if the secret can be stored """ raise NotImplementedError # pragma: no cover def get_transport_key(self): """Gets a transport key. Returns the current valid transport key associated with this plugin. The transport key is expected to be a base64 encoded x509 certificate containing a public key. Admins are responsible for deleting old keys from the database using the DELETE method on the TransportKey resource. By default, returns None. Plugins that support transport key wrapping should override this method. """ return None def is_transport_key_current(self, transport_key): """Determines if the provided transport key is the current valid key Returns true if the transport key is the current valid transport key. If the key is not valid, then barbican core will request a new transport key from the plugin. Returns False by default. Plugins that support transport key wrapping should override this method. """ return False def _enforce_extensions_configured(plugin_related_function): def _check_plugins_configured(self, *args, **kwargs): if not self.extensions: raise SecretStorePluginsNotConfigured() return plugin_related_function(self, *args, **kwargs) return _check_plugins_configured class SecretStorePluginManager(named.NamedExtensionManager): def __init__(self, conf=CONF, invoke_args=(), invoke_kwargs={}): super(SecretStorePluginManager, self).__init__( conf.secretstore.namespace, conf.secretstore.enabled_secretstore_plugins, invoke_on_load=False, # Defer creating plugins to utility below. invoke_args=invoke_args, invoke_kwds=invoke_kwargs ) plugin_utils.instantiate_plugins( self, invoke_args, invoke_kwargs) @_enforce_extensions_configured def get_plugin_store(self, key_spec, plugin_name=None, transport_key_needed=False): """Gets a secret store plugin. :param: plugin_name: set to plugin_name to get specific plugin :param: key_spec: KeySpec of key that will be stored :param: transport_key_needed: set to True if a transport key is required. :returns: SecretStoreBase plugin implementation """ active_plugins = plugin_utils.get_active_plugins(self) if plugin_name is not None: for plugin in active_plugins: if utils.generate_fullname_for(plugin) == plugin_name: return plugin raise SecretStorePluginNotFound(plugin_name) if not transport_key_needed: for plugin in active_plugins: if plugin.store_secret_supports(key_spec): return plugin else: for plugin in active_plugins: if (plugin.get_transport_key() is not None and plugin.store_secret_supports(key_spec)): return plugin raise SecretStoreSupportedPluginNotFound() @_enforce_extensions_configured def get_plugin_retrieve_delete(self, plugin_name): """Gets a secret retrieve/delete plugin. If this function is being called, it is because we are trying to retrieve or delete an already stored secret. Thus, the plugin name is actually gotten from the plugin metadata that has already been stored in the database. So, in this case, if this plugin is not available, this might be due to a server misconfiguration. :returns: SecretStoreBase plugin implementation :raises: StorePluginNotAvailableOrMisconfigured: If the plugin wasn't found it's because the plugin parameters were not properly configured on the database side. """ for plugin in plugin_utils.get_active_plugins(self): if utils.generate_fullname_for(plugin) == plugin_name: return plugin raise StorePluginNotAvailableOrMisconfigured(plugin_name) @_enforce_extensions_configured def get_plugin_generate(self, key_spec): """Gets a secret generate plugin. :param key_spec: KeySpec that contains details on the type of key to generate :returns: SecretStoreBase plugin implementation """ for plugin in plugin_utils.get_active_plugins(self): if plugin.generate_supports(key_spec): return plugin raise SecretStoreSupportedPluginNotFound() def get_manager(): global _SECRET_STORE if not _SECRET_STORE: _SECRET_STORE = SecretStorePluginManager() return _SECRET_STORE barbican-2.0.0/barbican/plugin/store_crypto.py0000664000567000056710000003077712701405674022604 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 from barbican.common import config from barbican.common import utils from barbican.model import models from barbican.model import repositories from barbican.plugin.crypto import crypto from barbican.plugin.crypto import manager from barbican.plugin.interface import secret_store as sstore CONF = config.new_config() config.parse_args(CONF) class StoreCryptoContext(object): """Context for crypto-adapter secret store plugins. This context object allows access to core Barbican resources such as datastore models. """ def __init__( self, project_model, secret_model=None, private_secret_model=None, public_secret_model=None, passphrase_secret_model=None, content_type=None): self.secret_model = secret_model self.private_secret_model = private_secret_model self.public_secret_model = public_secret_model self.passphrase_secret_model = passphrase_secret_model self.project_model = project_model self.content_type = content_type class StoreCryptoAdapterPlugin(object): """Secret store plugin adapting to 'crypto' devices as backend. HSM-style 'crypto' devices perform encryption/decryption processing but do not actually store the encrypted information, unlike other 'secret store' plugins that do provide storage. Hence, this adapter bridges between these two plugin styles, providing Barbican persistence services as needed to store information. Note that this class does not inherit from SecretStoreBase, as it also requires access to lower-level datastore entities such as KEKDatum. This additional information is passed in via the 'context' parameter. """ def __init__(self): super(StoreCryptoAdapterPlugin, self).__init__() def store_secret(self, secret_dto, context): """Store a secret. :param secret_dto: SecretDTO for secret :param context: StoreCryptoContext for secret :returns: an optional dictionary containing metadata about the secret """ # Find HSM-style 'crypto' plugin. encrypting_plugin = manager.get_manager().get_plugin_store_generate( crypto.PluginSupportTypes.ENCRYPT_DECRYPT ) # Find or create a key encryption key metadata. kek_datum_model, kek_meta_dto = _find_or_create_kek_objects( encrypting_plugin, context.project_model) # Secrets are base64 encoded before being passed to the secret stores. secret_bytes = base64.b64decode(secret_dto.secret) encrypt_dto = crypto.EncryptDTO(secret_bytes) # Enhance the context with content_type, This is needed to build # datum_model to store if not context.content_type: context.content_type = secret_dto.content_type # Create an encrypted datum instance and add the encrypted cyphertext. response_dto = encrypting_plugin.encrypt( encrypt_dto, kek_meta_dto, context.project_model.external_id ) # Convert binary data into a text-based format. _store_secret_and_datum( context, context.secret_model, kek_datum_model, response_dto) return None def get_secret(self, secret_type, metadata, context): """Retrieve a secret. :param secret_type: secret type :param metadata: secret metadata :param context: StoreCryptoContext for secret :returns: SecretDTO that contains secret """ if (not context.secret_model or not context.secret_model.encrypted_data): raise sstore.SecretNotFoundException() # TODO(john-wood-w) Need to revisit 1 to many datum relationship. datum_model = context.secret_model.encrypted_data[0] # Find HSM-style 'crypto' plugin. decrypting_plugin = manager.get_manager().get_plugin_retrieve( datum_model.kek_meta_project.plugin_name) # wrap the KEKDatum instance in our DTO kek_meta_dto = crypto.KEKMetaDTO(datum_model.kek_meta_project) # Convert from text-based storage format to binary. encrypted = base64.b64decode(datum_model.cypher_text) decrypt_dto = crypto.DecryptDTO(encrypted) # Decrypt the secret. secret = decrypting_plugin.decrypt(decrypt_dto, kek_meta_dto, datum_model.kek_meta_extended, context.project_model.external_id) secret = base64.b64encode(secret) key_spec = sstore.KeySpec(alg=context.secret_model.algorithm, bit_length=context.secret_model.bit_length, mode=context.secret_model.mode) return sstore.SecretDTO(secret_type, secret, key_spec, datum_model.content_type) def delete_secret(self, secret_metadata): """Delete a secret.""" pass def generate_symmetric_key(self, key_spec, context): """Generate a symmetric key. :param key_spec: KeySpec that contains details on the type of key to generate :param context: StoreCryptoContext for secret :returns: a dictionary that contains metadata about the key """ # Find HSM-style 'crypto' plugin. plugin_type = _determine_generation_type(key_spec.alg) if crypto.PluginSupportTypes.SYMMETRIC_KEY_GENERATION != plugin_type: raise sstore.SecretAlgorithmNotSupportedException(key_spec.alg) generating_plugin = manager.get_manager().get_plugin_store_generate( plugin_type, key_spec.alg, key_spec.bit_length, key_spec.mode) # Find or create a key encryption key metadata. kek_datum_model, kek_meta_dto = _find_or_create_kek_objects( generating_plugin, context.project_model) # Create an encrypted datum instance and add the created cypher text. generate_dto = crypto.GenerateDTO(key_spec.alg, key_spec.bit_length, key_spec.mode, None) # Create the encrypted meta. response_dto = generating_plugin.generate_symmetric( generate_dto, kek_meta_dto, context.project_model.external_id) # Convert binary data into a text-based format. _store_secret_and_datum( context, context.secret_model, kek_datum_model, response_dto) return None def generate_asymmetric_key(self, key_spec, context): """Generates an asymmetric key. Returns a AsymmetricKeyMetadataDTO object containing metadata(s) for asymmetric key components. The metadata can be used to retrieve individual components of asymmetric key pair. """ plugin_type = _determine_generation_type(key_spec.alg) if crypto.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION != plugin_type: raise sstore.SecretAlgorithmNotSupportedException(key_spec.alg) generating_plugin = manager.get_manager().get_plugin_store_generate( plugin_type, key_spec.alg, key_spec.bit_length, None) # Find or create a key encryption key metadata. kek_datum_model, kek_meta_dto = _find_or_create_kek_objects( generating_plugin, context.project_model) generate_dto = crypto.GenerateDTO(key_spec.alg, key_spec.bit_length, None, key_spec.passphrase) # Create the encrypted meta. private_key_dto, public_key_dto, passwd_dto = ( generating_plugin.generate_asymmetric( generate_dto, kek_meta_dto, context.project_model.external_id ) ) _store_secret_and_datum( context, context.private_secret_model, kek_datum_model, private_key_dto) _store_secret_and_datum( context, context.public_secret_model, kek_datum_model, public_key_dto) if key_spec.passphrase and passwd_dto: _store_secret_and_datum( context, context.passphrase_secret_model, kek_datum_model, passwd_dto) return sstore.AsymmetricKeyMetadataDTO() def generate_supports(self, key_spec): """Key generation supported? Specifies whether the plugin supports key generation with the given key_spec. """ return (key_spec and (key_spec.alg.lower() in sstore.KeyAlgorithm.ASYMMETRIC_ALGORITHMS or key_spec.alg.lower() in sstore.KeyAlgorithm.SYMMETRIC_ALGORITHMS)) def store_secret_supports(self, key_spec): """Key storage supported? Specifies whether the plugin supports storage of the secret given the attributes included in the KeySpec """ return True def _determine_generation_type(algorithm): """Determines the type based on algorithm.""" if not algorithm: raise sstore.SecretAlgorithmNotSupportedException(algorithm) symmetric_algs = crypto.PluginSupportTypes.SYMMETRIC_ALGORITHMS asymmetric_algs = crypto.PluginSupportTypes.ASYMMETRIC_ALGORITHMS if algorithm.lower() in symmetric_algs: return crypto.PluginSupportTypes.SYMMETRIC_KEY_GENERATION elif algorithm.lower() in asymmetric_algs: return crypto.PluginSupportTypes.ASYMMETRIC_KEY_GENERATION else: raise sstore.SecretAlgorithmNotSupportedException(algorithm) def _find_or_create_kek_objects(plugin_inst, project_model): kek_repo = repositories.get_kek_datum_repository() # Find or create a key encryption key. full_plugin_name = utils.generate_fullname_for(plugin_inst) kek_datum_model = kek_repo.find_or_create_kek_datum(project_model, full_plugin_name) # Bind to the plugin's key management. # TODO(jwood): Does this need to be in a critical section? Should the # bind operation just be declared idempotent in the plugin contract? kek_meta_dto = crypto.KEKMetaDTO(kek_datum_model) if not kek_datum_model.bind_completed: kek_meta_dto = plugin_inst.bind_kek_metadata(kek_meta_dto) # By contract, enforce that plugins return a # (typically modified) DTO. if kek_meta_dto is None: raise crypto.CryptoKEKBindingException(full_plugin_name) _indicate_bind_completed(kek_meta_dto, kek_datum_model) kek_repo.save(kek_datum_model) return kek_datum_model, kek_meta_dto def _store_secret_and_datum( context, secret_model, kek_datum_model, generated_dto): # Create Secret entities in data store. if not secret_model.id: secret_model.project_id = context.project_model.id repositories.get_secret_repository().create_from(secret_model) # setup and store encrypted datum datum_model = models.EncryptedDatum(secret_model, kek_datum_model) datum_model.content_type = context.content_type datum_model.cypher_text = base64.b64encode(generated_dto.cypher_text) datum_model.kek_meta_extended = generated_dto.kek_meta_extended datum_model.secret_id = secret_model.id repositories.get_encrypted_datum_repository().create_from( datum_model) def _indicate_bind_completed(kek_meta_dto, kek_datum): """Updates the supplied kek_datum instance Updates the the kek_datum per the contents of the supplied kek_meta_dto instance. This function is typically used once plugins have had a chance to bind kek_meta_dto to their crypto systems. :param kek_meta_dto: :param kek_datum: :return: None """ kek_datum.bind_completed = True kek_datum.algorithm = kek_meta_dto.algorithm kek_datum.bit_length = kek_meta_dto.bit_length kek_datum.mode = kek_meta_dto.mode kek_datum.plugin_meta = kek_meta_dto.plugin_meta barbican-2.0.0/barbican/cmd/0000775000567000056710000000000012701406024016713 5ustar jenkinsjenkins00000000000000barbican-2.0.0/barbican/cmd/pkcs11_kek_rewrap.py0000775000567000056710000001365412701405673022626 0ustar jenkinsjenkins00000000000000#!/usr/bin/env python # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import argparse import base64 import json import traceback import sqlalchemy from sqlalchemy import orm from sqlalchemy.orm import scoping from barbican.common import utils from barbican.model import models from barbican.plugin.crypto import p11_crypto # Use config values from p11_crypto CONF = p11_crypto.CONF class KekRewrap(object): def __init__(self, conf): self.dry_run = False self.db_engine = sqlalchemy.create_engine(conf.sql_connection) self._session_creator = scoping.scoped_session( orm.sessionmaker( bind=self.db_engine, autocommit=True ) ) self.crypto_plugin = p11_crypto.P11CryptoPlugin(conf) self.pkcs11 = self.crypto_plugin.pkcs11 self.plugin_name = utils.generate_fullname_for(self.crypto_plugin) self.hsm_session = self.pkcs11.get_session() self.new_mkek_label = self.crypto_plugin.mkek_label self.new_hmac_label = self.crypto_plugin.hmac_label self.new_mkek = self.crypto_plugin._get_master_key(self.new_mkek_label) self.new_mkhk = self.crypto_plugin._get_master_key(self.new_hmac_label) def rewrap_kek(self, project, kek): with self.db_session.begin(): meta_dict = json.loads(kek.plugin_meta) if self.dry_run: msg = 'Would have unwrapped key with {} and rewrapped with {}' print(msg.format(meta_dict['mkek_label'], self.new_mkek_label)) print('Would have updated KEKDatum in db {}'.format(kek.id)) print('Rewrapping KEK {}'.format(kek.id)) print('Pre-change IV: {}, Wrapped Key: {}'.format( meta_dict['iv'], meta_dict['wrapped_key'])) return session = self.hsm_session # Get KEK's master keys kek_mkek = self.pkcs11.get_key_handle( meta_dict['mkek_label'], session ) kek_mkhk = self.pkcs11.get_key_handle( meta_dict['hmac_label'], session ) # Decode data iv = base64.b64decode(meta_dict['iv']) wrapped_key = base64.b64decode(meta_dict['wrapped_key']) hmac = base64.b64decode(meta_dict['hmac']) # Verify HMAC kek_data = iv + wrapped_key self.pkcs11.verify_hmac(kek_mkhk, hmac, kek_data, session) # Unwrap KEK kek = self.pkcs11.unwrap_key(kek_mkek, iv, wrapped_key, session) # Wrap KEK with new master keys new_kek = self.pkcs11.wrap_key(self.new_mkek, kek, session) # Compute HMAC for rewrapped KEK new_kek_data = new_kek['iv'] + new_kek['wrapped_key'] new_hmac = self.pkcs11.compute_hmac(self.new_mkhk, new_kek_data, session) # Destroy unwrapped KEK self.pkcs11.destroy_object(kek, session) # Build updated meta dict updated_meta = meta_dict.copy() updated_meta['mkek_label'] = self.new_mkek_label updated_meta['hmac_label'] = self.new_hmac_label updated_meta['iv'] = base64.b64encode(new_kek['iv']) updated_meta['wrapped_key'] = base64.b64encode( new_kek['wrapped_key']) updated_meta['hmac'] = base64.b64encode(new_hmac) print('Post-change IV: {}, Wrapped Key: {}'.format( updated_meta['iv'], updated_meta['wrapped_key'])) # Update KEK metadata in DB kek.plugin_meta = p11_crypto.json_dumps_compact(updated_meta) def get_keks_for_project(self, project): keks = [] with self.db_session.begin() as transaction: print('Retrieving KEKs for Project {}'.format(project.id)) query = transaction.session.query(models.KEKDatum) query = query.filter_by(project_id=project.id) query = query.filter_by(plugin_name=self.plugin_name) keks = query.all() return keks def get_projects(self): print('Retrieving all available projects') projects = [] with self.db_session.begin() as transaction: projects = transaction.session.query(models.Project).all() return projects @property def db_session(self): return self._session_creator() def execute(self, dry_run=True): self.dry_run = dry_run if self.dry_run: print('-- Running in dry-run mode --') projects = self.get_projects() for project in projects: keks = self.get_keks_for_project(project) for kek in keks: try: self.rewrap_kek(project, kek) except Exception: print('Error occurred! SQLAlchemy automatically rolled-' 'back the transaction') traceback.print_exc() def main(): script_desc = ('Utility to re-wrap project KEKs after rotating an MKEK.') parser = argparse.ArgumentParser(description=script_desc) parser.add_argument( '--dry-run', action='store_true', help='Displays changes that will be made (Non-destructive)' ) args = parser.parse_args() rewrapper = KekRewrap(CONF) rewrapper.execute(args.dry_run) rewrapper.pkcs11.return_session(rewrapper.hsm_session) if __name__ == '__main__': main() barbican-2.0.0/barbican/cmd/retry_scheduler.py0000775000567000056710000000404412701405673022506 0ustar jenkinsjenkins00000000000000#!/usr/bin/env python # Copyright (c) 2015 Rackspace, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Barbican worker server, running a periodic retry/scheduler process. """ import eventlet import os import sys # Oslo messaging RPC server uses eventlet. eventlet.monkey_patch() # 'Borrowed' from the Glance project: # If ../barbican/__init__.py exists, add ../ to Python search path, so that # it will override what happens to be installed in /usr/(local/)lib/python... possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), os.pardir, os.pardir)) if os.path.exists(os.path.join(possible_topdir, 'barbican', '__init__.py')): sys.path.insert(0, possible_topdir) from barbican.common import config from barbican import queue from barbican.queue import retry_scheduler from oslo_log import log from oslo_service import service def fail(returncode, e): sys.stderr.write("ERROR: {0}\n".format(e)) sys.exit(returncode) def main(): try: CONF = config.CONF # Import and configure logging. log.setup(CONF, 'barbican-retry-scheduler') LOG = log.getLogger(__name__) LOG.debug("Booting up Barbican worker retry/scheduler node...") # Queuing initialization (as a client only). queue.init(CONF, is_server_side=False) service.launch( CONF, retry_scheduler.PeriodicServer() ).wait() except RuntimeError as e: fail(1, e) if __name__ == '__main__': main() barbican-2.0.0/barbican/cmd/__init__.py0000664000567000056710000000122712701405673021037 0ustar jenkinsjenkins00000000000000# Copyright 2010-2015 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Barbican cmd module """ barbican-2.0.0/barbican/cmd/barbican_manage.py0000775000567000056710000003127612701405673022363 0ustar jenkinsjenkins00000000000000#!/usr/bin/env python # Copyright 2010-2015 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ CLI interface for barbican management """ from __future__ import print_function import argparse import six import sys from oslo_config import cfg from oslo_log import log as logging from barbican.cmd import pkcs11_kek_rewrap as pkcs11_rewrap from barbican.common import config from barbican.model import clean from barbican.model.migration import commands from barbican.plugin.crypto import pkcs11 import barbican.version CONF = cfg.CONF LOG = logging.getLogger(__name__) # Decorators for actions def args(*args, **kwargs): def _decorator(func): func.__dict__.setdefault('args', []).insert(0, (args, kwargs)) return func return _decorator class DbCommands(object): """Class for managing barbican database""" description = "Subcommands for managing barbican database" clean_description = "Clean up soft deletions in the database" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--min-days', '-m', metavar='', dest='min_days', type=int, default=90, help='minimum number of days to keep soft deletions. ' 'default is %(default)s days.') @args('--verbose', '-V', action='store_true', dest='verbose', default=False, help='Show verbose information about the clean up.') @args('--log-file', '-L', metavar='', type=str, default=None, dest='log_file', help='Set log file location. ' 'Default value for log_file can be found in barbican.conf') @args('--clean-unassociated-projects', '-p', action='store_true', dest='do_clean_unassociated_projects', default=False, help='Remove projects that have no ' 'associated resources.') @args('--soft-delete-expired-secrets', '-e', action='store_true', dest='do_soft_delete_expired_secrets', default=False, help='Soft delete secrets that are expired.') def clean(self, dburl=None, min_days=None, verbose=None, log_file=None, do_clean_unassociated_projects=None, do_soft_delete_expired_secrets=None): """Clean soft deletions in the database""" if dburl is None: dburl = CONF.sql_connection if log_file is None: log_file = CONF.log_file clean.clean_command( sql_url=dburl, min_num_days=min_days, do_clean_unassociated_projects=do_clean_unassociated_projects, do_soft_delete_expired_secrets=do_soft_delete_expired_secrets, verbose=verbose, log_file=log_file) revision_description = "Create a new database version file" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--message', '-m', metavar='', default='DB change', help='the message for the DB change') @args('--autogenerate', action="store_true", dest='autogen', default=False, help='autogenerate from models') def revision(self, dburl=None, message=None, autogen=None): """Process the 'revision' Alembic command.""" if dburl is None: commands.generate(autogenerate=autogen, message=str(message), sql_url=CONF.sql_connection) else: commands.generate(autogenerate=autogen, message=str(message), sql_url=str(dburl)) upgrade_description = "Upgrade to a future database version" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--version', '-v', metavar='', default='head', help='the version to upgrade to, or else ' 'the latest/head if not specified.') def upgrade(self, dburl=None, version=None): """Process the 'upgrade' Alembic command.""" if dburl is None: commands.upgrade(to_version=str(version), sql_url=CONF.sql_connection) else: commands.upgrade(to_version=str(version), sql_url=str(dburl)) history_description = "Show database changset history" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--verbose', '-V', action='store_true', dest='verbose', default=False, help='Show full information about the revisions.') def history(self, dburl=None, verbose=None): if dburl is None: commands.history(verbose, sql_url=CONF.sql_connection) else: commands.history(verbose, sql_url=str(dburl)) current_description = "Show current revision of database" @args('--db-url', '-d', metavar='', dest='dburl', help='barbican database URL') @args('--verbose', '-V', action='store_true', dest='verbose', default=False, help='Show full information about the revisions.') def current(self, dburl=None, verbose=None): if dburl is None: commands.current(verbose, sql_url=CONF.sql_connection) else: commands.current(verbose, sql_url=str(dburl)) class HSMCommands(object): """Class for managing HSM/pkcs11 plugin""" description = "Subcommands for managing HSM/PKCS11" gen_mkek_description = "Generates a new MKEK" @args('--library-path', metavar='', dest='libpath', default='/usr/lib/libCryptoki2_64.so', help='Path to vendor PKCS11 library') @args('--slot-id', metavar='', dest='slotid', default=1, help='HSM Slot id (Should correspond to a configured PKCS11 slot, \ default is 1)') @args('--passphrase', metavar='', default=None, required=True, help='Password to login to PKCS11 session') @args('--label', '-L', metavar='